├── .gitignore ├── .DS_Store ├── config ├── get_figma_files_list.yml └── download_settings.json ├── package.json ├── utils.js ├── team_file_scanner.sh ├── team_file_download.sh ├── figma_download_file_by_url.js ├── figma_download_files_by_list.js ├── readme.md ├── get_figma_files_list.py └── figma_actions.js /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | /process 3 | /store -------------------------------------------------------------------------------- /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ritds/figma_backup/HEAD/.DS_Store -------------------------------------------------------------------------------- /config/get_figma_files_list.yml: -------------------------------------------------------------------------------- 1 | access_token: 'your-access-tokens' 2 | output_limit: 100 3 | age_limit: 1 4 | teams: 5 | - your-team-id 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "fs-extra": "^10.0.1", 4 | "log4js": "^6.4.4", 5 | "puppeteer": "^13.4.1" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /utils.js: -------------------------------------------------------------------------------- 1 | function sleep(ms) { 2 | return new Promise(resolve => { 3 | setTimeout(resolve, ms); 4 | }) 5 | } 6 | 7 | module.exports = {sleep} -------------------------------------------------------------------------------- /config/download_settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "downloadTimeout": 300, 3 | "selectorTimeout": 20000, 4 | "navigationTimeout": 180000, 5 | "launchTimeout": 120000, 6 | "loginTimeout": 10000, 7 | "pageOpenTimeout": 10000 8 | } 9 | -------------------------------------------------------------------------------- /team_file_scanner.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | working_dir_path='./process' 6 | store_dir_path='./store' 7 | 8 | downloads_dir_name=${working_dir_path}'/_downloads' 9 | get_list_log_name=${working_dir_path}'/get_figma_files_list_log.txt' 10 | download_log_name=${store_dir_path}'/download_figma_files_log.txt' 11 | partial_lists_names=${working_dir_path}'/figma_files_list*.json' 12 | 13 | tmp_dir_path=$working_dir_path 14 | http_server_dir_path='./figma' 15 | 16 | current_date=`date +"%Y-%m-%d"` 17 | 18 | rm -rf ${working_dir_path} 19 | mkdir -p ${working_dir_path}/ 20 | mkdir -p ${store_dir_path}/ 21 | 22 | ./get_figma_files_list.py 2>&1 | tee ${get_list_log_name} -------------------------------------------------------------------------------- /team_file_download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eo pipefail 4 | 5 | partial_lists_names='./process/figma_files_list*.json' 6 | store_dir_path='./store' 7 | 8 | list_names=`ls ${partial_lists_names}` 9 | 10 | # Other 11 | figma_login=$1 12 | figma_password=$2 13 | 14 | if [ -z $figma_login ] 15 | then 16 | echo "figma login needed" 17 | exit 1 18 | fi 19 | 20 | if [ -z $figma_password ] 21 | then 22 | echo "figma password needed" 23 | exit 1 24 | fi 25 | 26 | for list_name in ${list_names} ; do 27 | echo -e "\n\nProcessing the file ${list_name}\n\n" 28 | node ./figma_download_files_by_list.js "figmaLogin=${figma_login}" "figmaPassword=${figma_password}" "figmaFilesList=./${list_name}" 2>&1 29 | sleep 30 30 | done 31 | -------------------------------------------------------------------------------- /figma_download_file_by_url.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/node 2 | 3 | const {readSettings, open, login, downloadFile, close} = require('./figma_actions') 4 | 5 | let figmaLogin = ''; 6 | let figmaPassword = ''; 7 | let figmaUrl = ''; 8 | 9 | const fs = require('fs'); 10 | 11 | process.argv.forEach((arg) => { 12 | const keyValue = arg.split("="); 13 | switch (keyValue[0]) { 14 | case 'figmaLogin': 15 | figmaLogin = keyValue[1]; 16 | break; 17 | case 'figmaPassword': 18 | figmaPassword = keyValue[1]; 19 | break; 20 | case 'figmaUrl': 21 | figmaUrl = keyValue[1]; 22 | break; 23 | } 24 | }); 25 | 26 | if (!figmaLogin || !figmaPassword || !figmaUrl) 27 | { 28 | console.log('Usage: '); 29 | process.exit(1); 30 | } 31 | 32 | (async() => { 33 | 34 | let figmaFilesList; 35 | figmaFilesList = [ 36 | { 37 | uri: figmaUrl, 38 | path: './store/download', 39 | } 40 | ] 41 | const settings = readSettings(); 42 | const session = await open(settings); 43 | session.figmaLogin = figmaLogin; 44 | session.figmaPassword = figmaPassword; 45 | 46 | 47 | await login(session, settings); 48 | 49 | for(let i = 0; i < figmaFilesList.length; i++) { 50 | const dwResult = await downloadFile(session, figmaFilesList[i], settings); 51 | console.log('download ' + figmaFilesList[i].key + ',' + figmaFilesList[i].file + ' result: ' + dwResult); 52 | } 53 | close(session); 54 | })(); 55 | 56 | -------------------------------------------------------------------------------- /figma_download_files_by_list.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/node 2 | 3 | const {readSettings, open, login, downloadFile, close} = require('./figma_actions') 4 | 5 | let figmaLogin = ''; 6 | let figmaPassword = ''; 7 | let figmaFilesListFile = ''; 8 | 9 | const fs = require('fs'); 10 | 11 | process.argv.forEach((arg) => { 12 | const keyValue = arg.split("="); 13 | switch (keyValue[0]) { 14 | case 'figmaLogin': 15 | figmaLogin = keyValue[1]; 16 | break; 17 | case 'figmaPassword': 18 | figmaPassword = keyValue[1]; 19 | break; 20 | case 'figmaFilesList': 21 | figmaFilesListFile = keyValue[1]; 22 | break; 23 | } 24 | }); 25 | 26 | if (!figmaLogin || !figmaPassword) 27 | { 28 | console.log('Usage: '); 29 | process.exit(1); 30 | } 31 | 32 | (async() => { 33 | 34 | let figmaFilesList; 35 | fs.readFile(figmaFilesListFile, (err, data) => { 36 | if(err) throw err; 37 | figmaFilesList = JSON.parse(data); 38 | }); 39 | const settings = readSettings(); 40 | const session = await open(settings); 41 | session.figmaLogin = figmaLogin; 42 | session.figmaPassword = figmaPassword; 43 | 44 | 45 | await login(session, settings); 46 | 47 | for(let i = 0; i < figmaFilesList.length; i++) { 48 | const dwResult = await downloadFile(session, figmaFilesList[i], settings); 49 | console.log('download ('+(i+1)+'/'+figmaFilesList.length+') ' + figmaFilesList[i].key + ', ' + figmaFilesList[i].file + ' result: ' + dwResult); 50 | } 51 | close(session); 52 | })(); 53 | 54 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Утилита предназначена для создания локальных бекапов fig / jam файлов 2 | 3 | Для работы необходимо: 4 | 1. bash консоль 5 | 2. Установленная node (проверено на версии v16.13.0) 6 | 3. Для полной версии, установленный python (проверено на Python 2.7.18) 7 | 8 | ## Ligth запуск 9 | 10 | Предусмотрен вариант запуска в режиме простого скачивания отдельного файла. 11 | 12 | Для этого необходимо в корне проекта: 13 | 1. Установить зависимости node (команда `npm i`) 14 | 2. Выполнить команду `node figma_download_file_by_url.js figmaLogin=<ваш логин> figmaPassword=<ваш пароль> figmaUrl=<полный адрес файла, который необходимо скачать>`. 15 | 16 | В ligth запуске все файлы будут сохранены в директорию `./store/download`. Если эта директория не пустая, существующие в ней файлы будут переименованы 17 | 18 | ## Full запуск 19 | 20 | Требует установленного python, отличается от ligth версии следующим: 21 | 1. Скачивает все файлы для команд, указанных в `./config/get_figme_files_list.yml` 22 | 2. Анализирует время изменения файла в figma и если файл не менялся с прошлого запуска пропускает их 23 | 24 | Работает следующим образом: 25 | 1. Установить зависимости node (команда `npm i`) 26 | 2. Необходимо запустить скрипт `team_file_scanner.sh` для подготовки списка файлов для скачивания. Информация о файлах сохраняется в `./process/figma_files_list*.json` в следующем формате: 27 | ``` 28 | [ 29 | { 30 | "key": "<ключ файла>", 31 | "project": "<название проекта>", 32 | "team": "<название команды>", 33 | "file": "<название файла>", 34 | "last_modified": "<дата последнего изменения>", 35 | "path": "<путь куда будет сохранен файл>" 36 | }, 37 | ... 38 | { 39 | "key": "<ключ файла>", 40 | "project": "<название проекта>", 41 | "team": "<название команды>", 42 | "file": "<название файла>", 43 | "last_modified": "<дата последнего изменения>", 44 | "path": "<путь куда будет сохранен файл>" 45 | } 46 | ] 47 | ``` 48 | При этом если файл уже существует, сравниваются даты сохранения файла на диск и модификации файла в figma. Если новых изменений нету, то файл исключается из списка на скачивания. 49 | При запуске `team_file_scanner.sh` удаляет предыдущие файлы `./process/figma_files_list*.json`, если они были 50 | 3. Для запуска скачивания файлов необходимо запустить скрипт `team_file_download.sh <ваш логин> <ваш пароль>`. Скрипт идет по файлам `./process/figma_files_list*.json` и сохраняет их в `./store/TEAM <название команды>/PROJECT <имя проекта>/<имя файла>`. Если файл был ранее загружен пердыдущая версия сохранятся с временной меткой. 51 | 52 | 53 | ## Настройки 54 | 55 | 1. `./config/download_settings.json` - настройки таймаутов для скачивания файлов 56 | 1. `downloadTimeout` - ожидание скачивания файла в секундах. Если указан 0 - таймаут оключается 57 | 2. `selectorTimeout` - ожидание появления нужных пунктов меню на странице в миллисекундах 58 | 3. `navigationTimeout` - таймаут навигации в миллисекундах 59 | 4. `launchTimeout` - таймаут запуска headless хрома в миллисекундах 60 | 5. `loginTimeout` - таймаут ожидания логина в миллисекундах 61 | 6. `pageOpenTimeout` - таймаут открытия страницы в миллисекундах 62 | 63 | 2. `./config/get_figme_files_list.yml` - настройки для скачивания файлов команд 64 | 1. `access_token` - access token для вызовов методов figma api 65 | 2. `teams` - список идентификаторов команд (можно подсмотреть в урле https://www.figma.com/files/<...>/team/<нужный ид команды>/...) 66 | 67 | -------------------------------------------------------------------------------- /get_figma_files_list.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python3 -u 2 | 3 | import json 4 | import os 5 | from datetime import datetime 6 | from pathlib import Path 7 | from urllib import request 8 | from urllib.error import HTTPError 9 | from yaml import load, Loader 10 | 11 | 12 | class FigmaFilesListGetter(object): 13 | def __init__(self, config_file_path: Path = Path('./config/get_figma_files_list.yml')): 14 | with open(config_file_path) as config_file: 15 | self.config = load(config_file, Loader) 16 | 17 | self.token_header = {'X-Figma-Token': self.config['access_token']} 18 | 19 | def _http_request( 20 | self, 21 | request_url: str, 22 | request_method: str = 'GET', 23 | request_headers: dict or None = None, 24 | request_data: bytes or None = None 25 | ) -> dict: 26 | http_request = request.Request(request_url, method=request_method) 27 | 28 | if request_headers: 29 | http_request.headers = request_headers 30 | 31 | if request_data: 32 | http_request.data = request_data 33 | 34 | try: 35 | with request.urlopen(http_request) as http_response: 36 | response_status = http_response.getcode() 37 | response_headers = http_response.info() 38 | response_data = http_response.read() 39 | 40 | except HTTPError as http_response_not_ok: 41 | response_status = http_response_not_ok.getcode() 42 | response_headers = http_response_not_ok.info() 43 | response_data = http_response_not_ok.read() 44 | 45 | return { 46 | 'status': response_status, 47 | 'headers': response_headers, 48 | 'data': response_data 49 | } 50 | 51 | def _get_team_projects(self, team_id: str) -> list: 52 | api_request_url = f'https://api.figma.com/v1/teams/{team_id}/projects' 53 | 54 | print(f'Getting team projects, requesting URL: {api_request_url}') 55 | 56 | api_response = self._http_request( 57 | api_request_url, 'GET', self.token_header) 58 | api_response_data = json.loads(api_response['data'].decode('utf-8')) 59 | 60 | print(f'Status: {api_response["status"]}') 61 | 62 | if api_response['status'] != 200 or api_response_data.get('err', None): 63 | print('Failed to perform API request') 64 | return [] 65 | 66 | team_name = api_response_data.get('name', team_id) 67 | team_projects = [] 68 | 69 | for project in api_response_data.get('projects', []): 70 | team_projects.append( 71 | { 72 | 'id': project['id'], 73 | 'team': team_name 74 | } 75 | ) 76 | 77 | return team_projects 78 | 79 | def _get_teams_projects(self) -> list: 80 | print('Getting the projects of all teams listed in config, if any, removing duplicates') 81 | 82 | teams_projects = [] 83 | 84 | for team_id in list(set(self.config.get('teams', []))): 85 | team_projects = self._get_team_projects(str(team_id)) 86 | teams_projects.extend(team_projects) 87 | 88 | return teams_projects 89 | 90 | def _merge_projects(self, teams_projects: list) -> list: 91 | print('Merging teams projects with projects listed in config, if any, removing duplicates') 92 | 93 | merged_projects = [] 94 | seen_projects_ids = set() 95 | 96 | for project in teams_projects: 97 | if project['id'] not in seen_projects_ids: 98 | merged_projects.append(project) 99 | seen_projects_ids.add(project['id']) 100 | 101 | for project_id in list(set(self.config.get('projects', []))): 102 | project_id = str(project_id) 103 | 104 | if project_id not in seen_projects_ids: 105 | merged_projects.append( 106 | { 107 | 'id': project_id, 108 | 'team': '' 109 | } 110 | ) 111 | 112 | seen_projects_ids.add(project_id) 113 | 114 | return merged_projects 115 | 116 | def _get_project_files(self, project: dict) -> list: 117 | api_request_url = f'https://api.figma.com/v1/projects/{project["id"]}/files' 118 | 119 | print(f'Getting project files, requesting URL: {api_request_url}') 120 | 121 | api_response = self._http_request( 122 | api_request_url, 'GET', self.token_header) 123 | 124 | api_response_data = json.loads(api_response['data'].decode('utf-8')) 125 | 126 | print(f'Status: {api_response["status"]}') 127 | 128 | if api_response['status'] != 200 or api_response_data.get('err', None): 129 | print('Failed to perform API request') 130 | return [] 131 | 132 | project_name = api_response_data.get('name', project['id']) 133 | project_files = [] 134 | 135 | for file in api_response_data.get('files', []): 136 | # if not os.path.exists(f'./store/TEAM {project["team"]}'): 137 | # os.makedirs(f'./store/TEAM {project["team"]}') 138 | # if not os.path.exists(f'./store/TEAM {project["team"]}/PROJECT {project_name}/'): 139 | # os.makedirs( 140 | # f'./store/TEAM {project["team"]}/PROJECT {project_name}/') 141 | # if not os.path.exists(f'./store/TEAM {project["team"]}/PROJECT {project_name}/map/'): 142 | # os.makedirs( 143 | # f'./store/TEAM {project["team"]}/PROJECT {project_name}/map/') 144 | 145 | fileName = f'{file["name"]}' 146 | fileName = fileName.replace('/', '_') 147 | fileName = fileName.replace('?', '_') 148 | fileName = fileName.replace('"', '_') 149 | 150 | file_name_to_check = f'{fileName}.fig' 151 | map_file = f'./store/TEAM {project["team"]}/PROJECT {project_name}/map/{file["key"]}' 152 | 153 | if os.path.isfile(map_file): 154 | with open(map_file) as map_fl: 155 | tmp = map_fl.read() 156 | if tmp != 'none': 157 | file_name_to_check = tmp 158 | 159 | full_file_name = f'./store/TEAM {project["team"]}/PROJECT {project_name}/{file_name_to_check}' 160 | 161 | time = 0 162 | if os.path.isfile(full_file_name): 163 | time = os.path.getmtime(full_file_name) 164 | if os.path.isfile(map_file): 165 | os.remove(map_file) 166 | with open(map_file, "w") as text_file: 167 | text_file.write(file_name_to_check) 168 | else: 169 | file_name_to_check = f'{fileName}.jam' 170 | full_file_name = f'./store/TEAM {project["team"]}/PROJECT {project_name}/{file_name_to_check}' 171 | if os.path.isfile(full_file_name): 172 | time = os.path.getmtime(full_file_name) 173 | if os.path.isfile(map_file): 174 | os.remove(map_file) 175 | with open(map_file, "w") as text_file: 176 | text_file.write(file_name_to_check) 177 | 178 | updates = datetime.strptime( 179 | file["last_modified"], "%Y-%m-%dT%H:%M:%SZ").timestamp() 180 | print(f'time: {time}, updated: {updates}') 181 | if time < updates: 182 | print( 183 | f'File TEAM {project["team"]}/PROJECT {project_name}/{fileName}, key {file["key"]}, last modified: {file["last_modified"]} -> adding to the list') 184 | project_files.append( 185 | { 186 | 'key': file['key'], 187 | 'project': project_name, 188 | 'team': project['team'], 189 | 'file': file["name"], 190 | 'last_modified': file['last_modified'], 191 | 'path': f'./store/TEAM {project["team"]}/PROJECT {project_name}/' 192 | } 193 | ) 194 | else: 195 | print( 196 | f'File {project["team"]}/{project_name}/{file["name"]}, key {file["key"]}, last modified: {file["last_modified"]} not modifided') 197 | 198 | return project_files 199 | 200 | def _get_projects_files(self, projects: list) -> list: 201 | print('Getting the files of all projects') 202 | 203 | projects_files = [] 204 | 205 | for project in projects: 206 | project_files = self._get_project_files(project) 207 | projects_files.extend(project_files) 208 | 209 | return projects_files 210 | 211 | def _merge_files(self, projects_files: list) -> list: 212 | print('Merging projects files with files listed in config, if any, removing duplicates') 213 | 214 | merged_files = [] 215 | seen_files_keys = set() 216 | 217 | for file in projects_files: 218 | if file['key'] not in seen_files_keys: 219 | merged_files.append(file) 220 | seen_files_keys.add(file['key']) 221 | 222 | for file_key in list(set(self.config.get('files', []))): 223 | file_key = str(file_key) 224 | 225 | if file_key not in seen_files_keys: 226 | merged_files.append( 227 | { 228 | 'key': file_key, 229 | 'project': '', 230 | 'team': '' 231 | } 232 | ) 233 | 234 | seen_files_keys.add(file_key) 235 | 236 | return merged_files 237 | 238 | def perform(self) -> None: 239 | teams_projects = self._get_teams_projects() 240 | projects = self._merge_projects(teams_projects) 241 | projects_files = self._get_projects_files(projects) 242 | files = self._merge_files(projects_files) 243 | output_file_path = Path(self.config.get( 244 | 'output_file', './process/figma_files_list.json')) 245 | output_limit = self.config.get('output_limit', 0) 246 | 247 | if not output_limit or len(files) <= output_limit: 248 | print('Writing the single output file') 249 | 250 | with open(output_file_path, 'w', encoding='utf8') as output_file: 251 | json.dump(files, output_file, ensure_ascii=False, indent=4) 252 | 253 | else: 254 | print('Writing multiple output files with partial slices') 255 | 256 | parts_count = int((len(files) - 1) / output_limit) + 1 257 | 258 | for part_number in range(1, parts_count + 1): 259 | start = output_limit * (part_number - 1) 260 | stop = output_limit * (part_number) 261 | files_part = files[start:stop] 262 | 263 | partial_output_file_path = ( 264 | output_file_path.parent / 265 | f'{output_file_path.stem}_part_{part_number}{output_file_path.suffix}' 266 | ) 267 | 268 | with open(partial_output_file_path, 'w', encoding='utf8') as partial_output_file: 269 | json.dump(files_part, partial_output_file, 270 | ensure_ascii=False, indent=4) 271 | 272 | print('Done') 273 | 274 | 275 | if __name__ == "__main__": 276 | FigmaFilesListGetter().perform() 277 | -------------------------------------------------------------------------------- /figma_actions.js: -------------------------------------------------------------------------------- 1 | 2 | const loginPageUrl = 'https://www.figma.com/login'; 3 | const filePageBaseUrl = 'https://www.figma.com/file/'; 4 | const usernameSelector = 'input[name="email"]'; 5 | const passwordSelector = 'input[name="password"]'; 6 | const buttonSelector = 'button[type="submit"]'; 7 | const filebrowserSelector = '#filebrowser-loading-page'; 8 | 9 | const puppeteer = require('puppeteer'); 10 | const fs = require('fs'); 11 | const fse = require('fs-extra'); 12 | const {sleep} = require('./utils'); 13 | const log4js = require('log4js'); 14 | 15 | const logger = log4js.getLogger(); 16 | logger.level = "info"; 17 | log4js.configure({ 18 | appenders: { 19 | out: { type: 'stdout', layout: { 20 | type: 'pattern', 21 | pattern: '%d{yyyy-MM-dd hh:mm:ss} %m' 22 | }} 23 | }, 24 | categories: { default: { appenders: ['out'], level: 'info' } } 25 | }); 26 | 27 | function readSettings() { 28 | const settings = { 29 | downloadTimeout: 360, 30 | selectorTimeout: 5000, 31 | navigationTimeout: 180000, 32 | launchTimeout: 120000, 33 | loginTimeout: 10000, 34 | pageOpenTimeout: 10000, 35 | pageLoadTimeout: 60000, 36 | downloadsBaseDir: './process/_downloads', 37 | debugDir: './process/debug/', 38 | doDebug: false, 39 | saveScreenOnError: true 40 | } 41 | 42 | if (fs.existsSync('./config/download_settings.json')) { 43 | const data = fs.readFileSync('./config/download_settings.json'); 44 | newSettings = JSON.parse(data); 45 | 46 | settings.downloadTimeout = newSettings.downloadTimeout ?? settings.downloadTimeout; 47 | settings.selectorTimeout = newSettings.selectorTimeout ?? settings.selectorTimeout; 48 | settings.navigationTimeout = newSettings.navigationTimeout ?? settings.navigationTimeout; 49 | settings.launchTimeout = newSettings.launchTimeout ?? settings.launchTimeout; 50 | settings.loginTimeout = newSettings.loginTimeout ?? settings.loginTimeout; 51 | settings.pageOpenTimeout = newSettings.pageOpenTimeout ?? settings.pageOpenTimeout; 52 | settings.doDebug = newSettings.doDebug ?? settings.doDebug; 53 | settings.saveScreenOnError = newSettings.saveScreenOnError ?? settings.saveScreenOnError; 54 | settings.pageLoadTimeout = newSettings.pageLoadTimeout ?? settings.pageLoadTimeout; 55 | } 56 | 57 | return settings; 58 | } 59 | 60 | async function open(settings) { 61 | const browser = await puppeteer.launch({args: ['--no-sandbox', '--disable-setuid-sandbox', '--disable-dev-shm-usage '], timeout: settings.launchTimeout, headless: true}); 62 | const page = await browser.newPage(); 63 | page.setDefaultNavigationTimeout(settings.navigationTimeout); 64 | 65 | return {browser, page} 66 | } 67 | 68 | async function login(session, settings) { 69 | try { 70 | // Opening the login page 71 | logger.info('Opening the login page'); 72 | await session.page.goto(loginPageUrl, {waitUntil: 'networkidle2'}); 73 | 74 | 75 | // Filling the email field 76 | logger.info('Filling the email field'); 77 | await session.page.focus(usernameSelector, {delay: 500}); 78 | await session.page.keyboard.type(session.figmaLogin); 79 | 80 | 81 | // Filling the password field 82 | logger.info('Filling the password field'); 83 | await session.page.focus(passwordSelector, {delay: 500}); 84 | await session.page.keyboard.type(session.figmaPassword); 85 | 86 | 87 | // Clicking the submit button 88 | logger.info('Clicking the submit button'); 89 | await session.page.click(buttonSelector, {delay: 500}); 90 | logger.info('Waiting for an after-login page opening'); 91 | 92 | 93 | // Sleeping for 10 seconds 94 | await sleep(settings.loginTimeout); 95 | 96 | // Waiting for an after-login page loading 97 | await session.page.waitForSelector(filebrowserSelector); 98 | } catch (err) { 99 | console.error(err); 100 | process.exit(1); 101 | } 102 | } 103 | 104 | const HTTP_NOT_200 = -1; 105 | const SAVE_NOT_ALLOWED = -2; 106 | const BAD_FILE_FORMAT = -3; 107 | const CANNOT_OPEN_MAIN_MENU = -4; 108 | const CANNOT_OPEN_FILE_MENU = -5; 109 | const DOWNLOAD_FAILED = -6; 110 | const UNKNOWN_ERROR = -7; 111 | 112 | async function downloadFile(session, file, settings) { 113 | let debugDir = settings.debugDir; 114 | let page = session.page; 115 | 116 | let filePage = "" 117 | if (file.uri && file.uri.startsWith("http")) { 118 | filePage = file.uri 119 | } else if (file.uri) { 120 | filePage = filePageBaseUrl + file.uri 121 | if (!filePage.endsWith("/")) { 122 | filePage += "/" 123 | } 124 | } else { 125 | filePage = filePageBaseUrl + file.key + '/'; 126 | } 127 | logger.info('Starting to process file, url: ' + filePage); 128 | 129 | try { 130 | // Opening a file page 131 | let filePageResponse = await page.goto(filePage, {waitUntil: 'networkidle2'}); 132 | // Checking status code 133 | if(filePageResponse.status() !== 200) { 134 | logger.info(`Skipping the file, status = ${filePageResponse.status()}`); 135 | return HTTP_NOT_200; 136 | } 137 | // Sleeping for 10 seconds, waiting for a specific element in React-generated content 138 | await sleep(settings.pageOpenTimeout); 139 | await page.waitForSelector('[data-testid="set-tool-default"]', {timeout: settings.pageLoadTimeout}); 140 | // Checking if the file is available to save locally 141 | let content = await page.content(); 142 | 143 | if(content.includes('="Viewers can\'t copy or share this file."')) { 144 | logger.info('This file is protected against saving locally and sharing. Skipping') 145 | return SAVE_NOT_ALLOWED; 146 | } 147 | // Getting and validating page title 148 | const title = await page.title(); 149 | const fileName = title.replace(' – Figma', '').replaceAll('/', '_').replaceAll('|', '_').replaceAll('"', '_'); 150 | if(!title.endsWith(' – Figma')) { 151 | logger.info(`Title format "${title}" seems to be unrecognized, skipping the file`) 152 | if (settings.debugDir && (settings.saveScreenOnError || settings.doDebug)) { 153 | await page.screenshot({path: debugDir + fileName +'_screenshot.png', fullPage: true}); 154 | } 155 | return BAD_FILE_FORMAT; 156 | } 157 | // Getting the local path of the directory for the file to download 158 | let downloadDir = file.path; 159 | 160 | if(!downloadDir.endsWith('/')) { 161 | downloadDir += '/'; 162 | } 163 | let downloadsBaseDir = settings.downloadsBaseDir; 164 | let tmpDownloadDir = `${downloadsBaseDir}/tmp/`; 165 | 166 | for (let t = 1; t < 1000; t++) { 167 | if (!fs.existsSync(tmpDownloadDir)) { 168 | break; 169 | } 170 | tmpDownloadDir = `${downloadsBaseDir}/tmp${t}/`; 171 | } 172 | fs.mkdirSync(tmpDownloadDir, {recursive: true}); 173 | 174 | logger.info(`Tmp directory to save the file: ${tmpDownloadDir}`); 175 | 176 | fs.mkdirSync(downloadDir, {recursive: true}); 177 | 178 | // Set download behavior 179 | await page._client.send('Page.setDownloadBehavior', {behavior: 'allow', downloadPath: tmpDownloadDir}); 180 | 181 | // Debug: making screenshot and saving the page content 182 | if(settings.debugDir && settings.doDebug) { 183 | fs.writeFile(debugDir + title + '_content' + '.html', content, () => {}); 184 | await page.screenshot({path: debugDir + fileName + '_screenshot' + '.png', fullPage: true}); 185 | } 186 | 187 | 188 | await page.evaluate(_ => { 189 | const mainMenu = document.querySelector('div[data-tooltip="main-menu"]'); 190 | const clickEvt = document.createEvent("MouseEvents"); 191 | clickEvt.initEvent("mousedown", true, true); 192 | mainMenu.dispatchEvent(clickEvt); 193 | }); 194 | 195 | await sleep(500); 196 | 197 | let menuItemFileHandle = null; 198 | let box = {x: 8, y: 143}; 199 | try { 200 | menuItemFileHandle = await page.waitForSelector('div[data-testid="dropdown-option-File"]', {timeout: settings.selectorTimeout}); 201 | box = await menuItemFileHandle.boundingBox(); 202 | } catch (error) { 203 | logger.info('cannot open main menu\n', error); 204 | if(debugDir && !menuItemFileHandle && (settings.doDebug || settings.saveScreenOnError)) { 205 | fs.writeFile(debugDir + fileName + '_main_menu.html', content, () => {}); 206 | await page.screenshot({path: debugDir + fileName + '_main_menu_screenshot.png', fullPage: true}); 207 | } 208 | return CANNOT_OPEN_MAIN_MENU; 209 | } 210 | 211 | await page.mouse.move(box.x + 5, box.y + 5); 212 | await sleep(500); 213 | 214 | let submenuFileHandle = null 215 | let saveFileBox = {x: 0, y: 0}; 216 | try { 217 | submenuFileHandle = await page.waitForSelector('div[data-testid="dropdown-option-Save local copy…"]', {timeout: settings.selectorTimeout}); 218 | saveFileBox = await submenuFileHandle.boundingBox(); 219 | } catch (error) { 220 | if (debugDir && !submenuFileHandle && (settings.doDebug || settings.saveScreenOnError)) { 221 | fs.writeFile(debugDir + fileName + '_file_menu' + '.html', content, () => {}); 222 | await page.screenshot({path: debugDir + fileName + '_file_menu_screenshot' + '.png', fullPage: true}); 223 | } 224 | } 225 | 226 | // const saveFileBox = await submenuFileHandle.boundingBox(); 227 | if (!saveFileBox.x || !saveFileBox.y) { 228 | try { 229 | const newFileItem = await page.waitForSelector('div[data-testid="dropdown-option-New design file"]', {timeout: settings.selectorTimeout}); 230 | if (newFileItem) { 231 | logger.info('cannot save copy export not allowed') 232 | return SAVE_NOT_ALLOWED; 233 | } 234 | } catch (error) { 235 | //ignore 236 | } 237 | logger.info('cannot select Save local copy… menu item'); 238 | return CANNOT_OPEN_FILE_MENU; 239 | } 240 | 241 | await page.mouse.move(saveFileBox.x + 5, saveFileBox.y + 5); 242 | await page.mouse.click(saveFileBox.x + 5, saveFileBox.y + 5); 243 | 244 | let downloadedCheckTries = settings.downloadTimeout; 245 | 246 | let downloaded = false; 247 | let downloadError = false; 248 | 249 | for(let j = 0; downloadedCheckTries == 0 || j < downloadedCheckTries; j++) { 250 | await sleep(1000); 251 | fs.readdir(tmpDownloadDir, (err, files) => { 252 | try { 253 | let donwloadedFile = '' 254 | if (files && files.length > 0 && (files[0].toLowerCase().endsWith(".jam") || files[0].toLowerCase().endsWith(".fig"))) { 255 | donwloadedFile = files[0] 256 | } 257 | 258 | 259 | if (donwloadedFile.length) { 260 | if (fs.existsSync(downloadDir + donwloadedFile)) { 261 | const tmpFileName = donwloadedFile.lastIndexOf('.'); 262 | const now = new Date().toISOString().substring(0, 19).replaceAll('T', '_').replaceAll(':','-'); 263 | fse.moveSync(downloadDir + donwloadedFile, downloadDir + donwloadedFile.substring(0, tmpFileName) + '_' + now + donwloadedFile.substring(tmpFileName)) 264 | } 265 | fse.moveSync(tmpDownloadDir + donwloadedFile, downloadDir + donwloadedFile); 266 | 267 | if (file.key) { 268 | if (!fs.existsSync(downloadDir + 'map/')) { 269 | fs.mkdirSync(downloadDir + 'map/', {recursive: true}); 270 | } 271 | if (file.key) { 272 | if (fs.existsSync(downloadDir + 'map/' + file.key)) { 273 | fs.rmSync(downloadDir + 'map/' + file.key); 274 | } 275 | fs.writeFileSync(downloadDir + 'map/' + file.key, donwloadedFile) 276 | } 277 | } 278 | 279 | downloaded = true; 280 | } 281 | } catch (error) { 282 | logger.info(`cannot define file ${title} download`, error); 283 | downloadError = true; 284 | } 285 | }); 286 | 287 | if (downloadError) { 288 | logger.info(`Download ${title} failed`); 289 | return DOWNLOAD_FAILED; 290 | } 291 | 292 | if (downloaded) { 293 | logger.info('Download complete'); 294 | break; 295 | } 296 | 297 | if (j % 30 == 0) { 298 | logger.info(`waiting file to download for ${parseInt(j / 60)} min ${j % 60} sec.`) 299 | } 300 | 301 | if(downloadedCheckTries > 0 && j === (downloadedCheckTries - 1)) { 302 | logger.info(`File ${title} is not downloaded during timeout`) 303 | } 304 | } 305 | 306 | } catch (err) { 307 | console.error(err); 308 | return UNKNOWN_ERROR; 309 | } 310 | 311 | try { 312 | if (fs.existsSync(tmpDownloadDir)) { 313 | fs.rmdirSync(tmpDownloadDir); 314 | } 315 | } catch (err) { 316 | 317 | } 318 | 319 | return 0; 320 | } 321 | 322 | function close(session) { 323 | session.browser.close(); 324 | } 325 | 326 | module.exports = {readSettings, open, login, downloadFile, close} --------------------------------------------------------------------------------