├── .gitignore ├── .prettierignore ├── block.json ├── .block ├── remote.json └── development.remote.json ├── tsconfig.json ├── assets ├── json-block-run.png ├── json-block-screenshot.png ├── json-block-remix-github.png └── json-mapping-screenshot.png ├── babel.config.js ├── .eslintignore ├── .prettierrc.json ├── frontend ├── headersValuesParsingHelpers.ts ├── setAsyncCommon.ts ├── jsonToHeadersValuesParsingHelpers.ts ├── jsonParsingHelpers.ts ├── jsonFilteringHelpers.ts ├── RecordPreview.tsx ├── nonblocking.tsx ├── JsonImage.tsx ├── FieldMapping.tsx ├── SettingsStore.ts ├── FileDropper.tsx ├── supportedFieldTypes.ts ├── RecordPreviewList.tsx ├── Main.tsx ├── headersValuesMappingHelpers.ts └── ParsedDataReview.tsx ├── tsconfig.base.json ├── .eslintrc.json ├── .eslintrc.typescript-base.json ├── package.json ├── README.md └── .eslintrc-base.json /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules 2 | /.airtableblocksrc.json 3 | /build -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | **/node_modules/** 2 | **/flow-typed/** 3 | */build/** 4 | -------------------------------------------------------------------------------- /block.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "1.0", 3 | "frontendEntry": "./frontend/Main.tsx" 4 | } 5 | -------------------------------------------------------------------------------- /.block/remote.json: -------------------------------------------------------------------------------- 1 | { 2 | "blockId": "blk0vYeu36uurKBtb", 3 | "baseId": "appPvDYfRJLQr5zz7" 4 | } 5 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "../tsconfig.base.json", 3 | "include": ["frontend/**/*"] 4 | } 5 | -------------------------------------------------------------------------------- /.block/development.remote.json: -------------------------------------------------------------------------------- 1 | { 2 | "blockId": "blk0vYeu36uurKBtb", 3 | "baseId": "appPvDYfRJLQr5zz7" 4 | } 5 | -------------------------------------------------------------------------------- /assets/json-block-run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SiliconValleyInsight/airtable-json-block/HEAD/assets/json-block-run.png -------------------------------------------------------------------------------- /assets/json-block-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SiliconValleyInsight/airtable-json-block/HEAD/assets/json-block-screenshot.png -------------------------------------------------------------------------------- /assets/json-block-remix-github.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SiliconValleyInsight/airtable-json-block/HEAD/assets/json-block-remix-github.png -------------------------------------------------------------------------------- /assets/json-mapping-screenshot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SiliconValleyInsight/airtable-json-block/HEAD/assets/json-mapping-screenshot.png -------------------------------------------------------------------------------- /babel.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | presets: [['@babel/preset-env', {targets: {node: 'current'}}], '@babel/preset-typescript'], 3 | }; 4 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | */block_client_wrapper.js 2 | */bundle.js 3 | **/node_modules/* 4 | **/flow-typed/* 5 | */build/* 6 | web_clipper/chrome_extension/build -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true, 3 | "trailingComma": "all", 4 | "bracketSpacing": false, 5 | "tabWidth": 4, 6 | "printWidth": 100 7 | } 8 | -------------------------------------------------------------------------------- /frontend/headersValuesParsingHelpers.ts: -------------------------------------------------------------------------------- 1 | import * as CSV from 'csv-string'; 2 | 3 | export function parseHeadersValuesStringSync(parsedString: string) { 4 | return CSV.parse(parsedString); 5 | } 6 | -------------------------------------------------------------------------------- /tsconfig.base.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "noEmit": true, 4 | "target": "es2018", 5 | "module": "commonjs", 6 | "esModuleInterop": true, 7 | "isolatedModules": true, 8 | "jsx": "preserve", 9 | "strict": true 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "./.eslintrc.typescript-base.json", 3 | "plugins": ["@airtable/blocks"], 4 | "rules": { 5 | "@airtable/blocks/no-throw-new": "error", 6 | "@airtable/blocks/no-node-modules-invariant": "error", 7 | "@airtable/blocks/no-error-interpolation": ["error", {"spawnError": 0, "invariant": 1}] 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /.eslintrc.typescript-base.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["./.eslintrc-base.json", "plugin:@typescript-eslint/eslint-recommended"], 3 | "parser": "@typescript-eslint/parser", 4 | "parserOptions": { 5 | "sourceType": "module" 6 | }, 7 | "plugins": ["@typescript-eslint"], 8 | "rules": { 9 | "no-unused-vars": "off", 10 | "@typescript-eslint/no-unused-vars": ["error", {"vars": "all", "args": "none"}] 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /frontend/setAsyncCommon.ts: -------------------------------------------------------------------------------- 1 | export async function setStateAsync(context, partialState) { 2 | return new Promise((resolve, reject) => { 3 | context.setState(partialState, () => { 4 | resolve(); 5 | }); 6 | }); 7 | } 8 | 9 | export async function setTimeoutAsync(timeout) { 10 | return new Promise((resolve, reject) => { 11 | setTimeout(() => { 12 | resolve(); 13 | }, timeout); 14 | }); 15 | } 16 | -------------------------------------------------------------------------------- /frontend/jsonToHeadersValuesParsingHelpers.ts: -------------------------------------------------------------------------------- 1 | import Papa from 'papaparse'; 2 | 3 | export async function parseJsonToHeadersValuesAsync({data}) { 4 | let parsedData; 5 | 6 | const parseDataPromise = new Promise((resolve, reject) => { 7 | try { 8 | parsedData = Papa.parse(Papa.unparse(data)); 9 | resolve(); 10 | } catch (error) { 11 | console.error(error); 12 | resolve(); 13 | } 14 | }); 15 | 16 | await parseDataPromise; 17 | 18 | return parsedData; 19 | } 20 | -------------------------------------------------------------------------------- /frontend/jsonParsingHelpers.ts: -------------------------------------------------------------------------------- 1 | export async function parseJsonFileAsync(file: File) { 2 | let parsedJsonData; 3 | 4 | const parseJsonPromise = new Promise((resolve, reject) => { 5 | const reader = new FileReader(); 6 | reader.onload = (event) => { 7 | parsedJsonData = {}; 8 | parsedJsonData.data = JSON.parse(event.target.result); 9 | resolve(); 10 | }; 11 | 12 | reader.onerror = () => { 13 | console.error(reader.error); 14 | reader.abort(); 15 | parsedJsonData = null; 16 | resolve(); 17 | }; 18 | 19 | reader.readAsText(file); 20 | }); 21 | 22 | await parseJsonPromise; 23 | 24 | return parsedJsonData; 25 | } 26 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "private": true, 3 | "scripts": { 4 | "types": "./node_modules/.bin/tsc -b" 5 | }, 6 | "dependencies": { 7 | "@airtable/blocks": "0.0.41", 8 | "@airtable/blocks-cli": "0.0.44", 9 | "@types/classnames": "^2.2.9", 10 | "@types/jsonpath": "^0.2.0", 11 | "classnames": "2.2.5", 12 | "csv-string": "3.1.7", 13 | "invariant": "2.2.4", 14 | "jsonpath": "1.0.1", 15 | "lodash": "4.17.15", 16 | "papaparse": "4.3", 17 | "react": "^16.8.4", 18 | "react-dom": "^16.8.4", 19 | "react-virtualized": "^9.21.2" 20 | }, 21 | "devDependencies": { 22 | "@babel/core": "^7.8.4", 23 | "@babel/preset-env": "^7.8.4", 24 | "@babel/preset-typescript": "^7.8.3", 25 | "@types/jest": "^25.1.1", 26 | "@types/react": "^16.9.11", 27 | "@types/react-dom": "^16.9.4", 28 | "babel-jest": "^25.1.0", 29 | "source-map-loader": "^0.2.4", 30 | "typescript": "^3.8.3" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /frontend/jsonFilteringHelpers.ts: -------------------------------------------------------------------------------- 1 | export async function filterJsonAsync({data}) { 2 | let filteredJsonData; 3 | 4 | const filterJsonPromise = new Promise((resolve, reject) => { 5 | try { 6 | filteredJsonData = {}; 7 | filteredJsonData.data = filterArrays(data); 8 | resolve(); 9 | } catch (error) { 10 | console.error(error); 11 | resolve(); 12 | } 13 | }); 14 | 15 | await filterJsonPromise; 16 | 17 | return filteredJsonData; 18 | } 19 | 20 | export const filterArrays = (data) => { 21 | if (!Array.isArray(data)) { 22 | return data; 23 | } 24 | return data.map((item) => { 25 | if (Array.isArray(item)) { 26 | return item; 27 | } 28 | Object.keys(item).forEach((key) => { 29 | const value = item[key]; 30 | if (Array.isArray(value)) { // if value is an array of objects 31 | if (value.length > 0 && typeof value[1] === 'object' && value[1] !== null) { 32 | delete item[key]; 33 | } 34 | } else if (typeof value === 'object' && value !== null) { // if value is an object 35 | delete item[key]; 36 | } 37 | }); 38 | return item; 39 | }); 40 | }; 41 | -------------------------------------------------------------------------------- /frontend/RecordPreview.tsx: -------------------------------------------------------------------------------- 1 | import {RecordCard} from '@airtable/blocks/ui'; 2 | 3 | import {fieldTypes, Table} from '@airtable/blocks/models'; 4 | import React from 'react'; 5 | import _ from 'lodash'; 6 | 7 | type RecordPreviewProps = { 8 | table: Table; 9 | cellValuesByFieldId: object; 10 | width: number; 11 | className: string; 12 | style: object; 13 | }; 14 | 15 | class RecordPreview extends React.PureComponent { 16 | render() { 17 | const {table, cellValuesByFieldId, width, className, style} = this.props; 18 | 19 | // record card currently doesn't support displaying manually specified linked records 20 | const fieldIds = _.keys(cellValuesByFieldId).filter(fieldId => { 21 | const field = table.getFieldByIdIfExists(fieldId); 22 | return field.type !== fieldTypes.MULTIPLE_RECORD_LINKS; 23 | }); 24 | const fields = fieldIds.map(fieldId => table.getFieldByIdIfExists(fieldId)); 25 | 26 | return ( 27 | 34 | ); 35 | } 36 | } 37 | 38 | export default RecordPreview; 39 | -------------------------------------------------------------------------------- /frontend/nonblocking.tsx: -------------------------------------------------------------------------------- 1 | // Copied from the chart block. Ideally we'd be able to share code between blocks 2 | /* helper functions for iterating over an array without blocking the UI thread */ 3 | const BUDGET_MS = 10; 4 | 5 | const schedule = (callback: () => void) => window.requestAnimationFrame(callback); 6 | 7 | async function forEachAsync( 8 | array: Array, 9 | callback: (value: T, index: number, array: Array) => unknown, 10 | ): Promise { 11 | await new Promise((resolve, reject) => { 12 | let i = 0; 13 | const len = array.length; 14 | 15 | const step = () => { 16 | const stepEnd = Date.now() + BUDGET_MS; 17 | while (i < len && Date.now() < stepEnd) { 18 | try { 19 | callback(array[i], i, array); 20 | } catch (err) { 21 | reject(err); 22 | return; 23 | } 24 | i += 1; 25 | } 26 | 27 | if (i >= len) { 28 | resolve(); 29 | } else { 30 | schedule(step); 31 | } 32 | }; 33 | 34 | schedule(step); 35 | }); 36 | } 37 | 38 | async function mapAsync( 39 | array: Array, 40 | callback: (item: T, index: number, array: Array) => U, 41 | ): Promise> { 42 | let results: Array = []; 43 | await forEachAsync(array, (item, i, arr) => { 44 | results.push(callback(item, i, arr)); 45 | }); 46 | return results; 47 | } 48 | 49 | export default { 50 | forEachAsync, 51 | mapAsync, 52 | }; 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # 📊 JSON Import Block for Airtable 2 | ![](https://img.shields.io/badge/typescript-^3.8.3-blue) ![](https://img.shields.io/badge/@airtable/blocks-0.0.41-green) ![](https://img.shields.io/badge/@airtable/blocks--cli-0.0.44-green) 3 | 4 | Written in TypeScript. Import data from JSON files, and query with [JSONPath](https://jsonpath.com/). 5 | 6 | Built by the [SVI Team](https://svi.io). Featured on Airtable's [Developers Blocks](https://airtable.com/developers/blocks/examples). 7 | 8 | Looking for help with your software project? Email us to chat at [info@siliconvalleyinsight.com](mailto:info@siliconvalleyinsight.com)! 9 | 10 | ## Quick Start 11 | 12 | ### Recommended method: Remix from GitHub 13 | 14 | 1. On your Airtable base, open the Blocks sidebar on the right, and click **Install a block** 15 | 1. On the modal that opens, click **Build a custom block** 16 | 1. Select **Remix from GitHub** on the **Build a block** modal 17 | 1. Enter `JSON import Block` in the **Block name** field 18 | 1. Enter `https://github.com/SiliconValleyInsight/airtable-json-block` in the **GitHub repository** field 19 | 20 | ![Remix from GitHub](assets/json-block-remix-github.png) 21 | 22 | 1. Click **Create block**, and follow the instructions on the next screens to run and test the Block locally 23 | 24 | ### Manual method (no longer recommended) 25 | 26 | 1. Clone this git repo 27 | ```console 28 | $ git clone git@github.com:SiliconValleyInsight.com/airtable-json-block.git 29 | ``` 30 | 31 | 1. Install necessary packages with npm 32 | 33 | ```console 34 | $ cd airtable-json-block/json_import 35 | $ npm install @airtable/blocks-cli 36 | $ npm install @airtable/blocks 37 | $ npm install 38 | ``` 39 | 40 | 1. Follow [this guide](https://airtable.com/developers/blocks/guides/hello-world-tutorial) to setup a new Block for your base 41 | 1. Take note of the Block ID (format: `blkxxxxxxxxx`) and Base ID (format: `appxxxxxxxxx`) from the guide above, modify the `baseID` and `blockID` in [remote.json](.block/remote.json), and save 42 | 1. Run `$ block run` and ensure that the Block is running locally 43 | 1. On your newly created Block from step 3, enter `htps://localhost:9000` as the URL and click **"Start editing block"** 44 | 45 | ![Block Edit Screen](assets/json-block-run.png) 46 | 47 | ## Developing 48 | 49 | - Make sure to follow Airtable's style guides for React and TypeScript (refer to Airtable's developer docs) 50 | - Install [eslint](https://eslint.org/) and [prettier](https://prettier.io/), and make sure to run both before committing a file 51 | 52 | ## Screenshots 53 | 54 | ![JSON Import Block](assets/json-block-screenshot.png) 55 | 56 | _Block dashboard screen_ 57 | 58 | ![JSON Import Block mapping screen](assets/json-mapping-screenshot.png) 59 | 60 | _JSON import and mapping screen_ 61 | -------------------------------------------------------------------------------- /frontend/JsonImage.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | 3 | type JsonImageProps = { 4 | width: number; 5 | height: number; 6 | fill: string; 7 | }; 8 | 9 | const JsonImage = (props: JsonImageProps) => ( 10 | 19 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | ); 34 | 35 | export default JsonImage; 36 | -------------------------------------------------------------------------------- /frontend/FieldMapping.tsx: -------------------------------------------------------------------------------- 1 | import {viewport} from '@airtable/blocks'; 2 | import { 3 | Tooltip, 4 | Icon, 5 | Select, 6 | useViewport, 7 | useWatchable, 8 | withHooks, 9 | Switch, 10 | } from '@airtable/blocks/ui'; 11 | import React from 'react'; 12 | 13 | import classNames from 'classnames'; 14 | 15 | type FieldMappingProps = { 16 | field: object; 17 | parsedHeaders: Array; 18 | selectedIndex: number; 19 | onChange: (newHeaderIndex: number) => void; 20 | isEnabled: (isEnabled: boolean) => void; 21 | onToggle: () => void; 22 | className: string; 23 | style: object; 24 | isFieldTypeSupported: boolean; 25 | }; 26 | 27 | class FieldMapping extends React.Component { 28 | render() { 29 | const { 30 | parsedHeaders, 31 | selectedIndex, 32 | isEnabled, 33 | onToggle, 34 | field, 35 | onChange, 36 | className, 37 | style, 38 | isFieldTypeSupported, 39 | } = this.props; 40 | const toggleWidth = viewport.size.width * 0.4 * 0.33; 41 | 42 | return ( 43 |
53 | 58 |
65 | 71 | {field.name} 72 |
73 | } 74 | className="p1 flex-none width-full" 75 | /> 76 |
77 | 78 | {isEnabled && } 79 | {isEnabled && ( 80 |
81 | (this._fileInput = el)} 169 | accept={`.${fileType},${mimeTypes.join(',')}`} 170 | onChange={this._onFileInputChange} 171 | /> 172 | {viewport.size.height > 150 && ( 173 | 178 | )} 179 |
180 | Drop a {fileText} to import 181 |
182 |
Or click to choose a file...
183 |
184 | {this.props.learnMoreUrl && ( 185 | e.stopPropagation()} 187 | href={this.props.learnMoreUrl} 188 | target="_blank" 189 | rel="noopener noreferrer" 190 | className="flex-none mt2 p1 rounded small text-blue darken1-hover" 191 | > 192 | Learn more 193 | 194 | )} 195 | 196 | ); 197 | } 198 | } 199 | 200 | export default withHooks(FileDropper, () => { 201 | useWatchable(viewport, 'size'); 202 | return {}; 203 | }); 204 | -------------------------------------------------------------------------------- /frontend/supportedFieldTypes.ts: -------------------------------------------------------------------------------- 1 | import {base} from '@airtable/blocks'; 2 | import {fieldTypes} from '@airtable/blocks/models'; 3 | import _ from 'lodash'; 4 | import {parseHeadersValuesStringSync} from './headersValuesParsingHelpers'; 5 | 6 | const supportedFieldTypes = [ 7 | fieldTypes.SINGLE_LINE_TEXT, 8 | fieldTypes.EMAIL, 9 | fieldTypes.URL, 10 | fieldTypes.MULTILINE_TEXT, 11 | fieldTypes.NUMBER, 12 | fieldTypes.CURRENCY, 13 | fieldTypes.PERCENT, 14 | fieldTypes.SINGLE_SELECT, 15 | fieldTypes.MULTIPLE_SELECTS, 16 | fieldTypes.SINGLE_COLLABORATOR, 17 | fieldTypes.MULTIPLE_COLLABORATORS, 18 | fieldTypes.MULTIPLE_RECORD_LINKS, 19 | fieldTypes.DATE, 20 | fieldTypes.DATE_TIME, 21 | fieldTypes.PHONE_NUMBER, 22 | fieldTypes.CHECKBOX, 23 | fieldTypes.RATING, 24 | fieldTypes.DURATION, 25 | ]; 26 | 27 | const supportedFieldTypesForLinkedTablePrimaryField = [ 28 | ...supportedFieldTypes, 29 | fieldTypes.FORMULA, 30 | fieldTypes.AUTO_NUMBER, 31 | ]; 32 | 33 | export const fieldConfigByType = { 34 | [fieldTypes.SINGLE_LINE_TEXT]: { 35 | key: fieldTypes.SINGLE_LINE_TEXT, 36 | convertParsedValueToCellValue, 37 | }, 38 | [fieldTypes.EMAIL]: { 39 | key: fieldTypes.EMAIL, 40 | convertParsedValueToCellValue, 41 | }, 42 | [fieldTypes.URL]: { 43 | key: fieldTypes.URL, 44 | convertParsedValueToCellValue, 45 | }, 46 | [fieldTypes.MULTILINE_TEXT]: { 47 | key: fieldTypes.MULTILINE_TEXT, 48 | convertParsedValueToCellValue, 49 | }, 50 | [fieldTypes.NUMBER]: { 51 | key: fieldTypes.NUMBER, 52 | convertParsedValueToCellValue, 53 | }, 54 | [fieldTypes.CURRENCY]: { 55 | key: fieldTypes.CURRENCY, 56 | convertParsedValueToCellValue, 57 | }, 58 | [fieldTypes.PERCENT]: { 59 | key: fieldTypes.PERCENT, 60 | convertParsedValueToCellValue, 61 | }, 62 | [fieldTypes.SINGLE_SELECT]: { 63 | key: fieldTypes.SINGLE_SELECT, 64 | convertParsedValueToCellValue, 65 | helpMessage: 'Create new select options to import these values', 66 | }, 67 | [fieldTypes.MULTIPLE_SELECTS]: { 68 | key: fieldTypes.MULTIPLE_SELECTS, 69 | convertParsedValueToCellValue, 70 | helpMessage: 'Create new select options to import these values', 71 | }, 72 | [fieldTypes.SINGLE_COLLABORATOR]: { 73 | key: fieldTypes.SINGLE_COLLABORATOR, 74 | convertParsedValueToCellValue, 75 | helpMessage: 'Invite missing collaborators to import these values', 76 | }, 77 | [fieldTypes.MULTIPLE_COLLABORATORS]: { 78 | key: fieldTypes.MULTIPLE_COLLABORATORS, 79 | convertParsedValueToCellValue, 80 | helpMessage: 'Invite missing collaborators to import these values', 81 | }, 82 | [fieldTypes.MULTIPLE_RECORD_LINKS]: { 83 | key: fieldTypes.MULTIPLE_RECORD_LINKS, 84 | convertParsedValueToCellValue: (parsedValue, field) => { 85 | // this returns the primary field value for the new linked records, since the actual records may not exist already 86 | 87 | // Replace line breaks with spaces before attempting to parse this field as a JSON row. 88 | // Allowing it to contain newlines would produce > 1 rows. 89 | if (typeof parsedValue === 'string') { 90 | parsedValue = parsedValue.replace(/(\r\n|\n|\r)/gm, ' '); 91 | } 92 | const parseResult = parseHeadersValuesStringSync(parsedValue); 93 | if (parseResult.length !== 1) { 94 | // Temporary logging 95 | console.log('parseResult', JSON.stringify(parseResult)); 96 | throw Error(`parseResult.length must be 1. Value is: ${parseResult.length}`); 97 | } 98 | 99 | const parsedRow = parseResult[0]; 100 | if (parsedRow === '') { 101 | return null; 102 | } else { 103 | return _.compact( 104 | parsedRow.map(cell => { 105 | return cell.trim() !== '' ? {name: cell.trim()} : null; 106 | }), 107 | ); 108 | } 109 | }, 110 | }, 111 | [fieldTypes.DATE]: { 112 | key: fieldTypes.DATE, 113 | convertParsedValueToCellValue, 114 | }, 115 | [fieldTypes.DATE_TIME]: { 116 | key: fieldTypes.DATE_TIME, 117 | convertParsedValueToCellValue, 118 | }, 119 | [fieldTypes.PHONE_NUMBER]: { 120 | key: fieldTypes.PHONE_NUMBER, 121 | convertParsedValueToCellValue, 122 | }, 123 | [fieldTypes.CHECKBOX]: { 124 | key: fieldTypes.CHECKBOX, 125 | convertParsedValueToCellValue, 126 | }, 127 | [fieldTypes.RATING]: { 128 | key: fieldTypes.RATING, 129 | convertParsedValueToCellValue: (parsedValue, field) => { 130 | // Special case zero: convertStringToCellValue will return 131 | // null which gets treated as a "failedToMap" value. 132 | if (parsedValue === '0') { 133 | return 0; 134 | } else { 135 | return convertParsedValueToCellValue(parsedValue, field); 136 | } 137 | }, 138 | }, 139 | [fieldTypes.DURATION]: { 140 | key: fieldTypes.DURATION, 141 | convertParsedValueToCellValue, 142 | }, 143 | }; 144 | 145 | function convertParsedValueToCellValue(parsedValue, field) { 146 | if (parsedValue) { 147 | return field.convertStringToCellValue(parsedValue); 148 | } else { 149 | return null; 150 | } 151 | } 152 | 153 | export function isFieldValid(field, linkedTablesPrimaryFieldTypesByTableId = null): boolean { 154 | if (field === null) { 155 | return false; 156 | } 157 | 158 | if (!linkedTablesPrimaryFieldTypesByTableId) { 159 | linkedTablesPrimaryFieldTypesByTableId = getLinkedTablesPrimaryFieldTypesByTableId( 160 | field.parentTable, 161 | ); 162 | } 163 | return ( 164 | !!field && 165 | (field.type === fieldTypes.MULTIPLE_RECORD_LINKS 166 | ? _.includes( 167 | supportedFieldTypesForLinkedTablePrimaryField, 168 | linkedTablesPrimaryFieldTypesByTableId[field.options.linkedTableId], 169 | ) 170 | : _.includes(supportedFieldTypes, field.type)) 171 | ); 172 | } 173 | 174 | /** 175 | * I believe: 176 | * fieldIds?: [FieldId] 177 | * table: Table 178 | * return [FieldId] 179 | */ 180 | export const filterDeletedOrUnsupportedFieldIds = (fieldIds, table) => { 181 | return (fieldIds || []).filter(fieldId => { 182 | const field = table.getFieldByIdIfExists(fieldId); 183 | return isFieldValid(field); 184 | }); 185 | }; 186 | 187 | export function getLinkedTablesPrimaryFieldTypesByTableId(table) { 188 | return _.fromPairs( 189 | table.fields 190 | .filter(field => field.type === fieldTypes.MULTIPLE_RECORD_LINKS) 191 | .map(field => [ 192 | field.options.linkedTableId, 193 | base.getTableByIdIfExists(field.options.linkedTableId).primaryField.type, 194 | ]), 195 | ); 196 | } 197 | 198 | export default supportedFieldTypes; 199 | -------------------------------------------------------------------------------- /frontend/RecordPreviewList.tsx: -------------------------------------------------------------------------------- 1 | import {spawnError} from '@airtable/blocks/unstable_private_utils'; 2 | 3 | import {Table, TableOrViewQueryResult} from '@airtable/blocks/models'; 4 | import {Icon, colorUtils, colors} from '@airtable/blocks/ui'; 5 | import React from 'react'; 6 | import {AutoSizer, List} from 'react-virtualized'; 7 | import _ from 'lodash'; 8 | 9 | import RecordPreview from './RecordPreview'; 10 | 11 | const MAX_ROWS_PER_TABLE = 50000; 12 | const RECORD_CARD_HEIGHT = 80; 13 | const RECORD_CARD_MARGIN = 16; 14 | const RECORD_LIST_HEADER_HEIGHT = 48; 15 | 16 | type RecordPreviewListProps = { 17 | table: Table; 18 | queryResult: TableOrViewQueryResult; 19 | failedToMapValuesByFieldId: object; 20 | dataDiff: object; 21 | }; 22 | 23 | export default class RecordPreviewList extends React.Component { 24 | listRef = React.createRef(); 25 | 26 | recomputeRowHeights() { 27 | if (this.listRef.current) { 28 | this.listRef.current.recomputeRowHeights(); 29 | } 30 | } 31 | 32 | _renderRecordPreview(cellValuesByFieldId, index, width) { 33 | const {table} = this.props; 34 | 35 | if (!table) { 36 | throw spawnError('Cannot render parsed record without a table'); 37 | } 38 | 39 | return ( 40 | 47 | ); 48 | } 49 | 50 | _renderTableRecordNumberExceededMessage(remainingRecordLimit) { 51 | return ( 52 |
53 |

Table record limit exceeded

54 |
55 | {`The JSON file has too many records to import. You can import at most 56 | ${remainingRecordLimit} record${remainingRecordLimit === 1 ? '' : 's'}.`} 57 |
58 |
59 | ); 60 | } 61 | 62 | _renderNoUpdatesOrInsertsMessage() { 63 | return ( 64 |
65 |

No new or updated records found

66 |
67 | Based on the current field mappings, all rows in this JSON file are already in 68 | the selected table 69 |
70 |
71 | ); 72 | } 73 | 74 | render() { 75 | const {queryResult, failedToMapValuesByFieldId, dataDiff: diff} = this.props; 76 | if (!queryResult.isDataLoaded) { 77 | return null; 78 | } 79 | 80 | if (!diff) { 81 | return null; 82 | } 83 | 84 | const numRecordsToCreate = diff.recordDefsToCreate.length; 85 | // TODO(ben): Use recordLimit when it's back in the sdk 86 | const remainingRecords = MAX_ROWS_PER_TABLE - queryResult.records.length; 87 | if (numRecordsToCreate > remainingRecords) { 88 | return ( 89 |
90 |
91 |
92 | {this._renderTableRecordNumberExceededMessage(remainingRecords)} 93 |
94 |
95 | ); 96 | } 97 | 98 | const items = []; 99 | 100 | if (numRecordsToCreate > 0) { 101 | items.push( 102 | `${numRecordsToCreate} record${numRecordsToCreate > 1 ? 's' : ''} will be created`, 103 | ...diff.recordDefsToCreate, 104 | ); 105 | } 106 | const numRecordsToUpdate = diff.recordDefsToUpdate.length; 107 | if (numRecordsToUpdate > 0) { 108 | items.push( 109 | `${numRecordsToUpdate} record${numRecordsToUpdate > 1 ? 's' : ''} will be updated`, 110 | ..._.map(diff.recordDefsToUpdate, recordDef => recordDef.fields), 111 | ); 112 | } 113 | const numRecordsUnchanged = _.size(diff.unchangedRecordsById); 114 | if (numRecordsUnchanged > 0) { 115 | items.push( 116 | `${numRecordsUnchanged} record${numRecordsUnchanged > 1 ? 's' : ''} didn't change`, 117 | ..._.values(diff.unchangedRecordsById), 118 | ); 119 | } 120 | 121 | const warnings = []; 122 | const ignoredCount = diff.numIgnoredParsedRowsDueToDuplicateMatch; 123 | if (ignoredCount > 0) { 124 | const text = 125 | ignoredCount === 1 126 | ? '1 row in the JSON file was ignored because it had duplicate values in the merge field.' 127 | : `${ignoredCount} rows in the JSON file were ignored because they had duplicate values in the merge field.`; 128 | warnings.push( 129 |
133 | 138 | {text} 139 |
, 140 | ); 141 | } 142 | 143 | const failedToMapValues = _.flatten(_.values(failedToMapValuesByFieldId)); 144 | if (failedToMapValues.length > 0) { 145 | warnings.push( 146 |
150 | 155 | {failedToMapValues.length} cell value 156 | {failedToMapValues.length === 1 ? ' ' : 's '} 157 | couldn 158 | {"'"}t be mapped. 159 |
, 160 | ); 161 | } 162 | 163 | const noUpdatesOrInserts = 164 | diff && diff.recordDefsToCreate.length === 0 && diff.recordDefsToUpdate.length === 0; 165 | const recordPreviews = noUpdatesOrInserts ? ( 166 | this._renderNoUpdatesOrInsertsMessage() 167 | ) : ( 168 | 169 | {({height, width}) => ( 170 | { 174 | return typeof items[index] === 'string' 175 | ? RECORD_LIST_HEADER_HEIGHT 176 | : RECORD_CARD_HEIGHT + RECORD_CARD_MARGIN; 177 | }} 178 | overscanRowCount={10} 179 | rowRenderer={({index, key, style}) => { 180 | const item = items[index]; 181 | if (typeof item === 'string') { 182 | return ( 183 |
184 | {item} 185 |
186 | ); 187 | } else { 188 | return ( 189 |
190 | {this._renderRecordPreview( 191 | item, 192 | index, 193 | width - RECORD_CARD_MARGIN * 2, 194 | )} 195 |
196 | ); 197 | } 198 | }} 199 | height={height} 200 | width={width} 201 | ref={this.listRef} 202 | /> 203 | )} 204 |
205 | ); 206 | 207 | return ( 208 |
209 |
{warnings}
210 |
{recordPreviews}
211 |
212 | ); 213 | } 214 | } 215 | -------------------------------------------------------------------------------- /frontend/Main.tsx: -------------------------------------------------------------------------------- 1 | import {invariant, spawnError} from '@airtable/blocks/unstable_private_utils'; 2 | import {base, cursor, globalConfig, session, viewport} from '@airtable/blocks'; 3 | import { 4 | globalAlert, 5 | Icon, 6 | colorUtils, 7 | colors, 8 | Loader, 9 | useGlobalConfig, 10 | useWatchable, 11 | withHooks, 12 | initializeBlock, 13 | Dialog, 14 | } from '@airtable/blocks/ui'; 15 | import React, {useRef} from 'react'; 16 | import _ from 'lodash'; 17 | import FileDropper from './FileDropper'; 18 | import SettingsStore, {ConfigKeys} from './SettingsStore'; 19 | import ParsedDataReview from './ParsedDataReview'; 20 | import {setStateAsync} from './setAsyncCommon'; 21 | import {parseJsonFileAsync} from './jsonParsingHelpers'; 22 | import {filterJsonAsync} from './jsonFilteringHelpers'; 23 | import {parseJsonToHeadersValuesAsync} from './jsonToHeadersValuesParsingHelpers'; 24 | 25 | // Add the baymax class to the root to use the global baymax styles. 26 | // This was previously the default, but is now opt-in. 27 | invariant(document.body, 'document.body'); 28 | document.body.classList.add('baymax'); 29 | 30 | const FileParseStatuses = { 31 | WAITING_FOR_FILE: 'waitingForFile', 32 | ERROR: 'error', 33 | READING_FILE: 'readingFile', 34 | REVIEW: 'review', 35 | }; 36 | 37 | const settingsStore = new SettingsStore(); 38 | 39 | class Main extends React.Component { 40 | state = { 41 | fileParseStatus: FileParseStatuses.WAITING_FOR_FILE, 42 | }; 43 | 44 | constructor(props) { 45 | super(props); 46 | 47 | this._onBaseTablesChanged = this._onBaseTablesChanged.bind(this); 48 | this._showReloadPromptIfNeeded = this._showReloadPromptIfNeeded.bind(this); 49 | } 50 | 51 | componentDidMount() { 52 | this._showReloadPromptIfNeeded(); 53 | } 54 | 55 | _showReloadPromptIfNeeded() { 56 | if (settingsStore.isSchemaVersionOutOfDate()) { 57 | globalAlert.showReloadPrompt(); 58 | } 59 | } 60 | 61 | // watch for table deletions 62 | _onBaseTablesChanged() { 63 | const tableIds = base.tables.map(table => table.id); 64 | if (this.state.tableId && !tableIds.includes(this.state.tableId)) { 65 | // if the current table was deleted, switch to a new one 66 | const newTableId = cursor.activeTableId || base.tables[0].id; 67 | this.setState({ 68 | tableId: newTableId, 69 | }); 70 | } 71 | } 72 | 73 | _onParsedDataReviewClose = settings => { 74 | const { 75 | table, 76 | fieldMappings, 77 | isFirstLineHeaders, 78 | shouldMergeDuplicates, 79 | fieldIdsForMerging, 80 | } = settings; 81 | 82 | if (table) { 83 | const tableId = table.id; 84 | settingsStore.tableId = tableId; 85 | settingsStore.setFieldMappingsForTableId(tableId, fieldMappings); 86 | settingsStore.isFirstLineHeaders = isFirstLineHeaders; 87 | settingsStore.setMergeFieldIdsForTableId(tableId, fieldIdsForMerging); 88 | settingsStore.shouldMergeDuplicates = shouldMergeDuplicates; 89 | } 90 | 91 | if (this.state.tempFullscreen && viewport.isFullscreen) { 92 | viewport.exitFullscreen(); 93 | } 94 | 95 | this.setState({ 96 | fileParseStatus: FileParseStatuses.WAITING_FOR_FILE, 97 | tempFullscreen: false, 98 | tableId: null, 99 | }); 100 | }; 101 | 102 | async _processFileAsync(file) { 103 | await setStateAsync(this, { 104 | fileParseStatus: FileParseStatuses.READING_FILE, 105 | }); 106 | const parsedJsonFile = await parseJsonFileAsync(file); 107 | const parsedData = await parseJsonToHeadersValuesAsync(await filterJsonAsync(_.cloneDeep(parsedJsonFile))); 108 | 109 | // check for errors while parsing 110 | if (!parsedData) { 111 | this._showErrorModal('There was an error parsing the JSON file, please try again.'); 112 | return; 113 | } 114 | 115 | if (parsedData.errors && parsedData.errors.length > 0) { 116 | const firstError = parsedData.errors[0]; 117 | // If the only error is an undetectable delimiter, process the results as normal, since 118 | // papaparse defaults to comma-delimiting and correctly parses single-column JSONs. 119 | const couldBeSingleColumnData = 120 | parsedData.errors.length === 1 && firstError.code === 'UndetectableDelimiter'; 121 | if (!couldBeSingleColumnData) { 122 | if (firstError.row) { 123 | this._showErrorModal(` 124 | The JSON file you uploaded contains an error on row ${firstError.row + 1} 125 | ${ 126 | firstError.message ? `: ${firstError.message}` : '' 127 | }. Please fix and try again. 128 | `); 129 | } else { 130 | // If there was an error besides an undetectable delimiter, use that instead. 131 | const errorMessage = 132 | parsedData.errors.length > 1 && 133 | firstError.code === 'UndetectableDelimiter' 134 | ? parsedData.errors[1].message 135 | : firstError.message; 136 | this._showErrorModal(` 137 | The JSON file you uploaded contains an error 138 | ${errorMessage ? `: ${errorMessage}` : ''}. Please fix and try again. 139 | `); 140 | } 141 | return; 142 | } 143 | } 144 | 145 | if ((parsedJsonFile.data.constructor === Array && parsedJsonFile.data.length === 0) || (Object.keys(parsedJsonFile.data).length === 0 && parsedJsonFile.data.constructor === Object)) { 146 | this._showErrorModal('The JSON file you uploaded was empty.'); 147 | return; 148 | } 149 | 150 | if (parsedData.data.length > 15000) { 151 | this._showErrorModal('The JSON file cannot contain more than 15,000 rows.'); 152 | return; 153 | } 154 | 155 | // Pull settings store into locally managed state so a user can 156 | // mess with config without other users messing with their import. 157 | // We specifically pass the table in as a prop so ParsedDataReview can watch the field configs 158 | let tableId; 159 | if (settingsStore.table) { 160 | tableId = settingsStore.table.id; 161 | } else if (cursor.activeTableId) { 162 | tableId = cursor.activeTableId; 163 | } else { 164 | tableId = base.tables[0].id; 165 | } 166 | 167 | const tempFullscreen = !viewport.isFullscreen; 168 | if (tempFullscreen) { 169 | viewport.enterFullscreenIfPossible(); 170 | } 171 | 172 | await setStateAsync(this, { 173 | fileParseStatus: FileParseStatuses.REVIEW, 174 | parsedData, 175 | parsedJsonFile, 176 | tempFullscreen, 177 | tableId: tableId, 178 | }); 179 | } 180 | 181 | _onTableChanged = newTable => { 182 | this.setState({ 183 | tableId: newTable ? newTable.id : null, 184 | }); 185 | }; 186 | 187 | _onPickFile = file => { 188 | this._processFileAsync(file); 189 | }; 190 | 191 | _onPickFileError = errorMessage => { 192 | this._showErrorModal(errorMessage); 193 | }; 194 | 195 | _onParsedDataReviewError = errorMessage => { 196 | this._showErrorModal(errorMessage); 197 | }; 198 | 199 | _showErrorModal(errorMessage) { 200 | this.setState({ 201 | fileParseStatus: FileParseStatuses.ERROR, 202 | errorMessage, 203 | }); 204 | } 205 | 206 | _renderContents() { 207 | const {fileParseStatus} = this.state; 208 | 209 | switch (fileParseStatus) { 210 | case FileParseStatuses.WAITING_FOR_FILE: 211 | case FileParseStatuses.ERROR: 212 | return ( 213 |
214 | 224 | {fileParseStatus === FileParseStatuses.ERROR && ( 225 | 227 | this.setState({ 228 | fileParseStatus: FileParseStatuses.WAITING_FOR_FILE, 229 | }) 230 | } 231 | style={{ 232 | maxWidth: 350, 233 | }} 234 | > 235 |
236 | 237 |
238 |
239 | 244 | 245 | Sorry, something is wrong 246 | 247 |
248 |
{this.state.errorMessage}
249 |
250 |
251 |
252 | )} 253 |
254 | ); 255 | 256 | case FileParseStatuses.READING_FILE: 257 | return ( 258 |
259 |
260 | Reading file... 261 | 262 |
263 |
264 | ); 265 | 266 | case FileParseStatuses.REVIEW: { 267 | const {parsedData, parsedJsonFile, tableId} = this.state; 268 | if (!parsedData) { 269 | throw spawnError('No parsed data for review state'); 270 | } 271 | 272 | const table = tableId ? base.getTableByIdIfExists(tableId) : null; 273 | if (!table) { 274 | throw spawnError('Cannot render parsed data review state without a table'); 275 | } 276 | 277 | return ( 278 | 288 | ); 289 | } 290 | 291 | default: 292 | throw spawnError('Unrecognized status: %s', fileParseStatus); 293 | } 294 | } 295 | render() { 296 | return
{this._renderContents()}
; 297 | } 298 | } 299 | 300 | const Block = withHooks(Main, () => { 301 | const instanceRef = useRef(); 302 | useWatchable(session, 'permissionLevel'); 303 | 304 | useWatchable(base, 'tables', () => { 305 | instanceRef.current._onBaseTablesChanged(); 306 | }); 307 | 308 | useGlobalConfig(); 309 | useWatchable(globalConfig, ConfigKeys.schemaVersion, () => { 310 | instanceRef.current._showReloadPromptIfNeeded(); 311 | }); 312 | 313 | useWatchable(cursor, 'activeTableId'); 314 | 315 | return { 316 | ref: instanceRef, 317 | }; 318 | }); 319 | 320 | initializeBlock(() => ); 321 | -------------------------------------------------------------------------------- /frontend/headersValuesMappingHelpers.ts: -------------------------------------------------------------------------------- 1 | import {spawnError} from '@airtable/blocks/unstable_private_utils'; 2 | import {base} from '@airtable/blocks'; 3 | import {fieldTypes} from '@airtable/blocks/models'; 4 | import nonblocking from './nonblocking'; 5 | import supportedFieldTypes, {fieldConfigByType} from './supportedFieldTypes'; 6 | import {setTimeoutAsync} from './setAsyncCommon'; 7 | import _ from 'lodash'; 8 | 9 | function normalizeCellValueForComparison(cellValue) { 10 | if (typeof cellValue === 'string') { 11 | return ( 12 | cellValue 13 | .trim() 14 | // Hyperbase seems to convert \r\n -> \n 15 | // Do that as well for incoming data. 16 | .replace(/\r\n/g, '\n') 17 | ); 18 | } else if (Array.isArray(cellValue)) { 19 | return new Set(cellValue); 20 | } else { 21 | return cellValue; 22 | } 23 | } 24 | 25 | export function getFieldMappingsMatchingHeaders({parsedHeaders, table}) { 26 | if (!table) { 27 | return {}; 28 | } 29 | 30 | const fieldMappingsMatchingHeaders = {}; 31 | for (let index = 0; index < (parsedHeaders || []).length; index++) { 32 | const header = parsedHeaders[index]; 33 | const fieldMatchingHeader = _.find(table.fields, field => { 34 | return ( 35 | field.name.toLowerCase() === header.toLowerCase() && 36 | _.includes(supportedFieldTypes, field.type) 37 | ); 38 | }); 39 | 40 | if (fieldMatchingHeader) { 41 | if (fieldMappingsMatchingHeaders.hasOwnProperty(fieldMatchingHeader.id)) { 42 | // Skip this field if we already found a matching header for it. 43 | continue; 44 | } 45 | fieldMappingsMatchingHeaders[fieldMatchingHeader.id] = { 46 | isEnabled: true, 47 | parsedIndex: index, 48 | }; 49 | } 50 | } 51 | return fieldMappingsMatchingHeaders; 52 | } 53 | 54 | /** 55 | * returns { 56 | * values: { 57 | * [fieldId]: cellValue 58 | * } 59 | * failures: { 60 | * [fieldId]: parsedValue (I think string, whatever parsedRecord is an array of) 61 | * } 62 | * } 63 | */ 64 | export function getValuesAndFailuresByFieldIdForParsedRecord( 65 | {fieldMappings, parsedHeaders, table}, 66 | parsedRecord, 67 | ) { 68 | if (!table) { 69 | throw spawnError('Cannot get fields and cell values without a table'); 70 | } 71 | 72 | const cellValueByFieldId = {}; 73 | const failedToMapValueByFieldId = {}; 74 | // TODO: Shouldn't this iterate over fieldMapping instead? 75 | for (const field of table.fields) { 76 | if (!_.includes(supportedFieldTypes, field.type)) { 77 | continue; 78 | } 79 | if (!fieldConfigByType[field.type]) { 80 | throw spawnError( 81 | 'Trying to get cell values using un-supported field type: %s', 82 | field.type, 83 | ); 84 | } 85 | 86 | const fieldMapping = fieldMappings[field.id]; 87 | if (fieldMapping && fieldMapping.isEnabled && fieldMapping.parsedIndex !== null) { 88 | const parsedValue = parsedRecord[fieldMapping.parsedIndex] || ''; 89 | const cellValue = fieldConfigByType[field.type].convertParsedValueToCellValue( 90 | parsedValue, 91 | field, 92 | ); 93 | cellValueByFieldId[field.id] = cellValue; 94 | if (parsedValue && (cellValue === null || cellValue === undefined)) { 95 | failedToMapValueByFieldId[field.id] = parsedValue; 96 | } 97 | } 98 | } 99 | return { 100 | values: cellValueByFieldId, 101 | failures: failedToMapValueByFieldId, 102 | }; 103 | } 104 | 105 | // predicate for detecting key collisions 106 | // supports composite keys (array) 107 | function createMatcher(fieldIdsForMerging) { 108 | return (recordDef, record) => { 109 | return fieldIdsForMerging.reduce((memo, key) => { 110 | const rdValue = recordDef[key]; 111 | if (!memo || rdValue === null || rdValue === undefined) { 112 | return false; 113 | } 114 | 115 | const compValue = normalizeCellValueForComparison(rdValue); 116 | const cellValue = normalizeCellValueForComparison(record.getCellValue(key)); 117 | return memo && _.isEqual(compValue, cellValue); 118 | }, true); 119 | }; 120 | } 121 | 122 | function isFieldRecordLink(field): boolean { 123 | return field.type === fieldTypes.MULTIPLE_RECORD_LINKS; 124 | } 125 | 126 | export async function createOrUpdateRecordsAsync({table, diff}, onProgress) { 127 | if (!table) { 128 | throw spawnError('Cannot create records without a table'); 129 | } 130 | 131 | const recordDefs = _.concat( 132 | diff.recordDefsToCreate, 133 | _.map(diff.recordDefsToUpdate, recordDef => recordDef.fields), 134 | ); 135 | 136 | // We need to stay below the 1.9MB request payload limit. 137 | // TODO: create a batch record create helper in the sdk that actually measures the batch sizes. 138 | const RECORD_BATCH_SIZE = 50; 139 | 140 | const linkedRecordNamesSetByLinkedTableId = {}; 141 | const linkedTableIdByLinkedRecordFieldId = {}; 142 | 143 | for (const recordDef of recordDefs) { 144 | for (const [fieldId, cellValue] of _.entries(recordDef)) { 145 | const field = table.getFieldByIdIfExists(fieldId); 146 | if (!field) { 147 | throw spawnError('No field'); 148 | } 149 | 150 | if (isFieldRecordLink(field) && cellValue !== null && cellValue !== undefined) { 151 | // need to create a new record in the linked table 152 | const {linkedTableId} = field.options; 153 | if (!linkedRecordNamesSetByLinkedTableId[linkedTableId]) { 154 | linkedRecordNamesSetByLinkedTableId[linkedTableId] = new Set(); 155 | } 156 | 157 | // keep track of each cell value that is the name of a linked record 158 | for (const linkedRecordName of cellValue) { 159 | linkedRecordNamesSetByLinkedTableId[linkedTableId].add(linkedRecordName.name); 160 | } 161 | 162 | // keep track of each linked record field so their cell values can be updated once we know 163 | // the record id for each linked record 164 | linkedTableIdByLinkedRecordFieldId[field.id] = linkedTableId; 165 | } 166 | } 167 | } 168 | 169 | const numRecordsToBeTouched = 170 | recordDefs.length + 171 | _.reduce( 172 | linkedRecordNamesSetByLinkedTableId, 173 | (result, linkedRecordNamesSet) => result + linkedRecordNamesSet.size, 174 | 0, 175 | ); 176 | let numRecordsTouched = 0; 177 | 178 | // for each linked table, map each name of the linked record to its record id 179 | const linkedRecordIdByNameByTableId = {}; 180 | for (const [linkedTableId, linkedRecordNamesSet] of _.entries( 181 | linkedRecordNamesSetByLinkedTableId, 182 | )) { 183 | const linkedTable = base.getTableByIdIfExists(linkedTableId); 184 | 185 | if (!linkedTable) { 186 | continue; 187 | } 188 | const linkedTableQueryResult = linkedTable.selectRecords(); 189 | await linkedTableQueryResult.loadDataAsync(); 190 | 191 | // keep track of all the records that already exist in the linked table 192 | linkedRecordIdByNameByTableId[linkedTableId] = {}; 193 | for (const record of linkedTableQueryResult.records) { 194 | linkedRecordIdByNameByTableId[linkedTableId][record.primaryCellValueAsString] = 195 | record.id; 196 | } 197 | 198 | const {primaryField: linkedTablePrimaryField} = linkedTable; 199 | if (linkedTablePrimaryField.isComputed) { 200 | // if the field is formulaic/computed, we can't make new records, so don't map these cell values 201 | numRecordsTouched += linkedRecordNamesSet.size; 202 | onProgress(numRecordsTouched, numRecordsToBeTouched); 203 | linkedTableQueryResult.unloadData(); 204 | continue; 205 | } 206 | 207 | // Do a first pass to figure out how many linked records will just be no-ops (since 208 | // they already exist). 209 | // NOTE: this is necessary because calling onProgress in a tight loop makes things really 210 | // slow. So rather than figuring out which records to no-op as we're also creating records, 211 | // and calling onProgress every time we no-op, we'll just do one batch to speed things up. 212 | for (const linkedRecordName of linkedRecordNamesSet) { 213 | if (linkedRecordIdByNameByTableId[linkedTableId][linkedRecordName]) { 214 | numRecordsTouched++; 215 | } 216 | } 217 | onProgress(numRecordsTouched, numRecordsToBeTouched); 218 | 219 | // create new records in the linked table for each name that doesn't already exist in the linked table 220 | let createdLinkedRecordCount = 0; 221 | for (const linkedRecordName of linkedRecordNamesSet) { 222 | if (!linkedRecordIdByNameByTableId[linkedTableId][linkedRecordName]) { 223 | if (!fieldConfigByType[linkedTablePrimaryField.type]) { 224 | throw spawnError( 225 | 'Trying to get linked record values using un-supported field type: %s', 226 | linkedTablePrimaryField.type, 227 | ); 228 | } 229 | 230 | const recordId = await linkedTable.createRecordAsync({ 231 | [linkedTable.primaryField.id]: fieldConfigByType[ 232 | linkedTablePrimaryField.type 233 | ].convertParsedValueToCellValue(linkedRecordName, linkedTablePrimaryField), 234 | }); 235 | 236 | linkedRecordIdByNameByTableId[linkedTableId][linkedRecordName] = recordId; // eslint-disable-line require-atomic-updates 237 | 238 | createdLinkedRecordCount++; 239 | numRecordsTouched++; 240 | 241 | if (createdLinkedRecordCount % RECORD_BATCH_SIZE === 0) { 242 | onProgress(numRecordsTouched, numRecordsToBeTouched); 243 | } 244 | } 245 | } 246 | // Be sure to call onProgress at the end, since we may have been in the middle of 247 | // a batch, in which case we wouldn't have called onProgress for the last set of 248 | // records. 249 | onProgress(numRecordsTouched, numRecordsToBeTouched); 250 | 251 | await setTimeoutAsync(50); 252 | linkedTableQueryResult.unloadData(); 253 | } 254 | 255 | const tableQueryResult = table.selectRecords(); 256 | await tableQueryResult.loadDataAsync(); 257 | 258 | // replace the cell value for each linked record to be the linked record objects json 259 | for (const recordDef of recordDefs) { 260 | for (const [linkedRecordFieldId, linkedTableId] of _.entries( 261 | linkedTableIdByLinkedRecordFieldId, 262 | )) { 263 | const linkedRecordCellValue = recordDef[linkedRecordFieldId]; 264 | // Need to _.compact to remove possible nulls. 265 | // Happens when primary field is read-only, see comment above: 266 | // "if the field is formulaic/computed, we can't make new records, so don't map these cell values" 267 | // 268 | // Also need to remove duplicates with uniqBy, since a linked cell can't contain 269 | // references to the same record multiple times. This means if the JSON cell has 270 | // "foo,foo", we remove the second "foo" 271 | recordDef[linkedRecordFieldId] = _.uniqBy( 272 | _.compact( 273 | linkedRecordCellValue.map(cellValue => { 274 | // Check if cellValue has already been updated from {name: ... } to { id: ...} 275 | // Happens when updating, if an incoming JSON row matched multiple rows in the table. 276 | if (Object.prototype.hasOwnProperty.call(cellValue, 'id')) { 277 | return cellValue; 278 | } else { 279 | const linkedRecordId = 280 | linkedRecordIdByNameByTableId[linkedTableId][cellValue.name]; 281 | return linkedRecordId ? {id: linkedRecordId} : null; 282 | } 283 | }), 284 | ), 285 | cellValue => cellValue.id, 286 | ); 287 | } 288 | } 289 | 290 | // update records in batches 291 | for (let i = 0; i < diff.recordDefsToUpdate.length; i += RECORD_BATCH_SIZE) { 292 | let initialRecordDefsToUpdate = diff.recordDefsToUpdate.slice(i, i + RECORD_BATCH_SIZE); 293 | let recordDefsToUpdate = []; 294 | // get records that were deleted between starting the import process and the persistence step. 295 | // Move them to the creation list 296 | for (const recordDef of initialRecordDefsToUpdate) { 297 | if (tableQueryResult.getRecordById(recordDef.id) === null) { 298 | diff.recordDefsToCreate.push(recordDef.fields); 299 | } else { 300 | recordDefsToUpdate.push(recordDef); 301 | } 302 | } 303 | 304 | await table.updateRecordsAsync(recordDefsToUpdate); 305 | numRecordsTouched += _.size(recordDefsToUpdate); 306 | onProgress(numRecordsTouched, numRecordsToBeTouched); 307 | } 308 | 309 | // create records in batches 310 | for (let i = 0; i < diff.recordDefsToCreate.length; i += RECORD_BATCH_SIZE) { 311 | const recordDefsToCreate = diff.recordDefsToCreate.slice(i, i + RECORD_BATCH_SIZE); 312 | 313 | await table.createRecordsAsync(recordDefsToCreate); 314 | 315 | numRecordsTouched += recordDefsToCreate.length; 316 | onProgress(numRecordsTouched, numRecordsToBeTouched); 317 | } 318 | 319 | tableQueryResult.unloadData(); 320 | } 321 | 322 | /** 323 | * Given the input JSON records, field mappings and the merge keys, returns 324 | * an object with the follow shape: 325 | * 326 | * { 327 | * recordDefsToCreate: Array, 328 | * recordDefsToUpdate: Array<{fields: RecordDef, id: RecordId}>, 329 | * unchangedRecordsById: Array, 330 | * numIgnoredParsedRowsDueToDuplicateMatch: number, 331 | * } 332 | * 333 | * When multiple rows in the JSON file match a record, only the first row will 334 | * be used and subsequent rows will be ignored. numIgnoredParsedRowsDueToDuplicateMatch 335 | * is the number of rows that were ignored. 336 | */ 337 | export async function computeDataDiffAsync( 338 | parsedRecords = [], 339 | fieldMappings, 340 | parsedHeaders, 341 | fieldIdsForMerging, 342 | table, 343 | queryResult, 344 | ) { 345 | const tableRecords = queryResult.records; 346 | // no merge key or empty table, return early, they're all inserts 347 | if (tableRecords.length === 0 || fieldIdsForMerging.length === 0) { 348 | const recordDefsToCreate = []; 349 | const failedToMapValuesByFieldId = {}; 350 | 351 | await nonblocking.forEachAsync(parsedRecords, parsedRecord => { 352 | const valuesAndFailures = getValuesAndFailuresByFieldIdForParsedRecord( 353 | {fieldMappings, parsedHeaders, table}, 354 | parsedRecord, 355 | ); 356 | recordDefsToCreate.push(valuesAndFailures.values); 357 | if (valuesAndFailures.failures) { 358 | for (const [fieldId, failedToMapValue] of _.entries(valuesAndFailures.failures)) { 359 | if (Array.isArray(failedToMapValuesByFieldId[fieldId])) { 360 | failedToMapValuesByFieldId[fieldId].push(failedToMapValue); 361 | } else { 362 | failedToMapValuesByFieldId[fieldId] = [failedToMapValue]; 363 | } 364 | } 365 | } 366 | }); 367 | 368 | return { 369 | recordDefsToCreate, 370 | recordDefsToUpdate: [], 371 | unchangedRecords: [], 372 | numIgnoredParsedRowsDueToDuplicateMatch: 0, 373 | failedToMapValuesByFieldId, 374 | }; 375 | } 376 | 377 | const diff = { 378 | recordDefsToCreate: [], 379 | updatesByRecordId: {}, 380 | unchangedRecordsById: {}, 381 | numIgnoredParsedRowsDueToDuplicateMatch: 0, 382 | failedToMapValuesByFieldId: {}, 383 | }; 384 | 385 | const tableRecordsByFirstKeyField = _.groupBy(tableRecords, r => { 386 | // TODO: for non-primitive fields (e.g. multiple select), this will put 387 | // all records into a single group with key "[object Object]". 388 | // We could JSON.stringify, but that would put records with differently 389 | // ordered options in different groups, which is bad. 390 | // We should add a Grouper helper to the SDK. 391 | return normalizeCellValueForComparison(r.getCellValue(fieldIdsForMerging[0])); 392 | }); 393 | 394 | const matcher = createMatcher(fieldIdsForMerging); 395 | const mappedFieldIds = _.keys(_.pickBy(fieldMappings, fieldMapping => fieldMapping.isEnabled)); 396 | 397 | const processedRecordIdsSet = new Set(); 398 | 399 | await nonblocking.forEachAsync(parsedRecords, parsedRecord => { 400 | const { 401 | values: cellValueByFieldId, 402 | failures: failedToMapValuesByFieldId, 403 | } = getValuesAndFailuresByFieldIdForParsedRecord( 404 | {fieldMappings, parsedHeaders, table}, 405 | parsedRecord, 406 | ); 407 | 408 | for (const [fieldId, failedToMapValue] of _.entries(failedToMapValuesByFieldId)) { 409 | if (Array.isArray(diff.failedToMapValuesByFieldId[fieldId])) { 410 | diff.failedToMapValuesByFieldId[fieldId].push(failedToMapValue); 411 | } else { 412 | diff.failedToMapValuesByFieldId[fieldId] = [failedToMapValue]; 413 | } 414 | } 415 | 416 | const firstKeyValue = normalizeCellValueForComparison( 417 | cellValueByFieldId[fieldIdsForMerging[0]], 418 | ); 419 | 420 | // Only process the JSON record if at least one field is non-null and not an empty array 421 | if ( 422 | _.some(cellValueByFieldId, (cellValue, fieldId) => { 423 | return cellValue !== null && !(_.isArray(cellValue) && cellValue.length === 0); 424 | }) 425 | ) { 426 | const matchedTableRecords = (tableRecordsByFirstKeyField[firstKeyValue] || []).filter( 427 | _.partial(matcher, cellValueByFieldId), 428 | ); 429 | 430 | if (matchedTableRecords.length > 0) { 431 | // Update matching records. 432 | for (const matchedRecord of matchedTableRecords) { 433 | if (processedRecordIdsSet.has(matchedRecord.id)) { 434 | // If we've already seen this record before, it means 435 | // a prior JSON row matched it. Don't process it again, 436 | // and add the parsedRecord to list of ignored rows so we 437 | // can tell the user about it. 438 | diff.numIgnoredParsedRowsDueToDuplicateMatch++; 439 | break; 440 | } 441 | processedRecordIdsSet.add(matchedRecord.id); 442 | 443 | const areAnyCellValuesDifferent = _.some(mappedFieldIds, fieldId => { 444 | let cellValue = matchedRecord.getCellValue(fieldId); 445 | let parsedValue = cellValueByFieldId[fieldId]; 446 | 447 | // if field is a foreign key, compare against the linked record name 448 | if ( 449 | isFieldRecordLink(table.getFieldByIdIfExists(fieldId)) && 450 | _.isArray(cellValue) 451 | ) { 452 | if ( 453 | _.isArray(cellValue) && 454 | cellValue.length >= 1 && 455 | (parsedValue === null || parsedValue === undefined) 456 | ) { 457 | // The cell value is not empty, but the parsed value is. 458 | return true; 459 | } else if (_.isEmpty(cellValue) && _.isEmpty(parsedValue)) { 460 | // both are empty 461 | return false; 462 | } else { 463 | cellValue = cellValue.map(cell => cell.name); 464 | parsedValue = parsedValue.map(cell => cell.name); 465 | } 466 | } 467 | 468 | cellValue = normalizeCellValueForComparison(cellValue); 469 | parsedValue = normalizeCellValueForComparison(parsedValue); 470 | 471 | return !_.isEqual(parsedValue, cellValue); 472 | }); 473 | 474 | if (areAnyCellValuesDifferent) { 475 | // Note: if multiple table records match this parsed row, they will share the 476 | // same cellValueByFieldId object. Careful when mutating this object. 477 | diff.updatesByRecordId[matchedRecord.id] = cellValueByFieldId; 478 | } else { 479 | diff.unchangedRecordsById[matchedRecord.id] = cellValueByFieldId; 480 | } 481 | } 482 | } else { 483 | // No matching records, create a new record. 484 | diff.recordDefsToCreate.push(cellValueByFieldId); 485 | } 486 | } 487 | }); 488 | 489 | diff.recordDefsToUpdate = _.map(diff.updatesByRecordId, (value, key) => ({ 490 | id: key, 491 | fields: value, 492 | })); 493 | delete diff.updatesByRecordId; 494 | return diff; 495 | } 496 | -------------------------------------------------------------------------------- /frontend/ParsedDataReview.tsx: -------------------------------------------------------------------------------- 1 | import {spawnError, trackEvent} from '@airtable/blocks/unstable_private_utils'; 2 | import {viewport} from '@airtable/blocks'; 3 | import {fieldTypes, Table} from '@airtable/blocks/models'; 4 | import { 5 | TablePicker, 6 | Tooltip, 7 | Select, 8 | Icon, 9 | Input, 10 | colorUtils, 11 | colors, 12 | Button, 13 | Modal, 14 | Loader, 15 | ProgressBar, 16 | ViewportConstraint, 17 | useBase, 18 | useViewport, 19 | useWatchable, 20 | withHooks, 21 | Switch, 22 | } from '@airtable/blocks/ui'; 23 | import React, {useRef} from 'react'; 24 | import jp from 'jsonpath'; 25 | import FieldMapping from './FieldMapping'; 26 | import RecordPreviewList from './RecordPreviewList'; 27 | import supportedFieldTypes, { 28 | fieldConfigByType, 29 | filterDeletedOrUnsupportedFieldIds, 30 | getLinkedTablesPrimaryFieldTypesByTableId, 31 | isFieldValid, 32 | } from './supportedFieldTypes'; 33 | import { 34 | getFieldMappingsMatchingHeaders, 35 | createOrUpdateRecordsAsync, 36 | computeDataDiffAsync, 37 | } from './headersValuesMappingHelpers'; 38 | import {setStateAsync, setTimeoutAsync} from './setAsyncCommon'; 39 | import _ from 'lodash'; 40 | import classNames from 'classnames'; 41 | import {filterJsonAsync} from './jsonFilteringHelpers'; 42 | import {parseJsonToHeadersValuesAsync} from './jsonToHeadersValuesParsingHelpers'; 43 | 44 | const MAX_ROWS_PER_TABLE = 50000; 45 | 46 | const ReviewStatuses = { 47 | REVIEW: 'review', 48 | FAILED_TO_MAP_VALUES: 'failedToMapValues', 49 | CREATING_RECORDS: 'creatingRecords', 50 | SUCCESS: 'success', 51 | }; 52 | 53 | type ParsedDataReviewProps = { 54 | parsedData: object; 55 | parsedJsonFile: object; 56 | settingsStore: object; 57 | onClose: (settings: object) => void; 58 | onError: (errorMessage: string) => void; 59 | table: Table; 60 | onTableChanged: (newTable: object) => void; 61 | className: string; 62 | style: object; 63 | }; 64 | 65 | type ParsedDataReviewState = { 66 | status: string; 67 | parsedData: object; 68 | fieldMappings: object; 69 | parsedRecords: Array; 70 | parsedHeaders: Array; 71 | isFirstLineHeaders: boolean; 72 | shouldMergeDuplicates: boolean; 73 | fieldIdsForMerging: Array; 74 | dataDiff: object; 75 | isDataDiffReady: boolean; 76 | }; 77 | 78 | class ParsedDataReview extends React.Component { 79 | recordPreviewListRef = React.createRef(); 80 | 81 | constructor(props: ParsedDataReviewProps) { 82 | super(props); 83 | 84 | const {parsedData, settingsStore, table} = props; 85 | this.state = this._getIcState(parsedData, settingsStore, table); 86 | 87 | // This id is used to ensure that we don't store stale loads. It is implemented as a strictly increasing counter. 88 | // Whenever we finish computing a dataDiff, we check that the counter hasn't changed. 89 | // If it hasn't, it is safe to store the result. If it has increased, we don't store the result and 90 | // continue to wait on the new computation (which caused the counter to increase). 91 | // We never reset the counter as that would break the invariant. 92 | this._dataDiffId = 0; 93 | this._onTableFieldsChange = this._onTableFieldsChange.bind(this); 94 | this._onFieldConfigChange = this._onFieldConfigChange.bind(this); 95 | this._computeJsonPathAsync = _.debounce(this._computeJsonPathAsync, 300); 96 | } 97 | 98 | _getIcState = (parsedData, settingsStore, table) => { 99 | const {data} = parsedData; 100 | if (!data || !data.length) { 101 | return { 102 | status: ReviewStatuses.REVIEW, 103 | fieldMappings: settingsStore.getFieldMappingsForTableId(table.id), 104 | isFirstLineHeaders: true, 105 | }; 106 | } 107 | 108 | const parsedFirstLine = data[0]; 109 | 110 | const fieldMappings = settingsStore.getFieldMappingsForTableId(table.id); 111 | 112 | // Use stored config if every field mapping has an index within the bounds of this parsed file. 113 | const isUsingStoredConfig = _.every(fieldMappings, fieldMapping => { 114 | return ( 115 | fieldMapping.parsedIndex === null || 116 | (fieldMapping.parsedIndex >= 0 && fieldMapping.parsedIndex < (parsedFirstLine || []).length) 117 | ); 118 | }); 119 | 120 | const fieldIdsForMerging = settingsStore.getMergeFieldIdsForTableId(table.id); 121 | 122 | const shouldMergeDuplicates = settingsStore.shouldMergeDuplicates; 123 | const fieldMappingsMatchingHeaders = getFieldMappingsMatchingHeaders({ 124 | parsedHeaders: parsedFirstLine, 125 | table, 126 | }); 127 | 128 | // Let's determine if any field names match the parsed header names. If they do, we'll pre-populate 129 | // the appropriate field mappings and assume isFirstLineHeaders === true 130 | let isFirstLineHeaders; 131 | if (_.keys(fieldMappingsMatchingHeaders).length > 0) { 132 | _.each(fieldMappingsMatchingHeaders, (fieldMapping, fieldId) => { 133 | if (!fieldMappings.hasOwnProperty(fieldId)) { 134 | fieldMappings[fieldId] = fieldMapping; 135 | } 136 | }); 137 | isFirstLineHeaders = true; 138 | } else { 139 | isFirstLineHeaders = isUsingStoredConfig ? settingsStore.isFirstLineHeaders : false; 140 | } 141 | 142 | const {parsedRecords, parsedHeaders} = this._getParsedRecordsAndParsedHeaders(parsedData, isFirstLineHeaders); 143 | 144 | const state = { 145 | status: ReviewStatuses.REVIEW, 146 | parsedData, 147 | fieldMappings, 148 | parsedRecords, 149 | parsedHeaders, 150 | isFirstLineHeaders, 151 | shouldMergeDuplicates, 152 | fieldIdsForMerging, 153 | dataDiff: null, 154 | isDataDiffReady: false, 155 | }; 156 | 157 | return state; 158 | } 159 | 160 | componentDidMount() { 161 | this._loadTableDataAsync(this.props.table); 162 | } 163 | 164 | componentWillUnmount() { 165 | if (this.state.queryResult) { 166 | this.state.queryResult.unloadData(); 167 | } 168 | } 169 | 170 | _onFieldMappingChange = (fieldId, newIndex) => { 171 | const {fieldMappings} = this.state; 172 | 173 | const fieldMapping = { 174 | ...fieldMappings[fieldId], 175 | }; 176 | fieldMapping.parsedIndex = newIndex; 177 | 178 | this.setState( 179 | { 180 | fieldMappings: { 181 | ...fieldMappings, 182 | [fieldId]: fieldMapping, 183 | }, 184 | }, 185 | this._computeDataDiffAsync, 186 | ); 187 | }; 188 | 189 | _computeJsonPathAsync = async () => { 190 | const {parsedJsonFile} = this.props; 191 | const {jsonPath} = this.state; 192 | 193 | if (!jsonPath || jsonPath === '') { 194 | await this._setStateFromParsedJsonFileAsync(parsedJsonFile); 195 | return; 196 | } 197 | 198 | const extractedJsonData = {}; 199 | try { 200 | extractedJsonData.data = jp.query(parsedJsonFile.data, jsonPath); 201 | } catch (error) { 202 | await this._setStateFromParsedJsonFileAsync({data: []}); 203 | return; 204 | } 205 | extractedJsonData.data = [].concat(...extractedJsonData.data); 206 | 207 | if (!(extractedJsonData.data || {}).length) { 208 | await this._setStateFromParsedJsonFileAsync({data: []}); 209 | return; 210 | } 211 | 212 | await this._setStateFromParsedJsonFileAsync(extractedJsonData); 213 | }; 214 | 215 | _setStateFromParsedJsonFileAsync = async (parsedJsonFile) => { 216 | const {isFirstLineHeaders} = this.state; 217 | 218 | const parsedData = await parseJsonToHeadersValuesAsync(await filterJsonAsync(_.cloneDeep(parsedJsonFile))); 219 | const {parsedRecords, parsedHeaders} = this._getParsedRecordsAndParsedHeaders(parsedData, isFirstLineHeaders); 220 | 221 | this.setState( 222 | { 223 | parsedData, 224 | parsedHeaders: parsedHeaders || [], 225 | parsedRecords, 226 | }, 227 | this._computeDataDiffAsync, 228 | ); 229 | } 230 | 231 | 232 | _computeDataDiffAsync = async () => { 233 | const { 234 | fieldMappings, 235 | fieldIdsForMerging, 236 | shouldMergeDuplicates, 237 | parsedHeaders, 238 | parsedRecords, 239 | queryResult, 240 | } = this.state; 241 | const newDataDiffId = this._dataDiffId + 1; 242 | this._dataDiffId = newDataDiffId; 243 | 244 | // TODO: cancel existing load? Debounce? Memoize? 245 | this.setState({ 246 | dataDiff: null, 247 | isDataDiffReady: false, 248 | }); 249 | const newDataDiff = await computeDataDiffAsync( 250 | parsedRecords, 251 | fieldMappings, 252 | parsedHeaders, 253 | shouldMergeDuplicates ? fieldIdsForMerging : [], 254 | this.props.table, 255 | queryResult, 256 | ); 257 | 258 | // Store the load only if another load wasn't queued up in the meantime 259 | if (newDataDiffId === this._dataDiffId) { 260 | this.setState({ 261 | isDataDiffReady: true, 262 | dataDiff: newDataDiff, 263 | }); 264 | } 265 | return newDataDiff; 266 | }; 267 | 268 | // TODO: refactor this component to not use getDerivedStateFromProps 269 | static getDerivedStateFromProps(props, state) { 270 | const {fieldMappings, fieldIdsForMerging, status, queryResult} = state; 271 | const {table} = props; 272 | 273 | // Bail out early from getDerivedStateFromProps if an import is in progress. 274 | // We do this to avoid recalculation of dataDiff (an expensive operation) as 275 | // records get created/updated 276 | // Note: getDerivedStateFromProps gets when status === ReviewStatuses.CREATING_RECORDS 277 | // because the `progress` state property gets updated during creation/update. 278 | if (status === ReviewStatuses.CREATING_RECORDS) { 279 | return null; 280 | } 281 | 282 | // TODO: Do we really want to do this filtering here? Can we just have the right listeners 283 | // and do the filtering then? In general, handling schema changes while the block is running 284 | // is super annoying to have to think about 285 | 286 | // Filter out possibly deleted or unsupported fields from the merge key and 287 | // mapping config keys 288 | const filteredFieldIdsForMerging = filterDeletedOrUnsupportedFieldIds( 289 | fieldIdsForMerging, 290 | table, 291 | ); 292 | 293 | const filteredFieldMappings = _.pick( 294 | fieldMappings, 295 | filterDeletedOrUnsupportedFieldIds(_.keys(fieldMappings), table), 296 | ); 297 | 298 | // if table is still not loaded, empty the `dataDiff` state variable to prevent 299 | // using stale data (eg, a diff from a previously selected table). 300 | return queryResult && queryResult.isDataLoaded 301 | ? { 302 | fieldMappings: filteredFieldMappings, 303 | fieldIdsForMerging: filteredFieldIdsForMerging, 304 | } 305 | : // I believe queryResult not being loaded implies that we're in the middle of _loadTableDataAsync and will 306 | // therefore call _computeDataDiffAsync in the setState callback when we finish awaiting the queryResult 307 | // loading, so we should not get stuck with no dataDiff. 308 | { 309 | isDataDiffReady: false, 310 | dataDiff: null, 311 | }; 312 | } 313 | 314 | _onTableFieldsChange() { 315 | // Recompute the dataDiff, since our fields changed. 316 | this._computeDataDiffAsync(); 317 | } 318 | 319 | _onFieldConfigChange(field) { 320 | const {fieldMappings} = this.state; 321 | const {table} = this.props; 322 | 323 | const fieldMapping = fieldMappings[field.id]; 324 | if (fieldMapping && fieldMapping.isEnabled) { 325 | if (isFieldValid(field, getLinkedTablesPrimaryFieldTypesByTableId(table))) { 326 | if (fieldMapping.parsedIndex !== null) { 327 | // only need to recompute dataDiff if the field is actually enabled 328 | this._computeDataDiffAsync(); 329 | } 330 | } else { 331 | // Field type has become invalid, so disable the mapping altogether. 332 | const newFieldMappings = { 333 | ...fieldMappings, 334 | [field.id]: { 335 | isEnabled: false, 336 | parsedIndex: null, 337 | }, 338 | }; 339 | 340 | this.setState( 341 | { 342 | fieldMappings: newFieldMappings, 343 | }, 344 | this._computeDataDiffAsync, 345 | ); 346 | } 347 | } 348 | } 349 | 350 | _onClose = () => { 351 | const settings = { 352 | ...this.state, 353 | table: this.props.table, 354 | }; 355 | this.props.onClose(settings); 356 | }; 357 | 358 | _onCloseModal = () => { 359 | this.setState({ 360 | status: ReviewStatuses.REVIEW, 361 | }); 362 | }; 363 | 364 | _getDefaultHeaders() { 365 | const {parsedData} = this.state; 366 | const {data} = parsedData; 367 | 368 | return _.range(data[0].length).map(index => `Column ${index + 1}`); 369 | } 370 | 371 | _validate() { 372 | const {table} = this.props; 373 | if (!table) { 374 | return { 375 | isValid: false, 376 | message: 'Pick a table', 377 | }; 378 | } 379 | 380 | const {fieldMappings, fieldIdsForMerging, shouldMergeDuplicates} = this.state; 381 | const enabledFieldMappings = _.filter(_.values(fieldMappings), _.property('isEnabled')); 382 | 383 | if ( 384 | enabledFieldMappings.length === 0 || 385 | _.every(enabledFieldMappings, fieldMapping => fieldMapping.parsedIndex === null) 386 | ) { 387 | return { 388 | isValid: false, 389 | message: 'Map at least one JSON column to a field', 390 | }; 391 | } 392 | 393 | if (shouldMergeDuplicates && fieldIdsForMerging.length === 0) { 394 | return { 395 | isValid: false, 396 | message: 'Choose a field to match existing records for merging', 397 | }; 398 | } 399 | 400 | if ( 401 | shouldMergeDuplicates && 402 | !_.every(fieldIdsForMerging, fieldId => { 403 | const fieldMapping = fieldMappings[fieldId]; 404 | return fieldMapping && fieldMapping.isEnabled && fieldMapping.parsedIndex !== null; 405 | }) 406 | ) { 407 | return { 408 | isValid: false, 409 | message: 'Map the merge field to a JSON column', 410 | }; 411 | } 412 | 413 | for (const field of table.fields) { 414 | const fieldMapping = fieldMappings[field.id]; 415 | if (fieldMapping && fieldMapping.isEnabled && fieldMapping.parsedIndex === null) { 416 | return { 417 | isValid: false, 418 | message: `Map a JSON column to the "${field.name}" field for merging`, 419 | }; 420 | } 421 | } 422 | 423 | // This needs to be the last check in this method: we need to validate field mappings 424 | // before checking field and table locks 425 | if (!this._canUserPerformImport()) { 426 | return { 427 | isValid: false, 428 | message: "You don't have permissions to import to the selected fields", 429 | }; 430 | } 431 | 432 | return {isValid: true}; 433 | } 434 | 435 | _onCreateWithFailedToMapValues = () => { 436 | this.setState({ 437 | status: ReviewStatuses.FAILED_TO_MAP_VALUES, 438 | }); 439 | }; 440 | 441 | async _loadTableDataAsync(table) { 442 | const {queryResult} = this.state; 443 | 444 | if (queryResult) { 445 | queryResult.unloadData(); 446 | } 447 | const newQueryResult = table.selectRecords(); 448 | await newQueryResult.loadDataAsync(); 449 | this.setState( 450 | { 451 | queryResult: newQueryResult, 452 | dataDiff: null, 453 | isDataDiffReady: false, 454 | }, 455 | this._computeDataDiffAsync, 456 | ); 457 | } 458 | 459 | // All callers should check that the dataDiff is ready before calling this method. 460 | _createOrUpdateRecordsAsync = async () => { 461 | const {dataDiff, isDataDiffReady} = this.state; 462 | if (!dataDiff || !isDataDiffReady) { 463 | throw spawnError('Data diff is not ready'); 464 | } 465 | 466 | const startTime = Date.now(); 467 | await setStateAsync(this, { 468 | status: ReviewStatuses.CREATING_RECORDS, 469 | progress: 0, 470 | }); 471 | 472 | const {table} = this.props; 473 | if (!table) { 474 | throw spawnError('Cannot create records without a table'); 475 | } 476 | 477 | const {queryResult} = this.state; 478 | if (!queryResult.isDataLoaded) { 479 | await queryResult.loadDataAsync(); 480 | } 481 | 482 | // make sure there is still a valid number of records remaining in the table to create all the records from the parsed 483 | // TODO(ben): Use recordLimit when it's back in the sdk 484 | const remainingRecords = MAX_ROWS_PER_TABLE - queryResult.records.length; 485 | if (remainingRecords < dataDiff.recordDefsToCreate.length) { 486 | this.props.onError( 487 | `The JSON file has too many records to import. You can import at most ${remainingRecords} more record${ 488 | remainingRecords === 1 ? '' : 's' 489 | }.`, 490 | ); 491 | return; 492 | } 493 | 494 | await createOrUpdateRecordsAsync( 495 | { 496 | table, 497 | diff: dataDiff, 498 | }, 499 | this._onProgress, 500 | ); 501 | 502 | await setStateAsync(this, {status: ReviewStatuses.SUCCESS}); 503 | 504 | const durationMs = Date.now() - startTime; 505 | this._trackImport(durationMs); 506 | 507 | await setTimeoutAsync(1500); 508 | 509 | if (viewport.isFullscreen) { 510 | viewport.exitFullscreen(); 511 | } 512 | 513 | this._onClose(); 514 | }; 515 | 516 | _trackImport(durationMs) { 517 | const fieldMappings = this.state.table 518 | ? this.props.settingsStore.getFieldMappingsForTableId(this.state.table.id) 519 | : {}; 520 | const numFieldMappings = Object.values(fieldMappings).filter( 521 | fieldMapping => fieldMapping.isEnabled, 522 | ).length; 523 | 524 | trackEvent('blockInstallation.jsonImport.import', { 525 | isMerging: this.state.shouldMergeDuplicates, 526 | durationMs, 527 | isFirstLineHeaders: this.state.isFirstLineHeaders, 528 | numFieldMappings, 529 | }); 530 | } 531 | 532 | _onProgress = (numRecordsTouched, numRecordsToBeTouched) => { 533 | const progress = numRecordsTouched / numRecordsToBeTouched; 534 | this.setState({progress}); 535 | }; 536 | 537 | _onTableChange = newTable => { 538 | this.props.onTableChanged(newTable); 539 | 540 | const {data} = this.state.parsedData; 541 | const parsedFirstLine = data[0]; 542 | 543 | const fieldMappingsMatchingHeaders = getFieldMappingsMatchingHeaders({ 544 | parsedHeaders: parsedFirstLine, 545 | table: newTable, 546 | }); 547 | const foundFieldNamesWithHeaders = _.keys(fieldMappingsMatchingHeaders).length > 0; 548 | 549 | const fieldMappings = this.props.settingsStore.getFieldMappingsForTableId(newTable.id); 550 | const fieldIdsForMerging = this.props.settingsStore.getMergeFieldIdsForTableId(newTable.id); 551 | 552 | let isFirstLineHeaders; 553 | if (foundFieldNamesWithHeaders) { 554 | _.each(fieldMappingsMatchingHeaders, (fieldMapping, fieldId) => { 555 | if (!fieldMappings.hasOwnProperty(fieldId)) { 556 | fieldMappings[fieldId] = fieldMapping; 557 | } 558 | }); 559 | isFirstLineHeaders = true; 560 | } else { 561 | isFirstLineHeaders = this.state.isFirstLineHeaders; 562 | } 563 | 564 | const {parsedRecords, parsedHeaders} = this._getParsedRecordsAndParsedHeaders(this.state.parsedData, isFirstLineHeaders); 565 | 566 | this.setState( 567 | { 568 | fieldMappings, 569 | isFirstLineHeaders, 570 | fieldIdsForMerging, 571 | parsedRecords, 572 | parsedHeaders, 573 | dataDiff: null, 574 | isDataDiffReady: false, 575 | }, 576 | () => { 577 | this._loadTableDataAsync(newTable); 578 | }, 579 | ); 580 | }; 581 | 582 | _getParsedRecordsAndParsedHeaders(parsedData, isFirstLineHeaders) { 583 | const {data} = parsedData; 584 | 585 | let parsedRecords; 586 | let parsedHeaders; 587 | if (isFirstLineHeaders) { 588 | parsedRecords = data.slice(); 589 | parsedHeaders = parsedRecords.shift(1); 590 | } else { 591 | parsedRecords = data; 592 | parsedHeaders = this._getDefaultHeaders(); 593 | } 594 | 595 | return {parsedRecords, parsedHeaders}; 596 | } 597 | 598 | _onMergeDuplicatesToggleChange = shouldMergeDuplicates => { 599 | this.setState( 600 | { 601 | shouldMergeDuplicates, 602 | }, 603 | this._computeDataDiffAsync, 604 | ); 605 | }; 606 | 607 | _onJsonPathChange = e => { 608 | this.setState( 609 | { 610 | jsonPath: (e.target || {}).value, 611 | }, 612 | this._computeJsonPathAsync, 613 | ); 614 | }; 615 | 616 | _onSelectedKeyForMergingChange = (keyIndex, fieldId) => { 617 | const {fieldIdsForMerging} = this.state; 618 | 619 | // TODO: don't mutate state directly 620 | fieldIdsForMerging[keyIndex] = fieldId; 621 | this.setState( 622 | { 623 | fieldIdsForMerging: fieldIdsForMerging, 624 | }, 625 | this._computeDataDiffAsync, 626 | ); 627 | if (this.recordPreviewListRef.current) { 628 | this.recordPreviewListRef.current.recomputeRowHeights(); 629 | } 630 | }; 631 | 632 | _onFieldMappingToggle = fieldId => { 633 | const {fieldMappings} = this.state; 634 | 635 | const newFieldMappings = { 636 | ...fieldMappings, 637 | }; 638 | const fieldMapping = newFieldMappings[fieldId] || { 639 | isEnabled: false, 640 | parsedIndex: null, 641 | }; 642 | const isEnabled = !fieldMapping.isEnabled; 643 | fieldMapping.isEnabled = isEnabled; 644 | newFieldMappings[fieldId] = fieldMapping; 645 | 646 | this.setState( 647 | { 648 | fieldMappings: newFieldMappings, 649 | }, 650 | this._computeDataDiffAsync, 651 | ); 652 | }; 653 | 654 | _onFieldMappingChange = (fieldId, newIndex) => { 655 | const {fieldMappings} = this.state; 656 | 657 | const fieldMapping = { 658 | ...fieldMappings[fieldId], 659 | }; 660 | fieldMapping.parsedIndex = newIndex; 661 | 662 | this.setState( 663 | { 664 | fieldMappings: { 665 | ...fieldMappings, 666 | [fieldId]: fieldMapping, 667 | }, 668 | }, 669 | this._computeDataDiffAsync, 670 | ); 671 | }; 672 | 673 | _canUserPerformImport() { 674 | const {dataDiff: diff, isDataDiffReady} = this.state; 675 | if (!isDataDiffReady || !diff) { 676 | return true; 677 | } 678 | const {table} = this.props; 679 | // TODO: ideally we wouldn't need to do the below map. Hopefully a new sdk will change this api 680 | const rv = 681 | table.hasPermissionToCreateRecords( 682 | _.map(diff.recordDefsToCreate, recordDef => ({fields: recordDef})), 683 | ) && table.hasPermissionToUpdateRecords(diff.recordDefsToUpdate); 684 | return rv; 685 | } 686 | 687 | _renderFieldMappings() { 688 | const {table} = this.props; 689 | if (!table) { 690 | return null; 691 | } 692 | 693 | const {fieldMappings, parsedHeaders} = this.state; 694 | 695 | const supportedFields = []; 696 | const unsupportedFields = []; 697 | for (const field of table.fields) { 698 | if (_.includes(supportedFieldTypes, field.type)) { 699 | supportedFields.push(field); 700 | } else { 701 | unsupportedFields.push(field); 702 | } 703 | } 704 | 705 | const fieldMappingElements = [...supportedFields, ...unsupportedFields].map(field => { 706 | const {isEnabled, parsedIndex} = fieldMappings[field.id] || { 707 | isEnabled: false, 708 | parsedIndex: null, 709 | }; 710 | 711 | return ( 712 | 718 | this._onFieldMappingChange(field.id, newHeaderIndex) 719 | } 720 | isEnabled={isEnabled} 721 | onToggle={() => this._onFieldMappingToggle(field.id)} 722 | isFieldTypeSupported={isFieldValid( 723 | field, 724 | getLinkedTablesPrimaryFieldTypesByTableId(table), 725 | )} 726 | /> 727 | ); 728 | }); 729 | 730 | return ( 731 |
732 |
Field mappings
733 |
{fieldMappingElements}
734 |
735 | ); 736 | } 737 | 738 | _renderJsonPathRootObjectNoArrayMessage() { 739 | return ( 740 |
741 |

Select a different file or Set up JSONPath

742 |
743 | The selected file does not contain a top-level Array. Select a different file or use JSONPath. 744 |
745 | jsonpath.com 746 |
747 | ); 748 | } 749 | 750 | _renderJsonPathNoMatchMessage() { 751 | return ( 752 |
753 |

Set up JSONPath

754 |
755 | JSONPath expression has no matches. 756 |
757 | jsonpath.com 758 |
759 | ); 760 | } 761 | 762 | _renderAllFieldMappingsDisabledMessage() { 763 | return ( 764 |
765 |

Set up field mappings

766 |
767 | Choose how you'd like to map each JSON column to the fields in the " 768 | {this.props.table.name}" table. 769 |
770 |
771 | ); 772 | } 773 | 774 | _renderRightPane() { 775 | const {table} = this.props; 776 | if (!table) { 777 | return null; 778 | } 779 | 780 | const validationResult = this._validate(); 781 | const isConfigValid = validationResult.isValid; 782 | const {fieldMappings, queryResult, jsonPath, parsedHeaders} = this.state; 783 | const areAllFieldMappingsDisabled = _.every( 784 | fieldMappings, 785 | fieldMapping => 786 | !fieldMapping || fieldMapping.parsedIndex === null || !fieldMapping.isEnabled, 787 | ); 788 | const isJsonPathNoMatch = !(!jsonPath || jsonPath === '') && !(parsedHeaders && parsedHeaders.length); 789 | const isJsonPathNoArray = (!jsonPath || jsonPath === '') && !(parsedHeaders && parsedHeaders.length); 790 | 791 | let rightPane; 792 | let containerClasses = 'flex-auto flex items-center justify-center p2 huge quiet'; 793 | 794 | if (this.state.status === ReviewStatuses.CREATING_RECORDS) { 795 | rightPane =

Saving records…

; 796 | } else if (!this.state.isDataDiffReady) { 797 | rightPane =

Loading…

; 798 | } else if (isConfigValid && queryResult && (parsedHeaders && parsedHeaders.length) && !isJsonPathNoArray) { 799 | containerClasses = 'flex-auto'; 800 | 801 | rightPane = ( 802 | 809 | ); 810 | } else if (areAllFieldMappingsDisabled) { 811 | rightPane = this._renderAllFieldMappingsDisabledMessage(); 812 | } else if (isJsonPathNoMatch) { 813 | rightPane = this._renderJsonPathNoMatchMessage(); 814 | } else if (isJsonPathNoArray) { 815 | rightPane = this._renderJsonPathRootObjectNoArrayMessage(); 816 | } else { 817 | rightPane =

{validationResult.message}

; 818 | } 819 | 820 | return
{rightPane}
; 821 | } 822 | 823 | _renderFieldMappingsAndRecordPreviewsForTable() { 824 | const {table} = this.props; 825 | const {shouldMergeDuplicates} = this.state; 826 | const sideBarWidth = viewport.size.width * 0.4; 827 | 828 | return ( 829 |
830 |
834 |
Table
835 | 840 |
841 |
842 | Options 843 |
844 | 849 | {shouldMergeDuplicates && ( 850 |
851 |
852 | JSON rows will be merged if they match the following field: 853 |
854 | 904 | 905 |
906 |
{this._renderFieldMappings()}
907 |
908 | {this._renderRightPane()} 909 |
910 | ); 911 | } 912 | 913 | _getStatusBarText() { 914 | let statusBarText = ''; 915 | 916 | const {isDataDiffReady, dataDiff, queryResult} = this.state; 917 | if (!isDataDiffReady || !dataDiff || !queryResult || !queryResult.isDataLoaded) { 918 | return null; 919 | } 920 | 921 | // TODO(ben): Use recordLimit when it's back in the sdk 922 | const remainingRecords = MAX_ROWS_PER_TABLE - queryResult.records.length; 923 | if (dataDiff.recordDefsToCreate.length > remainingRecords) { 924 | return 'Table record limit exceeded.'; 925 | } 926 | 927 | if (dataDiff.recordDefsToCreate.length > 0) { 928 | statusBarText += `${dataDiff.recordDefsToCreate.length} record${ 929 | dataDiff.recordDefsToCreate.length > 1 ? 's' : '' 930 | } will be created.`; 931 | } 932 | 933 | const updatesCount = dataDiff.recordDefsToUpdate.length; 934 | if (updatesCount > 0) { 935 | statusBarText += ` ${updatesCount} record${ 936 | updatesCount > 1 ? 's' : '' 937 | } will be updated.`; 938 | } 939 | 940 | const unchangedCount = _.size(dataDiff.unchangedRecordsById); 941 | if (unchangedCount > 0) { 942 | statusBarText += ` ${unchangedCount} record${ 943 | unchangedCount > 1 ? 's' : '' 944 | } didn't change.`; 945 | } 946 | 947 | return statusBarText; 948 | } 949 | 950 | _renderBottomBar() { 951 | const {isDataDiffReady, dataDiff, queryResult} = this.state; 952 | if (!isDataDiffReady || !queryResult || !queryResult.isDataLoaded) { 953 | return null; 954 | } 955 | 956 | const willCreateOrUpdateRecords = dataDiff 957 | ? dataDiff.recordDefsToCreate.length > 0 || dataDiff.recordDefsToUpdate.length > 0 958 | : false; 959 | // TODO(ben): Use recordLimit when it's back in the sdk 960 | const remainingRecords = MAX_ROWS_PER_TABLE - queryResult.records.length; 961 | const tableRecordLimitExceeded = dataDiff 962 | ? remainingRecords < dataDiff.recordDefsToCreate.length 963 | : false; 964 | const validationResult = this._validate(); 965 | const failedToMapValues = _.flatten(_.values(dataDiff.failedToMapValuesByFieldId)); 966 | 967 | return ( 968 |
969 | {validationResult.isValid ? ( 970 |
{this._getStatusBarText()}
971 | ) : ( 972 |
973 | {' '} 978 | {validationResult.message} 979 |
980 | )} 981 |
982 | 985 | 1005 |
1006 |
1007 | ); 1008 | } 1009 | 1010 | _renderModal() { 1011 | const {status} = this.state; 1012 | const {table} = this.props; 1013 | 1014 | let modalContents; 1015 | switch (status) { 1016 | case ReviewStatuses.REVIEW: 1017 | return null; 1018 | 1019 | case ReviewStatuses.FAILED_TO_MAP_VALUES: { 1020 | const {isDataDiffReady, dataDiff} = this.state; 1021 | if (!isDataDiffReady) { 1022 | modalContents = ( 1023 |
1024 |

Loading…

1025 |
1026 | 1033 |
1034 |
1035 | ); 1036 | break; 1037 | } 1038 | 1039 | const {failedToMapValuesByFieldId} = dataDiff; 1040 | const failedToMapValues = _.flatten(_.values(failedToMapValuesByFieldId)); 1041 | 1042 | if (failedToMapValues.length > 0) { 1043 | modalContents = ( 1044 |
1045 |

1046 | The following values won't be imported: 1047 |

1048 |
1049 | All other values will be imported if you continue. 1050 |
1051 | {_.map(failedToMapValuesByFieldId, (values, fieldId) => { 1052 | if (values.length === 0) { 1053 | return null; 1054 | } else { 1055 | const field = table.getFieldByIdIfExists(fieldId); 1056 | const {helpMessage} = fieldConfigByType[field.type]; 1057 | return ( 1058 |
1059 |
{field.name}
1060 | {helpMessage && ( 1061 |
1062 | {helpMessage} 1063 |
1064 | )} 1065 |
1066 | {_.map(_.uniq(values), (value, index) => ( 1067 |
1071 | {value} 1072 |
1073 | ))} 1074 |
1075 |
1076 | ); 1077 | } 1078 | })} 1079 |
1080 | 1087 | 1095 |
1096 |
1097 | ); 1098 | } else { 1099 | modalContents = ( 1100 |
1101 |
All conversion issues fixed!
1102 |
1103 | 1110 | 1118 |
1119 |
1120 | ); 1121 | } 1122 | break; 1123 | } 1124 | 1125 | case ReviewStatuses.CREATING_RECORDS: 1126 | modalContents = ( 1127 |
1128 |
Saving records
1129 |
1130 | 1131 |
1132 | 1133 |
1134 | ); 1135 | break; 1136 | 1137 | case ReviewStatuses.SUCCESS: 1138 | modalContents = ( 1139 |
1140 |
Success!
1141 | 1146 |
1147 | ); 1148 | break; 1149 | 1150 | default: 1151 | throw spawnError('Unrecognized review status: ', status); 1152 | } 1153 | 1154 | return ( 1155 | 1161 | {modalContents} 1162 | 1163 | ); 1164 | } 1165 | 1166 | render() { 1167 | const {className, style} = this.props; 1168 | 1169 | return ( 1170 | 1171 |
1172 |
1173 | {this._renderFieldMappingsAndRecordPreviewsForTable()} 1174 |
1175 | {this._renderBottomBar()} 1176 | {this._renderModal()} 1177 |
1178 |
1179 | ); 1180 | } 1181 | } 1182 | 1183 | export default withHooks(ParsedDataReview, (props: ParsedDataReviewProps) => { 1184 | const instanceRef = useRef(); 1185 | 1186 | // We need to useBase to get field lock changes, which also triggers for the specific field 1187 | // watches. Should we just get rid of them? This also triggers on linked record field changes 1188 | useBase(); 1189 | useViewport(); 1190 | 1191 | useWatchable(props.table, 'fields', () => { 1192 | instanceRef.current._onTableFieldsChange(); 1193 | }); 1194 | 1195 | useWatchable(props.table.fields, ['type', 'options'], field => { 1196 | instanceRef.current._onFieldConfigChange(field); 1197 | }); 1198 | 1199 | return { 1200 | ref: instanceRef, 1201 | }; 1202 | }); 1203 | --------------------------------------------------------------------------------