├── .gitignore ├── Dockerfile ├── README.md ├── data └── readme.md ├── docker-compose.yml ├── package.json ├── src ├── notifications.ts ├── openai.ts ├── scraper.ts └── storage.ts ├── template.database.env ├── template.env ├── template.phpmyadmin.env └── tsconfig.json /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # Because the project will be running in docker and local desktop 45 | package-lock.json 46 | 47 | # TypeScript v1 declaration files 48 | typings/ 49 | 50 | # TypeScript cache 51 | *.tsbuildinfo 52 | 53 | # Optional npm cache directory 54 | .npm 55 | 56 | # Optional eslint cache 57 | .eslintcache 58 | 59 | # Microbundle cache 60 | .rpt2_cache/ 61 | .rts2_cache_cjs/ 62 | .rts2_cache_es/ 63 | .rts2_cache_umd/ 64 | 65 | # Optional REPL history 66 | .node_repl_history 67 | 68 | # Output of 'npm pack' 69 | *.tgz 70 | 71 | # Yarn Integrity file 72 | .yarn-integrity 73 | 74 | # dotenv environment variables file 75 | .env 76 | database.env 77 | phpmyadmin.env 78 | .env.test 79 | 80 | # parcel-bundler cache (https://parceljs.org/) 81 | .cache 82 | 83 | # Next.js build output 84 | .next 85 | 86 | # Nuxt.js build / generate output 87 | .nuxt 88 | dist 89 | 90 | # Gatsby files 91 | .cache/ 92 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 93 | # https://nextjs.org/blog/next-9-1#public-directory-support 94 | # public 95 | 96 | # vuepress build output 97 | .vuepress/dist 98 | 99 | # Serverless directories 100 | .serverless/ 101 | 102 | # FuseBox cache 103 | .fusebox/ 104 | 105 | # DynamoDB Local files 106 | .dynamodb/ 107 | 108 | # TernJS port file 109 | .tern-port 110 | 111 | # Log 112 | log/* -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16-slim 2 | 3 | # Install latest chrome dev package and fonts to support major charsets (Chinese, Japanese, Arabic, Hebrew, Thai and a few others) 4 | # Note: this installs the necessary libs to make the bundled version of Chromium that Puppeteer 5 | # installs, work. 6 | RUN apt-get update \ 7 | && apt-get install -y wget gnupg \ 8 | && wget -q -O - https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - \ 9 | && sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \ 10 | && apt-get update \ 11 | && apt-get install -y google-chrome-stable fonts-ipafont-gothic fonts-wqy-zenhei fonts-thai-tlwg fonts-kacst fonts-freefont-ttf libxss1 \ 12 | --no-install-recommends \ 13 | && rm -rf /var/lib/apt/lists/* 14 | 15 | # If running Docker >= 1.13.0 use docker run's --init arg to reap zombie processes, otherwise 16 | # uncomment the following lines to have `dumb-init` as PID 1 17 | # ADD https://github.com/Yelp/dumb-init/releases/download/v1.2.2/dumb-init_1.2.2_x86_64 /usr/local/bin/dumb-init 18 | # RUN chmod +x /usr/local/bin/dumb-init 19 | # ENTRYPOINT ["dumb-init", "--"] 20 | 21 | # Uncomment to skip the chromium download when installing puppeteer. If you do, 22 | # you'll need to launch puppeteer with: 23 | # browser.launch({executablePath: 'google-chrome-stable'}) 24 | # ENV PUPPETEER_SKIP_DOWNLOAD true 25 | 26 | # Install puppeteer so it's available in the container. 27 | # RUN npm init -y && \ 28 | # npm i puppeteer \ 29 | # # Add user so we don't need --no-sandbox. 30 | # # same layer as npm install to keep re-chowned files from using up several hundred MBs more space 31 | # && groupadd -r pptruser && useradd -r -g pptruser -G audio,video pptruser \ 32 | # && mkdir -p /home/pptruser/Downloads \ 33 | # && chown -R pptruser:pptruser /home/pptruser \ 34 | # && chown -R pptruser:pptruser /node_modules \ 35 | # && chown -R pptruser:pptruser /package.json \ 36 | # && chown -R pptruser:pptruser /package-lock.json 37 | 38 | WORKDIR /usr/src/app 39 | COPY package*.json ./ 40 | RUN npm install 41 | RUN npm ci --only=production 42 | COPY . . 43 | 44 | # Run everything after as non-privileged user. 45 | # USER pptruser 46 | 47 | ENV NODE_ENV=production 48 | 49 | RUN npm run compile 50 | 51 | CMD [ "npm", "run", "start:prod" ] -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Advanced OpenAi TypeScript Puppeteer Web Scraper with MySQL Integration 2 | 3 | This advanced TypeScript Puppeteer web scraper template offers a comprehensive solution for web scraping tasks, integrating Puppeteer with MySQL database and incorporating various Puppeteer plugins for enhanced functionality. Tailored for both development and production environments, this template extends beyond basic web scraping by offering features like automated scheduling, headless browser operation, and advanced error handling. It is perfect for developers seeking a robust and scalable web scraping setup. 4 | 5 | ## Features 6 | 7 | - **Puppeteer Plugins Integration**: Includes plugins like `puppeteer-extra-plugin-anonymize-ua`, `puppeteer-extra-plugin-adblocker`, `puppeteer-extra-plugin-recaptcha`, and `puppeteer-extra-plugin-stealth` for enhanced scraping capabilities. 8 | - **Automated Scheduling**: Utilizes `node-cron` for scheduling scraping tasks, customizable for different intervals. 9 | - **Environment-Specific Configuration**: Leverages `.env` files for differentiating between development and production environments. 10 | - **MySQL Database Integration**: Features integration with MySQL using a connection pool for efficient data handling. 11 | - **Error Handling and Debugging**: Advanced error handling with screenshot capabilities for debugging, along with options to open devtools and slow down Puppeteer operations for detailed inspection. 12 | - **Automated Deployment**: Includes a docker-compose file for automated deployment of the scraper. This will automatically build the scraper, a MySQL database, and a phpMyAdmin instance for database management. 13 | 14 | ## Getting Started 15 | 16 | ### Prerequisites 17 | 18 | - Node.js installed on your system 19 | - MySQL database setup 20 | - Yarn or npm for dependency management 21 | 22 | ### Installation 23 | 24 | 1. Clone the repository or use the "Use this template" button on GitHub. 25 | 2. Install the dependencies: 26 | 27 | ```sh 28 | yarn install 29 | # or 30 | npm install 31 | ``` 32 | 33 | ### Configuration 34 | 35 | 1. Create thre three env files `.env`, `database.env` and `phpmyadmin.env` in the root directory. 36 | 2. Add the necessary environment variables (as declared in the `template.*.env` files) to the `.env` files or environment variables. 37 | 38 | ### Local Usage 39 | 40 | - Compile the scraper: 41 | 42 | ```sh 43 | npm run compile 44 | # or 45 | npm run dev-compile # for continuous compilation 46 | ``` 47 | 48 | - Run the scraper: 49 | 50 | ```sh 51 | yarn start 52 | # or 53 | npm start 54 | ``` 55 | 56 | ### Docker Usage 57 | 58 | - Build the scraper, MySQL database, and phpMyAdmin instance: 59 | 60 | ```sh 61 | docker-compose up 62 | ``` 63 | Make sure to add the necessary environment variables to the `database.env` and `phpmyadmin.env` files. 64 | 65 | ## TypeScript and Puppeteer Integration 66 | 67 | - **TypeScript Support**: Fully supported with TypeScript for type safety and easier code management. 68 | - **Puppeteer**: Control headless Chrome or Chromium for web page navigation, interaction, and data extraction. 69 | 70 | ## Customizing the Scraper 71 | 72 | You can modify the `scrape` function in the `scraper.ts` file to add your custom scraping logic and interact with MySQL database. 73 | 74 | ## Contributing 75 | 76 | Contributions are welcome! If you have suggestions for improvement or encounter any issues, feel free to open an issue or submit a pull request. 77 | 78 | --- 79 | 80 | This template provides a solid foundation for building sophisticated web scrapers with TypeScript and Puppeteer, optimized for both development and production use. Enjoy your scraping journey! -------------------------------------------------------------------------------- /data/readme.md: -------------------------------------------------------------------------------- 1 | This folder is mounted inside a volume, so it will be persisted even if the container is removed. This is useful for storing data 2 | that you want to keep between container restarts but don't want to store in the database. -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | 3 | volumes: 4 | scraper-db-data: 5 | scraper-data: 6 | 7 | services: 8 | scraper: 9 | container_name: scraper 10 | build: ./ 11 | depends_on: 12 | - db 13 | env_file: 14 | - .env 15 | volumes: 16 | - scraper-data:/usr/src/app/data 17 | 18 | phpmyadmin: 19 | container_name: phpmyadmin 20 | image: phpmyadmin/phpmyadmin 21 | depends_on: 22 | - db 23 | ports: 24 | - 9999:80 25 | expose: 26 | - 9999 27 | restart: always 28 | env_file: 29 | - database.env 30 | - phpmyadmin.env 31 | 32 | db: 33 | container_name: scraper-db 34 | image: mysql:latest 35 | command: 36 | --default-authentication-plugin=mysql_native_password 37 | env_file: 38 | - database.env 39 | volumes: 40 | - scraper-db-data:/var/lib/mysql 41 | ports: 42 | - 7706:3306 43 | expose: 44 | - 7706 -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "scraper-template", 3 | "version": "1.1.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "compile": "tsc", 8 | "dev-compile": "tsc -w", 9 | "start": "node ./dist/scraper.js", 10 | "start:prod": "node ./dist/scraper.js --mode production" 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "git+https://github.com/NoxelS/ultimate-scraper.git" 15 | }, 16 | "author": "", 17 | "license": "ISC", 18 | "bugs": { 19 | "url": "https://github.com/NoxelS/ultimate-scraper/issues" 20 | }, 21 | "homepage": "https://github.com/NoxelS/ultimate-scraper#readme", 22 | "dependencies": { 23 | "@types/mysql": "^2.15.15", 24 | "@types/node": "^14.6.0", 25 | "@types/node-cron": "^2.0.3", 26 | "@types/nodemailer": "^6.4.0", 27 | "@types/puppeteer": "^3.0.1", 28 | "dotenv": "^8.2.0", 29 | "gpt-3-encoder": "^1.1.4", 30 | "mysql": "^2.18.1", 31 | "node-cron": "^3.0.2", 32 | "nodemailer": "^6.9.1", 33 | "openai": "^3.2.1", 34 | "puppeteer": "^21.10.0", 35 | "puppeteer-extra": "^3.3.6", 36 | "puppeteer-extra-plugin-adblocker": "^2.13.6", 37 | "puppeteer-extra-plugin-anonymize-ua": "^2.4.6", 38 | "puppeteer-extra-plugin-recaptcha": "^3.6.8", 39 | "puppeteer-extra-plugin-stealth": "^2.11.2", 40 | "typescript": "^4.0.2" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/notifications.ts: -------------------------------------------------------------------------------- 1 | import { createTransport } from 'nodemailer'; 2 | 3 | 4 | export async function sendEmail() { 5 | const transporter = createTransport({ 6 | host: process.env.MAIL_HOST, 7 | port: Number(process.env.MAIL_PORT), 8 | secure: !!process.env.MAIL_SECURE, 9 | auth: { 10 | user: process.env.MAIL_USER, 11 | pass: process.env.MAIL_PASSWORD 12 | } 13 | }); 14 | 15 | const info = await transporter.sendMail({ 16 | from: `"..." <${process.env.MAIL_USER}>`, 17 | to: process.env.MAIL_TARGET, 18 | subject: process.env.MAIL_SUBJECT, 19 | html: `...` 20 | }); 21 | 22 | await transporter.close(); 23 | } 24 | -------------------------------------------------------------------------------- /src/openai.ts: -------------------------------------------------------------------------------- 1 | import { config } from 'dotenv'; 2 | import { encode } from 'gpt-3-encoder'; 3 | import { Configuration, OpenAIApi } from 'openai'; 4 | 5 | 6 | /** Only use .env files when running in dev mode */ 7 | if (!process.env.produtction) config(); 8 | 9 | /** Openai */ 10 | export const openaiConfiguration = new Configuration({ 11 | apiKey: process.env.OPENAI_API_KEY 12 | }); 13 | export const openai = new OpenAIApi(openaiConfiguration); 14 | 15 | /** Utility to manage tokens */ 16 | export const countTokens = (text: string | string[]) => { 17 | if (Array.isArray(text)) return encode(text.join(' ')).length; 18 | return encode(text).length; 19 | }; 20 | -------------------------------------------------------------------------------- /src/scraper.ts: -------------------------------------------------------------------------------- 1 | import { config } from 'dotenv'; 2 | import { createPool, Pool, PoolConfig } from 'mysql'; 3 | import { schedule } from 'node-cron'; 4 | import { PuppeteerLaunchOptions } from 'puppeteer'; 5 | import Puppeteer from 'puppeteer-extra'; 6 | 7 | import { query } from './storage'; 8 | 9 | 10 | /** Only use .env files when running in dev mode */ 11 | const isProduction = process.env.production?.toString() === 'true' || process.env.NODE_ENV === 'production'; 12 | if (!process.env.production) config(); 13 | 14 | /** Additional Puppeteer options and plugins */ 15 | const AnonymizeUAPlugin = require('puppeteer-extra-plugin-anonymize-ua'); // Add anonymize user agent plugin (changes user agent to a random one) 16 | const AdblockerPlugin = require('puppeteer-extra-plugin-adblocker'); // Add adblocker plugin to block all ads and trackers (saves bandwidth) 17 | const RecaptchaPlugin = require('puppeteer-extra-plugin-recaptcha'); // Add recaptcha plugin (solves recaptchas automagically) 18 | const StealthPlugin = require('puppeteer-extra-plugin-stealth'); // Add stealth plugin and use defaults (all tricks to hide puppeteer usage) 19 | 20 | Puppeteer.use(RecaptchaPlugin({provider: {id: '2captcha',token: process.env.TWO_CAPTCHA_API_KEY }, visualFeedback: true })) 21 | Puppeteer.use(AnonymizeUAPlugin({ makeWindows: true, stripHeadless: true })) 22 | Puppeteer.use(AdblockerPlugin({ blockTrackers: true })); 23 | Puppeteer.use(StealthPlugin()); 24 | 25 | /** Launch options */ 26 | const launchOptions: PuppeteerLaunchOptions = { 27 | headless: isProduction, // Run headless in production mode 28 | args: [ 29 | '--disable-gpu', '--disable-dev-shm-usage', '--disable-setuid-sandbox', '--no-sandbox', 30 | '--window-size=1920,1080', /* '--window-position=1920,0' */ // Activate this if you want to have the browser window on a second screen 31 | ], 32 | ignoreHTTPSErrors: true, // Ignore HTTPS errors 33 | devtools: !isProduction, // Open devtools in development mode 34 | slowMo: 0, // Slow down puppeteer operations by X milliseconds (useful for debugging) 35 | timeout: 0 // Disable timeouts 36 | } 37 | 38 | 39 | /** Url to scrape */ 40 | export const url = 'https://bot.sannysoft.com/'; // Example url to test the scraper's fingerprint 41 | 42 | /** 43 | * @param pool - MySQL connection pool (could also be made global) 44 | * @description This is the main scraping function. It will be called by the scheduler. 45 | * Make sure to garabage collect the browser instance if running on some 46 | * complicated server structure. The Dockerfile for this project is a good 47 | * example of how to do this. 48 | */ 49 | async function scrape(pool: Pool) { 50 | const browser = await Puppeteer.launch(launchOptions); 51 | const page = await browser.newPage(); 52 | await page.goto(url); 53 | 54 | /** Examples */ 55 | 56 | const documentItems: unknown[] = await page.evaluate(() => { 57 | /** Do stuff with document and retrive some HTMLElements */ 58 | return []; 59 | }); 60 | 61 | /** 62 | * You could take screenshots when facing a document error. 63 | * This is especially usefull if you scrape a lot of pages and 64 | * debugging is hard. 65 | */ 66 | try { 67 | // Do stuff ... 68 | throw new Error('Error while scraping...'); 69 | } catch (error) { 70 | if (error instanceof Error) { 71 | /** Save a screenshot if possible */ 72 | try { await page.screenshot({ path: `data/err-${new Date().getTime()}.png` }) } catch (error) { } 73 | console.error(error.message); 74 | } 75 | } 76 | 77 | await browser.close(); 78 | } 79 | 80 | /** Create MySQL connection pool so we can reuse connections */ 81 | const pool: Pool = createPool({ 82 | host: process.env.DB_HOST, 83 | user: process.env.DB_USER, 84 | password: process.env.DB_PASSWORD, 85 | database: process.env.DB_DATABASE, 86 | port: process.env.DB_PORT 87 | }); 88 | 89 | /* Test connection */ 90 | query('SHOW TABLES FROM data;', [], (e, r) => {console.log(e ? e : `You have the following tables: ${r[0]}`);}, pool); 91 | 92 | /* 93 | * Scrape every 15 minutes if production mode is enabled or once 94 | * if not. 95 | * (https://crontab.guru is your best friend) 96 | */ 97 | const interval = isProduction ? '*/30 * * * *' : '* * * * *'; 98 | console.log(`Scraping ${isProduction ? 'every 30 minutes' : 'once'} in ${isProduction ? 'production' : 'dev'} mode.`); 99 | 100 | if (isProduction) schedule(interval, () => scrape(pool)); 101 | else scrape(pool); -------------------------------------------------------------------------------- /src/storage.ts: -------------------------------------------------------------------------------- 1 | import { MysqlError, Pool } from 'mysql'; 2 | 3 | 4 | /** 5 | * @description Executes a query in a transaction 6 | * 7 | * @param query Query to execute 8 | * @param inputs Inputs to query 9 | * @param callback Callback function 10 | * @param pool Pool to use 11 | */ 12 | export function query(query: string, inputs: any[], callback: (error: MysqlError, result: any[]) => void, pool: Pool) : void { 13 | pool.getConnection(function (error, connection) { 14 | if (error) callback(error, []); 15 | connection.beginTransaction(function (error) { 16 | if (error) { 17 | // Transaction Error (Rollback and release connection) 18 | connection.rollback(function () { 19 | connection.destroy(); 20 | callback(error, []); 21 | }); 22 | } else { 23 | connection.query(query, inputs, function (error, results) { 24 | if (error) { 25 | // Query Error (Rollback and release connection) 26 | connection.rollback(function () { 27 | connection.destroy(); 28 | callback(error, []); 29 | }); 30 | } else { 31 | connection.commit(function (error) { 32 | if (error) { 33 | connection.rollback(function () { 34 | connection.destroy(); 35 | callback(error, []); 36 | }); 37 | } else { 38 | connection.destroy(); 39 | callback(error, results); 40 | } 41 | }); 42 | } 43 | }); 44 | } 45 | }); 46 | }); 47 | } -------------------------------------------------------------------------------- /template.database.env: -------------------------------------------------------------------------------- 1 | MYSQL_ROOT_PASSWORD= 2 | MYSQL_USER= 3 | MYSQL_PASSWORD= 4 | MYSQL_DATABASE= 5 | DB_PORT= -------------------------------------------------------------------------------- /template.env: -------------------------------------------------------------------------------- 1 | # Settings for database 2 | DB_HOST= 3 | DB_USER= 4 | DB_PASSWORD= 5 | DB_DATABASE= 6 | DB_PORT= 7 | 8 | # Settings for email notification 9 | MAIL_HOST= 10 | MAIL_PORT=465 11 | MAIL_SECURE=true 12 | MAIL_USER= 13 | MAIL_PASSWORD= 14 | MAIL_TARGET= 15 | MAIL_SUBJECT= 16 | 17 | # Openai api key (https://openai.com) 18 | OPENAI_API_KEY= 19 | 20 | # 2captcha api key (https://2captcha.com) 21 | TWO_CAPTCHA_API_KEY=<2captcha api key> 22 | 23 | -------------------------------------------------------------------------------- /template.phpmyadmin.env: -------------------------------------------------------------------------------- 1 | PMA_HOST= 2 | PMA_PORT= 3 | PMA_ARBITRARY= -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | /* Visit https://aka.ms/tsconfig.json to read more about this file */ 4 | 5 | "moduleResolution": "node", 6 | // "incremental": true, /* Enable incremental compilation */ 7 | "target": "es6" /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', or 'ESNEXT'. */, 8 | "module": "commonjs" /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */, 9 | "lib": ["DOM", "ESNEXT"] /* Specify library files to be included in the compilation. */, 10 | "allowJs": true /* Allow javascript files to be compiled. */, 11 | // "checkJs": true, /* Report errors in .js files. */ 12 | // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */ 13 | // "declaration": true, /* Generates corresponding '.d.ts' file. */ 14 | // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ 15 | "sourceMap": false /* Generates corresponding '.map' file. */, 16 | // "outFile": "./", /* Concatenate and emit output to single file. */ 17 | "outDir": "./dist" /* Redirect output structure to the directory. */, 18 | "rootDir": "./src" /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */, 19 | // "composite": true, /* Enable project compilation */ 20 | // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ 21 | // "removeComments": true, /* Do not emit comments to output. */ 22 | // "noEmit": true, /* Do not emit outputs. */ 23 | // "importHelpers": true, /* Import emit helpers from 'tslib'. */ 24 | // "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ 25 | // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ 26 | 27 | /* Strict Type-Checking Options */ 28 | "strict": true /* Enable all strict type-checking options. */, 29 | "noImplicitAny": false /* Raise error on expressions and declarations with an implied 'any' type. */, 30 | // "strictNullChecks": true, /* Enable strict null checks. */ 31 | "strictFunctionTypes": false, /* Enable strict checking of function types. */ 32 | // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ 33 | "strictPropertyInitialization": false, /* Enable strict checking of property initialization in classes. */ 34 | // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ 35 | // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ 36 | 37 | /* Additional Checks */ 38 | "noUnusedLocals": false, /* Report errors on unused locals. */ 39 | "noUnusedParameters": false, /* Report errors on unused parameters. */ 40 | "noImplicitReturns": false, /* Report error when not all code paths in function return a value. */ 41 | // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ 42 | 43 | /* Module Resolution Options */ 44 | // "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ 45 | // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ 46 | // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ 47 | // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ 48 | // "typeRoots": [], /* List of folders to include type definitions from. */ 49 | // "types": [], /* Type declaration files to be included in compilation. */ 50 | // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ 51 | "esModuleInterop": true /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */, 52 | // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ 53 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ 54 | 55 | /* Source Map Options */ 56 | // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ 57 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ 58 | // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ 59 | // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ 60 | 61 | /* Experimental Options */ 62 | // "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ 63 | // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ 64 | 65 | /* Advanced Options */ 66 | "skipLibCheck": true /* Skip type checking of declaration files. */, 67 | "forceConsistentCasingInFileNames": true /* Disallow inconsistently-cased references to the same file. */ 68 | } 69 | } 70 | --------------------------------------------------------------------------------