& RequestOptions;
201 | export type CrawlerResponse = any;
202 |
--------------------------------------------------------------------------------
/src/types/index.ts:
--------------------------------------------------------------------------------
1 | declare module "seenreq";
--------------------------------------------------------------------------------
/test/binaryDataStream.test.js:
--------------------------------------------------------------------------------
1 | import test from 'ava';
2 | import Crawler from '../dist/index.js';
3 | import nock from 'nock';
4 | import { testCb } from "./lib/avaTestCb.js";
5 |
6 |
7 | const binaryData = Buffer.from('Hello, World!', 'utf-8');
8 |
9 | test.beforeEach(t => {
10 | nock('http://example.com')
11 | .get('/binary-data')
12 | .reply(200, binaryData, {
13 | 'Content-Type': 'application/octet-stream',
14 | });
15 |
16 | t.context.crawler = new Crawler({
17 | encoding: null,
18 | callback: (err, res, done) => {
19 | if (err) {
20 | console.error(err.stack);
21 | return done(err);
22 | }
23 |
24 | const buffers = [];
25 | res.body.on('data', chunk => buffers.push(chunk));
26 | res.body.on('end', () => {
27 | const result = Buffer.concat(buffers);
28 | t.is(result.toString(), 'Hello, World!', 'The binary stream should match the expected content');
29 | done();
30 | });
31 | },
32 | });
33 | });
34 |
35 |
36 | testCb(test, 'should correctly handle and process a binary data stream', async t => {
37 | t.context.crawler.send({
38 | url: 'http://example.com/binary-data',
39 | callback: (error, res) => {
40 | t.is(error, null);
41 | t.is(res.statusCode, 200);
42 | t.end();
43 | },
44 | });
45 | });
--------------------------------------------------------------------------------
/test/cacheOptions.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | test.beforeEach(t => {
7 | t.context.scope = nock("http://target.com");
8 | });
9 | test.afterEach(t => {
10 | t.context.c = {};
11 | });
12 |
13 | testCb(test, "Should't skip one single url if duplicates are active.", async t => {
14 | t.context.scope.get("/").reply(200);
15 | t.context.c = new Crawler({
16 | // silence: true,
17 | skipDuplicates: true,
18 | callback: (error, result, done) => {
19 | t.is(error, null);
20 | t.is(result.statusCode, 200);
21 | t.true(t.context.scope.isDone());
22 | t.end();
23 | },
24 | });
25 | t.context.c.add("http://target.com");
26 | });
27 |
28 | testCb(test, "Should notify the callback when an error occurs and 'retries' is disabled.", async t => {
29 | t.context.scope.get("/").replyWithError("Bad request.");
30 | t.context.c = new Crawler({
31 | // silence: true,
32 | jQuery: false,
33 | skipDuplicates: true,
34 | retries: 0,
35 | callback: (error, result, done) => {
36 | t.truthy(error);
37 | t.true(t.context.scope.isDone());
38 | t.end();
39 | },
40 | });
41 | t.context.c.add("http://target.com");
42 | });
43 |
44 | testCb(test, "Should retry and notify the callback when an error occurs and 'retries' is enabled.", async t => {
45 | t.context.scope.get("/").replyWithError("Bad request.").persist();
46 | t.context.c = new Crawler({
47 | jQuery: false,
48 | skipDuplicates: true,
49 | retries: 1,
50 | retryInterval: 10,
51 | callback: (error, result, done) => {
52 | t.truthy(error);
53 | t.true(t.context.scope.isDone());
54 | t.context.scope.persist(false);
55 | t.end();
56 | },
57 | });
58 | t.context.c.add("http://target.com");
59 | });
60 |
61 | testCb(test, "Should skip previously crawled urls when 'skipDuplicates' is active.", async t => {
62 | t.context.scope.get("/").reply(200).persist();
63 | t.plan(3);
64 | t.context.c = new Crawler({
65 | jQuery: false,
66 | skipDuplicates: true,
67 | callback: (error, result, done) => {
68 | t.is(error, null);
69 | t.is(result.statusCode, 200);
70 | t.true(t.context.scope.isDone());
71 | t.context.c.add("http://target.com");
72 | done();
73 | },
74 | });
75 | t.context.c.add("http://target.com");
76 | t.context.c.on("drain", () => {
77 | t.end();
78 | });
79 | });
80 |
--------------------------------------------------------------------------------
/test/callback.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | const url = "http://www.whatever.com";
7 | test.before(t => {
8 | nock.cleanAll();
9 | });
10 | test.beforeEach(t => {
11 | t.context.crawler = new Crawler({
12 | // silence: true,
13 | retryInterval: 0,
14 | retries: 0,
15 | timeout: 100,
16 | });
17 | });
18 | test.afterEach(t => {
19 | t.context.crawler = null;
20 | });
21 |
22 | testCb(test, "should end as expected without callback", async t => {
23 | t.context.scope = nock(url).get("/get").reply(200, "", {
24 | "Content-Type": "text/html",
25 | });
26 | t.context.crawler.on("drain", () => {
27 | t.true(t.context.scope.isDone());
28 | t.end();
29 | });
30 | t.context.crawler.add(`${url}/get`);
31 | });
32 |
33 | testCb(test, "should end as expected without callback when timedout", async t => {
34 | t.context.scope = nock(url).get("/delay").delayBody(500).reply(200, "", {
35 | "Content-Type": "text/html",
36 | });
37 | t.context.crawler.on("drain", () => {
38 | t.true(t.context.scope.isDone());
39 | t.end();
40 | });
41 | t.context.crawler.add(`${url}/delay`);
42 | });
43 |
--------------------------------------------------------------------------------
/test/cookieJar.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 | import { CookieJar } from "tough-cookie";
6 |
7 | test.before(t => {
8 | nock.cleanAll();
9 | nock("http://test.crawler.com/").get("/setCookie").reply(function () {
10 | let response = [200, "ok",
11 | {
12 | "Set-Cookie": `ping=pong; Domain=.crawler.com; Expires=${new Date(
13 | Date.now() + 86400000
14 | ).toUTCString()}; Path=/`,
15 | },
16 | ];
17 | return response;
18 | }).persist();
19 | nock("http://test.crawler.com/").get("/getCookie").reply(200, function () {
20 | return this.req.headers.cookie;
21 | }).persist();
22 | const jar = new CookieJar();
23 | jar.setCookieSync("foo=bar", "http://test.crawler.com");
24 | t.context.jar = jar;
25 | t.context.crawler = new Crawler({
26 | // silence: true,
27 | jQuery: false,
28 | jar: t.context.jar,
29 | });
30 | });
31 |
32 | testCb(test, "should send with cookie when setting jar options", async t => {
33 | t.context.crawler.add({
34 | url: "http://test.crawler.com/getCookie",
35 | callback: (error, response, done) => {
36 | t.is(error, null);
37 | t.is(response.body, t.context.jar.getCookieStringSync("http://test.crawler.com"));
38 | done();
39 | t.end();
40 | }
41 | });
42 | });
43 |
44 | testCb(test, "should set cookie when response set-cookie headers exist", async t => {
45 | t.context.crawler.add({
46 | url: "http://test.crawler.com/setCookie",
47 | callback: (error, response, done) => {
48 | t.is(error, null);
49 | t.true(t.context.jar.getCookieStringSync("http://test.crawler.com").includes("ping=pong"));
50 | done();
51 | t.end();
52 | }
53 | });
54 | });
55 |
--------------------------------------------------------------------------------
/test/direct.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 | import sinon from "sinon";
6 |
7 | test.before(t => {
8 | nock.cleanAll();
9 | nock("http://test.crawler.com").get("/").reply(200, "ok").persist();
10 | });
11 | test.beforeEach(t => {
12 | t.context.cb = sinon.spy();
13 | t.context.crawler = new Crawler({
14 | // silence: true,
15 | jQuery: false,
16 | rateLimit: 100,
17 | preRequest: (options, done) => {
18 | t.context.cb("preRequest");
19 | done();
20 | },
21 | callback: (err, res, done) => {
22 | if (err) {
23 | t.context.cb("error");
24 | } else {
25 | t.context.cb("callback");
26 | }
27 | done();
28 | },
29 | });
30 | t.context.crawler.on("request", () => {
31 | t.context.cb("Event:request");
32 | });
33 | });
34 | test.afterEach(t => {
35 | t.context.crawler = null;
36 | });
37 |
38 | testCb(test, "should not trigger preRequest or callback of crawler instance", async t => {
39 | t.context.crawler.send({
40 | url: "http://test.crawler.com/",
41 | callback: (error, res) => {
42 | t.is(error, null);
43 | t.is(res.statusCode, 200);
44 | t.is(res.body, "ok");
45 | t.false(t.context.cb.called);
46 | t.end();
47 | },
48 | });
49 | });
50 |
51 | testCb(test, "should be sent directly regardless of current queue of crawler", async t => {
52 | t.context.crawler.add({
53 | url: "http://test.crawler.com/",
54 | callback: (error, res, done) => {
55 | t.is(error, null);
56 | t.context.crawler.send({
57 | url: "http://test.crawler.com/",
58 | callback: () => {
59 | t.is(t.context.cb.getCalls().length, 2);
60 | t.context.cb("direct");
61 | },
62 | });
63 | done();
64 | },
65 | });
66 | t.context.crawler.add("http://test.crawler.com/");
67 | t.context.crawler.add("http://test.crawler.com/");
68 | t.context.crawler.add({
69 | url: "http://test.crawler.com/",
70 | callback: (error, res, done) => {
71 | t.is(error, null);
72 | const seq = [
73 | "preRequest",
74 | "Event:request",
75 | "direct",
76 | "preRequest",
77 | "Event:request",
78 | "callback",
79 | "preRequest",
80 | "Event:request",
81 | "callback",
82 | "preRequest",
83 | "Event:request",
84 | ];
85 | t.deepEqual(
86 | t.context.cb.args.map(args => args[0]),
87 | seq
88 | );
89 | t.end();
90 | },
91 | });
92 | });
93 |
94 | testCb(test, "should not trigger Event:request by default.", async t => {
95 | t.context.crawler.send({
96 | url: "http://test.crawler.com/",
97 | callback: (error, res) => {
98 | t.is(error, null);
99 | t.is(res.statusCode, 200);
100 | t.is(res.body, "ok");
101 | t.false(t.context.cb.calledWith("Event:request"));
102 | t.end();
103 | },
104 | });
105 | });
106 |
107 | testCb(test, "should trigger Event:request if set.", async t => {
108 | t.context.crawler.send({
109 | url: "http://test.crawler.com/",
110 | skipEventRequest: false,
111 | callback: (error, res) => {
112 | t.is(error, null);
113 | t.is(res.statusCode, 200);
114 | t.is(res.body, "ok");
115 | t.true(t.context.cb.calledWith("Event:request"));
116 | t.end();
117 | },
118 | });
119 | });
120 |
--------------------------------------------------------------------------------
/test/encoding.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | const origin = "http://czyborra.com";
7 | const encodingFileName = "iso8859.html";
8 | const charsetName = "ISO-8859-1";
9 | const path = `/charsets/${encodingFileName}`;
10 | const url = `${origin}${path}`;
11 | const pathWithoutCharsetHeader = `/charsets-noheader/${encodingFileName}`;
12 | const urlWithoutCharsetHeader = `${origin}${pathWithoutCharsetHeader}`;
13 |
14 | test.before(t => {
15 | nock.cleanAll();
16 | });
17 | test.beforeEach(t => {
18 | t.context.crawler = new Crawler({
19 | retries: 0
20 | });
21 |
22 | nock(origin).get(path).replyWithFile(200, `test/lib/${encodingFileName}`, { "Content-Type": `text/html;charset=${charsetName}` });
23 | nock(origin).get(pathWithoutCharsetHeader).replyWithFile(200, `test/lib/${encodingFileName}`, { "Content-Type": "text/html" });
24 | });
25 | test.afterEach(t => {
26 | t.context.crawler = null;
27 | });
28 |
29 | testCb(test, "should parse latin-1", async t => {
30 | t.context.crawler.add({
31 | url,
32 | callback: (error, result) => {
33 | t.is(error, null);
34 | t.is(result.charset, charsetName.toLowerCase());
35 | t.true(result.body.indexOf("Jörg") > 0);
36 | t.end();
37 | }
38 | });
39 | });
40 |
41 | testCb(test, "should return buffer if encoding = null", async t => {
42 | t.context.crawler.add({
43 | url,
44 | encoding: null,
45 | callback: (error, result) => {
46 | t.is(error, null);
47 | t.true(result.body instanceof Buffer);
48 | t.end();
49 | }
50 | });
51 | });
52 |
53 | testCb(test, "should parse latin-1 if encoding = ISO-8859-1", async t => {
54 | t.context.crawler.add({
55 | url,
56 | encoding: charsetName,
57 | callback: (error, result) => {
58 | t.is(error, null);
59 | t.is(result.charset, charsetName.toLowerCase());
60 | t.true(result.body.indexOf("Jörg") > 0);
61 | t.end();
62 | }
63 | });
64 | });
65 |
66 | testCb(test, "could not parse latin-1 if encoding = gb2312", async t => {
67 | t.context.crawler.add({
68 | url,
69 | encoding: "gb2312",
70 | callback: (error, result) => {
71 | t.is(error, null);
72 | t.is(result.body.indexOf("Jörg"), -1);
73 | t.end();
74 | }
75 | });
76 | });
77 |
78 | testCb(test, "should parse charset from header", async t => {
79 | t.context.crawler.add({
80 | url,
81 | callback: (error, result) => {
82 | t.is(error, null);
83 | t.is(result.charset, charsetName.toLowerCase());
84 | t.true(result.body.indexOf("Jörg") > 0);
85 | t.end();
86 | }
87 | });
88 | });
89 |
90 | testCb(test, "should parse charset from meta tag in html if header does not contain content-type key", async t => {
91 | t.context.crawler.add({
92 | url: urlWithoutCharsetHeader,
93 | callback: (error, result) => {
94 | t.is(error, null);
95 | t.is(result.charset, charsetName.toLowerCase());
96 | t.true(result.body.indexOf("Jörg") > 0);
97 | t.end();
98 | }
99 | });
100 | });
--------------------------------------------------------------------------------
/test/errorHandling.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | test.before(t => {
7 | nock.cleanAll();
8 | nock("http://test.crawler.com").get("/delay/1").delay(1000).reply(200, "ok").persist();
9 | nock("http://test.crawler.com").get("/status/400").reply(400, "Bad Request").persist();
10 | nock("http://test.crawler.com").get("/status/401").reply(401, "Unauthorized").persist();
11 | nock("http://test.crawler.com").get("/status/403").reply(403, "Forbidden").persist();
12 | nock("http://test.crawler.com").get("/status/404").reply(404, "Not Found").persist();
13 | nock("http://test.crawler.com").get("/status/500").reply(500, "Internal Error").persist();
14 | nock("http://test.crawler.com").get("/status/204").reply(204, "").persist();
15 | });
16 | test.beforeEach(t => {
17 | t.context.crawler = new Crawler({
18 | // silence: true,
19 | timeout: 500,
20 | retryInterval: 500,
21 | retries: 2,
22 | jQuery: false,
23 | });
24 | });
25 | test.afterEach(t => {
26 | t.context.crawler = null;
27 | });
28 |
29 | testCb(test, "should retry after timeout", async t => {
30 | let options = {
31 | url: "http://test.crawler.com/delay/1",
32 | callback: (error, response, done) => {
33 | t.truthy(error);
34 | t.is(response.options.retries, 0);
35 | t.end();
36 | },
37 | };
38 | t.context.crawler.add(options);
39 | t.is(options.retries, 2);
40 | });
41 |
42 | testCb(test, "should return a timeout error after ~2sec", async t => {
43 | t.context.crawler.add({
44 | url: "http://test.crawler.com/delay/1",
45 | callback: (error, response, done) => {
46 | t.truthy(error);
47 | t.true(error.code === "ETIMEDOUT" || error.code === "ESOCKETTIMEDOUT");
48 | t.end();
49 | },
50 | });
51 | });
52 |
53 | testCb(test, "should not failed on empty response", async t => {
54 | t.context.crawler.add({
55 | url: "http://test.crawler.com/status/204",
56 | callback: (error, response, done) => {
57 | t.falsy(error);
58 | t.is(response.statusCode, 204);
59 | t.end();
60 | },
61 | });
62 | });
63 |
64 | testCb(test, "should not failed on a malformed html if jQuery is false", async t => {
65 | t.context.crawler.add({
66 | html: "hello
dude",
67 | callback: (error, response, done) => {
68 | t.falsy(error);
69 | t.truthy(response);
70 | t.end();
71 | },
72 | });
73 | });
74 |
--------------------------------------------------------------------------------
/test/examples.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 | import sinon from "sinon";
6 |
7 | test.before(t => {
8 | nock.cleanAll();
9 | });
10 | test.beforeEach(t => {
11 | nock("http://nockhost")
12 | .get(url => url.indexOf("status") >= 0)
13 | .times(20)
14 | .reply(200, "Yes");
15 | t.context.crawler = new Crawler({
16 | // silence: true,
17 | maxConnections: 10,
18 | jQuery: false,
19 | });
20 | });
21 | test.afterEach(t => {
22 | t.context.crawler = null;
23 | t.context.cb = null;
24 | });
25 |
26 | testCb(test, "should run the first readme examples.", async t => {
27 | t.context.crawler.add({
28 | url: "http://github.com",
29 | callback: (err, res, done) => {
30 | t.falsy(err);
31 | t.is(typeof res.body, "string");
32 | t.end();
33 | },
34 | });
35 | });
36 |
37 | testCb(test, "should run the readme examples.", async t => {
38 | t.context.crawler = new Crawler({
39 | // silence: true,
40 | maxConnections: 10,
41 | jQuery: false,
42 | callback: (err, res, done) => {
43 | t.falsy(err);
44 | done();
45 | },
46 | });
47 | t.context.cb = sinon.spy(t.context.crawler, "add");
48 | t.context.crawler.add("http://nockhost/status/200");
49 | t.context.crawler.add("http://nockhost/status/200");
50 | t.context.crawler.on("drain", () => {
51 | t.true(t.context.cb.calledTwice);
52 | t.end();
53 | });
54 | });
55 |
56 | testCb(test, "should run the with an array queue.", async t => {
57 | t.context.crawler.add([
58 | {
59 | url: "http://www.github.com",
60 | jQuery: true,
61 | callback: (err, res, done) => {
62 | t.falsy(err);
63 | t.truthy(res.$);
64 | t.is(typeof res.body, "string");
65 | done();
66 | },
67 | },
68 | ]);
69 | t.context.crawler.on("drain", () => {
70 | t.end();
71 | });
72 | });
73 |
--------------------------------------------------------------------------------
/test/http2ErrorHanding.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import Crawler from "../dist/index.js";
4 |
5 | test.before(t => {
6 | t.context.crawler = new Crawler({
7 | // silence: true,
8 | timeout: 1000,
9 | retryInterval: 0,
10 | retries: 2,
11 | jQuery: false,
12 | http2: true,
13 | });
14 | });
15 |
16 | testCb(test, "http2: should retry after timeout.", async t => {
17 | const options = {
18 | url: "https://nghttp2.org/httpbin/delay/4",
19 | callback: (error, response, done) => {
20 | t.truthy(error);
21 | t.is(response.options.retries, 0);
22 | done();
23 | t.end();
24 | },
25 | };
26 | t.context.crawler.add(options);
27 | t.is(options.retries, 2);
28 | });
29 |
30 | testCb(test, "http2: should return a timeout error after ~3sec.", async t => {
31 | t.context.crawler.add({
32 | url: "https://nghttp2.org/httpbin/delay/4",
33 | callback: (error, response, done) => {
34 | t.truthy(error);
35 | t.true(error.code === "ETIMEDOUT" || error.code === "ESOCKETTIMEDOUT");
36 | done();
37 | t.end();
38 | },
39 | });
40 | });
41 |
--------------------------------------------------------------------------------
/test/http2Response.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import Crawler from "../dist/index.js";
4 |
5 | test.afterEach(t => {
6 | t.context.crawler = null;
7 | });
8 |
9 | testCb(test, "response statusCode.", async t => {
10 | t.context.crawler = new Crawler({
11 | // silence: true,
12 | timeout: 10000,
13 | retryInterval: 1000,
14 | retries: 2,
15 | jQuery: false,
16 | http2: true,
17 | });
18 | t.context.crawler.add({
19 | url: "https://nghttp2.org/httpbin/status/200",
20 | callback: (error, response, done) => {
21 | t.is(response.statusCode, 200);
22 | done();
23 | t.end();
24 | },
25 | });
26 | });
27 |
28 | testCb(test, "response headers.", async t => {
29 | t.context.crawler = new Crawler({
30 | // silence: true,
31 | retryInterval: 1000,
32 | retries: 2,
33 | jQuery: false,
34 | http2: true,
35 | });
36 | t.context.crawler.add({
37 | url: "https://nghttp2.org/httpbin/status/200",
38 | callback: (error, response, done) => {
39 | t.truthy(response.headers);
40 | t.is(typeof response.headers, "object");
41 | t.is(response.headers["content-type"], "text/html; charset=utf-8");
42 | done();
43 | t.end();
44 | },
45 | });
46 | });
47 |
48 | testCb(test, "html response body.", async t => {
49 | t.context.crawler = new Crawler({
50 | // silence: true,
51 | retryInterval: 1000,
52 | retries: 2,
53 | jQuery: true,
54 | http2: true,
55 | });
56 | t.context.crawler.add({
57 | url: "https://nghttp2.org/httpbin/html",
58 | callback: (error, response, done) => {
59 | t.truthy(response.$);
60 | t.is(typeof response.$, "function");
61 | t.is(response.$("body").length, 1);
62 | done();
63 | t.end();
64 | },
65 | });
66 | });
67 |
--------------------------------------------------------------------------------
/test/lib/avaTestCb.js:
--------------------------------------------------------------------------------
1 | export const testCbAsync = (test, description, assertions) => {
2 | test(description, async t => {
3 | await new Promise(resolve => {
4 | // eslint-disable-next-linse @typescript-eslint/no-explicit-any
5 | t.end = () => {
6 | resolve(undefined);
7 | };
8 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
9 | assertions(t);
10 | });
11 | });
12 | };
13 | export const testCbSync = (test, description, assertions) => {
14 | test.serial(description, async t => {
15 | await new Promise(resolve => {
16 | // eslint-disable-next-linse @typescript-eslint/no-explicit-any
17 | t.end = () => {
18 | resolve(undefined);
19 | };
20 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
21 | assertions(t);
22 | });
23 | });
24 | };
25 |
26 | // @todo: add async test version
27 | export const testCb = testCbSync;
28 |
--------------------------------------------------------------------------------
/test/lib/iso8859.html:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bda-research/node-crawler/5f6219c02925299953918de9d39954895e17b187/test/lib/iso8859.html
--------------------------------------------------------------------------------
/test/limiter.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | test.before(t => {
7 | nock.cleanAll();
8 | });
9 | test.beforeEach(t => {
10 | nock("http://nockHost")
11 | .get(url => url.indexOf("status") >= 0)
12 | .times(5)
13 | .reply(200, "Yes");
14 | t.context.crawler = new Crawler({
15 | // silence: true,
16 | jQuery: false,
17 | rateLimit: 500,
18 | retries: 0,
19 | callback: (err, result, done) => {
20 | t.is(err, null);
21 | t.is(result.statusCode, 200);
22 | done();
23 | },
24 | });
25 | t.context.tsArrs = [];
26 | t.context.crawler.on("request", () => {
27 | t.context.tsArrs.push(Date.now());
28 | });
29 | });
30 | test.afterEach(t => {
31 | t.context.crawler = null;
32 | t.context.tsArrs = [];
33 | });
34 |
35 | testCb(test, "One limiter, tasks should execute one by one", async t => {
36 | for (let i = 0; i < 5; i++) {
37 | t.context.crawler.add({ url: "http://nockHost/status/200" });
38 | }
39 | t.context.crawler.on("drain", () => {
40 | t.is(t.context.tsArrs.length, 5);
41 | // setTimeout in nodejs is delayed
42 | // 4 rateLimit +- 50ms = 4 * 500 +- 50
43 | t.true(t.context.tsArrs[4] - t.context.tsArrs[0] >= 1950);
44 | t.true(t.context.tsArrs[4] - t.context.tsArrs[0] <= 2050);
45 | t.end();
46 | });
47 | });
48 |
49 | testCb(test, "Multiple limiters, tasks should execute in parallel", async t => {
50 | for (let i = 0; i < 5; i++) {
51 | t.context.crawler.add({ url: "http://nockHost/status/200", rateLimiterId: i });
52 | }
53 | t.context.crawler.on("drain", () => {
54 | t.is(t.context.tsArrs.length, 5);
55 | // setTimeout in nodejs is delayed
56 | // request sent almost at same time
57 | t.true(t.context.tsArrs[4] - t.context.tsArrs[0] <= 50);
58 | t.end();
59 | });
60 | });
61 |
62 | testCb(test, "Multiple limiters are mutual independent", async t => {
63 | for (let i = 0; i < 5; i++) {
64 | const limiter = i === 4 ? "second" : "default";
65 | t.context.crawler.add({ url: "http://nockHost/status/200", rateLimiterId: limiter });
66 | }
67 | t.context.crawler.on("drain", () => {
68 | t.is(t.context.tsArrs.length, 5);
69 | // setTimeout in nodejs is delayed
70 | // 3 rateLimit +- 50ms = 3 * 500 +- 50
71 | t.true(t.context.tsArrs[4] - t.context.tsArrs[0] >= 1450);
72 | t.true(t.context.tsArrs[4] - t.context.tsArrs[0] <= 1550);
73 | t.end();
74 | });
75 | });
76 |
77 | testCb(test, "should modify maxConnections when rateLimit is set", async t => {
78 | nock.cleanAll();
79 | nock("http://nockHost").get(url => url.indexOf("status") >= 0).times(1).reply(200, "Yes");
80 | t.context.crawler.add({
81 | url: "http://nockHost/status/200",
82 | callback: (err, result, done) => {
83 | t.is(err, null);
84 | t.is(result.statusCode, 200);
85 | done();
86 | },
87 | });
88 | t.context.crawler.on("drain", () => {
89 | t.is(t.context.crawler.options.maxConnections, 1);
90 | t.end();
91 | });
92 | });
93 |
--------------------------------------------------------------------------------
/test/preRequest.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 | import sinon from "sinon";
6 |
7 | test.before(t => {
8 | nock.cleanAll();
9 | nock("http://test.crawler.com").get("/").reply(200, "ok").persist();
10 | });
11 | test.beforeEach(t => {
12 | t.context.cb = sinon.spy();
13 | });
14 |
15 | testCb(test, "Should do preRequest before request when preRequest defined in crawler options.", async t => {
16 | t.context.crawler = new Crawler({
17 | // silence: true,
18 | jQuery: false,
19 | preRequest: (options, done) => {
20 | setTimeout(() => {
21 | t.context.cb("preRequest");
22 | done();
23 | }, 50);
24 | },
25 | });
26 | t.context.crawler.add({
27 | url: "http://test.crawler.com/",
28 | callback: (error, response, done) => {
29 | t.is(error, null);
30 | t.is(t.context.cb.getCalls().length, 1);
31 | t.is(t.context.cb.getCalls()[0].args[0], "preRequest");
32 | done();
33 | t.end();
34 | },
35 | });
36 | });
37 |
38 | testCb(test, "Should do preRequest before request when preRequest defined in add options.", async t => {
39 | t.context.crawler = new Crawler({
40 | // silence: true,
41 | jQuery: false
42 | });
43 | t.context.crawler.add({
44 | url: "http://test.crawler.com/",
45 | preRequest: (options, done) => {
46 | setTimeout(() => {
47 | t.context.cb("preRequest");
48 | done();
49 | }, 50);
50 | },
51 | callback: (error, response, done) => {
52 | t.is(error, null);
53 | t.is(t.context.cb.getCalls().length, 1);
54 | t.is(t.context.cb.getCalls()[0].args[0], "preRequest");
55 | done();
56 | t.end();
57 | },
58 | });
59 | });
60 |
61 | testCb(test, "preRequest should be executed the same times as request.", async t => {
62 | t.context.crawler = new Crawler({
63 | // silence: true,
64 | jQuery: false,
65 | rateLimit: 50,
66 | preRequest: (options, done) => {
67 | t.context.cb("preRequest");
68 | done();
69 | },
70 | callback: (error, response, done) => {
71 | t.is(error, null);
72 | t.context.cb("callback");
73 | done();
74 | },
75 | });
76 | const seq = [];
77 | for (let i = 0; i < 5; i++) {
78 | t.context.crawler.add("http://test.crawler.com/");
79 | seq.push("preRequest");
80 | seq.push("callback");
81 | }
82 | t.context.crawler.add({
83 | url: "http://test.crawler.com/",
84 | preRequest: (options, done) => done(),
85 | callback: (error, response, done) => {
86 | t.is(error, null);
87 | t.deepEqual(
88 | t.context.cb.getCalls().map(call => call.args[0]),
89 | seq
90 | );
91 | done();
92 | t.end();
93 | },
94 | });
95 | });
96 |
97 | testCb(test, "when preRequest fail, should retry two times by default.", async t => {
98 | t.context.crawler = new Crawler({
99 | // silence: true,
100 | jQuery: false,
101 | rateLimit: 20,
102 | retryInterval: 0,
103 | preRequest: (options, done) => {
104 | t.context.cb("preRequest");
105 | done(new Error("error"));
106 | },
107 | callback: (error, response, done) => {
108 | t.truthy(error instanceof Error);
109 | t.is(t.context.cb.getCalls().length, 3);
110 | t.deepEqual(
111 | t.context.cb.getCalls().map(call => call.args[0]),
112 | ["preRequest", "preRequest", "preRequest"]
113 | );
114 | done();
115 | t.end();
116 | },
117 | });
118 | t.context.crawler.add("http://test.crawler.com/");
119 | });
120 |
--------------------------------------------------------------------------------
/test/priority.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | test.before(t => {
7 | nock.cleanAll();
8 | nock('http://nockHost').get(url => url.indexOf('links') >= 0).times(4).reply(200, 'Yes');
9 | t.context.crawler = new Crawler({ jQuery: false, maxConnections: 1 });
10 | });
11 |
12 | testCb(test, "should execute requests in the correct order", async t => {
13 | t.context.spf = [];
14 | let cnt = 0;
15 | t.context.crawler.add([{
16 | url: 'http://nockHost/links/0',
17 | priority: 4,
18 | callback: (error, result, done) => {
19 | t.context.spf[cnt++] = 0;
20 | done();
21 | }
22 | }])
23 | t.context.crawler.add([{
24 | url: 'http://nockHost/links/1',
25 | priority: 3,
26 | callback: (error, result, done) => {
27 | t.context.spf[cnt++] = 1;
28 | done();
29 | }
30 | }])
31 | t.context.crawler.add([{
32 | url: 'http://nockHost/links/2',
33 | priority: 2,
34 | callback: (error, result, done) => {
35 | t.context.spf[cnt++] = 2;
36 | done();
37 | }
38 | }])
39 | t.context.crawler.add([{
40 | url: 'http://nockHost/links/3',
41 | priority: 1,
42 | callback: (error, result, done) => {
43 | t.context.spf[cnt++] = 3;
44 | done();
45 | }
46 | }])
47 | t.context.crawler.on("drain", () => {
48 | t.deepEqual(t.context.spf, [0, 3, 2, 1]);
49 | t.end();
50 | });
51 | });
52 |
--------------------------------------------------------------------------------
/test/rateLimit.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | test.before(t => {
7 | nock.cleanAll();
8 | });
9 | test.beforeEach(t => {
10 | t.context.c = new Crawler({
11 | // silence: true,
12 | retries: 0,
13 | rateLimit: 500,
14 | callback: (err, res, done) => {
15 | t.is(err, null);
16 | t.is(res.statusCode, 200);
17 | done();
18 | }
19 | });
20 | t.context.c.on('request', () => t.context.tsArrs.push(Date.now()));
21 | t.context.tsArrs = [];
22 | });
23 | test.afterEach(t => {
24 | nock.cleanAll();
25 | t.context.c = {};
26 | t.context.tsArrs = [];
27 | });
28 |
29 | testCb(test, "Interval of two requests should be no less than 500ms", async t => {
30 | nock('http://nockHost').get(url => url.includes('status')).times(2).delay(500).reply(200, 'Yes');
31 | t.context.c.add({ url: 'http://nockHost/status/200' });
32 | t.context.c.add({
33 | url: 'http://nockHost/status/200',
34 | callback: (err, res, done) => {
35 | t.is(err, null);
36 | t.is(res.statusCode, 200);
37 | done();
38 | t.is(t.context.tsArrs.length, 2);
39 | t.true(t.context.tsArrs[1] - t.context.tsArrs[0] >= 500);
40 | done();
41 | }
42 | });
43 | t.context.c.on("drain", t.end);
44 | });
45 |
46 | testCb(test, "request speed should abide by rateLimit", async t => {
47 | nock('http://nockHost').get(url => url.includes('status')).times(5).reply(200, 'Yes');
48 | for (let i = 0; i < 5; i++) {
49 | t.context.c.add('http://nockHost/status/200');
50 | }
51 | t.context.c.on("drain", () => {
52 | t.is(t.context.tsArrs.length, 5);
53 | for (let i = 1; i < 5; i++) {
54 | const interval = t.context.tsArrs[i] - t.context.tsArrs[i - 1];
55 | t.true(Math.abs(interval - 500) < 30);
56 | }
57 | t.end();
58 | });
59 | });
60 |
61 | testCb(test, "should be able to change rateLimit", async t => {
62 | nock('http://nockHost').get(url => url.includes('status')).times(5).reply(200, 'Yes');
63 | t.context.c.setLimiter(0, 'rateLimit', 300);
64 | for (let i = 0; i < 5; i++) {
65 | t.context.c.add('http://nockHost/status/200');
66 | }
67 | t.context.c.on("drain", () => {
68 | t.is(t.context.tsArrs.length, 5);
69 | for (let i = 1; i < 5; i++) {
70 | const interval = t.context.tsArrs[i] - t.context.tsArrs[i - 1];
71 | t.true(Math.abs(interval - 300) < 30);
72 | }
73 | t.end();
74 | });
75 | });
76 |
--------------------------------------------------------------------------------
/test/requests.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | const origin = 'http://www.whatever.com';
7 | const path = '/get';
8 | const headerPath = '/header';
9 | test.before(t => {
10 | nock.cleanAll();
11 | });
12 | test.beforeEach(t => {
13 | t.context.crawler = new Crawler({
14 | // silence: true,
15 | retries: 0,
16 | isJson: true,
17 | callback: (err, res, done) => {
18 | t.is(err, null);
19 | t.is(res.statusCode, 200);
20 | done();
21 | }
22 | });
23 | t.context.scope = nock(origin).get(path).reply(200).persist();
24 | nock(origin).get(headerPath).reply(function () {
25 | return [200, this.req.headers, { 'Content-Type': 'application/json' }];
26 | });
27 | });
28 | test.afterEach(t => {
29 | t.context.scope.persist(false);
30 | t.context.crawler = null;
31 | });
32 |
33 | testCb(test, "should crawl one request", async t => {
34 | t.context.crawler.add({
35 | url: `${origin}${path}`, callback: (error, res, done) => {
36 | t.is(error, null);
37 | t.is(res.statusCode, 200);
38 | done();
39 | t.end();
40 | }
41 | });
42 | });
43 |
44 | testCb(test, "should crawl two request request and emit the drain event.", async t => {
45 | const callback = function (error, res, next) {
46 | t.is(error, null);
47 | t.is(res.statusCode, 200);
48 | next();
49 | };
50 |
51 | t.context.crawler.on('drain', t.end);
52 |
53 | t.context.crawler.add({
54 | url: `${origin}${path}`,
55 | callback: callback
56 | });
57 |
58 | t.context.crawler.add({
59 | url: `${origin}${path}`,
60 | callback: callback
61 | });
62 | });
63 |
64 | testCb(test, "should use the provided user-agent", async t => {
65 | const userAgent = 'test/1.2';
66 | t.context.crawler.add({
67 | url: `${origin}${path}`,
68 | headers: { "user-agent": userAgent },
69 | callback: (error, res, done) => {
70 | t.is(error, null);
71 | t.is(res.statusCode, 200);
72 | t.is(res.options.headers['user-agent'], userAgent);
73 | done();
74 | t.end();
75 | }
76 | });
77 | });
78 |
79 | testCb(test, "should replace the global default user-agent", async t => {
80 | t.context.crawler = new Crawler({
81 | // silence: true,
82 | isJson: true,
83 | headers: { "user-agent": "test/1.2" },
84 | callback: (err, res, done) => {
85 | t.is(err, null);
86 | t.is(res.body['user-agent'], "foo/bar");
87 | done();
88 | t.end();
89 | }
90 | });
91 | t.context.crawler.add({
92 | url: `${origin}${headerPath}`,
93 | headers: { "user-agent": "foo/bar" }
94 | });
95 | });
96 |
97 | testCb(test, "should spoof the referrer", async t => {
98 | const referer = 'http://spoofed.com';
99 | t.context.crawler.add({
100 | url: `${origin}${path}`,
101 | referer: referer,
102 | callback: (error, res, done) => {
103 | t.is(error, null);
104 | t.is(res.options.headers.referer, referer);
105 | done();
106 | t.end();
107 | }
108 | });
109 | });
110 |
--------------------------------------------------------------------------------
/test/urlOptions.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 | import sinon from "sinon";
6 |
7 | test.before(t => {
8 | nock.cleanAll();
9 | nock('http://test.crawler.com').get('/').reply(200, 'ok').persist();
10 | t.context.crawler = new Crawler({
11 | // silence: true,
12 | jQuery: false
13 | });
14 | });
15 |
16 | testCb(test, "should work if url is string", t => {
17 | t.context.crawler.add({
18 | url: 'http://test.crawler.com/',
19 | callback: (error, response, done) => {
20 | t.is(error, null);
21 | done();
22 | t.end();
23 | }
24 | });
25 | });
26 |
27 | testCb(test, "should work if url is a function", t => {
28 | function urlFn(onUrl) {
29 | onUrl('http://test.crawler.com/');
30 | }
31 | t.context.crawler.add({
32 | url: urlFn,
33 | callback: (error, response, done) => {
34 | t.is(error, null);
35 | done();
36 | t.end();
37 | }
38 | });
39 | });
40 |
41 | testCb(test, "should skip if the url is undefined or an empty string", t => {
42 | const push = sinon.spy(t.context.crawler, '_schedule');
43 | t.context.crawler.add([undefined, null, []]);
44 | t.context.crawler.add({
45 | url: 'http://test.crawler.com/',
46 | callback: (error, response, done) => {
47 | t.true(push.calledOnce);
48 | done();
49 | t.end();
50 | }
51 | });
52 | });
--------------------------------------------------------------------------------
/test/userAgent.js:
--------------------------------------------------------------------------------
1 | import test from "ava";
2 | import { testCb } from "./lib/avaTestCb.js";
3 | import nock from "nock";
4 | import Crawler from "../dist/index.js";
5 |
6 | test.before(t => {
7 | nock.cleanAll();
8 | nock("http://nockhost").get(url => url.indexOf("status") >= 0).times(20).reply(200, "Yes");
9 | t.context.calledAgents = [];
10 | t.context.crawler = new Crawler({
11 | // silence: true,
12 | jQuery: false,
13 | userAgents: [
14 | "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
15 | "Googlebot/2.1 (+http://www.google.com/bot.html)",
16 | "test/1.0",
17 | "test/2.0"
18 | ],
19 | callback: (error, res, done) => {
20 | t.context.calledAgents.push(res.request.options.headers["user-agent"]);
21 | done();
22 | }
23 | });
24 | });
25 |
26 | testCb(test, "should rotate user agents if userAgents is set.", async t => {
27 | t.context.crawler.add([
28 | "http://nockhost/status1",
29 | "http://nockhost/status2",
30 | "http://nockhost/status3",
31 | "http://nockhost/status4",
32 | "http://nockhost/status1",
33 | ])
34 | t.context.crawler.on("drain", () => {
35 | t.deepEqual(t.context.calledAgents, [
36 | "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)",
37 | "Googlebot/2.1 (+http://www.google.com/bot.html)",
38 | "test/1.0",
39 | "test/2.0",
40 | "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
41 | ]);
42 | t.end();
43 | });
44 | });
45 |
46 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | /* Visit https://aka.ms/tsconfig to read more about this file */
4 |
5 | /* Projects */
6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
12 |
13 | /* Language and Environment */
14 | "target": "es2020" /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */,
15 | // "lib": [
16 | // "ESNext",
17 | // "DOM"
18 | // ] /* Specify a set of bundled library declaration files that describe the target runtime environment. */,
19 | // "jsx": "preserve", /* Specify what JSX code is generated. */
20 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
21 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
22 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
23 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
24 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
25 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
26 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
27 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
28 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
29 |
30 | /* Modules */
31 | "module": "es2020" /* Specify what module code is generated. */,
32 | // "rootDir": "./", /* Specify the root folder within your source files. */
33 | "moduleResolution": "node", /* Specify how TypeScript looks up a file from a given module specifier. */
34 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
35 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
36 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
37 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
38 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */
39 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
40 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
41 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
42 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
43 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
44 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
45 | // "resolveJsonModule": true, /* Enable importing .json files. */
46 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
47 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */
48 |
49 | /* JavaScript Support */
50 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
51 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
52 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
53 |
54 | /* Emit */
55 | "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
56 | "declarationMap": true, /* Create sourcemaps for d.ts files. */
57 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
58 | "sourceMap": true /* Create source map files for emitted JavaScript files. */,
59 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
60 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
61 | "outDir": "dist" /* Specify an output folder for all emitted files. */,
62 | // "removeComments": true, /* Disable emitting comments. */
63 | // "noEmit": true, /* Disable emitting files from a compilation. */
64 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
65 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
66 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
67 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
68 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
69 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
70 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
71 | // "newLine": "crlf", /* Set the newline character for emitting files. */
72 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
73 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
74 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
75 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
76 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
77 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
78 |
79 | /* Interop Constraints */
80 | // "isolatedModules": false /* Ensure that each file can be safely transpiled without relying on other imports. */,
81 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
82 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
83 | "esModuleInterop": true /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */,
84 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
85 | "forceConsistentCasingInFileNames": true /* Ensure that casing is correct in imports. */,
86 |
87 | /* Type Checking */
88 | "strict": true /* Enable all strict type-checking options. */,
89 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
90 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
91 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
92 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
93 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
94 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
95 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
96 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
97 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
98 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
99 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
100 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
101 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
102 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
103 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
104 | // "noPropertyAccessFromIndexSignature": true /* Enforces using indexed accessors for keys declared using an indexed type. */,
105 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
106 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
107 |
108 | /* Completeness */
109 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
110 | "skipLibCheck": false /* Skip type checking all .d.ts files. */
111 | },
112 | "include": ["src/**/*"],
113 | "exclude": ["node_modules", "dist"]
114 | }
115 |
--------------------------------------------------------------------------------