"), "stdin")
13 | assertEqual(t, GenerateNonUniqueNameFromPath("foo/bar"), "bar")
14 | assertEqual(t, GenerateNonUniqueNameFromPath("foo/bar.js"), "bar")
15 | assertEqual(t, GenerateNonUniqueNameFromPath("foo/bar.min.js"), "bar_min")
16 | assertEqual(t, GenerateNonUniqueNameFromPath("trailing//slashes//"), "slashes")
17 | assertEqual(t, GenerateNonUniqueNameFromPath("path/with/spaces in name.js"), "spaces_in_name")
18 | assertEqual(t, GenerateNonUniqueNameFromPath("path\\on\\windows.js"), "windows")
19 | assertEqual(t, GenerateNonUniqueNameFromPath("node_modules/demo-pkg/index.js"), "demo_pkg")
20 | assertEqual(t, GenerateNonUniqueNameFromPath("node_modules\\demo-pkg\\index.js"), "demo_pkg")
21 | assertEqual(t, GenerateNonUniqueNameFromPath("123_invalid_identifier.js"), "invalid_identifier")
22 | assertEqual(t, GenerateNonUniqueNameFromPath("emoji 🍕 name.js"), "emoji_name")
23 | }
24 |
--------------------------------------------------------------------------------
/internal/js_parser/global_name_parser.go:
--------------------------------------------------------------------------------
1 | package js_parser
2 |
3 | import (
4 | "github.com/evanw/esbuild/internal/js_lexer"
5 | "github.com/evanw/esbuild/internal/logger"
6 | )
7 |
8 | func ParseGlobalName(log logger.Log, source logger.Source) (result []string, ok bool) {
9 | ok = true
10 | defer func() {
11 | r := recover()
12 | if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
13 | ok = false
14 | } else if r != nil {
15 | panic(r)
16 | }
17 | }()
18 |
19 | lexer := js_lexer.NewLexerGlobalName(log, source)
20 |
21 | // Start off with an identifier
22 | result = append(result, lexer.Identifier)
23 | lexer.Expect(js_lexer.TIdentifier)
24 |
25 | // Follow with dot or index expressions
26 | for lexer.Token != js_lexer.TEndOfFile {
27 | switch lexer.Token {
28 | case js_lexer.TDot:
29 | lexer.Next()
30 | if !lexer.IsIdentifierOrKeyword() {
31 | lexer.Expect(js_lexer.TIdentifier)
32 | }
33 | result = append(result, lexer.Identifier)
34 | lexer.Next()
35 |
36 | case js_lexer.TOpenBracket:
37 | lexer.Next()
38 | result = append(result, js_lexer.UTF16ToString(lexer.StringLiteral()))
39 | lexer.Expect(js_lexer.TStringLiteral)
40 | lexer.Expect(js_lexer.TCloseBracket)
41 |
42 | default:
43 | lexer.Expect(js_lexer.TDot)
44 | }
45 | }
46 |
47 | return
48 | }
49 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Website |
5 | Getting started |
6 | Documentation |
7 | Plugins |
8 | FAQ
9 |
10 |
11 | ## Why?
12 |
13 | Our current build tools for the web are 10-100x slower than they could be:
14 |
15 |
16 |
17 |
18 |
19 | The main goal of the esbuild bundler project is to bring about a new era of build tool performance, and create an easy-to-use modern bundler along the way.
20 |
21 | Major features:
22 |
23 | * Extreme speed without needing a cache
24 | * ES6 and CommonJS modules
25 | * Tree shaking of ES6 modules
26 | * An [API](https://esbuild.github.io/api/) for JavaScript and Go
27 | * [TypeScript](https://esbuild.github.io/content-types/#typescript) and [JSX](https://esbuild.github.io/content-types/#jsx) syntax
28 | * [Source maps](https://esbuild.github.io/api/#sourcemap)
29 | * [Minification](https://esbuild.github.io/api/#minify)
30 | * [Plugins](https://esbuild.github.io/plugins/)
31 |
32 | Check out the [getting started](https://esbuild.github.io/getting-started/) instructions if you want to give esbuild a try.
33 |
--------------------------------------------------------------------------------
/cmd/esbuild/main_other.go:
--------------------------------------------------------------------------------
1 | //go:build !js || !wasm
2 | // +build !js !wasm
3 |
4 | package main
5 |
6 | import (
7 | "fmt"
8 | "os"
9 | "runtime/pprof"
10 | "runtime/trace"
11 |
12 | "github.com/evanw/esbuild/internal/logger"
13 | )
14 |
15 | func createTraceFile(osArgs []string, traceFile string) func() {
16 | f, err := os.Create(traceFile)
17 | if err != nil {
18 | logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
19 | "Failed to create trace file: %s", err.Error()))
20 | return nil
21 | }
22 | trace.Start(f)
23 | return func() {
24 | trace.Stop()
25 | f.Close()
26 | }
27 | }
28 |
29 | func createHeapFile(osArgs []string, heapFile string) func() {
30 | f, err := os.Create(heapFile)
31 | if err != nil {
32 | logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
33 | "Failed to create heap file: %s", err.Error()))
34 | return nil
35 | }
36 | return func() {
37 | if err := pprof.WriteHeapProfile(f); err != nil {
38 | logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
39 | "Failed to write heap profile: %s", err.Error()))
40 | }
41 | f.Close()
42 | }
43 | }
44 |
45 | func createCpuprofileFile(osArgs []string, cpuprofileFile string) func() {
46 | f, err := os.Create(cpuprofileFile)
47 | if err != nil {
48 | logger.PrintErrorToStderr(osArgs, fmt.Sprintf(
49 | "Failed to create cpuprofile file: %s", err.Error()))
50 | return nil
51 | }
52 | pprof.StartCPUProfile(f)
53 | return func() {
54 | pprof.StopCPUProfile()
55 | f.Close()
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/internal/cache/cache_fs.go:
--------------------------------------------------------------------------------
1 | package cache
2 |
3 | import (
4 | "sync"
5 |
6 | "github.com/evanw/esbuild/internal/fs"
7 | )
8 |
9 | // This cache uses information from the "stat" syscall to try to avoid re-
10 | // reading files from the file system during subsequent builds if the file
11 | // hasn't changed. The assumption is reading the file metadata is faster than
12 | // reading the file contents.
13 |
14 | type FSCache struct {
15 | mutex sync.Mutex
16 | entries map[string]*fsEntry
17 | }
18 |
19 | type fsEntry struct {
20 | contents string
21 | modKey fs.ModKey
22 | isModKeyUsable bool
23 | }
24 |
25 | func (c *FSCache) ReadFile(fs fs.FS, path string) (contents string, canonicalError error, originalError error) {
26 | entry := func() *fsEntry {
27 | c.mutex.Lock()
28 | defer c.mutex.Unlock()
29 | return c.entries[path]
30 | }()
31 |
32 | // If the file's modification key hasn't changed since it was cached, assume
33 | // the contents of the file are also the same and skip reading the file.
34 | modKey, modKeyErr := fs.ModKey(path)
35 | if entry != nil && entry.isModKeyUsable && modKeyErr == nil && entry.modKey == modKey {
36 | return entry.contents, nil, nil
37 | }
38 |
39 | contents, err, originalError := fs.ReadFile(path)
40 | if err != nil {
41 | return "", err, originalError
42 | }
43 |
44 | c.mutex.Lock()
45 | defer c.mutex.Unlock()
46 | c.entries[path] = &fsEntry{
47 | contents: contents,
48 | modKey: modKey,
49 | isModKeyUsable: modKeyErr == nil,
50 | }
51 | return contents, nil, nil
52 | }
53 |
--------------------------------------------------------------------------------
/lib/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "lib",
3 | "lockfileVersion": 2,
4 | "requires": true,
5 | "packages": {
6 | "": {
7 | "dependencies": {
8 | "@types/node": "14.0.13",
9 | "typescript": "4.4.2"
10 | }
11 | },
12 | "node_modules/@types/node": {
13 | "version": "14.0.13",
14 | "resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.13.tgz",
15 | "integrity": "sha512-rouEWBImiRaSJsVA+ITTFM6ZxibuAlTuNOCyxVbwreu6k6+ujs7DfnU9o+PShFhET78pMBl3eH+AGSI5eOTkPA=="
16 | },
17 | "node_modules/typescript": {
18 | "version": "4.4.2",
19 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.2.tgz",
20 | "integrity": "sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ==",
21 | "bin": {
22 | "tsc": "bin/tsc",
23 | "tsserver": "bin/tsserver"
24 | },
25 | "engines": {
26 | "node": ">=4.2.0"
27 | }
28 | }
29 | },
30 | "dependencies": {
31 | "@types/node": {
32 | "version": "14.0.13",
33 | "resolved": "https://registry.npmjs.org/@types/node/-/node-14.0.13.tgz",
34 | "integrity": "sha512-rouEWBImiRaSJsVA+ITTFM6ZxibuAlTuNOCyxVbwreu6k6+ujs7DfnU9o+PShFhET78pMBl3eH+AGSI5eOTkPA=="
35 | },
36 | "typescript": {
37 | "version": "4.4.2",
38 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.4.2.tgz",
39 | "integrity": "sha512-gzP+t5W4hdy4c+68bfcv0t400HVJMMd2+H9B7gae1nQlBzCqvrXX+6GL/b3GAgyTH966pzrZ70/fRjwAtZksSQ=="
40 | }
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/internal/compat/css_table.go:
--------------------------------------------------------------------------------
1 | package compat
2 |
3 | type CSSFeature uint32
4 |
5 | const (
6 | HexRGBA CSSFeature = 1 << iota
7 |
8 | RebeccaPurple
9 |
10 | // This feature includes all of the following:
11 | // - Allow floats in rgb() and rgba()
12 | // - hsl() can accept alpha values
13 | // - rgb() can accept alpha values
14 | // - Space-separated functional color notations
15 | Modern_RGB_HSL
16 | )
17 |
18 | func (features CSSFeature) Has(feature CSSFeature) bool {
19 | return (features & feature) != 0
20 | }
21 |
22 | var cssTable = map[CSSFeature]map[Engine][]int{
23 | // Data from: https://developer.mozilla.org/en-US/docs/Web/CSS/color_value
24 | HexRGBA: {
25 | Chrome: {62},
26 | Edge: {79},
27 | Firefox: {49},
28 | IOS: {9, 3},
29 | Safari: {9, 1},
30 | },
31 | RebeccaPurple: {
32 | Chrome: {38},
33 | Edge: {12},
34 | Firefox: {33},
35 | IOS: {8},
36 | Safari: {9},
37 | },
38 | Modern_RGB_HSL: {
39 | Chrome: {66},
40 | Edge: {79},
41 | Firefox: {52},
42 | IOS: {12, 2},
43 | Safari: {12, 1},
44 | },
45 | }
46 |
47 | // Return all features that are not available in at least one environment
48 | func UnsupportedCSSFeatures(constraints map[Engine][]int) (unsupported CSSFeature) {
49 | for feature, engines := range cssTable {
50 | for engine, version := range constraints {
51 | if engine == ES || engine == Node {
52 | // Specifying "--target=es2020" shouldn't affect CSS
53 | continue
54 | }
55 | if minVersion, ok := engines[engine]; !ok || isVersionLessThan(version, minVersion) {
56 | unsupported |= feature
57 | }
58 | }
59 | }
60 | return
61 | }
62 |
--------------------------------------------------------------------------------
/internal/xxhash/xxhash_other.go:
--------------------------------------------------------------------------------
1 | package xxhash
2 |
3 | // Sum64 computes the 64-bit xxHash digest of b.
4 | func Sum64(b []byte) uint64 {
5 | // A simpler version would be
6 | // d := New()
7 | // d.Write(b)
8 | // return d.Sum64()
9 | // but this is faster, particularly for small inputs.
10 |
11 | n := len(b)
12 | var h uint64
13 |
14 | if n >= 32 {
15 | v1 := prime1v + prime2
16 | v2 := prime2
17 | v3 := uint64(0)
18 | v4 := -prime1v
19 | for len(b) >= 32 {
20 | v1 = round(v1, u64(b[0:8:len(b)]))
21 | v2 = round(v2, u64(b[8:16:len(b)]))
22 | v3 = round(v3, u64(b[16:24:len(b)]))
23 | v4 = round(v4, u64(b[24:32:len(b)]))
24 | b = b[32:len(b):len(b)]
25 | }
26 | h = rol1(v1) + rol7(v2) + rol12(v3) + rol18(v4)
27 | h = mergeRound(h, v1)
28 | h = mergeRound(h, v2)
29 | h = mergeRound(h, v3)
30 | h = mergeRound(h, v4)
31 | } else {
32 | h = prime5
33 | }
34 |
35 | h += uint64(n)
36 |
37 | i, end := 0, len(b)
38 | for ; i+8 <= end; i += 8 {
39 | k1 := round(0, u64(b[i:i+8:len(b)]))
40 | h ^= k1
41 | h = rol27(h)*prime1 + prime4
42 | }
43 | if i+4 <= end {
44 | h ^= uint64(u32(b[i:i+4:len(b)])) * prime1
45 | h = rol23(h)*prime2 + prime3
46 | i += 4
47 | }
48 | for ; i < end; i++ {
49 | h ^= uint64(b[i]) * prime5
50 | h = rol11(h) * prime1
51 | }
52 |
53 | h ^= h >> 33
54 | h *= prime2
55 | h ^= h >> 29
56 | h *= prime3
57 | h ^= h >> 32
58 |
59 | return h
60 | }
61 |
62 | func writeBlocks(d *Digest, b []byte) int {
63 | v1, v2, v3, v4 := d.v1, d.v2, d.v3, d.v4
64 | n := len(b)
65 | for len(b) >= 32 {
66 | v1 = round(v1, u64(b[0:8:len(b)]))
67 | v2 = round(v2, u64(b[8:16:len(b)]))
68 | v3 = round(v3, u64(b[16:24:len(b)]))
69 | v4 = round(v4, u64(b[24:32:len(b)]))
70 | b = b[32:len(b):len(b)]
71 | }
72 | d.v1, d.v2, d.v3, d.v4 = v1, v2, v3, v4
73 | return n - len(b)
74 | }
75 |
--------------------------------------------------------------------------------
/internal/resolver/dataurl.go:
--------------------------------------------------------------------------------
1 | package resolver
2 |
3 | import (
4 | "encoding/base64"
5 | "fmt"
6 | "net/url"
7 | "strings"
8 | )
9 |
10 | type DataURL struct {
11 | mimeType string
12 | data string
13 | isBase64 bool
14 | }
15 |
16 | func ParseDataURL(url string) (parsed DataURL, ok bool) {
17 | if strings.HasPrefix(url, "data:") {
18 | if comma := strings.IndexByte(url, ','); comma != -1 {
19 | parsed.mimeType = url[len("data:"):comma]
20 | parsed.data = url[comma+1:]
21 | if strings.HasSuffix(parsed.mimeType, ";base64") {
22 | parsed.mimeType = parsed.mimeType[:len(parsed.mimeType)-len(";base64")]
23 | parsed.isBase64 = true
24 | }
25 | ok = true
26 | }
27 | }
28 | return
29 | }
30 |
31 | type MIMEType uint8
32 |
33 | const (
34 | MIMETypeUnsupported MIMEType = iota
35 | MIMETypeTextCSS
36 | MIMETypeTextJavaScript
37 | MIMETypeApplicationJSON
38 | )
39 |
40 | func (parsed DataURL) DecodeMIMEType() MIMEType {
41 | // Remove things like ";charset=utf-8"
42 | mimeType := parsed.mimeType
43 | if semicolon := strings.IndexByte(mimeType, ';'); semicolon != -1 {
44 | mimeType = mimeType[:semicolon]
45 | }
46 |
47 | // Hard-code a few supported types
48 | switch mimeType {
49 | case "text/css":
50 | return MIMETypeTextCSS
51 | case "text/javascript":
52 | return MIMETypeTextJavaScript
53 | case "application/json":
54 | return MIMETypeApplicationJSON
55 | default:
56 | return MIMETypeUnsupported
57 | }
58 | }
59 |
60 | func (parsed DataURL) DecodeData() (string, error) {
61 | // Try to read base64 data
62 | if parsed.isBase64 {
63 | bytes, err := base64.StdEncoding.DecodeString(parsed.data)
64 | if err != nil {
65 | return "", fmt.Errorf("could not decode base64 data: %s", err.Error())
66 | }
67 | return string(bytes), nil
68 | }
69 |
70 | // Try to read percent-escaped data
71 | content, err := url.PathUnescape(parsed.data)
72 | if err != nil {
73 | return "", fmt.Errorf("could not decode percent-escaped data: %s", err.Error())
74 | }
75 | return content, nil
76 | }
77 |
--------------------------------------------------------------------------------
/lib/npm/worker.ts:
--------------------------------------------------------------------------------
1 | // This file is part of the web worker source code
2 |
3 | declare const ESBUILD_VERSION: string;
4 | declare function postMessage(message: any): void;
5 |
6 | onmessage = ({ data: wasm }) => {
7 | let decoder = new TextDecoder()
8 | let fs = (global as any).fs
9 |
10 | let stderr = ''
11 | fs.writeSync = (fd: number, buffer: Uint8Array) => {
12 | if (fd === 1) {
13 | postMessage(buffer)
14 | } else if (fd === 2) {
15 | stderr += decoder.decode(buffer)
16 | let parts = stderr.split('\n')
17 | if (parts.length > 1) console.log(parts.slice(0, -1).join('\n'))
18 | stderr = parts[parts.length - 1]
19 | } else {
20 | throw new Error('Bad write')
21 | }
22 | return buffer.length
23 | }
24 |
25 | let stdin: Uint8Array[] = []
26 | let resumeStdin: () => void
27 | let stdinPos = 0
28 |
29 | onmessage = ({ data }) => {
30 | if (data.length > 0) {
31 | stdin.push(data)
32 | if (resumeStdin) resumeStdin()
33 | }
34 | }
35 |
36 | fs.read = (
37 | fd: number, buffer: Uint8Array, offset: number, length: number,
38 | position: null, callback: (err: Error | null, count?: number) => void,
39 | ) => {
40 | if (fd !== 0 || offset !== 0 || length !== buffer.length || position !== null) {
41 | throw new Error('Bad read')
42 | }
43 |
44 | if (stdin.length === 0) {
45 | resumeStdin = () => fs.read(fd, buffer, offset, length, position, callback)
46 | return
47 | }
48 |
49 | let first = stdin[0]
50 | let count = Math.max(0, Math.min(length, first.length - stdinPos))
51 | buffer.set(first.subarray(stdinPos, stdinPos + count), offset)
52 | stdinPos += count
53 | if (stdinPos === first.length) {
54 | stdin.shift()
55 | stdinPos = 0
56 | }
57 | callback(null, count)
58 | }
59 |
60 | let go = new (global as any).Go()
61 | go.argv = ['', `--service=${ESBUILD_VERSION}`]
62 |
63 | WebAssembly.instantiate(wasm, go.importObject)
64 | .then(({ instance }) => go.run(instance))
65 | }
66 |
--------------------------------------------------------------------------------
/internal/helpers/timer.go:
--------------------------------------------------------------------------------
1 | package helpers
2 |
3 | import (
4 | "fmt"
5 | "strings"
6 | "sync"
7 | "time"
8 |
9 | "github.com/evanw/esbuild/internal/logger"
10 | )
11 |
12 | type Timer struct {
13 | mutex sync.Mutex
14 | data []timerData
15 | }
16 |
17 | type timerData struct {
18 | name string
19 | time time.Time
20 | isEnd bool
21 | }
22 |
23 | func (t *Timer) Begin(name string) {
24 | if t != nil {
25 | t.data = append(t.data, timerData{
26 | name: name,
27 | time: time.Now(),
28 | })
29 | }
30 | }
31 |
32 | func (t *Timer) End(name string) {
33 | if t != nil {
34 | t.data = append(t.data, timerData{
35 | name: name,
36 | time: time.Now(),
37 | isEnd: true,
38 | })
39 | }
40 | }
41 |
42 | func (t *Timer) Fork() *Timer {
43 | if t != nil {
44 | return &Timer{}
45 | }
46 | return nil
47 | }
48 |
49 | func (t *Timer) Join(other *Timer) {
50 | if t != nil && other != nil {
51 | t.mutex.Lock()
52 | defer t.mutex.Unlock()
53 | t.data = append(t.data, other.data...)
54 | }
55 | }
56 |
57 | func (t *Timer) Log(log logger.Log) {
58 | if t == nil {
59 | return
60 | }
61 |
62 | type pair struct {
63 | timerData
64 | index uint32
65 | }
66 |
67 | var notes []logger.MsgData
68 | var stack []pair
69 | indent := 0
70 |
71 | for _, item := range t.data {
72 | if !item.isEnd {
73 | top := pair{timerData: item, index: uint32(len(notes))}
74 | notes = append(notes, logger.MsgData{})
75 | stack = append(stack, top)
76 | indent++
77 | } else {
78 | indent--
79 | last := len(stack) - 1
80 | top := stack[last]
81 | stack = stack[:last]
82 | if item.name != top.name {
83 | panic("Internal error")
84 | }
85 | notes[top.index].Text = fmt.Sprintf("%s%s: %dms",
86 | strings.Repeat(" ", indent),
87 | top.name,
88 | item.time.Sub(top.time).Milliseconds())
89 | }
90 | }
91 |
92 | log.AddMsg(logger.Msg{
93 | Kind: logger.Info,
94 | Data: logger.MsgData{Text: "Timing information (times may not nest hierarchically due to parallelism)"},
95 | Notes: notes,
96 | })
97 | }
98 |
--------------------------------------------------------------------------------
/internal/helpers/joiner.go:
--------------------------------------------------------------------------------
1 | package helpers
2 |
3 | import (
4 | "bytes"
5 | "strings"
6 | )
7 |
8 | // This provides an efficient way to join lots of big string and byte slices
9 | // together. It avoids the cost of repeatedly reallocating as the buffer grows
10 | // by measuring exactly how big the buffer should be and then allocating once.
11 | // This is a measurable speedup.
12 | type Joiner struct {
13 | lastByte byte
14 | strings []joinerString
15 | bytes []joinerBytes
16 | length uint32
17 | }
18 |
19 | type joinerString struct {
20 | data string
21 | offset uint32
22 | }
23 |
24 | type joinerBytes struct {
25 | data []byte
26 | offset uint32
27 | }
28 |
29 | func (j *Joiner) AddString(data string) {
30 | if len(data) > 0 {
31 | j.lastByte = data[len(data)-1]
32 | }
33 | j.strings = append(j.strings, joinerString{data, j.length})
34 | j.length += uint32(len(data))
35 | }
36 |
37 | func (j *Joiner) AddBytes(data []byte) {
38 | if len(data) > 0 {
39 | j.lastByte = data[len(data)-1]
40 | }
41 | j.bytes = append(j.bytes, joinerBytes{data, j.length})
42 | j.length += uint32(len(data))
43 | }
44 |
45 | func (j *Joiner) LastByte() byte {
46 | return j.lastByte
47 | }
48 |
49 | func (j *Joiner) Length() uint32 {
50 | return j.length
51 | }
52 |
53 | func (j *Joiner) EnsureNewlineAtEnd() {
54 | if j.length > 0 && j.lastByte != '\n' {
55 | j.AddString("\n")
56 | }
57 | }
58 |
59 | func (j *Joiner) Done() []byte {
60 | if len(j.strings) == 0 && len(j.bytes) == 1 && j.bytes[0].offset == 0 {
61 | // No need to allocate if there was only a single byte array written
62 | return j.bytes[0].data
63 | }
64 | buffer := make([]byte, j.length)
65 | for _, item := range j.strings {
66 | copy(buffer[item.offset:], item.data)
67 | }
68 | for _, item := range j.bytes {
69 | copy(buffer[item.offset:], item.data)
70 | }
71 | return buffer
72 | }
73 |
74 | func (j *Joiner) Contains(s string, b []byte) bool {
75 | for _, item := range j.strings {
76 | if strings.Contains(item.data, s) {
77 | return true
78 | }
79 | }
80 | for _, item := range j.bytes {
81 | if bytes.Contains(item.data, b) {
82 | return true
83 | }
84 | }
85 | return false
86 | }
87 |
--------------------------------------------------------------------------------
/internal/test/diff.go:
--------------------------------------------------------------------------------
1 | package test
2 |
3 | import (
4 | "fmt"
5 | "strings"
6 |
7 | "github.com/evanw/esbuild/internal/logger"
8 | )
9 |
10 | func diff(old string, new string, color bool) string {
11 | return strings.Join(diffRec(nil, strings.Split(old, "\n"), strings.Split(new, "\n"), color), "\n")
12 | }
13 |
14 | // This is a simple recursive line-by-line diff implementation
15 | func diffRec(result []string, old []string, new []string, color bool) []string {
16 | o, n, common := lcSubstr(old, new)
17 |
18 | if common == 0 {
19 | // Everything changed
20 | for _, line := range old {
21 | if color {
22 | result = append(result, fmt.Sprintf("%s-%s%s", logger.TerminalColors.Red, line, logger.TerminalColors.Reset))
23 | } else {
24 | result = append(result, "-"+line)
25 | }
26 | }
27 | for _, line := range new {
28 | if color {
29 | result = append(result, fmt.Sprintf("%s+%s%s", logger.TerminalColors.Green, line, logger.TerminalColors.Reset))
30 | } else {
31 | result = append(result, "+"+line)
32 | }
33 | }
34 | } else {
35 | // Something in the middle stayed the same
36 | result = diffRec(result, old[:o], new[:n], color)
37 | for _, line := range old[o : o+common] {
38 | if color {
39 | result = append(result, fmt.Sprintf("%s %s%s", logger.TerminalColors.Dim, line, logger.TerminalColors.Reset))
40 | } else {
41 | result = append(result, " "+line)
42 | }
43 | }
44 | result = diffRec(result, old[o+common:], new[n+common:], color)
45 | }
46 |
47 | return result
48 | }
49 |
50 | // From: https://en.wikipedia.org/wiki/Longest_common_substring_problem
51 | func lcSubstr(S []string, T []string) (int, int, int) {
52 | r := len(S)
53 | n := len(T)
54 | Lprev := make([]int, n)
55 | Lnext := make([]int, n)
56 | z := 0
57 | retI := 0
58 | retJ := 0
59 |
60 | for i := 0; i < r; i++ {
61 | for j := 0; j < n; j++ {
62 | if S[i] == T[j] {
63 | if j == 0 {
64 | Lnext[j] = 1
65 | } else {
66 | Lnext[j] = Lprev[j-1] + 1
67 | }
68 | if Lnext[j] > z {
69 | z = Lnext[j]
70 | retI = i + 1
71 | retJ = j + 1
72 | }
73 | } else {
74 | Lnext[j] = 0
75 | }
76 | }
77 | Lprev, Lnext = Lnext, Lprev
78 | }
79 |
80 | return retI - z, retJ - z, z
81 | }
82 |
--------------------------------------------------------------------------------
/scripts/parse-ts-files.js:
--------------------------------------------------------------------------------
1 | // This script parses all .ts and .tsx files in the current directory using
2 | // esbuild. This is useful to check for parser bugs and/or crashes in esbuild.
3 |
4 | const fs = require('fs');
5 | const os = require('os');
6 | const path = require('path');
7 | const ts = require('typescript');
8 | const child_process = require('child_process');
9 | const esbuildPath = path.join(path.dirname(__dirname), 'esbuild');
10 |
11 | function walkDir(root, cb) {
12 | for (const entry of fs.readdirSync(root)) {
13 | const absolute = path.join(root, entry);
14 | if (fs.statSync(absolute).isDirectory()) {
15 | walkDir(absolute, cb);
16 | } else if ((entry.endsWith('.ts') && !entry.endsWith('.d.ts')) || entry.endsWith('.tsx')) {
17 | cb(absolute)
18 | }
19 | }
20 | }
21 |
22 | child_process.execSync('make', { cwd: path.dirname(__dirname) });
23 |
24 | // Doing one file at a time is useful for debugging crashes
25 | if (process.argv.includes('--individual')) {
26 | walkDir(process.cwd(), absolute => {
27 | let output = child_process.spawnSync(esbuildPath, [absolute, '--outfile=/dev/null'], { stdio: ['inherit', 'pipe', 'pipe'] });
28 | if (output.status) {
29 | let result;
30 | try {
31 | result = ts.transpileModule(fs.readFileSync(absolute, 'utf8'), { reportDiagnostics: true });
32 | } catch (e) {
33 | // Ignore this file if the TypeScript compiler crashes on it
34 | return
35 | }
36 | if (result.diagnostics.length > 0) {
37 | // Ignore this file if the TypeScript compiler has parse errors
38 | return
39 | }
40 | console.log('-'.repeat(80));
41 | console.log('Failure:', absolute);
42 | console.log('-'.repeat(20) + ' esbuild output:');
43 | console.log(output.stdout + output.stderr);
44 | }
45 | });
46 | }
47 |
48 | // Otherwise it's much faster to do everything at once
49 | else {
50 | const tempDir = path.join(os.tmpdir(), 'esbuild-parse-ts-files');
51 | try {
52 | fs.mkdirSync(tempDir);
53 | } catch (e) {
54 | }
55 | const all = [];
56 | walkDir(process.cwd(), absolute => all.push(absolute));
57 | try {
58 | child_process.execFileSync(esbuildPath, ['--outdir=' + tempDir].concat(all), { stdio: 'inherit' });
59 | } catch (e) {
60 | }
61 | }
62 |
--------------------------------------------------------------------------------
/scripts/deno-tests.js:
--------------------------------------------------------------------------------
1 | // To run this, you must first build the Deno package with "make platform-deno"
2 | import * as esbuild from '../deno/mod.js'
3 | import * as path from 'https://deno.land/std@0.95.0/path/mod.ts'
4 | import * as asserts from 'https://deno.land/std@0.95.0/testing/asserts.ts'
5 |
6 | const rootTestDir = path.join(path.dirname(path.fromFileUrl(import.meta.url)), '.deno-tests')
7 | let testDidFail = false
8 |
9 | try {
10 | Deno.removeSync(rootTestDir, { recursive: true })
11 | } catch {
12 | }
13 | Deno.mkdirSync(rootTestDir, { recursive: true })
14 |
15 | function test(name, fn) {
16 | let testDir = path.join(rootTestDir, name)
17 | Deno.test(name, async () => {
18 | await Deno.mkdir(testDir, { recursive: true })
19 | try {
20 | await fn({ testDir })
21 | await Deno.remove(testDir, { recursive: true }).catch(() => null)
22 | } catch (e) {
23 | testDidFail = true
24 | throw e
25 | } finally {
26 | esbuild.stop()
27 | }
28 | })
29 | }
30 |
31 | window.addEventListener("unload", (e) => {
32 | if (testDidFail) {
33 | console.error(`❌ deno tests failed`)
34 | } else {
35 | console.log(`✅ deno tests passed`)
36 | try {
37 | Deno.removeSync(rootTestDir, { recursive: true })
38 | } catch {
39 | // root test dir possibly already removed, so ignore
40 | }
41 | }
42 | })
43 |
44 | test("basicBuild", async ({ testDir }) => {
45 | const input = path.join(testDir, 'in.ts')
46 | const dep = path.join(testDir, 'dep.ts')
47 | const output = path.join(testDir, 'out.ts')
48 | await Deno.writeTextFile(input, 'import dep from "./dep.ts"; export default dep === 123')
49 | await Deno.writeTextFile(dep, 'export default 123')
50 | await esbuild.build({
51 | entryPoints: [input],
52 | bundle: true,
53 | outfile: output,
54 | format: 'esm',
55 | })
56 | const result = await import(path.toFileUrl(output))
57 | asserts.assertStrictEquals(result.default, true)
58 | })
59 |
60 | test("basicTransform", async () => {
61 | const ts = 'let x: number = 1+2'
62 | const result = await esbuild.transform(ts, { loader: 'ts' })
63 | asserts.assertStrictEquals(result.code, 'let x = 1 + 2;\n')
64 | })
65 |
66 | test("largeTransform", async () => {
67 | // This should be large enough to be bigger than Deno's write buffer
68 | let x = '0'
69 | for (let i = 0; i < 1000; i++)x += '+' + i
70 | x += ','
71 | let y = 'return['
72 | for (let i = 0; i < 1000; i++)y += x
73 | y += ']'
74 | const result = await esbuild.build({
75 | stdin: {
76 | contents: y,
77 | },
78 | write: false,
79 | minify: true,
80 | })
81 | asserts.assertStrictEquals(result.outputFiles[0].text, y.slice(0, -2) + '];\n')
82 | })
83 |
84 |
--------------------------------------------------------------------------------
/images/benchmark.svg:
--------------------------------------------------------------------------------
1 |
39 |
--------------------------------------------------------------------------------
/pkg/cli/cli.go:
--------------------------------------------------------------------------------
1 | // This API exposes the command-line interface for esbuild. It can be used to
2 | // run esbuild from Go without the overhead of creating a child process.
3 | //
4 | // Example usage:
5 | //
6 | // package main
7 | //
8 | // import (
9 | // "os"
10 | //
11 | // "github.com/evanw/esbuild/pkg/cli"
12 | // )
13 | //
14 | // func main() {
15 | // os.Exit(cli.Run(os.Args[1:]))
16 | // }
17 | //
18 | package cli
19 |
20 | import (
21 | "github.com/evanw/esbuild/pkg/api"
22 | )
23 |
24 | // This function invokes the esbuild CLI. It takes an array of command-line
25 | // arguments (excluding the executable argument itself) and returns an exit
26 | // code. There are some minor differences between this CLI and the actual
27 | // "esbuild" executable such as the lack of auxiliary flags (e.g. "--help" and
28 | // "--version") but it is otherwise exactly the same code.
29 | func Run(osArgs []string) int {
30 | return runImpl(osArgs)
31 | }
32 |
33 | // This parses an array of strings into an options object suitable for passing
34 | // to "api.Build()". Use this if you need to reuse the same argument parsing
35 | // logic as the esbuild CLI.
36 | //
37 | // Example usage:
38 | //
39 | // options, err := cli.ParseBuildOptions([]string{
40 | // "input.js",
41 | // "--bundle",
42 | // "--minify",
43 | // })
44 | //
45 | // result := api.Build(options)
46 | //
47 | func ParseBuildOptions(osArgs []string) (options api.BuildOptions, err error) {
48 | options = newBuildOptions()
49 | err, _ = parseOptionsImpl(osArgs, &options, nil, kindExternal)
50 | return
51 | }
52 |
53 | // This parses an array of strings into an options object suitable for passing
54 | // to "api.Transform()". Use this if you need to reuse the same argument
55 | // parsing logic as the esbuild CLI.
56 | //
57 | // Example usage:
58 | //
59 | // options, err := cli.ParseTransformOptions([]string{
60 | // "--minify",
61 | // "--loader=tsx",
62 | // "--define:DEBUG=false",
63 | // })
64 | //
65 | // result := api.Transform(input, options)
66 | //
67 | func ParseTransformOptions(osArgs []string) (options api.TransformOptions, err error) {
68 | options = newTransformOptions()
69 | err, _ = parseOptionsImpl(osArgs, nil, &options, kindExternal)
70 | return
71 | }
72 |
73 | // This parses an array of strings into an options object suitable for passing
74 | // to "api.Serve()". The remaining non-serve arguments are returned in another
75 | // array to then be passed to "api.ParseBuildOptions()". Use this if you need
76 | // to reuse the same argument parsing logic as the esbuild CLI.
77 | //
78 | // Example usage:
79 | //
80 | // serveOptions, args, err := cli.ParseServeOptions([]string{
81 | // "--serve=8000",
82 | // })
83 | //
84 | // buildOptions, err := cli.ParseBuildOptions(args)
85 | //
86 | // result := api.Serve(serveOptions, buildOptions)
87 | //
88 | func ParseServeOptions(osArgs []string) (options api.ServeOptions, remainingArgs []string, err error) {
89 | return parseServeOptionsImpl(osArgs)
90 | }
91 |
--------------------------------------------------------------------------------
/internal/css_parser/css_decls_box_shadow.go:
--------------------------------------------------------------------------------
1 | package css_parser
2 |
3 | import (
4 | "github.com/evanw/esbuild/internal/css_ast"
5 | "github.com/evanw/esbuild/internal/css_lexer"
6 | )
7 |
8 | func (p *parser) mangleBoxShadow(tokens []css_ast.Token) []css_ast.Token {
9 | insetCount := 0
10 | colorCount := 0
11 | numbersBegin := 0
12 | numbersCount := 0
13 | numbersDone := false
14 | foundUnexpectedToken := false
15 |
16 | for i, t := range tokens {
17 | if t.Kind == css_lexer.TNumber || t.Kind == css_lexer.TDimension {
18 | if numbersDone {
19 | // Track if we found a non-number in between two numbers
20 | foundUnexpectedToken = true
21 | }
22 | if t.TurnLengthIntoNumberIfZero() {
23 | // "0px" => "0"
24 | tokens[i] = t
25 | }
26 | if numbersCount == 0 {
27 | // Track the index of the first number
28 | numbersBegin = i
29 | }
30 | numbersCount++
31 | } else {
32 | if numbersCount != 0 {
33 | // Track when we find a non-number after a number
34 | numbersDone = true
35 | }
36 | if hex, ok := parseColor(t); ok {
37 | colorCount++
38 | tokens[i] = p.mangleColor(t, hex)
39 | } else if t.Kind == css_lexer.TIdent && t.Text == "inset" {
40 | insetCount++
41 | } else {
42 | // Track if we found a token other than a number, a color, or "inset"
43 | foundUnexpectedToken = true
44 | }
45 | }
46 | }
47 |
48 | // If everything looks like a valid rule, trim trailing zeros off the numbers.
49 | // There are three valid configurations of numbers:
50 | //
51 | // offset-x | offset-y
52 | // offset-x | offset-y | blur-radius
53 | // offset-x | offset-y | blur-radius | spread-radius
54 | //
55 | // If omitted, blur-radius and spread-radius are implied to be zero.
56 | if insetCount <= 1 && colorCount <= 1 && numbersCount > 2 && numbersCount <= 4 && !foundUnexpectedToken {
57 | numbersEnd := numbersBegin + numbersCount
58 | for numbersCount > 2 && tokens[numbersBegin+numbersCount-1].IsZero() {
59 | numbersCount--
60 | }
61 | tokens = append(tokens[:numbersBegin+numbersCount], tokens[numbersEnd:]...)
62 | }
63 |
64 | // Set the whitespace flags
65 | for i := range tokens {
66 | var whitespace css_ast.WhitespaceFlags
67 | if i > 0 || !p.options.RemoveWhitespace {
68 | whitespace |= css_ast.WhitespaceBefore
69 | }
70 | if i+1 < len(tokens) {
71 | whitespace |= css_ast.WhitespaceAfter
72 | }
73 | tokens[i].Whitespace = whitespace
74 | }
75 | return tokens
76 | }
77 |
78 | func (p *parser) mangleBoxShadows(tokens []css_ast.Token) []css_ast.Token {
79 | n := len(tokens)
80 | end := 0
81 | i := 0
82 |
83 | for i < n {
84 | // Find the comma or the end of the token list
85 | comma := i
86 | for comma < n && tokens[comma].Kind != css_lexer.TComma {
87 | comma++
88 | }
89 |
90 | // Mangle this individual shadow
91 | end += copy(tokens[end:], p.mangleBoxShadow(tokens[i:comma]))
92 |
93 | // Skip over the comma
94 | if comma < n {
95 | tokens[end] = tokens[comma]
96 | end++
97 | comma++
98 | }
99 | i = comma
100 | }
101 |
102 | return tokens[:end]
103 | }
104 |
--------------------------------------------------------------------------------
/internal/fs/fs_mock_test.go:
--------------------------------------------------------------------------------
1 | package fs
2 |
3 | import (
4 | "fmt"
5 | "testing"
6 | )
7 |
8 | func TestMockFSBasic(t *testing.T) {
9 | fs := MockFS(map[string]string{
10 | "/README.md": "// README.md",
11 | "/package.json": "// package.json",
12 | "/src/index.js": "// src/index.js",
13 | "/src/util.js": "// src/util.js",
14 | })
15 |
16 | // Test a missing file
17 | _, err, _ := fs.ReadFile("/missing.txt")
18 | if err == nil {
19 | t.Fatal("Unexpectedly found /missing.txt")
20 | }
21 |
22 | // Test an existing file
23 | readme, err, _ := fs.ReadFile("/README.md")
24 | if err != nil {
25 | t.Fatal("Expected to find /README.md")
26 | }
27 | if readme != "// README.md" {
28 | t.Fatalf("Incorrect contents for /README.md: %q", readme)
29 | }
30 |
31 | // Test an existing nested file
32 | index, err, _ := fs.ReadFile("/src/index.js")
33 | if err != nil {
34 | t.Fatal("Expected to find /src/index.js")
35 | }
36 | if index != "// src/index.js" {
37 | t.Fatalf("Incorrect contents for /src/index.js: %q", index)
38 | }
39 |
40 | // Test a missing directory
41 | _, err, _ = fs.ReadDirectory("/missing")
42 | if err == nil {
43 | t.Fatal("Unexpectedly found /missing")
44 | }
45 |
46 | // Test a nested directory
47 | src, err, _ := fs.ReadDirectory("/src")
48 | if err != nil {
49 | t.Fatal("Expected to find /src")
50 | }
51 | indexEntry, _ := src.Get("index.js")
52 | utilEntry, _ := src.Get("util.js")
53 | if len(src.data) != 2 ||
54 | indexEntry == nil || indexEntry.Kind(fs) != FileEntry ||
55 | utilEntry == nil || utilEntry.Kind(fs) != FileEntry {
56 | t.Fatalf("Incorrect contents for /src: %v", src)
57 | }
58 |
59 | // Test the top-level directory
60 | slash, err, _ := fs.ReadDirectory("/")
61 | if err != nil {
62 | t.Fatal("Expected to find /")
63 | }
64 | srcEntry, _ := slash.Get("src")
65 | readmeEntry, _ := slash.Get("README.md")
66 | packageEntry, _ := slash.Get("package.json")
67 | if len(slash.data) != 3 ||
68 | srcEntry == nil || srcEntry.Kind(fs) != DirEntry ||
69 | readmeEntry == nil || readmeEntry.Kind(fs) != FileEntry ||
70 | packageEntry == nil || packageEntry.Kind(fs) != FileEntry {
71 | t.Fatalf("Incorrect contents for /: %v", slash)
72 | }
73 | }
74 |
75 | func TestMockFSRel(t *testing.T) {
76 | fs := MockFS(map[string]string{})
77 |
78 | expect := func(a string, b string, c string) {
79 | t.Helper()
80 | t.Run(fmt.Sprintf("Rel(%q, %q) == %q", a, b, c), func(t *testing.T) {
81 | t.Helper()
82 | rel, ok := fs.Rel(a, b)
83 | if !ok {
84 | t.Fatalf("!ok")
85 | }
86 | if rel != c {
87 | t.Fatalf("Expected %q, got %q", c, rel)
88 | }
89 | })
90 | }
91 |
92 | expect("/a/b", "/a/b", ".")
93 | expect("/a/b", "/a/b/c", "c")
94 | expect("/a/b", "/a/b/c/d", "c/d")
95 | expect("/a/b/c", "/a/b", "..")
96 | expect("/a/b/c/d", "/a/b", "../..")
97 | expect("/a/b/c", "/a/b/x", "../x")
98 | expect("/a/b/c/d", "/a/b/x", "../../x")
99 | expect("/a/b/c", "/a/b/x/y", "../x/y")
100 | expect("/a/b/c/d", "/a/b/x/y", "../../x/y")
101 |
102 | expect("a/b", "a/c", "../c")
103 | expect("./a/b", "./a/c", "../c")
104 | expect(".", "./a/b", "a/b")
105 | expect(".", ".//a/b", "a/b")
106 | expect(".", "././a/b", "a/b")
107 | expect(".", "././/a/b", "a/b")
108 | }
109 |
--------------------------------------------------------------------------------
/internal/cache/cache.go:
--------------------------------------------------------------------------------
1 | package cache
2 |
3 | import (
4 | "sync"
5 |
6 | "github.com/evanw/esbuild/internal/logger"
7 | "github.com/evanw/esbuild/internal/runtime"
8 | )
9 |
10 | // This is a cache of the parsed contents of a set of files. The idea is to be
11 | // able to reuse the results of parsing between builds and make subsequent
12 | // builds faster by avoiding redundant parsing work. This only works if:
13 | //
14 | // * The AST information in the cache must be considered immutable. There is
15 | // no way to enforce this in Go, but please be disciplined about this. The
16 | // ASTs are shared in between builds. Any information that must be mutated
17 | // in the AST during a build must be done on a shallow clone of the data if
18 | // the mutation happens after parsing (i.e. a clone that clones everything
19 | // that will be mutated and shares only the parts that won't be mutated).
20 | //
21 | // * The information in the cache must not depend at all on the contents of
22 | // any file other than the file being cached. Invalidating an entry in the
23 | // cache does not also invalidate any entries that depend on that file, so
24 | // caching information that depends on other files can result in incorrect
25 | // results due to reusing stale data. For example, do not "bake in" some
26 | // value imported from another file.
27 | //
28 | // * Cached ASTs must only be reused if the parsing options are identical
29 | // between builds. For example, it would be bad if the AST parser depended
30 | // on options inherited from a nearby "package.json" file but those options
31 | // were not part of the cache key. Then the cached AST could incorrectly be
32 | // reused even if the contents of that "package.json" file have changed.
33 | //
34 | type CacheSet struct {
35 | SourceIndexCache SourceIndexCache
36 | FSCache FSCache
37 | CSSCache CSSCache
38 | JSONCache JSONCache
39 | JSCache JSCache
40 | }
41 |
42 | func MakeCacheSet() *CacheSet {
43 | return &CacheSet{
44 | SourceIndexCache: SourceIndexCache{
45 | entries: make(map[sourceIndexKey]uint32),
46 | nextSourceIndex: runtime.SourceIndex + 1,
47 | },
48 | FSCache: FSCache{
49 | entries: make(map[string]*fsEntry),
50 | },
51 | CSSCache: CSSCache{
52 | entries: make(map[logger.Path]*cssCacheEntry),
53 | },
54 | JSONCache: JSONCache{
55 | entries: make(map[logger.Path]*jsonCacheEntry),
56 | },
57 | JSCache: JSCache{
58 | entries: make(map[logger.Path]*jsCacheEntry),
59 | },
60 | }
61 | }
62 |
63 | type SourceIndexCache struct {
64 | mutex sync.Mutex
65 | entries map[sourceIndexKey]uint32
66 | nextSourceIndex uint32
67 | }
68 |
69 | type SourceIndexKind uint8
70 |
71 | const (
72 | SourceIndexNormal SourceIndexKind = iota
73 | SourceIndexJSStubForCSS
74 | )
75 |
76 | type sourceIndexKey struct {
77 | path logger.Path
78 | kind SourceIndexKind
79 | }
80 |
81 | func (c *SourceIndexCache) LenHint() uint32 {
82 | c.mutex.Lock()
83 | defer c.mutex.Unlock()
84 |
85 | // Add some extra room at the end for a new file or two without reallocating
86 | const someExtraRoom = 16
87 | return c.nextSourceIndex + someExtraRoom
88 | }
89 |
90 | func (c *SourceIndexCache) Get(path logger.Path, kind SourceIndexKind) uint32 {
91 | key := sourceIndexKey{path: path, kind: kind}
92 | c.mutex.Lock()
93 | defer c.mutex.Unlock()
94 | if sourceIndex, ok := c.entries[key]; ok {
95 | return sourceIndex
96 | }
97 | sourceIndex := c.nextSourceIndex
98 | c.nextSourceIndex++
99 | c.entries[key] = sourceIndex
100 | return sourceIndex
101 | }
102 |
--------------------------------------------------------------------------------
/internal/css_parser/css_decls_box.go:
--------------------------------------------------------------------------------
1 | package css_parser
2 |
3 | import (
4 | "github.com/evanw/esbuild/internal/css_ast"
5 | "github.com/evanw/esbuild/internal/css_lexer"
6 | "github.com/evanw/esbuild/internal/logger"
7 | )
8 |
9 | const (
10 | boxTop = iota
11 | boxRight
12 | boxBottom
13 | boxLeft
14 | )
15 |
16 | type boxSide struct {
17 | token css_ast.Token
18 | index uint32
19 | single bool
20 | }
21 |
22 | type boxTracker struct {
23 | sides [4]boxSide
24 | important bool
25 | }
26 |
27 | func (box *boxTracker) updateSide(rules []css_ast.Rule, side int, new boxSide) {
28 | if old := box.sides[side]; old.token.Kind != css_lexer.TEndOfFile && (!new.single || old.single) {
29 | rules[old.index] = css_ast.Rule{}
30 | }
31 | box.sides[side] = new
32 | }
33 |
34 | func (box *boxTracker) mangleSides(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool) {
35 | // Reset if we see a change in the "!important" flag
36 | if box.important != decl.Important {
37 | box.sides = [4]boxSide{}
38 | box.important = decl.Important
39 | }
40 |
41 | if quad, ok := expandTokenQuad(decl.Value); ok {
42 | isMargin := decl.Key == css_ast.DMargin
43 | for side, t := range quad {
44 | t.TurnLengthIntoNumberIfZero()
45 | box.updateSide(rules, side, boxSide{token: t, index: uint32(index)})
46 | }
47 | box.compactRules(rules, decl.KeyRange, removeWhitespace, isMargin)
48 | } else {
49 | box.sides = [4]boxSide{}
50 | }
51 | }
52 |
53 | func (box *boxTracker) mangleSide(rules []css_ast.Rule, decl *css_ast.RDeclaration, index int, removeWhitespace bool, side int) {
54 | // Reset if we see a change in the "!important" flag
55 | if box.important != decl.Important {
56 | box.sides = [4]boxSide{}
57 | box.important = decl.Important
58 | }
59 |
60 | if tokens := decl.Value; len(tokens) == 1 && tokens[0].Kind.IsNumericOrIdent() {
61 | isMargin := false
62 | switch decl.Key {
63 | case css_ast.DMarginTop, css_ast.DMarginRight, css_ast.DMarginBottom, css_ast.DMarginLeft:
64 | isMargin = true
65 | }
66 | t := tokens[0]
67 | if t.TurnLengthIntoNumberIfZero() {
68 | tokens[0] = t
69 | }
70 | box.updateSide(rules, side, boxSide{token: t, index: uint32(index), single: true})
71 | box.compactRules(rules, decl.KeyRange, removeWhitespace, isMargin)
72 | } else {
73 | box.sides = [4]boxSide{}
74 | }
75 | }
76 |
77 | func (box *boxTracker) compactRules(rules []css_ast.Rule, keyRange logger.Range, removeWhitespace bool, isMargin bool) {
78 | // All tokens must be present
79 | if eof := css_lexer.TEndOfFile; box.sides[0].token.Kind == eof || box.sides[1].token.Kind == eof ||
80 | box.sides[2].token.Kind == eof || box.sides[3].token.Kind == eof {
81 | return
82 | }
83 |
84 | // Generate the most minimal representation
85 | tokens := compactTokenQuad(
86 | box.sides[0].token,
87 | box.sides[1].token,
88 | box.sides[2].token,
89 | box.sides[3].token,
90 | removeWhitespace,
91 | )
92 |
93 | // Remove all of the existing declarations
94 | rules[box.sides[0].index] = css_ast.Rule{}
95 | rules[box.sides[1].index] = css_ast.Rule{}
96 | rules[box.sides[2].index] = css_ast.Rule{}
97 | rules[box.sides[3].index] = css_ast.Rule{}
98 |
99 | // Insert the combined declaration where the last rule was
100 | var key css_ast.D
101 | var keyText string
102 | if isMargin {
103 | key = css_ast.DMargin
104 | keyText = "margin"
105 | } else {
106 | key = css_ast.DPadding
107 | keyText = "padding"
108 | }
109 | rules[box.sides[3].index].Data = &css_ast.RDeclaration{
110 | Key: key,
111 | KeyText: keyText,
112 | Value: tokens,
113 | KeyRange: keyRange,
114 | Important: box.important,
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/npm/esbuild-wasm/bin/esbuild:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | // Forward to the automatically-generated WebAssembly loader from the Go compiler
4 |
5 | const crypto = require('crypto');
6 | const path = require('path');
7 | const zlib = require('zlib');
8 | const fs = require('fs');
9 | const os = require('os');
10 |
11 | const wasm_exec = path.join(__dirname, '..', 'wasm_exec.js');
12 | const esbuild_wasm = path.join(__dirname, '..', 'esbuild.wasm');
13 |
14 | const code = fs.readFileSync(wasm_exec, 'utf8');
15 | const wrapper = new Function('require', 'module', 'process', 'WebAssembly', code);
16 |
17 | function instantiate(bytes, importObject) {
18 | // Using this API causes "./esbuild --version" to run around 1 second faster
19 | // than using the "WebAssembly.instantiate()" API when run in node (v12.16.2)
20 | const module = new WebAssembly.Module(bytes);
21 | const instance = new WebAssembly.Instance(module, importObject);
22 | return Promise.resolve({ instance, module });
23 | }
24 |
25 | // Node has an unfortunate bug where the node process is unnecessarily kept open while a
26 | // WebAssembly module is being optimized: https://github.com/nodejs/node/issues/36616.
27 | // This means cases where running "esbuild" should take a few milliseconds can end up
28 | // taking many seconds instead. To work around this bug, it is possible to force node to
29 | // exit by calling the operating system's exit function. That's what this code does.
30 | process.on('exit', code => {
31 | // If it's a non-zero exit code, we can just kill our own process to stop. This will
32 | // preserve the fact that there is a non-zero exit code although the exit code will
33 | // be different. We cannot use this if the exit code is supposed to be zero.
34 | if (code !== 0) {
35 | try {
36 | process.kill(process.pid, 'SIGINT');
37 | } catch (e) {
38 | }
39 | return;
40 | }
41 |
42 | // Otherwise if the exit code is zero, try to fall back to a binary N-API module that
43 | // calls the operating system's "exit(0)" function.
44 | const nativeModule = `${process.platform}-${os.arch()}-${os.endianness()}.node`;
45 | const base64 = require('../exit0')[nativeModule];
46 | if (base64) {
47 | try {
48 | const data = zlib.inflateRawSync(Buffer.from(base64, 'base64'));
49 | const hash = crypto.createHash('sha256').update(base64).digest().toString('hex').slice(0, 16);
50 | const tempFile = path.join(os.tmpdir(), `${hash}-${nativeModule}`);
51 | try {
52 | if (fs.readFileSync(tempFile).equals(data)) {
53 | require(tempFile);
54 | }
55 | } finally {
56 | fs.writeFileSync(tempFile, data);
57 | require(tempFile);
58 | }
59 | } catch (e) {
60 | }
61 | }
62 | });
63 |
64 | // Node has another bug where using "fs.read" to read from stdin reads
65 | // everything successfully and then throws an error, but only on Windows. Go's
66 | // WebAssembly support uses "fs.read" so it hits this problem. This is a patch
67 | // to try to work around the bug in node. This bug has been reported to node
68 | // at least twice in https://github.com/nodejs/node/issues/35997 and in
69 | // https://github.com/nodejs/node/issues/19831. This issue has also been
70 | // reported to the Go project: https://github.com/golang/go/issues/43913.
71 | const read = fs.read;
72 | fs.read = function () {
73 | const callback = arguments[5];
74 | arguments[5] = function (err, count) {
75 | if (count === 0 && err && err.code === 'EOF') {
76 | arguments[0] = null;
77 | }
78 | return callback.apply(this, arguments);
79 | };
80 | return read.apply(this, arguments);
81 | };
82 |
83 | const argv = ['node', wasm_exec, esbuild_wasm].concat(process.argv.slice(2));
84 | wrapper(require, require.main, Object.assign(Object.create(process), { argv }), Object.assign(Object.create(WebAssembly), { instantiate }));
85 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: CI
2 |
3 | on:
4 | push:
5 | branches: [ '*' ]
6 | pull_request:
7 | branches: [ '*' ]
8 |
9 | jobs:
10 |
11 | esbuild:
12 | name: esbuild CI
13 | runs-on: ${{ matrix.os }}
14 | strategy:
15 | matrix:
16 | os: [ubuntu-latest, macos-latest, windows-latest]
17 |
18 | steps:
19 |
20 | - name: Set up Go 1.x
21 | uses: actions/setup-go@v2
22 | with:
23 | go-version: 1.17.1
24 | id: go
25 |
26 | - name: Setup Node.js environment
27 | uses: actions/setup-node@v1.4.4
28 | with:
29 | node-version: 14
30 |
31 | - name: Setup Deno 1.x
32 | uses: denoland/setup-deno@main
33 | with:
34 | deno-version: v1.x
35 |
36 | - name: Check out code into the Go module directory
37 | uses: actions/checkout@v2
38 |
39 | - name: go test
40 | run: go test ./internal/...
41 |
42 | - name: go vet
43 | run: go vet ./cmd/... ./internal/... ./pkg/...
44 |
45 | - name: Deno Tests
46 | # Deno tests currently don't run on Windows because of "esbuild" vs.
47 | # "esbuild.exe" in the test harness. This should be fixed...
48 | if: matrix.os != 'windows-latest'
49 | run: make test-deno
50 |
51 | - name: Test for path/filepath
52 | if: matrix.os == 'ubuntu-latest'
53 | run: make no-filepath
54 |
55 | - name: Make sure "check-go-version" works
56 | if: matrix.os != 'windows-latest'
57 | run: make check-go-version
58 |
59 | - name: go fmt
60 | if: matrix.os == 'macos-latest'
61 | run: make fmt-go
62 |
63 | - name: npm ci
64 | run: cd scripts && npm ci
65 |
66 | - name: Register Test (ESBUILD_WORKER_THREADS=0)
67 | if: matrix.os != 'windows-latest'
68 | run: ESBUILD_WORKER_THREADS=0 node scripts/register-test.js
69 |
70 | - name: Register Test
71 | run: node scripts/register-test.js
72 |
73 | - name: Verify Source Map
74 | run: node scripts/verify-source-map.js
75 |
76 | - name: E2E Tests
77 | run: node scripts/end-to-end-tests.js
78 |
79 | - name: JS API Tests (ESBUILD_WORKER_THREADS=0)
80 | if: matrix.os != 'windows-latest'
81 | run: ESBUILD_WORKER_THREADS=0 node scripts/js-api-tests.js
82 |
83 | - name: JS API Tests
84 | run: node scripts/js-api-tests.js
85 |
86 | - name: NodeJS Unref Tests
87 | run: node scripts/node-unref-tests.js
88 |
89 | - name: Plugin Tests
90 | run: node scripts/plugin-tests.js
91 |
92 | - name: TypeScript Type Definition Tests
93 | if: matrix.os == 'ubuntu-latest'
94 | run: node scripts/ts-type-tests.js
95 |
96 | - name: JS API Type Check
97 | if: matrix.os == 'ubuntu-latest'
98 | run: make lib-typecheck
99 |
100 | - name: WebAssembly API Tests (browser)
101 | if: matrix.os == 'ubuntu-latest'
102 | run: make test-wasm-browser
103 |
104 | - name: WebAssembly API Tests (node)
105 | if: matrix.os == 'ubuntu-latest'
106 | run: make test-wasm-node
107 |
108 | - name: WebAssembly API Tests (node)
109 | if: matrix.os != 'ubuntu-latest'
110 | run: node scripts/wasm-tests.js
111 |
112 | - name: Sucrase Tests
113 | if: matrix.os == 'ubuntu-latest'
114 | run: make test-sucrase
115 |
116 | - name: Esprima Tests
117 | if: matrix.os == 'ubuntu-latest'
118 | run: make test-esprima
119 |
120 | - name: Preact Splitting Tests
121 | if: matrix.os == 'ubuntu-latest'
122 | run: make test-preact-splitting
123 |
124 | - name: Uglify Tests
125 | if: matrix.os == 'ubuntu-latest'
126 | run: make uglify
127 |
128 | - name: Check the unicode table generator
129 | if: matrix.os == 'ubuntu-latest'
130 | run: cd scripts && node gen-unicode-table.js
131 |
--------------------------------------------------------------------------------
/internal/graph/input.go:
--------------------------------------------------------------------------------
1 | package graph
2 |
3 | // The code in this file mainly represents data that passes from the scan phase
4 | // to the compile phase of the bundler. There is currently one exception: the
5 | // "meta" member of the JavaScript file representation. That could have been
6 | // stored separately but is stored together for convenience and to avoid an
7 | // extra level of indirection. Instead it's kept in a separate type to keep
8 | // things organized.
9 |
10 | import (
11 | "github.com/evanw/esbuild/internal/ast"
12 | "github.com/evanw/esbuild/internal/config"
13 | "github.com/evanw/esbuild/internal/css_ast"
14 | "github.com/evanw/esbuild/internal/js_ast"
15 | "github.com/evanw/esbuild/internal/logger"
16 | "github.com/evanw/esbuild/internal/resolver"
17 | "github.com/evanw/esbuild/internal/sourcemap"
18 | )
19 |
20 | type InputFile struct {
21 | Source logger.Source
22 | Repr InputFileRepr
23 | InputSourceMap *sourcemap.SourceMap
24 |
25 | // If this file ends up being used in the bundle, these are additional files
26 | // that must be written to the output directory. It's used by the "file"
27 | // loader.
28 | AdditionalFiles []OutputFile
29 | UniqueKeyForFileLoader string
30 |
31 | SideEffects SideEffects
32 | Loader config.Loader
33 | }
34 |
35 | type OutputFile struct {
36 | AbsPath string
37 | Contents []byte
38 |
39 | // If "AbsMetadataFile" is present, this will be filled out with information
40 | // about this file in JSON format. This is a partial JSON file that will be
41 | // fully assembled later.
42 | JSONMetadataChunk string
43 |
44 | IsExecutable bool
45 | }
46 |
47 | type SideEffects struct {
48 | // This is optional additional information for use in error messages
49 | Data *resolver.SideEffectsData
50 |
51 | Kind SideEffectsKind
52 | }
53 |
54 | type SideEffectsKind uint8
55 |
56 | const (
57 | // The default value conservatively considers all files to have side effects.
58 | HasSideEffects SideEffectsKind = iota
59 |
60 | // This file was listed as not having side effects by a "package.json"
61 | // file in one of our containing directories with a "sideEffects" field.
62 | NoSideEffects_PackageJSON
63 |
64 | // This file was loaded using a data-oriented loader (e.g. "text") that is
65 | // known to not have side effects.
66 | NoSideEffects_PureData
67 |
68 | // Same as above but it came from a plugin. We don't want to warn about
69 | // unused imports to these files since running the plugin is a side effect.
70 | // Removing the import would not call the plugin which is observable.
71 | NoSideEffects_PureData_FromPlugin
72 | )
73 |
74 | type InputFileRepr interface {
75 | ImportRecords() *[]ast.ImportRecord
76 | }
77 |
78 | type JSRepr struct {
79 | AST js_ast.AST
80 | Meta JSReprMeta
81 |
82 | // If present, this is the CSS file that this JavaScript stub corresponds to.
83 | // A JavaScript stub is automatically generated for a CSS file when it's
84 | // imported from a JavaScript file.
85 | CSSSourceIndex ast.Index32
86 | }
87 |
88 | func (repr *JSRepr) ImportRecords() *[]ast.ImportRecord {
89 | return &repr.AST.ImportRecords
90 | }
91 |
92 | func (repr *JSRepr) TopLevelSymbolToParts(ref js_ast.Ref) []uint32 {
93 | // Overlay the mutable map from the linker
94 | if parts, ok := repr.Meta.TopLevelSymbolToPartsOverlay[ref]; ok {
95 | return parts
96 | }
97 |
98 | // Fall back to the immutable map from the parser
99 | return repr.AST.TopLevelSymbolToPartsFromParser[ref]
100 | }
101 |
102 | type CSSRepr struct {
103 | AST css_ast.AST
104 |
105 | // If present, this is the JavaScript stub corresponding to this CSS file.
106 | // A JavaScript stub is automatically generated for a CSS file when it's
107 | // imported from a JavaScript file.
108 | JSSourceIndex ast.Index32
109 | }
110 |
111 | func (repr *CSSRepr) ImportRecords() *[]ast.ImportRecord {
112 | return &repr.AST.ImportRecords
113 | }
114 |
--------------------------------------------------------------------------------
/internal/logger/logger_windows.go:
--------------------------------------------------------------------------------
1 | //go:build windows
2 | // +build windows
3 |
4 | package logger
5 |
6 | import (
7 | "os"
8 | "strings"
9 | "syscall"
10 | "unsafe"
11 | )
12 |
13 | const SupportsColorEscapes = true
14 |
15 | var kernel32 = syscall.NewLazyDLL("kernel32.dll")
16 | var getConsoleMode = kernel32.NewProc("GetConsoleMode")
17 | var setConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute")
18 | var getConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo")
19 |
20 | type consoleScreenBufferInfo struct {
21 | dwSizeX int16
22 | dwSizeY int16
23 | dwCursorPositionX int16
24 | dwCursorPositionY int16
25 | wAttributes uint16
26 | srWindowLeft int16
27 | srWindowTop int16
28 | srWindowRight int16
29 | srWindowBottom int16
30 | dwMaximumWindowSizeX int16
31 | dwMaximumWindowSizeY int16
32 | }
33 |
34 | func GetTerminalInfo(file *os.File) TerminalInfo {
35 | fd := file.Fd()
36 |
37 | // Is this file descriptor a terminal?
38 | var unused uint32
39 | isTTY, _, _ := syscall.Syscall(getConsoleMode.Addr(), 2, fd, uintptr(unsafe.Pointer(&unused)), 0)
40 |
41 | // Get the width of the window
42 | var info consoleScreenBufferInfo
43 | syscall.Syscall(getConsoleScreenBufferInfo.Addr(), 2, fd, uintptr(unsafe.Pointer(&info)), 0)
44 |
45 | return TerminalInfo{
46 | IsTTY: isTTY != 0,
47 | Width: int(info.dwSizeX) - 1,
48 | Height: int(info.dwSizeY) - 1,
49 | UseColorEscapes: !hasNoColorEnvironmentVariable(),
50 | }
51 | }
52 |
53 | func writeStringWithColor(file *os.File, text string) {
54 | const FOREGROUND_BLUE = 1
55 | const FOREGROUND_GREEN = 2
56 | const FOREGROUND_RED = 4
57 | const FOREGROUND_INTENSITY = 8
58 |
59 | fd := file.Fd()
60 | i := 0
61 |
62 | for i < len(text) {
63 | var attributes uintptr
64 | end := i
65 |
66 | switch {
67 | case text[i] != 033:
68 | i++
69 | continue
70 |
71 | case strings.HasPrefix(text[i:], TerminalColors.Reset):
72 | i += len(TerminalColors.Reset)
73 | attributes = FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE
74 |
75 | case strings.HasPrefix(text[i:], TerminalColors.Red):
76 | i += len(TerminalColors.Red)
77 | attributes = FOREGROUND_RED
78 |
79 | case strings.HasPrefix(text[i:], TerminalColors.Green):
80 | i += len(TerminalColors.Green)
81 | attributes = FOREGROUND_GREEN
82 |
83 | case strings.HasPrefix(text[i:], TerminalColors.Blue):
84 | i += len(TerminalColors.Blue)
85 | attributes = FOREGROUND_BLUE
86 |
87 | case strings.HasPrefix(text[i:], TerminalColors.Cyan):
88 | i += len(TerminalColors.Cyan)
89 | attributes = FOREGROUND_GREEN | FOREGROUND_BLUE
90 |
91 | case strings.HasPrefix(text[i:], TerminalColors.Magenta):
92 | i += len(TerminalColors.Magenta)
93 | attributes = FOREGROUND_RED | FOREGROUND_BLUE
94 |
95 | case strings.HasPrefix(text[i:], TerminalColors.Yellow):
96 | i += len(TerminalColors.Yellow)
97 | attributes = FOREGROUND_RED | FOREGROUND_GREEN
98 |
99 | case strings.HasPrefix(text[i:], TerminalColors.Dim):
100 | i += len(TerminalColors.Dim)
101 | attributes = FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE
102 |
103 | case strings.HasPrefix(text[i:], TerminalColors.Bold):
104 | i += len(TerminalColors.Bold)
105 | attributes = FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY
106 |
107 | // Apparently underlines only work with the CJK locale on Windows :(
108 | case strings.HasPrefix(text[i:], TerminalColors.Underline):
109 | i += len(TerminalColors.Underline)
110 | attributes = FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE
111 |
112 | default:
113 | i++
114 | continue
115 | }
116 |
117 | file.WriteString(text[:end])
118 | text = text[i:]
119 | i = 0
120 | setConsoleTextAttribute.Call(fd, attributes)
121 | }
122 |
123 | file.WriteString(text)
124 | }
125 |
--------------------------------------------------------------------------------
/scripts/node-unref-tests.js:
--------------------------------------------------------------------------------
1 | // This test verifies that:
2 | // - a running service will not prevent NodeJS to exit if there is no compilation in progress.
3 | // - the NodeJS process will continue running if there is a serve() active or a transform or build in progress.
4 |
5 | const assert = require('assert')
6 | const { fork } = require('child_process');
7 |
8 | // The tests to run in the child process
9 | async function tests() {
10 | const esbuild = require('./esbuild').installForTests()
11 |
12 | async function testTransform() {
13 | const t1 = await esbuild.transform(`1+2`)
14 | const t2 = await esbuild.transform(`1+3`)
15 | assert.strictEqual(t1.code, `1 + 2;\n`)
16 | assert.strictEqual(t2.code, `1 + 3;\n`)
17 | }
18 |
19 | async function testServe() {
20 | const server = await esbuild.serve({}, {})
21 | assert.strictEqual(server.host, '0.0.0.0')
22 | assert.strictEqual(typeof server.port, 'number')
23 | server.stop()
24 | await server.wait
25 | }
26 |
27 | async function testBuild() {
28 | const result = await esbuild.build({
29 | stdin: { contents: '1+2' },
30 | write: false,
31 | incremental: true,
32 | })
33 | assert.deepStrictEqual(result.outputFiles.length, 1);
34 | assert.deepStrictEqual(result.outputFiles[0].text, '1 + 2;\n');
35 | assert.deepStrictEqual(result.stop, void 0);
36 |
37 | const result2 = await result.rebuild()
38 | assert.deepStrictEqual(result2.outputFiles.length, 1);
39 | assert.deepStrictEqual(result2.outputFiles[0].text, '1 + 2;\n');
40 |
41 | const result3 = await result2.rebuild()
42 | assert.deepStrictEqual(result3.outputFiles.length, 1);
43 | assert.deepStrictEqual(result3.outputFiles[0].text, '1 + 2;\n');
44 |
45 | result2.rebuild.dispose()
46 | }
47 |
48 | async function testWatch() {
49 | const result = await esbuild.build({
50 | stdin: { contents: '1+2' },
51 | write: false,
52 | watch: true,
53 | })
54 |
55 | assert.deepStrictEqual(result.rebuild, void 0);
56 | assert.deepStrictEqual(result.outputFiles.length, 1);
57 | assert.deepStrictEqual(result.outputFiles[0].text, '1 + 2;\n');
58 |
59 | result.stop()
60 | }
61 |
62 | async function testWatchAndIncremental() {
63 | const result = await esbuild.build({
64 | stdin: { contents: '1+2' },
65 | write: false,
66 | incremental: true,
67 | watch: true,
68 | })
69 |
70 | assert.deepStrictEqual(result.outputFiles.length, 1);
71 | assert.deepStrictEqual(result.outputFiles[0].text, '1 + 2;\n');
72 |
73 | result.stop()
74 | result.rebuild.dispose()
75 | }
76 |
77 | await testTransform()
78 | await testServe()
79 | await testBuild()
80 | await testWatch()
81 | }
82 |
83 | // Called when this is the child process to run the tests.
84 | function runTests() {
85 | process.exitCode = 1;
86 | tests().then(() => {
87 | process.exitCode = 0;
88 | }, (error) => {
89 | console.error('❌', error)
90 | });
91 | }
92 |
93 | // A child process need to be started to verify that a running service is not hanging node.
94 | function startChildProcess() {
95 | const child = fork(__filename, ['__forked__'], { stdio: 'inherit', env: process.env });
96 |
97 | const timeout = setTimeout(() => {
98 | console.error('❌ node unref test timeout - child_process.unref() broken?')
99 | process.exit(1);
100 | }, 30 * 1000);
101 |
102 | child.on('error', (error) => {
103 | console.error('❌', error);
104 | process.exit(1);
105 | })
106 |
107 | child.on('exit', (code) => {
108 | clearTimeout(timeout);
109 | if (code) {
110 | console.error(`❌ node unref tests failed: child exited with code ${code}`)
111 | process.exit(1);
112 | } else {
113 | console.log(`✅ node unref tests passed`)
114 | }
115 | })
116 | }
117 |
118 | if (process.argv[2] === '__forked__') {
119 | runTests();
120 | } else {
121 | startChildProcess();
122 | }
123 |
--------------------------------------------------------------------------------
/scripts/register-test.js:
--------------------------------------------------------------------------------
1 | const { installForTests, removeRecursiveSync } = require('./esbuild')
2 | const child_process = require('child_process')
3 | const path = require('path')
4 | const fs = require('fs')
5 | const assert = require('assert')
6 | const esbuild = installForTests()
7 |
8 | // Create a fresh test directory
9 | const rootTestDir = path.join(__dirname, '.register-test')
10 | removeRecursiveSync(rootTestDir)
11 | fs.mkdirSync(rootTestDir)
12 |
13 | const entry = path.join(rootTestDir, 'entry.ts')
14 | fs.writeFileSync(entry, `
15 | console.log('in entry.ts' as string)
16 | require('./other.ts')
17 | `)
18 |
19 | const other = path.join(rootTestDir, 'other.ts')
20 | fs.writeFileSync(other, `
21 | console.log('in other.ts' as string)
22 | `)
23 |
24 | const register = path.join(rootTestDir, 'register.js')
25 | fs.writeFileSync(register, `
26 | const esbuild = require(${JSON.stringify(esbuild.ESBUILD_PACKAGE_PATH)});
27 | const fs = require('fs');
28 | require.extensions['.ts'] = (mod, filename) => {
29 | const ts = fs.readFileSync(filename, 'utf8');
30 | const { code } = esbuild.transformSync(ts, { loader: 'ts' });
31 | mod._compile(code, filename);
32 | };
33 | `)
34 |
35 | let tests = {
36 | async fromMainThread() {
37 | let result = await new Promise((resolve, reject) => child_process.execFile('node', ['-r', register, entry], (err, stdout) => {
38 | if (err) reject(err)
39 | else resolve(stdout)
40 | }))
41 | assert.strictEqual(result, `in entry.ts\nin other.ts\n`)
42 | },
43 |
44 | async fromChildThread({ testDir }) {
45 | const startThread = path.join(testDir, 'startThread.js')
46 | fs.writeFileSync(startThread, `
47 | const worker_threads = require('worker_threads')
48 | if (worker_threads.isMainThread) {
49 | console.log('in startThread.js')
50 | const worker = new worker_threads.Worker(__filename)
51 | worker.postMessage(null)
52 | worker.on('message', logs => {
53 | for (const log of logs) console.log(log)
54 | worker.terminate()
55 | })
56 | } else {
57 | worker_threads.parentPort.on('message', () => {
58 | console.log('in worker')
59 | let logs = []
60 | console.log = x => logs.push(x)
61 | require('../entry.ts')
62 | worker_threads.parentPort.postMessage(logs)
63 | })
64 | }
65 | `)
66 |
67 | let result = await new Promise((resolve, reject) => child_process.execFile('node', ['-r', register, startThread], (err, stdout) => {
68 | if (err) reject(err)
69 | else resolve(stdout)
70 | }))
71 | assert.strictEqual(result, `in startThread.js\nin worker\nin entry.ts\nin other.ts\n`)
72 | },
73 | }
74 |
75 | async function main() {
76 | // Time out these tests after 5 minutes. This exists to help debug test hangs in CI.
77 | let minutes = 5
78 | let timeout = setTimeout(() => {
79 | console.error(`❌ register tests timed out after ${minutes} minutes, exiting...`)
80 | process.exit(1)
81 | }, minutes * 60 * 1000)
82 |
83 | const runTest = async ([name, fn]) => {
84 | let testDir = path.join(rootTestDir, name)
85 | try {
86 | fs.mkdirSync(testDir)
87 | await fn({ esbuild, testDir })
88 | removeRecursiveSync(testDir)
89 | return true
90 | } catch (e) {
91 | console.error(`❌ ${name}: ${e && e.message || e}`)
92 | return false
93 | }
94 | }
95 |
96 | // Run all tests in serial
97 | let allTestsPassed = true
98 | for (let test of Object.entries(tests)) {
99 | if (!await runTest(test)) {
100 | allTestsPassed = false
101 | }
102 | }
103 |
104 | if (!allTestsPassed) {
105 | console.error(`❌ register tests failed`)
106 | process.exit(1)
107 | } else {
108 | console.log(`✅ register tests passed`)
109 | removeRecursiveSync(rootTestDir)
110 | }
111 |
112 | clearTimeout(timeout);
113 | }
114 |
115 | main().catch(e => setTimeout(() => { throw e }))
116 |
--------------------------------------------------------------------------------
/internal/fs/fs_mock.go:
--------------------------------------------------------------------------------
1 | // This is a mock implementation of the "fs" module for use with tests. It does
2 | // not actually read from the file system. Instead, it reads from a pre-specified
3 | // map of file paths to files.
4 |
5 | package fs
6 |
7 | import (
8 | "errors"
9 | "path"
10 | "strings"
11 | "syscall"
12 | )
13 |
14 | type mockFS struct {
15 | dirs map[string]DirEntries
16 | files map[string]string
17 | }
18 |
19 | func MockFS(input map[string]string) FS {
20 | dirs := make(map[string]DirEntries)
21 | files := make(map[string]string)
22 |
23 | for k, v := range input {
24 | files[k] = v
25 | original := k
26 |
27 | // Build the directory map
28 | for {
29 | kDir := path.Dir(k)
30 | dir, ok := dirs[kDir]
31 | if !ok {
32 | dir = DirEntries{kDir, make(map[string]*Entry)}
33 | dirs[kDir] = dir
34 | }
35 | if kDir == k {
36 | break
37 | }
38 | base := path.Base(k)
39 | if k == original {
40 | dir.data[strings.ToLower(base)] = &Entry{kind: FileEntry, base: base}
41 | } else {
42 | dir.data[strings.ToLower(base)] = &Entry{kind: DirEntry, base: base}
43 | }
44 | k = kDir
45 | }
46 | }
47 |
48 | return &mockFS{dirs, files}
49 | }
50 |
51 | func (fs *mockFS) ReadDirectory(path string) (DirEntries, error, error) {
52 | if dir, ok := fs.dirs[path]; ok {
53 | return dir, nil, nil
54 | }
55 | return DirEntries{}, syscall.ENOENT, syscall.ENOENT
56 | }
57 |
58 | func (fs *mockFS) ReadFile(path string) (string, error, error) {
59 | if contents, ok := fs.files[path]; ok {
60 | return contents, nil, nil
61 | }
62 | return "", syscall.ENOENT, syscall.ENOENT
63 | }
64 |
65 | func (fs *mockFS) OpenFile(path string) (OpenedFile, error, error) {
66 | if contents, ok := fs.files[path]; ok {
67 | return &InMemoryOpenedFile{Contents: []byte(contents)}, nil, nil
68 | }
69 | return nil, syscall.ENOENT, syscall.ENOENT
70 | }
71 |
72 | func (fs *mockFS) ModKey(path string) (ModKey, error) {
73 | return ModKey{}, errors.New("This is not available during tests")
74 | }
75 |
76 | func (*mockFS) IsAbs(p string) bool {
77 | return path.IsAbs(p)
78 | }
79 |
80 | func (*mockFS) Abs(p string) (string, bool) {
81 | return path.Clean(path.Join("/", p)), true
82 | }
83 |
84 | func (*mockFS) Dir(p string) string {
85 | return path.Dir(p)
86 | }
87 |
88 | func (*mockFS) Base(p string) string {
89 | return path.Base(p)
90 | }
91 |
92 | func (*mockFS) Ext(p string) string {
93 | return path.Ext(p)
94 | }
95 |
96 | func (*mockFS) Join(parts ...string) string {
97 | return path.Clean(path.Join(parts...))
98 | }
99 |
100 | func (*mockFS) Cwd() string {
101 | return "/"
102 | }
103 |
104 | func splitOnSlash(path string) (string, string) {
105 | if slash := strings.IndexByte(path, '/'); slash != -1 {
106 | return path[:slash], path[slash+1:]
107 | }
108 | return path, ""
109 | }
110 |
111 | func (*mockFS) Rel(base string, target string) (string, bool) {
112 | base = path.Clean(base)
113 | target = path.Clean(target)
114 |
115 | // Base cases
116 | if base == "" || base == "." {
117 | return target, true
118 | }
119 | if base == target {
120 | return ".", true
121 | }
122 |
123 | // Find the common parent directory
124 | for {
125 | bHead, bTail := splitOnSlash(base)
126 | tHead, tTail := splitOnSlash(target)
127 | if bHead != tHead {
128 | break
129 | }
130 | base = bTail
131 | target = tTail
132 | }
133 |
134 | // Stop now if base is a subpath of target
135 | if base == "" {
136 | return target, true
137 | }
138 |
139 | // Traverse up to the common parent
140 | commonParent := strings.Repeat("../", strings.Count(base, "/")+1)
141 |
142 | // Stop now if target is a subpath of base
143 | if target == "" {
144 | return commonParent[:len(commonParent)-1], true
145 | }
146 |
147 | // Otherwise, down to the parent
148 | return commonParent + target, true
149 | }
150 |
151 | func (fs *mockFS) kind(dir string, base string) (symlink string, kind EntryKind) {
152 | panic("This should never be called")
153 | }
154 |
155 | func (fs *mockFS) WatchData() WatchData {
156 | panic("This should never be called")
157 | }
158 |
--------------------------------------------------------------------------------
/internal/bundler/debug.go:
--------------------------------------------------------------------------------
1 | package bundler
2 |
3 | import (
4 | "fmt"
5 | "strings"
6 |
7 | "github.com/evanw/esbuild/internal/ast"
8 | "github.com/evanw/esbuild/internal/graph"
9 | "github.com/evanw/esbuild/internal/js_ast"
10 | "github.com/evanw/esbuild/internal/js_printer"
11 | )
12 |
13 | // Set this to true and then load the resulting metafile in "graph-debugger.html"
14 | // to debug graph information.
15 | //
16 | // This is deliberately not exposed in the final binary. It is *very* internal
17 | // and only exists to help debug esbuild itself. Make sure this is always set
18 | // back to false before committing.
19 | const debugVerboseMetafile = false
20 |
21 | func (c *linkerContext) generateExtraDataForFileJS(sourceIndex uint32) string {
22 | if !debugVerboseMetafile {
23 | return ""
24 | }
25 |
26 | file := &c.graph.Files[sourceIndex]
27 | repr := file.InputFile.Repr.(*graph.JSRepr)
28 | sb := strings.Builder{}
29 |
30 | quoteSym := func(ref js_ast.Ref) string {
31 | name := fmt.Sprintf("%d:%d [%s]", ref.SourceIndex, ref.InnerIndex, c.graph.Symbols.Get(ref).OriginalName)
32 | return string(js_printer.QuoteForJSON(name, c.options.ASCIIOnly))
33 | }
34 |
35 | sb.WriteString(`,"parts":[`)
36 | for partIndex, part := range repr.AST.Parts {
37 | if partIndex > 0 {
38 | sb.WriteByte(',')
39 | }
40 | var isFirst bool
41 | code := ""
42 |
43 | sb.WriteString(fmt.Sprintf(`{"isLive":%v`, part.IsLive))
44 | sb.WriteString(fmt.Sprintf(`,"canBeRemovedIfUnused":%v`, part.CanBeRemovedIfUnused))
45 |
46 | if partIndex == int(js_ast.NSExportPartIndex) {
47 | sb.WriteString(`,"nsExportPartIndex":true`)
48 | } else if ast.MakeIndex32(uint32(partIndex)) == repr.Meta.WrapperPartIndex {
49 | sb.WriteString(`,"wrapperPartIndex":true`)
50 | } else if len(part.Stmts) > 0 {
51 | start := part.Stmts[0].Loc.Start
52 | end := len(file.InputFile.Source.Contents)
53 | if partIndex+1 < len(repr.AST.Parts) {
54 | if nextStmts := repr.AST.Parts[partIndex+1].Stmts; len(nextStmts) > 0 {
55 | if nextStart := nextStmts[0].Loc.Start; nextStart >= start {
56 | end = int(nextStart)
57 | }
58 | }
59 | }
60 | code = file.InputFile.Source.Contents[start:end]
61 | }
62 |
63 | // importRecords
64 | sb.WriteString(`,"importRecords":[`)
65 | isFirst = true
66 | for _, importRecordIndex := range part.ImportRecordIndices {
67 | record := repr.AST.ImportRecords[importRecordIndex]
68 | if !record.SourceIndex.IsValid() {
69 | continue
70 | }
71 | if isFirst {
72 | isFirst = false
73 | } else {
74 | sb.WriteByte(',')
75 | }
76 | path := c.graph.Files[record.SourceIndex.GetIndex()].InputFile.Source.PrettyPath
77 | sb.WriteString(fmt.Sprintf(`{"source":%s}`, js_printer.QuoteForJSON(path, c.options.ASCIIOnly)))
78 | }
79 | sb.WriteByte(']')
80 |
81 | // declaredSymbols
82 | sb.WriteString(`,"declaredSymbols":[`)
83 | isFirst = true
84 | for _, declSym := range part.DeclaredSymbols {
85 | if !declSym.IsTopLevel {
86 | continue
87 | }
88 | if isFirst {
89 | isFirst = false
90 | } else {
91 | sb.WriteByte(',')
92 | }
93 | sb.WriteString(fmt.Sprintf(`{"name":%s}`, quoteSym(declSym.Ref)))
94 | }
95 | sb.WriteByte(']')
96 |
97 | // symbolUses
98 | sb.WriteString(`,"symbolUses":[`)
99 | isFirst = true
100 | for ref, uses := range part.SymbolUses {
101 | if isFirst {
102 | isFirst = false
103 | } else {
104 | sb.WriteByte(',')
105 | }
106 | sb.WriteString(fmt.Sprintf(`{"name":%s,"countEstimate":%d}`, quoteSym(ref), uses.CountEstimate))
107 | }
108 | sb.WriteByte(']')
109 |
110 | // dependencies
111 | sb.WriteString(`,"dependencies":[`)
112 | for i, dep := range part.Dependencies {
113 | if i > 0 {
114 | sb.WriteByte(',')
115 | }
116 | sb.WriteString(fmt.Sprintf(`{"source":%s,"partIndex":%d}`,
117 | js_printer.QuoteForJSON(c.graph.Files[dep.SourceIndex].InputFile.Source.PrettyPath, c.options.ASCIIOnly),
118 | dep.PartIndex,
119 | ))
120 | }
121 | sb.WriteByte(']')
122 |
123 | // code
124 | sb.WriteString(`,"code":`)
125 | sb.Write(js_printer.QuoteForJSON(strings.TrimRight(code, "\n"), c.options.ASCIIOnly))
126 |
127 | sb.WriteByte('}')
128 | }
129 | sb.WriteString(`]`)
130 |
131 | return sb.String()
132 | }
133 |
--------------------------------------------------------------------------------
/internal/ast/ast.go:
--------------------------------------------------------------------------------
1 | package ast
2 |
3 | import "github.com/evanw/esbuild/internal/logger"
4 |
5 | // This file contains data structures that are used with the AST packages for
6 | // both JavaScript and CSS. This helps the bundler treat both AST formats in
7 | // a somewhat format-agnostic manner.
8 |
9 | type ImportKind uint8
10 |
11 | const (
12 | // An entry point provided by the user
13 | ImportEntryPoint ImportKind = iota
14 |
15 | // An ES6 import or re-export statement
16 | ImportStmt
17 |
18 | // A call to "require()"
19 | ImportRequire
20 |
21 | // An "import()" expression with a string argument
22 | ImportDynamic
23 |
24 | // A call to "require.resolve()"
25 | ImportRequireResolve
26 |
27 | // A CSS "@import" rule
28 | ImportAt
29 |
30 | // A CSS "@import" rule with import conditions
31 | ImportAtConditional
32 |
33 | // A CSS "url(...)" token
34 | ImportURL
35 | )
36 |
37 | func (kind ImportKind) StringForMetafile() string {
38 | switch kind {
39 | case ImportStmt:
40 | return "import-statement"
41 | case ImportRequire:
42 | return "require-call"
43 | case ImportDynamic:
44 | return "dynamic-import"
45 | case ImportRequireResolve:
46 | return "require-resolve"
47 | case ImportAt, ImportAtConditional:
48 | return "import-rule"
49 | case ImportURL:
50 | return "url-token"
51 | case ImportEntryPoint:
52 | return "entry-point"
53 | default:
54 | panic("Internal error")
55 | }
56 | }
57 |
58 | func (kind ImportKind) IsFromCSS() bool {
59 | return kind == ImportAt || kind == ImportURL
60 | }
61 |
62 | type ImportRecord struct {
63 | Range logger.Range
64 | Path logger.Path
65 | Assertions *[]AssertEntry
66 |
67 | // The resolved source index for an internal import (within the bundle) or
68 | // nil for an external import (not included in the bundle)
69 | SourceIndex Index32
70 |
71 | // Sometimes the parser creates an import record and decides it isn't needed.
72 | // For example, TypeScript code may have import statements that later turn
73 | // out to be type-only imports after analyzing the whole file.
74 | IsUnused bool
75 |
76 | // If this is true, the import contains syntax like "* as ns". This is used
77 | // to determine whether modules that have no exports need to be wrapped in a
78 | // CommonJS wrapper or not.
79 | ContainsImportStar bool
80 |
81 | // If this is true, the import contains an import for the alias "default",
82 | // either via the "import x from" or "import {default as x} from" syntax.
83 | ContainsDefaultAlias bool
84 |
85 | // If true, this "export * from 'path'" statement is evaluated at run-time by
86 | // calling the "__reExport()" helper function
87 | CallsRunTimeReExportFn bool
88 |
89 | // Tell the printer to wrap this call to "require()" in "__toModule(...)"
90 | WrapWithToModule bool
91 |
92 | // Tell the printer to use the runtime "__require()" instead of "require()"
93 | CallRuntimeRequire bool
94 |
95 | // True for the following cases:
96 | //
97 | // try { require('x') } catch { handle }
98 | // try { await import('x') } catch { handle }
99 | // try { require.resolve('x') } catch { handle }
100 | // import('x').catch(handle)
101 | // import('x').then(_, handle)
102 | //
103 | // In these cases we shouldn't generate an error if the path could not be
104 | // resolved.
105 | HandlesImportErrors bool
106 |
107 | // If true, this was originally written as a bare "import 'file'" statement
108 | WasOriginallyBareImport bool
109 |
110 | Kind ImportKind
111 | }
112 |
113 | type AssertEntry struct {
114 | Key []uint16 // An identifier or a string
115 | Value []uint16 // Always a string
116 | KeyLoc logger.Loc
117 | ValueLoc logger.Loc
118 | PreferQuotedKey bool
119 | }
120 |
121 | // This stores a 32-bit index where the zero value is an invalid index. This is
122 | // a better alternative to storing the index as a pointer since that has the
123 | // same properties but takes up more space and costs an extra pointer traversal.
124 | type Index32 struct {
125 | flippedBits uint32
126 | }
127 |
128 | func MakeIndex32(index uint32) Index32 {
129 | return Index32{flippedBits: ^index}
130 | }
131 |
132 | func (i Index32) IsValid() bool {
133 | return i.flippedBits != 0
134 | }
135 |
136 | func (i Index32) GetIndex() uint32 {
137 | return ^i.flippedBits
138 | }
139 |
--------------------------------------------------------------------------------
/scripts/es6-fuzzer.js:
--------------------------------------------------------------------------------
1 | // This fuzzer attempts to find issues with the "scope hoisting" optimizations
2 | // for ES6-style imports and exports. It compares esbuild's behavior with the
3 | // behavior of node's experimental module support.
4 |
5 | (async () => {
6 | // Make sure this script runs as an ES6 module so we can import both ES6 modules and CommonJS modules
7 | if (typeof require !== 'undefined') {
8 | const childProcess = require('child_process')
9 | const child = childProcess.spawn('node', ['--experimental-modules', '--input-type=module'], {
10 | cwd: __dirname,
11 | stdio: ['pipe', 'inherit', 'inherit'],
12 | })
13 | child.stdin.write(require('fs').readFileSync(__filename))
14 | child.stdin.end()
15 | child.on('close', code => process.exit(code))
16 | return
17 | }
18 |
19 | const { default: { buildBinary, dirname, removeRecursiveSync } } = await import('./esbuild.js');
20 | const childProcess = await import('child_process');
21 | const util = await import('util');
22 | const path = await import('path');
23 | const fs = await import('fs');
24 | const esbuildPath = buildBinary();
25 | let failureCount = 0;
26 | let nextTest = 0;
27 |
28 | function reportFailure(testDir, files, kind, error) {
29 | failureCount++;
30 | console.log(`❌ FAILURE ${kind}: ${error}\n DIR: ${testDir}` +
31 | Object.keys(files).map(x => `\n ${x} => ${files[x]}`).join(''));
32 | }
33 |
34 | function circularObjectToString(root) {
35 | let map = new Map();
36 | let counter = 0;
37 | let visit = obj => {
38 | if (typeof obj !== 'object') return JSON.stringify(obj);
39 | if (map.has(obj)) return `$${map.get(obj)}`;
40 | map.set(obj, counter++);
41 | const keys = Object.keys(obj).sort();
42 | return `$${map.get(obj)} = {${keys.map(key =>
43 | `${JSON.stringify(key)}: ${visit(obj[key])}`).join(', ')}}`;
44 | };
45 | return visit(root);
46 | }
47 |
48 | function checkSameExportObject(a, b) {
49 | a = circularObjectToString(a);
50 | b = circularObjectToString(b);
51 | if (a !== b) throw new Error(`Different exports:\n ${a}\n ${b}`);
52 | }
53 |
54 | async function fuzzOnce(parentDir) {
55 | const mjs_or_cjs = () => Math.random() < 0.1 ? 'cjs' : 'mjs';
56 | const names = [
57 | 'a.' + mjs_or_cjs(),
58 | 'b.' + mjs_or_cjs(),
59 | 'c.' + mjs_or_cjs(),
60 | 'd.' + mjs_or_cjs(),
61 | 'e.' + mjs_or_cjs(),
62 | ];
63 | const randomName = () => names[Math.random() * names.length | 0];
64 | const files = {};
65 |
66 | for (const name of names) {
67 | if (name.endsWith('.cjs')) {
68 | files[name] = `module.exports = 123`;
69 | } else {
70 | switch (Math.random() * 5 | 0) {
71 | case 0:
72 | files[name] = `export const foo = 123`;
73 | break;
74 | case 1:
75 | files[name] = `export default 123`;
76 | break;
77 | case 2:
78 | files[name] = `export * from "./${randomName()}"`;
79 | break;
80 | case 3:
81 | files[name] = `export * as foo from "./${randomName()}"`;
82 | break;
83 | case 4:
84 | files[name] = `import * as foo from "./${randomName()}"; export {foo}`;
85 | break;
86 | }
87 | }
88 | }
89 |
90 | // Write the files to the file system
91 | const testDir = path.join(parentDir, (nextTest++).toString());
92 | fs.mkdirSync(testDir);
93 | for (const name in files) {
94 | fs.writeFileSync(path.join(testDir, name), files[name]);
95 | }
96 | if (nextTest % 100 === 0) console.log(`Checked ${nextTest} test cases`);
97 |
98 | // Load the raw module using node
99 | const entryPoint = path.join(testDir, names[0]);
100 | let realExports = await import(entryPoint);
101 | if (entryPoint.endsWith('.cjs')) realExports = realExports.default;
102 |
103 | // Bundle to a CommonJS module using esbuild
104 | const cjsFile = path.join(testDir, 'out.cjs');
105 | await util.promisify(childProcess.execFile)(esbuildPath, [
106 | '--bundle',
107 | '--outfile=' + cjsFile,
108 | '--format=cjs',
109 | entryPoint,
110 | ], { stdio: 'pipe' });
111 |
112 | // Validate the CommonJS module bundle
113 | try {
114 | let { default: cjsExports } = await import(cjsFile);
115 | checkSameExportObject(realExports, cjsExports);
116 | } catch (e) {
117 | reportFailure(testDir, files, 'cjs', e + '');
118 | return;
119 | }
120 |
121 | // Remove data for successful tests
122 | removeRecursiveSync(testDir);
123 | }
124 |
125 | const parentDir = path.join(dirname, '.es6-fuzzer');
126 | removeRecursiveSync(parentDir);
127 | fs.mkdirSync(parentDir);
128 |
129 | // Run a set number of tests in parallel
130 | let promises = [];
131 | for (let i = 0; i < 10; i++) {
132 | let promise = fuzzOnce(parentDir);
133 | for (let j = 0; j < 100; j++) {
134 | promise = promise.then(() => fuzzOnce(parentDir));
135 | }
136 | promises.push(promise);
137 | }
138 | await Promise.all(promises);
139 |
140 | // Remove everything if all tests passed
141 | if (failureCount === 0) {
142 | removeRecursiveSync(parentDir);
143 | }
144 | })().catch(e => setTimeout(() => { throw e }));
145 |
--------------------------------------------------------------------------------
/cmd/esbuild/stdio_protocol.go:
--------------------------------------------------------------------------------
1 | // The JavaScript API communicates with the Go child process over stdin/stdout
2 | // using this protocol. It's a very simple binary protocol that uses primitives
3 | // and nested arrays and maps. It's basically JSON with UTF-8 encoding and an
4 | // additional byte array primitive. You must send a response after receiving a
5 | // request because the other end is blocking on the response coming back.
6 |
7 | package main
8 |
9 | import (
10 | "encoding/binary"
11 | "sort"
12 | )
13 |
14 | func readUint32(bytes []byte) (value uint32, leftOver []byte, ok bool) {
15 | if len(bytes) >= 4 {
16 | return binary.LittleEndian.Uint32(bytes), bytes[4:], true
17 | }
18 |
19 | return 0, bytes, false
20 | }
21 |
22 | func writeUint32(bytes []byte, value uint32) []byte {
23 | bytes = append(bytes, 0, 0, 0, 0)
24 | binary.LittleEndian.PutUint32(bytes[len(bytes)-4:], value)
25 | return bytes
26 | }
27 |
28 | func readLengthPrefixedSlice(bytes []byte) (slice []byte, leftOver []byte, ok bool) {
29 | if length, afterLength, ok := readUint32(bytes); ok && uint(len(afterLength)) >= uint(length) {
30 | return afterLength[:length], afterLength[length:], true
31 | }
32 |
33 | return []byte{}, bytes, false
34 | }
35 |
36 | type packet struct {
37 | id uint32
38 | isRequest bool
39 | value interface{}
40 | }
41 |
42 | func encodePacket(p packet) []byte {
43 | var visit func(interface{})
44 | var bytes []byte
45 |
46 | visit = func(value interface{}) {
47 | switch v := value.(type) {
48 | case nil:
49 | bytes = append(bytes, 0)
50 |
51 | case bool:
52 | n := uint8(0)
53 | if v {
54 | n = 1
55 | }
56 | bytes = append(bytes, 1, n)
57 |
58 | case int:
59 | bytes = append(bytes, 2)
60 | bytes = writeUint32(bytes, uint32(v))
61 |
62 | case string:
63 | bytes = append(bytes, 3)
64 | bytes = writeUint32(bytes, uint32(len(v)))
65 | bytes = append(bytes, v...)
66 |
67 | case []byte:
68 | bytes = append(bytes, 4)
69 | bytes = writeUint32(bytes, uint32(len(v)))
70 | bytes = append(bytes, v...)
71 |
72 | case []interface{}:
73 | bytes = append(bytes, 5)
74 | bytes = writeUint32(bytes, uint32(len(v)))
75 | for _, item := range v {
76 | visit(item)
77 | }
78 |
79 | case map[string]interface{}:
80 | // Sort keys for determinism
81 | keys := make([]string, 0, len(v))
82 | for k := range v {
83 | keys = append(keys, k)
84 | }
85 | sort.Strings(keys)
86 | bytes = append(bytes, 6)
87 | bytes = writeUint32(bytes, uint32(len(keys)))
88 | for _, k := range keys {
89 | bytes = writeUint32(bytes, uint32(len(k)))
90 | bytes = append(bytes, k...)
91 | visit(v[k])
92 | }
93 |
94 | default:
95 | panic("Invalid packet")
96 | }
97 | }
98 |
99 | bytes = writeUint32(bytes, 0) // Reserve space for the length
100 | if p.isRequest {
101 | bytes = writeUint32(bytes, p.id<<1)
102 | } else {
103 | bytes = writeUint32(bytes, (p.id<<1)|1)
104 | }
105 | visit(p.value)
106 | writeUint32(bytes[:0], uint32(len(bytes)-4)) // Patch the length in
107 | return bytes
108 | }
109 |
110 | func decodePacket(bytes []byte) (packet, bool) {
111 | var visit func() (interface{}, bool)
112 |
113 | visit = func() (interface{}, bool) {
114 | kind := bytes[0]
115 | bytes = bytes[1:]
116 | switch kind {
117 | case 0: // nil
118 | return nil, true
119 |
120 | case 1: // bool
121 | value := bytes[0]
122 | bytes = bytes[1:]
123 | return value != 0, true
124 |
125 | case 2: // int
126 | value, next, ok := readUint32(bytes)
127 | if !ok {
128 | return nil, false
129 | }
130 | bytes = next
131 | return int(value), true
132 |
133 | case 3: // string
134 | value, next, ok := readLengthPrefixedSlice(bytes)
135 | if !ok {
136 | return nil, false
137 | }
138 | bytes = next
139 | return string(value), true
140 |
141 | case 4: // []byte
142 | value, next, ok := readLengthPrefixedSlice(bytes)
143 | if !ok {
144 | return nil, false
145 | }
146 | bytes = next
147 | return value, true
148 |
149 | case 5: // []interface{}
150 | count, next, ok := readUint32(bytes)
151 | if !ok {
152 | return nil, false
153 | }
154 | bytes = next
155 | value := make([]interface{}, count)
156 | for i := 0; i < int(count); i++ {
157 | item, ok := visit()
158 | if !ok {
159 | return nil, false
160 | }
161 | value[i] = item
162 | }
163 | return value, true
164 |
165 | case 6: // map[string]interface{}
166 | count, next, ok := readUint32(bytes)
167 | if !ok {
168 | return nil, false
169 | }
170 | bytes = next
171 | value := make(map[string]interface{}, count)
172 | for i := 0; i < int(count); i++ {
173 | key, next, ok := readLengthPrefixedSlice(bytes)
174 | if !ok {
175 | return nil, false
176 | }
177 | bytes = next
178 | item, ok := visit()
179 | if !ok {
180 | return nil, false
181 | }
182 | value[string(key)] = item
183 | }
184 | return value, true
185 |
186 | default:
187 | panic("Invalid packet")
188 | }
189 | }
190 |
191 | id, bytes, ok := readUint32(bytes)
192 | if !ok {
193 | return packet{}, false
194 | }
195 | isRequest := (id & 1) == 0
196 | id >>= 1
197 | value, ok := visit()
198 | if !ok {
199 | return packet{}, false
200 | }
201 | if len(bytes) != 0 {
202 | return packet{}, false
203 | }
204 | return packet{id: id, isRequest: isRequest, value: value}, true
205 | }
206 |
--------------------------------------------------------------------------------
/internal/cache/cache_ast.go:
--------------------------------------------------------------------------------
1 | package cache
2 |
3 | import (
4 | "sync"
5 |
6 | "github.com/evanw/esbuild/internal/css_ast"
7 | "github.com/evanw/esbuild/internal/css_parser"
8 | "github.com/evanw/esbuild/internal/js_ast"
9 | "github.com/evanw/esbuild/internal/js_parser"
10 | "github.com/evanw/esbuild/internal/logger"
11 | )
12 |
13 | // This cache intends to avoid unnecessarily re-parsing files in subsequent
14 | // builds. For a given path, parsing can be avoided if the contents of the file
15 | // and the options for the parser are the same as last time. Even if the
16 | // contents of the file are the same, the options for the parser may have
17 | // changed if they depend on some other file ("package.json" for example).
18 | //
19 | // This cache checks if the file contents have changed even though we have
20 | // the ability to detect if a file has changed on the file system by reading
21 | // its metadata. First of all, if the file contents are cached then they should
22 | // be the same pointer, which makes the comparison trivial. Also we want to
23 | // cache the AST for plugins in the common case that the plugin output stays
24 | // the same.
25 |
26 | ////////////////////////////////////////////////////////////////////////////////
27 | // CSS
28 |
29 | type CSSCache struct {
30 | mutex sync.Mutex
31 | entries map[logger.Path]*cssCacheEntry
32 | }
33 |
34 | type cssCacheEntry struct {
35 | source logger.Source
36 | options css_parser.Options
37 | ast css_ast.AST
38 | msgs []logger.Msg
39 | }
40 |
41 | func (c *CSSCache) Parse(log logger.Log, source logger.Source, options css_parser.Options) css_ast.AST {
42 | // Check the cache
43 | entry := func() *cssCacheEntry {
44 | c.mutex.Lock()
45 | defer c.mutex.Unlock()
46 | return c.entries[source.KeyPath]
47 | }()
48 |
49 | // Cache hit
50 | if entry != nil && entry.source == source && entry.options == options {
51 | for _, msg := range entry.msgs {
52 | log.AddMsg(msg)
53 | }
54 | return entry.ast
55 | }
56 |
57 | // Cache miss
58 | tempLog := logger.NewDeferLog(logger.DeferLogAll)
59 | ast := css_parser.Parse(tempLog, source, options)
60 | msgs := tempLog.Done()
61 | for _, msg := range msgs {
62 | log.AddMsg(msg)
63 | }
64 |
65 | // Create the cache entry
66 | entry = &cssCacheEntry{
67 | source: source,
68 | options: options,
69 | ast: ast,
70 | msgs: msgs,
71 | }
72 |
73 | // Save for next time
74 | c.mutex.Lock()
75 | defer c.mutex.Unlock()
76 | c.entries[source.KeyPath] = entry
77 | return ast
78 | }
79 |
80 | ////////////////////////////////////////////////////////////////////////////////
81 | // JSON
82 |
83 | type JSONCache struct {
84 | mutex sync.Mutex
85 | entries map[logger.Path]*jsonCacheEntry
86 | }
87 |
88 | type jsonCacheEntry struct {
89 | source logger.Source
90 | options js_parser.JSONOptions
91 | expr js_ast.Expr
92 | ok bool
93 | msgs []logger.Msg
94 | }
95 |
96 | func (c *JSONCache) Parse(log logger.Log, source logger.Source, options js_parser.JSONOptions) (js_ast.Expr, bool) {
97 | // Check the cache
98 | entry := func() *jsonCacheEntry {
99 | c.mutex.Lock()
100 | defer c.mutex.Unlock()
101 | return c.entries[source.KeyPath]
102 | }()
103 |
104 | // Cache hit
105 | if entry != nil && entry.source == source && entry.options == options {
106 | for _, msg := range entry.msgs {
107 | log.AddMsg(msg)
108 | }
109 | return entry.expr, entry.ok
110 | }
111 |
112 | // Cache miss
113 | tempLog := logger.NewDeferLog(logger.DeferLogAll)
114 | expr, ok := js_parser.ParseJSON(tempLog, source, options)
115 | msgs := tempLog.Done()
116 | for _, msg := range msgs {
117 | log.AddMsg(msg)
118 | }
119 |
120 | // Create the cache entry
121 | entry = &jsonCacheEntry{
122 | source: source,
123 | options: options,
124 | expr: expr,
125 | ok: ok,
126 | msgs: msgs,
127 | }
128 |
129 | // Save for next time
130 | c.mutex.Lock()
131 | defer c.mutex.Unlock()
132 | c.entries[source.KeyPath] = entry
133 | return expr, ok
134 | }
135 |
136 | ////////////////////////////////////////////////////////////////////////////////
137 | // JS
138 |
139 | type JSCache struct {
140 | mutex sync.Mutex
141 | entries map[logger.Path]*jsCacheEntry
142 | }
143 |
144 | type jsCacheEntry struct {
145 | source logger.Source
146 | options js_parser.Options
147 | ast js_ast.AST
148 | ok bool
149 | msgs []logger.Msg
150 | }
151 |
152 | func (c *JSCache) Parse(log logger.Log, source logger.Source, options js_parser.Options) (js_ast.AST, bool) {
153 | // Check the cache
154 | entry := func() *jsCacheEntry {
155 | c.mutex.Lock()
156 | defer c.mutex.Unlock()
157 | return c.entries[source.KeyPath]
158 | }()
159 |
160 | // Cache hit
161 | if entry != nil && entry.source == source && entry.options.Equal(&options) {
162 | for _, msg := range entry.msgs {
163 | log.AddMsg(msg)
164 | }
165 | return entry.ast, entry.ok
166 | }
167 |
168 | // Cache miss
169 | tempLog := logger.NewDeferLog(logger.DeferLogAll)
170 | ast, ok := js_parser.Parse(tempLog, source, options)
171 | msgs := tempLog.Done()
172 | for _, msg := range msgs {
173 | log.AddMsg(msg)
174 | }
175 |
176 | // Create the cache entry
177 | entry = &jsCacheEntry{
178 | source: source,
179 | options: options,
180 | ast: ast,
181 | ok: ok,
182 | msgs: msgs,
183 | }
184 |
185 | // Save for next time
186 | c.mutex.Lock()
187 | defer c.mutex.Unlock()
188 | c.entries[source.KeyPath] = entry
189 | return ast, ok
190 | }
191 |
--------------------------------------------------------------------------------
/internal/js_parser/json_parser.go:
--------------------------------------------------------------------------------
1 | package js_parser
2 |
3 | import (
4 | "fmt"
5 |
6 | "github.com/evanw/esbuild/internal/helpers"
7 | "github.com/evanw/esbuild/internal/js_ast"
8 | "github.com/evanw/esbuild/internal/js_lexer"
9 | "github.com/evanw/esbuild/internal/logger"
10 | )
11 |
12 | type jsonParser struct {
13 | log logger.Log
14 | source logger.Source
15 | tracker logger.LineColumnTracker
16 | lexer js_lexer.Lexer
17 | options JSONOptions
18 | suppressWarningsAboutWeirdCode bool
19 | }
20 |
21 | func (p *jsonParser) parseMaybeTrailingComma(closeToken js_lexer.T) bool {
22 | commaRange := p.lexer.Range()
23 | p.lexer.Expect(js_lexer.TComma)
24 |
25 | if p.lexer.Token == closeToken {
26 | if !p.options.AllowTrailingCommas {
27 | p.log.AddRangeError(&p.tracker, commaRange, "JSON does not support trailing commas")
28 | }
29 | return false
30 | }
31 |
32 | return true
33 | }
34 |
35 | func (p *jsonParser) parseExpr() js_ast.Expr {
36 | loc := p.lexer.Loc()
37 |
38 | switch p.lexer.Token {
39 | case js_lexer.TFalse:
40 | p.lexer.Next()
41 | return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: false}}
42 |
43 | case js_lexer.TTrue:
44 | p.lexer.Next()
45 | return js_ast.Expr{Loc: loc, Data: &js_ast.EBoolean{Value: true}}
46 |
47 | case js_lexer.TNull:
48 | p.lexer.Next()
49 | return js_ast.Expr{Loc: loc, Data: js_ast.ENullShared}
50 |
51 | case js_lexer.TStringLiteral:
52 | value := p.lexer.StringLiteral()
53 | p.lexer.Next()
54 | return js_ast.Expr{Loc: loc, Data: &js_ast.EString{Value: value}}
55 |
56 | case js_lexer.TNumericLiteral:
57 | value := p.lexer.Number
58 | p.lexer.Next()
59 | return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: value}}
60 |
61 | case js_lexer.TMinus:
62 | p.lexer.Next()
63 | value := p.lexer.Number
64 | p.lexer.Expect(js_lexer.TNumericLiteral)
65 | return js_ast.Expr{Loc: loc, Data: &js_ast.ENumber{Value: -value}}
66 |
67 | case js_lexer.TOpenBracket:
68 | p.lexer.Next()
69 | isSingleLine := !p.lexer.HasNewlineBefore
70 | items := []js_ast.Expr{}
71 |
72 | for p.lexer.Token != js_lexer.TCloseBracket {
73 | if len(items) > 0 {
74 | if p.lexer.HasNewlineBefore {
75 | isSingleLine = false
76 | }
77 | if !p.parseMaybeTrailingComma(js_lexer.TCloseBracket) {
78 | break
79 | }
80 | if p.lexer.HasNewlineBefore {
81 | isSingleLine = false
82 | }
83 | }
84 |
85 | item := p.parseExpr()
86 | items = append(items, item)
87 | }
88 |
89 | if p.lexer.HasNewlineBefore {
90 | isSingleLine = false
91 | }
92 | p.lexer.Expect(js_lexer.TCloseBracket)
93 | return js_ast.Expr{Loc: loc, Data: &js_ast.EArray{
94 | Items: items,
95 | IsSingleLine: isSingleLine,
96 | }}
97 |
98 | case js_lexer.TOpenBrace:
99 | p.lexer.Next()
100 | isSingleLine := !p.lexer.HasNewlineBefore
101 | properties := []js_ast.Property{}
102 | duplicates := make(map[string]logger.Range)
103 |
104 | for p.lexer.Token != js_lexer.TCloseBrace {
105 | if len(properties) > 0 {
106 | if p.lexer.HasNewlineBefore {
107 | isSingleLine = false
108 | }
109 | if !p.parseMaybeTrailingComma(js_lexer.TCloseBrace) {
110 | break
111 | }
112 | if p.lexer.HasNewlineBefore {
113 | isSingleLine = false
114 | }
115 | }
116 |
117 | keyString := p.lexer.StringLiteral()
118 | keyRange := p.lexer.Range()
119 | key := js_ast.Expr{Loc: keyRange.Loc, Data: &js_ast.EString{Value: keyString}}
120 | p.lexer.Expect(js_lexer.TStringLiteral)
121 |
122 | // Warn about duplicate keys
123 | if !p.suppressWarningsAboutWeirdCode {
124 | keyText := js_lexer.UTF16ToString(keyString)
125 | if prevRange, ok := duplicates[keyText]; ok {
126 | p.log.AddRangeWarningWithNotes(&p.tracker, keyRange, fmt.Sprintf("Duplicate key %q in object literal", keyText),
127 | []logger.MsgData{logger.RangeData(&p.tracker, prevRange, fmt.Sprintf("The original %q is here", keyText))})
128 | } else {
129 | duplicates[keyText] = keyRange
130 | }
131 | }
132 |
133 | p.lexer.Expect(js_lexer.TColon)
134 | value := p.parseExpr()
135 |
136 | property := js_ast.Property{
137 | Kind: js_ast.PropertyNormal,
138 | Key: key,
139 | ValueOrNil: value,
140 | }
141 | properties = append(properties, property)
142 | }
143 |
144 | if p.lexer.HasNewlineBefore {
145 | isSingleLine = false
146 | }
147 | p.lexer.Expect(js_lexer.TCloseBrace)
148 | return js_ast.Expr{Loc: loc, Data: &js_ast.EObject{
149 | Properties: properties,
150 | IsSingleLine: isSingleLine,
151 | }}
152 |
153 | default:
154 | p.lexer.Unexpected()
155 | return js_ast.Expr{}
156 | }
157 | }
158 |
159 | type JSONOptions struct {
160 | AllowComments bool
161 | AllowTrailingCommas bool
162 | }
163 |
164 | func ParseJSON(log logger.Log, source logger.Source, options JSONOptions) (result js_ast.Expr, ok bool) {
165 | ok = true
166 | defer func() {
167 | r := recover()
168 | if _, isLexerPanic := r.(js_lexer.LexerPanic); isLexerPanic {
169 | ok = false
170 | } else if r != nil {
171 | panic(r)
172 | }
173 | }()
174 |
175 | p := &jsonParser{
176 | log: log,
177 | source: source,
178 | tracker: logger.MakeLineColumnTracker(&source),
179 | options: options,
180 | lexer: js_lexer.NewLexerJSON(log, source, options.AllowComments),
181 | suppressWarningsAboutWeirdCode: helpers.IsInsideNodeModules(source.KeyPath.Text),
182 | }
183 |
184 | result = p.parseExpr()
185 | p.lexer.Expect(js_lexer.TEndOfFile)
186 | return
187 | }
188 |
--------------------------------------------------------------------------------
/scripts/gen-unicode-table.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const path = require('path');
3 |
4 | // ES5 reference: https://es5.github.io/
5 | //
6 | // A conforming implementation of this International standard shall interpret
7 | // characters in conformance with the Unicode Standard, Version 3.0 or later
8 | // and ISO/IEC 10646-1 with either UCS-2 or UTF-16 as the adopted encoding
9 | // form, implementation level 3. If the adopted ISO/IEC 10646-1 subset is not
10 | // otherwise specified, it is presumed to be the BMP subset, collection 300.
11 | //
12 | // UnicodeLetter: any character in the Unicode categories “Uppercase letter (Lu)”,
13 | // “Lowercase letter (Ll)”, “Titlecase letter (Lt)”, “Modifier letter (Lm)”,
14 | // “Other letter (Lo)”, or “Letter number (Nl)”.
15 | const idStartES5 = [].concat(
16 | require('@unicode/unicode-3.0.0/General_Category/Uppercase_Letter/code-points'),
17 | require('@unicode/unicode-3.0.0/General_Category/Lowercase_Letter/code-points'),
18 | require('@unicode/unicode-3.0.0/General_Category/Titlecase_Letter/code-points'),
19 | require('@unicode/unicode-3.0.0/General_Category/Modifier_Letter/code-points'),
20 | require('@unicode/unicode-3.0.0/General_Category/Other_Letter/code-points'),
21 |
22 | // The "letter number" category is not included because old versions of Safari
23 | // had a bug where they didn't include it. This means it does not match ES5.
24 | // We need to make sure we escape these characters so Safari can read them.
25 | // See https://github.com/evanw/esbuild/issues/1349 for more information.
26 | // require('@unicode/unicode-3.0.0/General_Category/Letter_Number/code-points'),
27 | ).sort((a, b) => a - b)
28 |
29 | // UnicodeCombiningMark: any character in the Unicode categories “Non-spacing mark (Mn)”
30 | // or “Combining spacing mark (Mc)”
31 | // UnicodeDigit: any character in the Unicode category “Decimal number (Nd)”
32 | // UnicodeConnectorPunctuation: any character in the Unicode category “Connector punctuation (Pc)”
33 | const idContinueES5 = idStartES5.concat(
34 | require('@unicode/unicode-3.0.0/General_Category/Nonspacing_Mark/code-points'),
35 | require('@unicode/unicode-3.0.0/General_Category/Spacing_Mark/code-points'),
36 | require('@unicode/unicode-3.0.0/General_Category/Decimal_Number/code-points'),
37 | require('@unicode/unicode-3.0.0/General_Category/Connector_Punctuation/code-points'),
38 | ).sort((a, b) => a - b)
39 |
40 | // ESNext reference: https://tc39.es/ecma262/
41 | //
42 | // A conforming implementation of ECMAScript must interpret source text input
43 | // in conformance with the Unicode Standard, Version 5.1.0 or later and ISO/IEC
44 | // 10646. If the adopted ISO/IEC 10646-1 subset is not otherwise specified, it
45 | // is presumed to be the Unicode set, collection 10646.
46 | //
47 | // UnicodeIDStart: any Unicode code point with the Unicode property “ID_Start”
48 | const idStartESNext = require('@unicode/unicode-13.0.0/Binary_Property/ID_Start/code-points');
49 | const idStartESNextSet = new Set(idStartESNext);
50 |
51 | // UnicodeIDContinue: any Unicode code point with the Unicode property “ID_Continue”
52 | const idContinueESNext = require('@unicode/unicode-13.0.0/Binary_Property/ID_Continue/code-points');
53 | const idContinueESNextSet = new Set(idContinueESNext);
54 |
55 | // These identifiers are valid in both ES5 and ES6+ (i.e. an intersection of both)
56 | const idStartES5AndESNext = idStartES5.filter(n => idStartESNextSet.has(n));
57 | const idContinueES5AndESNext = idContinueES5.filter(n => idContinueESNextSet.has(n));
58 |
59 | // These identifiers are valid in either ES5 or ES6+ (i.e. a union of both)
60 | const idStartES5OrESNext = [...new Set(idStartES5.concat(idStartESNext))].sort((a, b) => a - b);
61 | const idContinueES5OrESNext = [...new Set(idContinueES5.concat(idContinueESNext))].sort((a, b) => a - b);
62 |
63 | function generateRangeTable(codePoints) {
64 | let lines = [];
65 | let index = 0;
66 | let latinOffset = 0;
67 |
68 | while (latinOffset < codePoints.length && codePoints[latinOffset] <= 0xFF) {
69 | latinOffset++;
70 | }
71 |
72 | lines.push(
73 | `&unicode.RangeTable{`,
74 | `\tLatinOffset: ${latinOffset},`,
75 | `\tR16: []unicode.Range16{`,
76 | );
77 |
78 | // 16-bit code points
79 | while (index < codePoints.length && codePoints[index] < 0x1000) {
80 | let start = codePoints[index];
81 | index++;
82 | while (index < codePoints.length && codePoints[index] < 0x1000 && codePoints[index] === codePoints[index - 1] + 1) {
83 | index++;
84 | }
85 | let end = codePoints[index - 1];
86 | lines.push(`\t\t{Lo: 0x${start.toString(16)}, Hi: 0x${end.toString(16)}, Stride: 1},`);
87 | }
88 |
89 | lines.push(
90 | `\t},`,
91 | `\tR32: []unicode.Range32{`,
92 | );
93 |
94 | // 32-bit code points
95 | while (index < codePoints.length) {
96 | let start = codePoints[index];
97 | index++;
98 | while (index < codePoints.length && codePoints[index] === codePoints[index - 1] + 1) {
99 | index++;
100 | }
101 | let end = codePoints[index - 1];
102 | lines.push(`\t\t{Lo: 0x${start.toString(16)}, Hi: 0x${end.toString(16)}, Stride: 1},`);
103 | }
104 |
105 | lines.push(
106 | `\t},`,
107 | `}`,
108 | );
109 | return lines.join('\n');
110 | }
111 |
112 | fs.writeFileSync(path.join(__dirname, '..', 'internal', 'js_lexer', 'unicode.go'),
113 | `// This file was automatically generated by ${path.basename(__filename)}. Do not edit.
114 | package js_lexer
115 |
116 | import "unicode"
117 |
118 | var idStartES5AndESNext = ${generateRangeTable(idStartES5AndESNext)}
119 |
120 | var idContinueES5AndESNext = ${generateRangeTable(idContinueES5AndESNext)}
121 |
122 | var idStartES5OrESNext = ${generateRangeTable(idStartES5OrESNext)}
123 |
124 | var idContinueES5OrESNext = ${generateRangeTable(idContinueES5OrESNext)}
125 | `);
126 |
--------------------------------------------------------------------------------
/internal/css_lexer/css_lexer_test.go:
--------------------------------------------------------------------------------
1 | package css_lexer
2 |
3 | import (
4 | "testing"
5 |
6 | "github.com/evanw/esbuild/internal/logger"
7 | "github.com/evanw/esbuild/internal/test"
8 | )
9 |
10 | func lexToken(contents string) (T, string) {
11 | log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug)
12 | result := Tokenize(log, test.SourceForTest(contents))
13 | if len(result.Tokens) > 0 {
14 | t := result.Tokens[0]
15 | return t.Kind, t.DecodedText(contents)
16 | }
17 | return TEndOfFile, ""
18 | }
19 |
20 | func lexerError(contents string) string {
21 | log := logger.NewDeferLog(logger.DeferLogNoVerboseOrDebug)
22 | Tokenize(log, test.SourceForTest(contents))
23 | text := ""
24 | for _, msg := range log.Done() {
25 | text += msg.String(logger.OutputOptions{}, logger.TerminalInfo{})
26 | }
27 | return text
28 | }
29 |
30 | func TestTokens(t *testing.T) {
31 | expected := []struct {
32 | contents string
33 | token T
34 | text string
35 | }{
36 | {"", TEndOfFile, "end of file"},
37 | {"@media", TAtKeyword, "@-keyword"},
38 | {"url(x y", TBadURL, "bad URL token"},
39 | {"-->", TCDC, "\"-->\""},
40 | {"