├── .github └── workflows │ └── go.yml ├── LICENSE.md ├── README.md ├── api.go ├── api_test.go ├── cli └── main.go ├── esbuildplugin └── plugin.go ├── go.mod ├── go.sum ├── imports_test.go ├── internal ├── ast │ ├── ast.go │ ├── atrule.go │ ├── keyframe.go │ ├── media.go │ ├── rule.go │ ├── selector.go │ ├── value.go │ └── walk.go ├── integration_test.go ├── lexer │ ├── lexer.go │ ├── lexer_harness_test.go │ ├── lexer_test.go │ ├── string_bench_test.go │ └── tokens.go ├── logging │ └── logging.go ├── parser │ ├── bench_test.go │ ├── parser.go │ ├── selectors.go │ ├── span_test.go │ └── testdata │ │ └── spans.txt ├── printer │ ├── bench_test.go │ ├── manualtest │ │ ├── .gitignore │ │ └── main.go │ ├── math_test.go │ ├── printer.go │ ├── printer_test.go │ ├── sourcemaps.go │ ├── sourcemaps_test.go │ └── strings_bench_test.go ├── sources │ └── source.go ├── testdata │ ├── attributes.css │ ├── bem.css │ ├── bootstrap.css │ ├── comments.css │ ├── font-face.css │ └── grid.css └── transformer │ ├── any_link_test.go │ ├── bench_test.go │ ├── custom_media_test.go │ ├── custom_properties_test.go │ ├── math_test.go │ ├── media_feature_range_test.go │ ├── transformer.go │ └── transformer_test.go ├── resolver.go ├── resolver ├── node_resolver.go ├── node_resolver_test.go └── testdata │ ├── case-1.css │ ├── case-2 │ └── index.css │ ├── case-3 │ ├── package.json │ └── whatever.css │ ├── case-7.css │ └── .gitkeep │ ├── case-8.css │ └── index.css │ └── nested │ ├── 1 │ ├── 2 │ │ └── node_modules │ │ │ └── case-4.css │ └── node_modules │ │ └── case-5 │ │ └── index.css │ └── node_modules │ └── case-6 │ ├── dist │ ├── unreferenced.css │ └── whatever.css │ └── package.json ├── testdata ├── brokenimports │ └── index.css ├── crlf │ └── monaco.css ├── imports │ ├── another.css │ ├── index.css │ └── other.css └── simple │ └── index.css └── transforms └── transforms.go /.github/workflows/go.yml: -------------------------------------------------------------------------------- 1 | name: Go 2 | 3 | on: 4 | push: 5 | branches: [ master ] 6 | pull_request: 7 | branches: [ master ] 8 | 9 | jobs: 10 | 11 | build: 12 | name: Build 13 | runs-on: ubuntu-latest 14 | steps: 15 | 16 | - name: Set up Go 1.x 17 | uses: actions/setup-go@v2 18 | with: 19 | go-version: ^1.14 20 | id: go 21 | 22 | - name: Check out code into the Go module directory 23 | uses: actions/checkout@v2 24 | 25 | - name: Get dependencies 26 | run: | 27 | go get -v -t -d ./... 28 | if [ -f Gopkg.toml ]; then 29 | curl https://raw.githubusercontent.com/golang/dep/master/install.sh | sh 30 | dep ensure 31 | fi 32 | 33 | - name: Build 34 | run: go build -v ./... 35 | 36 | - name: Test 37 | run: go test -v ./... 38 | 39 | - name: Benchmark 40 | run: go test -bench=. ./internal/... 41 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Stephen Wan 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cssc 2 | [![PkgGoDev](https://pkg.go.dev/badge/github.com/stephen/cssc?tab=doc)](https://pkg.go.dev/github.com/stephen/cssc?tab=doc) 3 | 4 | A fast, friendly css compiler in go. 5 | 6 | This repo is the start of a css compiler (parser, ast, and printer) and set of transforms to support new CSS syntax in current browsers. To 7 | start with, it aims to be able to replace projects like [postcss-preset-env](https://github.com/csstools/postcss-preset-env) and [cssnext](https://github.com/MoOx/postcss-cssnext). 8 | 9 | It's approach is inspired from experimenting with [esbuild](https://github.com/evanw/esbuild) (see [here](https://github.com/evanw/esbuild/issues/111#issuecomment-673115702)). 10 | 11 | ## Status 12 | The package can currently parse and print most standard CSS. There are likely bugs in both. 13 | 14 | Some transforms are supported: 15 | 16 | | Transform | Support | Notes | 17 | | ------------- | ------------- | ------------- | 18 | | [`@import` rules](https://www.w3.org/TR/css-cascade-4) | Partial | Only non-conditional imports can be inlined. Import conditions will be ignored. | 19 | | [Custom Properties](https://www.w3.org/TR/css-variables-1/) | Partial | Only variables defined on `:root` will be substituted. The compiler will ignore any non-`:root` variables. [See #3](https://github.com/stephen/cssc/issues/3). | 20 | | [Custom Media Queries](https://www.w3.org/TR/mediaqueries-5/#custom-mq) | Complete | | 21 | | [Media Feature Ranges](https://www.w3.org/TR/mediaqueries-4/#mq-min-max) | Complete | | 22 | | [`:any-link`](https://www.w3.org/TR/selectors-4/#the-any-link-pseudo) | Complete | | 23 | 24 | ## API 25 | For now, there is only a go API. 26 | 27 | ```golang 28 | package main 29 | 30 | import ( 31 | "log" 32 | 33 | "github.com/stephen/cssc" 34 | ) 35 | 36 | func main() { 37 | result := cssc.Compile(cssc.Options{ 38 | Entry: []string{"css/index.css"}, 39 | }) 40 | 41 | // result.Files is a map of all output files. 42 | for path, content := range result.Files { 43 | log.Println(path, content) 44 | } 45 | } 46 | ``` 47 | 48 | ### Transforms 49 | Transforms can be specified via options: 50 | ```golang 51 | package main 52 | 53 | import ( 54 | "github.com/stephen/cssc" 55 | "github.com/stephen/cssc/transforms" 56 | ) 57 | 58 | func main() { 59 | result := cssc.Compile(cssc.Options{ 60 | Entry: []string{"css/index.css"}, 61 | Transforms: transforms.Options{ 62 | // Transform :any-link into :link and :visited equivalents. 63 | AnyLink: transforms.AnyLinkTransform, 64 | // Keep @import rules without transforming them or inlining their content. 65 | ImportRules: transforms.ImportRulesPassthrough, 66 | }, 67 | }) 68 | 69 | // result.Files... 70 | } 71 | ``` 72 | 73 | By default, all features are in passthrough mode and will not get transformed. 74 | 75 | ### Error reporting 76 | By default, errors and warnings are printed to stderr. You can control this behavior by providing a [Reporter](https://pkg.go.dev/github.com/stephen/cssc?tab=doc#Reporter): 77 | ```golang 78 | package main 79 | 80 | import ( 81 | "log" 82 | 83 | "github.com/stephen/cssc" 84 | ) 85 | 86 | type TestReporter []error 87 | 88 | func (r *TestReporter) AddError(err error) { 89 | *r = append(*r, err) 90 | } 91 | 92 | func main() { 93 | var errors TestReporter 94 | result := cssc.Compile(cssc.Options{ 95 | Entry: []string{"css/index.css"}, 96 | Reporter: &errors, 97 | }) 98 | 99 | for _, err := range errors { 100 | log.Println(err) 101 | } 102 | } 103 | ``` 104 | 105 | 106 | ## Benchmarks 107 | To keep track of performance, I've been benchmarking performance on (partially) [parsing bootstrap.css](https://github.com/postcss/benchmark). 108 | 109 | ```bash 110 | $ go test -bench=. internal/parser/*.go 111 | goos: darwin 112 | goarch: amd64 113 | BenchmarkParser-12 296 3934884 ns/op 1548281 B/op 45916 allocs/op 114 | PASS 115 | ``` 116 | 117 | I expect this to be a moving target as I complete the parser implementation. 118 | -------------------------------------------------------------------------------- /api.go: -------------------------------------------------------------------------------- 1 | package cssc 2 | 3 | import ( 4 | "io/ioutil" 5 | "path/filepath" 6 | "sync" 7 | 8 | "github.com/samsarahq/go/oops" 9 | "github.com/stephen/cssc/internal/ast" 10 | "github.com/stephen/cssc/internal/logging" 11 | "github.com/stephen/cssc/internal/parser" 12 | "github.com/stephen/cssc/internal/printer" 13 | "github.com/stephen/cssc/internal/sources" 14 | "github.com/stephen/cssc/internal/transformer" 15 | "github.com/stephen/cssc/resolver" 16 | "github.com/stephen/cssc/transforms" 17 | "golang.org/x/sync/errgroup" 18 | ) 19 | 20 | // Options is the set of options to pass to Compile. 21 | type Options struct { 22 | // Entry is the set of files to start parsing. 23 | Entry []string 24 | 25 | // Reporter is the error and warning reporter. If not specified, the default 26 | // reporter prints to stderr. 27 | Reporter Reporter 28 | 29 | Transforms transforms.Options 30 | 31 | // Resolver is a path resolver. If not specified, the default node-style 32 | // resolver will be used. 33 | Resolver Resolver 34 | } 35 | 36 | func newCompilation(opts Options) *compilation { 37 | c := &compilation{ 38 | sources: make(map[string]int), 39 | sourcesByIndex: make(map[int]*sources.Source), 40 | outputsByIndex: make(map[int]struct{}), 41 | astsByIndex: make(map[int]*ast.Stylesheet), 42 | result: newResult(), 43 | reporter: logging.DefaultReporter, 44 | transforms: opts.Transforms, 45 | resolver: &resolver.NodeResolver{}, 46 | } 47 | 48 | if opts.Reporter != nil { 49 | c.reporter = opts.Reporter 50 | } 51 | 52 | if opts.Resolver != nil { 53 | c.resolver = opts.Resolver 54 | } 55 | 56 | return c 57 | } 58 | 59 | type compilation struct { 60 | // mu synchronizes other globals like the error reporter. 61 | mu sync.Mutex 62 | 63 | // sourcesMu synchronizes assignment for new source indices. 64 | sourcesMu sync.RWMutex 65 | sources map[string]int 66 | nextIndex int 67 | 68 | sourcesByIndexMu sync.RWMutex 69 | sourcesByIndex map[int]*sources.Source 70 | 71 | astsByIndexMu sync.RWMutex 72 | astsByIndex map[int]*ast.Stylesheet 73 | 74 | // outputsByIndex is the set of sources to write outputs for. 75 | outputsByIndex map[int]struct{} 76 | 77 | result *Result 78 | 79 | reporter Reporter 80 | 81 | transforms transforms.Options 82 | 83 | resolver Resolver 84 | } 85 | 86 | // addSource will read in a path and assign it a source index. If 87 | // it's already been loaded, the cached source is returned. 88 | func (c *compilation) addSource(path string) (int, error) { 89 | abs, err := filepath.Abs(path) 90 | if err != nil { 91 | return 0, oops.Wrapf(err, "failed to make path absolute: %s", path) 92 | } 93 | 94 | c.sourcesMu.RLock() 95 | if _, ok := c.sources[abs]; ok { 96 | defer c.sourcesMu.RUnlock() 97 | return c.sources[abs], nil 98 | } 99 | c.sourcesMu.RUnlock() 100 | 101 | in, err := ioutil.ReadFile(abs) 102 | if err != nil { 103 | return 0, oops.Wrapf(err, "failed to read file: %s", path) 104 | } 105 | 106 | source := &sources.Source{ 107 | Content: string(in), 108 | Path: abs, 109 | } 110 | 111 | c.sourcesMu.Lock() 112 | i := c.nextIndex 113 | c.sources[abs] = i 114 | c.sourcesMu.Unlock() 115 | 116 | c.sourcesByIndexMu.Lock() 117 | c.sourcesByIndex[i] = source 118 | c.sourcesByIndexMu.Unlock() 119 | 120 | c.nextIndex++ 121 | return i, nil 122 | } 123 | 124 | func newResult() *Result { 125 | return &Result{ 126 | Files: make(map[string]string), 127 | } 128 | } 129 | 130 | // Result is the results of a compilation. 131 | type Result struct { 132 | mu sync.Mutex 133 | Files map[string]string 134 | } 135 | 136 | func (c *compilation) addError(err error) { 137 | c.mu.Lock() 138 | defer c.mu.Unlock() 139 | c.reporter.AddError(err) 140 | } 141 | 142 | // parseFile assigns the file a source index and parses the source. It also 143 | // looks at imported files and adds them to the compilation. hasOutput should 144 | // be called if the file should be included in compilation output. 145 | // 146 | // parseFile also runs the last transformation pass on the output. Note that we 147 | // don't make this function print the output as well so that we can make the current 148 | // file available to any callers as a dependency. 149 | func (c *compilation) parseFile(file string, hasOutput bool) *ast.Stylesheet { 150 | // Assign the file a source index. 151 | idx, err := c.addSource(file) 152 | if err != nil { 153 | c.addError(err) 154 | return nil 155 | } 156 | 157 | if hasOutput { 158 | c.outputsByIndex[idx] = struct{}{} 159 | } 160 | 161 | c.astsByIndexMu.RLock() 162 | if ss, ok := c.astsByIndex[idx]; ok { 163 | c.astsByIndexMu.RUnlock() 164 | return ss 165 | } 166 | c.astsByIndexMu.RUnlock() 167 | 168 | c.sourcesByIndexMu.RLock() 169 | source := c.sourcesByIndex[idx] 170 | c.sourcesByIndexMu.RUnlock() 171 | ss, err := parser.Parse(source) 172 | if err != nil { 173 | c.addError(err) 174 | return nil 175 | } 176 | 177 | // Immediately look at the imports from the file and feed those dependencies 178 | // into parseFile as well. If we're set to inline imports, then we'll use 179 | // collect those dependency ASTs to let the transformer replace them. 180 | var mu sync.Mutex 181 | replacements := make(map[*ast.AtRule]*ast.Stylesheet) 182 | var wg errgroup.Group 183 | for _, imp := range ss.Imports { 184 | wg.Go(func() error { 185 | rel, err := c.resolver.Resolve(imp.Value, filepath.Dir(source.Path)) 186 | if err != nil { 187 | c.addError(err) 188 | return nil 189 | } 190 | 191 | // If import follow is on, then every referenced file makes it to the output. 192 | imported := c.parseFile(rel, c.transforms.ImportRules == transforms.ImportRulesFollow) 193 | 194 | mu.Lock() 195 | defer mu.Unlock() 196 | if imported != nil { 197 | replacements[imp.AtRule] = imported 198 | } 199 | return nil 200 | }) 201 | } 202 | wg.Wait() 203 | 204 | opts := transformer.Options{ 205 | Options: c.transforms, 206 | OriginalSource: source, 207 | Reporter: c.reporter, 208 | } 209 | 210 | if c.transforms.ImportRules == transforms.ImportRulesInline { 211 | opts.ImportReplacements = replacements 212 | } 213 | 214 | ss = transformer.Transform(ss, opts) 215 | c.astsByIndexMu.Lock() 216 | c.astsByIndex[idx] = ss 217 | c.astsByIndexMu.Unlock() 218 | return ss 219 | } 220 | 221 | // Compile runs a compilation with the specified Options. 222 | func Compile(opts Options) *Result { 223 | c := newCompilation(opts) 224 | 225 | var wg errgroup.Group 226 | 227 | for _, e := range opts.Entry { 228 | wg.Go(func() error { 229 | c.parseFile(e, true) 230 | return nil 231 | }) 232 | } 233 | wg.Wait() 234 | 235 | wg = errgroup.Group{} 236 | for i := range c.outputsByIndex { 237 | idx := i 238 | wg.Go(func() error { 239 | // XXX: this is the wrong file name 240 | source := c.sourcesByIndex[idx] 241 | ast := c.astsByIndex[idx] 242 | if source == nil || ast == nil { 243 | // Skip attempting to print if there was a problem reading/parsing this file 244 | // in the first place. 245 | return nil 246 | } 247 | 248 | out, err := printer.Print(ast, printer.Options{ 249 | OriginalSource: source, 250 | }) 251 | if err != nil { 252 | c.addError(err) 253 | return nil 254 | } 255 | 256 | c.result.mu.Lock() 257 | defer c.result.mu.Unlock() 258 | c.result.Files[source.Path] = out 259 | return nil 260 | }) 261 | } 262 | wg.Wait() 263 | 264 | return c.result 265 | } 266 | 267 | // Reporter is an error and warning reporter. 268 | // 269 | // Note that it is the same type as logging.Reporter, which is 270 | // an internal-only interface. 271 | type Reporter interface { 272 | AddError(err error) 273 | } 274 | -------------------------------------------------------------------------------- /api_test.go: -------------------------------------------------------------------------------- 1 | package cssc_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc" 7 | "github.com/stephen/cssc/transforms" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | type TestReporter []error 12 | 13 | func (r *TestReporter) AddError(err error) { 14 | *r = append(*r, err) 15 | } 16 | 17 | func TestApi_Error(t *testing.T) { 18 | var errors TestReporter 19 | result := cssc.Compile(cssc.Options{ 20 | Entry: []string{ 21 | "testdata/nonexistent/index.css", 22 | }, 23 | Reporter: &errors, 24 | }) 25 | 26 | assert.Len(t, result.Files, 0) 27 | assert.Len(t, errors, 1) 28 | } 29 | 30 | func TestApi_Simple(t *testing.T) { 31 | var errors TestReporter 32 | result := cssc.Compile(cssc.Options{ 33 | Entry: []string{ 34 | "testdata/simple/index.css", 35 | }, 36 | Reporter: &errors, 37 | }) 38 | 39 | assert.Len(t, result.Files, 1) 40 | assert.Len(t, errors, 0) 41 | } 42 | 43 | func TestApi_BrokenImport(t *testing.T) { 44 | var errors TestReporter 45 | result := cssc.Compile(cssc.Options{ 46 | Entry: []string{ 47 | "testdata/brokenimports/index.css", 48 | }, 49 | Transforms: transforms.Options{ 50 | ImportRules: transforms.ImportRulesInline, 51 | }, 52 | Reporter: &errors, 53 | }) 54 | 55 | assert.Len(t, result.Files, 1) 56 | assert.Len(t, errors, 1) 57 | } 58 | 59 | func TestApi_Crlf(t *testing.T) { 60 | var errors TestReporter 61 | result := cssc.Compile(cssc.Options{ 62 | Entry: []string{ 63 | "testdata/crlf/monaco.css", 64 | }, 65 | Transforms: transforms.Options{ 66 | ImportRules: transforms.ImportRulesInline, 67 | }, 68 | Reporter: &errors, 69 | }) 70 | 71 | assert.Len(t, result.Files, 1) 72 | assert.Len(t, errors, 0) 73 | } 74 | -------------------------------------------------------------------------------- /cli/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "log" 5 | 6 | "github.com/davecgh/go-spew/spew" 7 | "github.com/stephen/cssc/internal/parser" 8 | "github.com/stephen/cssc/internal/printer" 9 | "github.com/stephen/cssc/internal/sources" 10 | ) 11 | 12 | func main() { 13 | source := &sources.Source{ 14 | Content: `@import "test.css"; 15 | @import url("./testing.css"); 16 | @import url(tester.css); 17 | /* some notes about the next line 18 | are here */ 19 | 20 | .class { 21 | width: 2rem; 22 | margin: 2em 1px; 23 | height: 20%; 24 | padding: 0; 25 | color: rgb(255, 255, calc(2 + 2)); 26 | } 27 | 28 | section { 29 | float: left; 30 | margin: 1em; border: solid 1px; 31 | width: calc(100%/3 - 2*1em - 2*1px); 32 | } 33 | 34 | /* 35 | here we are: 36 | */ 37 | section .child {} 38 | section.self {} 39 | 40 | [test="hello"] {} 41 | [test=hello] {} 42 | [test*=hello] {} 43 | [test^=2.5] {} 44 | [test] {} 45 | 46 | @media (width: 600px), (200px < width < 600px), (200px < width), (width < 600px) { 47 | .c {height: 100%;} 48 | } 49 | 50 | @media not screen { 51 | .c {height: 100%;} 52 | } 53 | 54 | @media screen and (color), projection and (color) { 55 | .c {height: 100%;} 56 | } 57 | 58 | @media not (width <= -100px) { 59 | body { background: green; } 60 | } 61 | @media (min-width: 30em) and (orientation: landscape) { 62 | body { background: green; } 63 | } 64 | 65 | @import url('landscape.css') screen and (orientation: landscape); 66 | `, 67 | } 68 | 69 | sheet, err := parser.Parse(source) 70 | if err != nil { 71 | panic(err) 72 | } 73 | 74 | log.Println(spew.Sdump(sheet)) 75 | log.Println(printer.Print(sheet, printer.Options{ 76 | OriginalSource: source, 77 | })) 78 | } 79 | -------------------------------------------------------------------------------- /esbuildplugin/plugin.go: -------------------------------------------------------------------------------- 1 | package esbuildplugin 2 | 3 | import ( 4 | "context" 5 | "runtime/pprof" 6 | 7 | "github.com/evanw/esbuild/pkg/api" 8 | "github.com/samsarahq/go/oops" 9 | "github.com/stephen/cssc" 10 | "github.com/stephen/cssc/internal/ast" 11 | "github.com/stephen/cssc/internal/sources" 12 | "github.com/stephen/cssc/transforms" 13 | ) 14 | 15 | type csscReporter []error 16 | 17 | func (r *csscReporter) AddError(err error) { 18 | *r = append(*r, err) 19 | } 20 | 21 | type locationError interface { 22 | Location() (*sources.Source, ast.Span) 23 | } 24 | 25 | func (r *csscReporter) toEsbuild() []api.Message { 26 | var errs []api.Message 27 | for _, err := range *r { 28 | 29 | if lErr, ok := err.(locationError); ok { 30 | source, span := lErr.Location() 31 | _, colNumber := source.LineAndCol(span) 32 | lineSpan := source.FullLine(span) 33 | 34 | errs = append(errs, api.Message{ 35 | Text: err.Error(), 36 | Location: &api.Location{ 37 | File: source.Path, 38 | Column: int(colNumber), 39 | Line: lineSpan.Start, 40 | Length: lineSpan.End - lineSpan.Start, 41 | LineText: source.Content[lineSpan.Start:lineSpan.End], 42 | }, 43 | }) 44 | } 45 | 46 | errs = append(errs, api.Message{ 47 | Text: err.Error(), 48 | }) 49 | } 50 | 51 | return errs 52 | } 53 | 54 | // Option is an optional argument to the plugin. 55 | type Option func(cssc.Options) cssc.Options 56 | 57 | // WithTransforms sets the transform options for the plugin. 58 | func WithTransforms(transforms transforms.Options) Option { 59 | return func(opts cssc.Options) cssc.Options { 60 | opts.Transforms = transforms 61 | return opts 62 | } 63 | } 64 | 65 | // WithResolver sets the import resolver for the plugin. 66 | func WithResolver(resolver cssc.Resolver) Option { 67 | return func(opts cssc.Options) cssc.Options { 68 | opts.Resolver = resolver 69 | return opts 70 | } 71 | } 72 | 73 | // Plugin is an esbuild plugin for importing .css files. 74 | func Plugin(opts ...Option) api.Plugin { 75 | return api.Plugin{ 76 | Name: "cssc", 77 | Setup: func(build api.PluginBuild) { 78 | build.OnLoad( 79 | api.OnLoadOptions{Filter: `\.css$`}, 80 | func(args api.OnLoadArgs) (res api.OnLoadResult, err error) { 81 | res.Loader = api.LoaderCSS 82 | 83 | var errors csscReporter 84 | options := cssc.Options{ 85 | Entry: []string{args.Path}, 86 | Reporter: &errors, 87 | } 88 | for _, opt := range opts { 89 | options = opt(options) 90 | } 91 | 92 | pprof.SetGoroutineLabels(pprof.WithLabels(context.TODO(), pprof.Labels("cssc-path", args.Path))) 93 | result := cssc.Compile(options) 94 | 95 | if len(errors) > 0 { 96 | res.Errors = errors.toEsbuild() 97 | return 98 | } 99 | 100 | f, ok := result.Files[args.Path] 101 | if !ok { 102 | err = oops.Errorf("cssc output did not contain %s", args.Path) 103 | return 104 | } 105 | 106 | res.Contents = &f 107 | return res, nil 108 | }, 109 | ) 110 | }, 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/stephen/cssc 2 | 3 | go 1.14 4 | 5 | require ( 6 | github.com/davecgh/go-spew v1.1.1 7 | github.com/evanw/esbuild v0.8.6 8 | github.com/kr/pretty v0.1.0 // indirect 9 | github.com/samsarahq/go v0.0.0-20191220233105-8077c9fbaed5 10 | github.com/stretchr/testify v1.6.1 11 | golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208 12 | gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect 13 | ) 14 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 2 | github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= 3 | github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 4 | github.com/evanw/esbuild v0.8.6 h1:AgNzZzldj5GeIefqzzR3aPt/FHD10VP/KWRsTYMVxsM= 5 | github.com/evanw/esbuild v0.8.6/go.mod h1:mptxmSXIzBIKKCe4jo9A5SToEd1G+AKZ9JmY85dYRJ0= 6 | github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= 7 | github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= 8 | github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= 9 | github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= 10 | github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= 11 | github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= 12 | github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= 13 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 14 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 15 | github.com/samsarahq/go v0.0.0-20191220233105-8077c9fbaed5 h1:x45emkhsiiRJQxqtI1tMxxqDDHVpz30YjQhl+WTozRE= 16 | github.com/samsarahq/go v0.0.0-20191220233105-8077c9fbaed5/go.mod h1:J7RmrHmcZ0rfq31ocAbPajAg/xxWSXOXy1ruhdpDL5Y= 17 | github.com/stretchr/objx v0.1.0 h1:4G4v2dO3VZwixGIRoQ5Lfboy6nUhCyYzaqnIAPPhYs4= 18 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 19 | github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= 20 | github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= 21 | golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208 h1:qwRHBd0NqMbJxfbotnDhm2ByMI1Shq4Y6oRJo21SGJA= 22 | golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= 23 | golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3 h1:5B6i6EAiSYyejWfvc5Rc9BbI3rzIsrrXfAQBWnYfn+w= 24 | golang.org/x/sys v0.0.0-20200501145240-bc7a7d42d5c3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 25 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 26 | gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo= 27 | gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 28 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= 29 | gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= 30 | -------------------------------------------------------------------------------- /imports_test.go: -------------------------------------------------------------------------------- 1 | package cssc_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc" 7 | "github.com/stephen/cssc/transforms" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func TestImports(t *testing.T) { 12 | t.Run("inline", func(t *testing.T) { 13 | var errors TestReporter 14 | result := cssc.Compile(cssc.Options{ 15 | Entry: []string{ 16 | "testdata/imports/index.css", 17 | }, 18 | Reporter: &errors, 19 | Transforms: transforms.Options{ 20 | ImportRules: transforms.ImportRulesInline, 21 | }, 22 | }) 23 | 24 | assert.Len(t, result.Files, 1) 25 | assert.Len(t, errors, 0) 26 | }) 27 | 28 | t.Run("passthrough", func(t *testing.T) { 29 | var errors TestReporter 30 | result := cssc.Compile(cssc.Options{ 31 | Entry: []string{ 32 | "testdata/imports/index.css", 33 | }, 34 | Reporter: &errors, 35 | Transforms: transforms.Options{ 36 | ImportRules: transforms.ImportRulesPassthrough, 37 | }, 38 | }) 39 | 40 | assert.Len(t, result.Files, 1) 41 | assert.Len(t, errors, 0) 42 | }) 43 | 44 | t.Run("follow", func(t *testing.T) { 45 | var errors TestReporter 46 | result := cssc.Compile(cssc.Options{ 47 | Entry: []string{ 48 | "testdata/imports/index.css", 49 | }, 50 | Reporter: &errors, 51 | Transforms: transforms.Options{ 52 | ImportRules: transforms.ImportRulesFollow, 53 | }, 54 | }) 55 | 56 | assert.Len(t, result.Files, 3) 57 | assert.Len(t, errors, 0) 58 | }) 59 | } 60 | -------------------------------------------------------------------------------- /internal/ast/ast.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // Node is any top-level stylesheet rule. 4 | type Node interface { 5 | Location() Span 6 | } 7 | 8 | // Span is a range of text in the source. 9 | type Span struct { 10 | // Start is the start of the range, inclusive. 11 | Start int 12 | 13 | // End is the end of the range, exclusive. 14 | End int 15 | } 16 | 17 | // Location implements Node. 18 | func (l Span) Location() Span { return l } 19 | 20 | // Stylesheet is a CSS stylesheet. 21 | type Stylesheet struct { 22 | Nodes []Node 23 | 24 | Imports []ImportSpecifier 25 | } 26 | 27 | // ImportSpecifier is a pointer to an import at rule. 28 | type ImportSpecifier struct { 29 | Value string 30 | 31 | // AtRule is a pointer to the at rule that specified this import. 32 | AtRule *AtRule 33 | } 34 | 35 | // Location implements Node. 36 | func (l Stylesheet) Location() Span { 37 | if len(l.Nodes) == 0 { 38 | return Span{} 39 | } 40 | 41 | return Span{l.Nodes[0].Location().Start, l.Nodes[len(l.Nodes)-1].Location().End} 42 | } 43 | 44 | // Block can either be a block of rules or declarations. 45 | // See https://www.w3.org/TR/css-syntax-3/#declaration-rule-list. 46 | type Block interface { 47 | Node 48 | 49 | isBlock() 50 | } 51 | 52 | // DeclarationBlock is a block containing a set of declarations. 53 | type DeclarationBlock struct { 54 | Span 55 | 56 | Declarations []Declarationish 57 | } 58 | 59 | // Declarationish is a Declaration or a Raw value. 60 | type Declarationish interface { 61 | Node 62 | isDeclaration() 63 | } 64 | 65 | // QualifiedRuleBlock is a block containing a set of rules. 66 | type QualifiedRuleBlock struct { 67 | Span 68 | 69 | Rules []*QualifiedRule 70 | } 71 | 72 | func (DeclarationBlock) isBlock() {} 73 | func (QualifiedRuleBlock) isBlock() {} 74 | 75 | var _ Block = DeclarationBlock{} 76 | var _ Block = QualifiedRuleBlock{} 77 | 78 | // Declaration is a property assignment, e.g. width: 2px. 79 | type Declaration struct { 80 | Span 81 | 82 | // Property is the property being assigned. 83 | Property string 84 | 85 | // Values is the list of values assigned to the declaration. 86 | Values []Value 87 | 88 | // Important is whether or not the declaration was marked !important. 89 | Important bool 90 | } 91 | 92 | func (Declaration) isDeclaration() {} 93 | func (Raw) isDeclaration() {} 94 | 95 | var _ Declarationish = Declaration{} 96 | var _ Declarationish = Raw{} 97 | -------------------------------------------------------------------------------- /internal/ast/atrule.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // AtRule represents an import statement. 4 | type AtRule struct { 5 | Span 6 | 7 | Name string 8 | 9 | Preludes []AtPrelude 10 | 11 | Block Block 12 | } 13 | 14 | func (String) isAtPrelude() {} 15 | func (Identifier) isAtPrelude() {} 16 | 17 | var _ AtPrelude = String{} 18 | var _ AtPrelude = Identifier{} 19 | 20 | // AtPrelude is the set of arguments for an at-rule. 21 | // The interface is only used for type discrimination. 22 | type AtPrelude interface { 23 | Node 24 | 25 | isAtPrelude() 26 | } 27 | 28 | var _ Node = AtRule{} 29 | -------------------------------------------------------------------------------- /internal/ast/keyframe.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // KeyframeSelectorList is a list of selectors used by @keyframes blocks. 4 | type KeyframeSelectorList struct { 5 | Span 6 | 7 | Selectors []KeyframeSelector 8 | } 9 | 10 | func (KeyframeSelectorList) isPrelude() {} 11 | 12 | var _ Prelude = KeyframeSelectorList{} 13 | 14 | // KeyframeSelector is a selector for rules in a @keyframes block.KeyframeSelector 15 | // Valid values are a Percentage or to/from. 16 | type KeyframeSelector interface { 17 | Node 18 | 19 | isKeyframeSelector() 20 | } 21 | 22 | func (Percentage) isKeyframeSelector() {} 23 | func (Identifier) isKeyframeSelector() {} 24 | 25 | var _ KeyframeSelector = Percentage{} 26 | var _ KeyframeSelector = Identifier{} 27 | -------------------------------------------------------------------------------- /internal/ast/media.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // MediaQueryList is a comma-separated list of media queries. 4 | type MediaQueryList struct { 5 | Span 6 | 7 | Queries []*MediaQuery 8 | } 9 | 10 | func (MediaQueryList) isAtPrelude() {} 11 | 12 | var _ AtPrelude = MediaQueryList{} 13 | 14 | // MediaQuery is a single media query. 15 | type MediaQuery struct { 16 | Span 17 | 18 | Parts []MediaQueryPart 19 | } 20 | 21 | // isAtPrelude implements AtPrelude for @custom-media rules. 22 | func (MediaQuery) isAtPrelude() {} 23 | 24 | var _ AtPrelude = MediaQuery{} 25 | 26 | // MediaQueryPart is a part of a media query, e.g. a MediaFeature, 27 | // MediaType, or MediaCombinator. 28 | type MediaQueryPart interface { 29 | Node 30 | 31 | isMediaQueryPart() 32 | } 33 | 34 | func (Identifier) isMediaQueryPart() {} 35 | func (MediaFeaturePlain) isMediaQueryPart() {} 36 | func (MediaFeatureRange) isMediaQueryPart() {} 37 | func (MediaInParens) isMediaQueryPart() {} 38 | 39 | var _ MediaQueryPart = Identifier{} 40 | var _ MediaQueryPart = MediaFeaturePlain{} 41 | var _ MediaQueryPart = MediaFeatureRange{} 42 | var _ MediaQueryPart = MediaInParens{} 43 | 44 | // MediaInParens is a media expression in parenthesis. It is 45 | // different from MediaQuery in that it implements MediaQueryPart. 46 | type MediaInParens struct { 47 | Span 48 | 49 | Parts []MediaQueryPart 50 | } 51 | 52 | // MediaType is a specific media type. 53 | type MediaType struct { 54 | Span 55 | 56 | Value string 57 | } 58 | 59 | // MediaFeature is fine-grained test for a media feature, 60 | // enclosed in parenthesis. 61 | type MediaFeature interface { 62 | Node 63 | MediaQueryPart 64 | 65 | isMediaFeature() 66 | } 67 | 68 | func (MediaFeaturePlain) isMediaFeature() {} 69 | func (MediaFeatureRange) isMediaFeature() {} 70 | 71 | var _ MediaFeature = MediaFeaturePlain{} 72 | var _ MediaFeature = MediaFeatureRange{} 73 | 74 | // MediaFeaturePlain is a equivalence check. 75 | // e.g. (width: 500px) or (color). 76 | type MediaFeaturePlain struct { 77 | Span 78 | 79 | Property *Identifier 80 | Value Value 81 | } 82 | 83 | // MediaFeatureRange is a type of media feature that looks 84 | // like value < name < value or value > name > value. 85 | type MediaFeatureRange struct { 86 | Span 87 | 88 | Property *Identifier 89 | 90 | LeftValue Value 91 | Operator string 92 | RightValue Value 93 | } 94 | -------------------------------------------------------------------------------- /internal/ast/rule.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // QualifiedRule is a prelude (selectors) and set of declarations. 4 | type QualifiedRule struct { 5 | Span 6 | 7 | Prelude Prelude 8 | 9 | Block Block 10 | } 11 | 12 | // Prelude is the prelude for QualifiedRules. 13 | type Prelude interface { 14 | Node 15 | 16 | isPrelude() 17 | } 18 | 19 | var _ Node = QualifiedRule{} 20 | -------------------------------------------------------------------------------- /internal/ast/selector.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // SelectorList is a type of prelude for QualifiedRule, 4 | // containing a list of selectors separated by commas. 5 | type SelectorList struct { 6 | Span 7 | 8 | Selectors []*Selector 9 | } 10 | 11 | func (SelectorList) isPrelude() {} 12 | func (SelectorList) isPseudoClassArguments() {} 13 | 14 | var _ Prelude = SelectorList{} 15 | var _ PseudoClassArguments = SelectorList{} 16 | 17 | // Selector represents a single selector. From the selectors level 4 18 | // spec, a selector is a flat representation of complex-selector, 19 | // compound-selector, type-selector, combinator, etc, since we mostly 20 | // just want tokens to work with. 21 | type Selector struct { 22 | Span 23 | 24 | Parts []SelectorPart 25 | } 26 | 27 | // SelectorPart is a part of a complex selector. It maybe be e.g. 28 | // a class or id selector, or a + or < combinator, or a pseudoselector. 29 | // 30 | // The interface is only used for type discrimination. 31 | type SelectorPart interface { 32 | Node 33 | 34 | isSelector() 35 | } 36 | 37 | // TypeSelector selects a single type, e.g. div, body, or html. 38 | type TypeSelector struct { 39 | Span 40 | 41 | Name string 42 | } 43 | 44 | // ClassSelector selects a single class, e.g. .test or .Thing. 45 | type ClassSelector struct { 46 | Span 47 | 48 | Name string 49 | } 50 | 51 | // IDSelector selects a single ID, e.g. #container. 52 | type IDSelector struct { 53 | Span 54 | 55 | Name string 56 | } 57 | 58 | // CombinatorSelector operates between two selectors. 59 | type CombinatorSelector struct { 60 | Span 61 | 62 | // The combinator operation, i.e. >, +, ~, or |. 63 | Operator string 64 | } 65 | 66 | // PseudoClassSelector selects a pseudo class, e.g. :not() or :hover. 67 | type PseudoClassSelector struct { 68 | Span 69 | 70 | // Name is the name of the pseudo selector. 71 | Name string 72 | 73 | // Children holds any arguments to the selector, if specified. 74 | Arguments PseudoClassArguments 75 | } 76 | 77 | // PseudoClassArguments is the arguments for a functional pseudo class. 78 | type PseudoClassArguments interface { 79 | Node 80 | 81 | isPseudoClassArguments() 82 | } 83 | 84 | // isPseudoClassArguments implemeents PseudoClassArguments so that 85 | // even/odd can be represented for nth-* pseudo classes. 86 | func (Identifier) isPseudoClassArguments() {} 87 | 88 | var _ PseudoClassArguments = Identifier{} 89 | 90 | // ANPlusB is an an+b value type for nth-* pseudo classes. 91 | type ANPlusB struct { 92 | Span 93 | 94 | A string 95 | Operator string 96 | B string 97 | } 98 | 99 | func (ANPlusB) isPseudoClassArguments() {} 100 | 101 | var _ PseudoClassArguments = ANPlusB{} 102 | 103 | // PseudoElementSelector selects a pseudo element, e.g. ::before or ::after. 104 | type PseudoElementSelector struct { 105 | Span 106 | 107 | Inner *PseudoClassSelector 108 | } 109 | 110 | // Whitespace represents any whitespace sequence. Whitespace is 111 | // only kept in the AST when necessary for disambiguating syntax, 112 | // e.g. in selectors. 113 | type Whitespace struct { 114 | Span 115 | } 116 | 117 | // AttributeSelector selects elements with the specified attributes matching. 118 | // Note that the = token is implied if Value is non-zero. 119 | type AttributeSelector struct { 120 | Span 121 | 122 | // Property is the attribute to check. 123 | Property string 124 | 125 | // PreOperator can be ~, ^, $, *. 126 | // See: https://www.w3.org/TR/selectors-4/#attribute-representation. 127 | PreOperator string 128 | 129 | // Value is the value to match against. 130 | Value Value 131 | 132 | // Modifier can be either i (insensitive) or s (sensitive). In practice, 133 | // s is not supported. 134 | Modifier string 135 | } 136 | 137 | var _ SelectorPart = TypeSelector{} 138 | var _ SelectorPart = ClassSelector{} 139 | var _ SelectorPart = IDSelector{} 140 | var _ SelectorPart = CombinatorSelector{} 141 | var _ SelectorPart = PseudoClassSelector{} 142 | var _ SelectorPart = PseudoElementSelector{} 143 | var _ SelectorPart = Whitespace{} 144 | var _ SelectorPart = AttributeSelector{} 145 | 146 | func (TypeSelector) isSelector() {} 147 | func (ClassSelector) isSelector() {} 148 | func (IDSelector) isSelector() {} 149 | func (CombinatorSelector) isSelector() {} 150 | func (PseudoClassSelector) isSelector() {} 151 | func (PseudoElementSelector) isSelector() {} 152 | func (Whitespace) isSelector() {} 153 | func (AttributeSelector) isSelector() {} 154 | -------------------------------------------------------------------------------- /internal/ast/value.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | // Value is a css value, e.g. dimension, percentage, or number. 4 | type Value interface { 5 | Node 6 | 7 | // isValue is only used for type discrimination. 8 | isValue() 9 | } 10 | 11 | // String is a string literal. 12 | type String struct { 13 | Span 14 | 15 | // Value is the string. 16 | Value string 17 | } 18 | 19 | // Dimension is a numeric value and a unit. Dimension can 20 | // also represent Percentages (% unit) or Numbers (empty string unit). 21 | type Dimension struct { 22 | Span 23 | 24 | // Value is the string representation for the value. 25 | Value string 26 | 27 | // Unit is the unit (e.g. rem, px) for the dimension. If Unit 28 | // is empty, then it's a CSS number type. 29 | Unit string 30 | } 31 | 32 | // Percentage is a numeric percentage. 33 | type Percentage struct { 34 | Span 35 | 36 | // Value is the string representation for the value. 37 | Value string 38 | } 39 | 40 | // Identifier is any string identifier value, e.g. inherit or left. 41 | type Identifier struct { 42 | Span 43 | 44 | // Value is the identifier. 45 | Value string 46 | } 47 | 48 | // HexColor is a hex color (e.g. #aabbccdd) defined by https://www.w3.org/TR/css-color-3/. 49 | type HexColor struct { 50 | Span 51 | 52 | // RGBA is the literal rgba value. 53 | RGBA string 54 | } 55 | 56 | // Brackets is a bracketized value. 57 | type Brackets struct { 58 | Span 59 | 60 | // Values is the inner values, space-separated. 61 | Values []Value 62 | } 63 | 64 | // Function is a css function. 65 | type Function struct { 66 | Span 67 | 68 | // Name is the name of the function. 69 | Name string 70 | 71 | // Arguments is the set of values passed into the function. 72 | Arguments []Value 73 | } 74 | 75 | // IsMath returns whether or not this function supports math expressions 76 | // as values. 77 | func (f Function) IsMath() bool { 78 | _, ok := mathFunctions[f.Name] 79 | return ok 80 | } 81 | 82 | var mathFunctions = map[string]struct{}{ 83 | "calc": struct{}{}, 84 | "min": struct{}{}, 85 | "max": struct{}{}, 86 | "clamp": struct{}{}, 87 | } 88 | 89 | // MathExpression is a binary expression for math functions. 90 | type MathExpression struct { 91 | Span 92 | 93 | // Operator +, -, *, or /. 94 | Operator string 95 | 96 | Left Value 97 | Right Value 98 | } 99 | 100 | // MathParenthesizedExpression is a parenthesized math expression. 101 | type MathParenthesizedExpression struct { 102 | Span 103 | 104 | Value Value 105 | } 106 | 107 | // Raw is an otherwise non-structured but valid value. 108 | type Raw struct { 109 | Span 110 | 111 | Value string 112 | } 113 | 114 | // Comma is a single comma. Some declarations require commas, 115 | // e.g. font-family fallbacks or transitions. 116 | type Comma struct { 117 | Span 118 | } 119 | 120 | func (String) isValue() {} 121 | func (Dimension) isValue() {} 122 | func (Brackets) isValue() {} 123 | func (Function) isValue() {} 124 | func (MathExpression) isValue() {} 125 | func (MathParenthesizedExpression) isValue() {} 126 | func (Comma) isValue() {} 127 | func (Identifier) isValue() {} 128 | func (HexColor) isValue() {} 129 | func (Raw) isValue() {} 130 | 131 | var _ Value = String{} 132 | var _ Value = Dimension{} 133 | var _ Value = Brackets{} 134 | var _ Value = Function{} 135 | var _ Value = MathExpression{} 136 | var _ Value = MathParenthesizedExpression{} 137 | var _ Value = Comma{} 138 | var _ Value = Identifier{} 139 | var _ Value = HexColor{} 140 | var _ Value = Raw{} 141 | -------------------------------------------------------------------------------- /internal/ast/walk.go: -------------------------------------------------------------------------------- 1 | package ast 2 | 3 | import ( 4 | "fmt" 5 | "reflect" 6 | ) 7 | 8 | // Walk walks the AST starting from the input node. 9 | func Walk(start Node, visit func(n Node)) { 10 | if start == nil { 11 | return 12 | } 13 | 14 | visit(start) 15 | switch s := start.(type) { 16 | case *Stylesheet: 17 | for _, n := range s.Nodes { 18 | Walk(n, visit) 19 | } 20 | 21 | case *QualifiedRule: 22 | Walk(s.Prelude, visit) 23 | Walk(s.Block, visit) 24 | 25 | case *SelectorList: 26 | for _, sel := range s.Selectors { 27 | Walk(sel, visit) 28 | } 29 | 30 | case *Selector: 31 | for _, part := range s.Parts { 32 | Walk(part, visit) 33 | } 34 | 35 | case *AtRule: 36 | for _, p := range s.Preludes { 37 | Walk(p, visit) 38 | } 39 | Walk(s.Block, visit) 40 | 41 | case *MediaQueryList: 42 | for _, mq := range s.Queries { 43 | Walk(mq, visit) 44 | } 45 | 46 | case *MediaQuery: 47 | for _, part := range s.Parts { 48 | Walk(part, visit) 49 | } 50 | 51 | case *MediaFeaturePlain: 52 | Walk(s.Property, visit) 53 | Walk(s.Value, visit) 54 | 55 | case *QualifiedRuleBlock: 56 | for _, r := range s.Rules { 57 | Walk(r, visit) 58 | } 59 | 60 | case *Declaration: 61 | for _, v := range s.Values { 62 | Walk(v, visit) 63 | } 64 | 65 | case *DeclarationBlock: 66 | for _, decl := range s.Declarations { 67 | Walk(decl, visit) 68 | } 69 | 70 | case *AttributeSelector: 71 | Walk(s.Value, visit) 72 | 73 | case *MediaFeatureRange: 74 | Walk(s.LeftValue, visit) 75 | Walk(s.RightValue, visit) 76 | 77 | case *MathParenthesizedExpression: 78 | Walk(s.Value, visit) 79 | 80 | case *MathExpression: 81 | Walk(s.Left, visit) 82 | Walk(s.Right, visit) 83 | 84 | case *KeyframeSelectorList: 85 | for _, k := range s.Selectors { 86 | Walk(k, visit) 87 | } 88 | 89 | case *Function: 90 | for _, arg := range s.Arguments { 91 | Walk(arg, visit) 92 | } 93 | 94 | case *PseudoElementSelector: 95 | Walk(s.Inner, visit) 96 | 97 | case *ClassSelector: 98 | case *Comma: 99 | case *IDSelector: 100 | case *String: 101 | case *TypeSelector: 102 | case *CombinatorSelector: 103 | case *PseudoClassSelector: 104 | case *ANPlusB: 105 | case *HexColor: 106 | case *Percentage: 107 | case *Dimension: 108 | case *Whitespace: 109 | case *Identifier: 110 | case *Raw: 111 | 112 | default: 113 | panic(fmt.Errorf("unknown node type: %s", reflect.TypeOf(s).String())) 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /internal/integration_test.go: -------------------------------------------------------------------------------- 1 | package cssc 2 | 3 | import ( 4 | "io/ioutil" 5 | "testing" 6 | 7 | "github.com/stephen/cssc/internal/parser" 8 | "github.com/stephen/cssc/internal/printer" 9 | "github.com/stephen/cssc/internal/sources" 10 | "github.com/stretchr/testify/require" 11 | ) 12 | 13 | func TestIntegration(t *testing.T) { 14 | for _, c := range []string{ 15 | "testdata/bootstrap.css", 16 | "testdata/comments.css", 17 | "testdata/bem.css", 18 | "testdata/font-face.css", 19 | "testdata/grid.css", 20 | "testdata/attributes.css", 21 | } { 22 | t.Run(c, func(t *testing.T) { 23 | 24 | by, err := ioutil.ReadFile(c) 25 | require.NoError(t, err) 26 | source := &sources.Source{ 27 | Path: c, 28 | Content: string(by), 29 | } 30 | 31 | ast, err := parser.Parse(source) 32 | require.NoError(t, err) 33 | 34 | printer.Print(ast, printer.Options{}) 35 | }) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /internal/lexer/lexer.go: -------------------------------------------------------------------------------- 1 | // Package lexer implements a css lexer that is meant to be 2 | // used in conjuction with its sibling parser package. 3 | package lexer 4 | 5 | import ( 6 | "unicode" 7 | "unicode/utf8" 8 | 9 | "github.com/stephen/cssc/internal/ast" 10 | "github.com/stephen/cssc/internal/logging" 11 | "github.com/stephen/cssc/internal/sources" 12 | ) 13 | 14 | // Lexer lexes the input source. Callers push the lexer 15 | // along with calls to Next(), which populate the current 16 | // token and literals. 17 | type Lexer struct { 18 | // ch is the last rune consumed with step(). If there are 19 | // no more runes, ch is -1. 20 | ch rune 21 | 22 | // pos is the current byte offset within source. 23 | pos int 24 | 25 | // lastPos is the most recently read byte offset within source, i.e. 26 | // one codepoint before pos. We need to keep track of this separate from 27 | // pos because it's possible that the previous position was multiple 28 | // bytes away (due to ut8 codepoints being bigger than a byte). 29 | lastPos int 30 | 31 | // start is bookkeeping for the value of lastPos when Next() was most recently called. 32 | // It's only used for providing locations to the lexer's caller, not for stepping 33 | // through the source within the lexer. 34 | start int 35 | 36 | // source is the current source code being lexed. 37 | source *sources.Source 38 | 39 | // Current is the last token lexed by Next(). 40 | Current Token 41 | 42 | // CurrentString is the last literal string lexed by Next(). It 43 | // is not cleared between valid literals. 44 | CurrentString string 45 | 46 | // CurrentNumeral is the last literal numeral lexed by Next(). It 47 | // is not cleared between valid literals. 48 | CurrentNumeral string 49 | 50 | // RetainWhitespace is settable by the caller of the lexer. When set, 51 | // it will keep whitespace tokens around. This is useful for parsing 52 | // some CSS that must be space disambiguated. 53 | RetainWhitespace bool 54 | 55 | // RetainComments is settable by the caller of the lexer. When set, 56 | // the lexer will emit comment tokens. Otherwise, they are skipped 57 | // and ignored. 58 | RetainComments bool 59 | } 60 | 61 | // NewLexer creates a new lexer for the source. 62 | func NewLexer(source *sources.Source) *Lexer { 63 | l := &Lexer{ 64 | source: source, 65 | } 66 | l.step() 67 | l.Next() 68 | return l 69 | } 70 | 71 | // step consumes the next unicode rune and stores it. 72 | func (l *Lexer) step() { 73 | if l.pos == 0 { 74 | l.source.Lines = append(l.source.Lines, l.pos) 75 | } 76 | 77 | cp, size := utf8.DecodeRuneInString(l.source.Content[l.pos:]) 78 | 79 | if size == 0 { 80 | l.ch = -1 81 | l.lastPos = l.pos 82 | return 83 | } 84 | 85 | // "Convert" to newline. https://www.w3.org/TR/css-syntax-3/#newline 86 | if cp == '\r' { 87 | next, nextSize := utf8.DecodeRuneInString(l.source.Content[l.pos+size:]) 88 | if next == '\n' { 89 | l.source.Lines = append(l.source.Lines, l.pos+size+nextSize) 90 | } 91 | 92 | l.ch = next 93 | l.lastPos = l.pos 94 | l.pos += size + nextSize 95 | return 96 | } 97 | 98 | if cp == '\n' { 99 | l.source.Lines = append(l.source.Lines, l.pos+size) 100 | } 101 | 102 | l.ch = cp 103 | l.lastPos = l.pos 104 | l.pos += size 105 | } 106 | 107 | // peek returns the next ith unconsumed rune but does not consume it. 108 | // i is 0-indexed (0 is one ahead, 1 is two ahead, etc.) 109 | func (l *Lexer) peek(i int) rune { 110 | if l.pos+i > len(l.source.Content) { 111 | return -1 112 | } 113 | 114 | cp, size := utf8.DecodeRuneInString(l.source.Content[l.pos+i:]) 115 | if size == 0 { 116 | return -1 117 | } 118 | return cp 119 | } 120 | 121 | // TokenEnd returns the end location of the current token. 122 | func (l *Lexer) TokenEnd() int { 123 | return l.lastPos 124 | } 125 | 126 | // TokenSpan creates a span that starts and ends with the current token. This is useful 127 | // for creating single-token AST nodes, e.g. commas. 128 | func (l *Lexer) TokenSpan() ast.Span { 129 | return ast.Span{Start: l.start, End: l.lastPos} 130 | } 131 | 132 | // Expect is like Next, except it asserts the current token before moving on. Callers should 133 | // pull CurrentLiteral / CurrentNumeral before calling this function. 134 | func (l *Lexer) Expect(token Token) { 135 | if l.Current != token { 136 | l.Errorf("expected %s, but got %s instead", token, l.Current) 137 | } 138 | l.Next() 139 | } 140 | 141 | // Next consumes the most recent r. 142 | func (l *Lexer) Next() { 143 | // Run in a for-loop so that some types (e.g. whitespace) can use continue to 144 | // move on to the next token. Other codepaths will end in a return statement 145 | // at the end of a single iteration. 146 | for { 147 | // Mark the start after all whitespace has been skipped. 148 | l.start = l.lastPos 149 | switch l.ch { 150 | case -1: 151 | l.Current = EOF 152 | return 153 | 154 | case ';': 155 | l.Current = Semicolon 156 | l.step() 157 | 158 | case ':': 159 | l.Current = Colon 160 | l.step() 161 | 162 | case '+': 163 | if startsNumber(l.ch, l.peek(0), l.peek(1)) { 164 | l.nextNumericToken() 165 | return 166 | } 167 | 168 | l.nextDelimToken() 169 | 170 | case '-': 171 | if startsNumber(l.ch, l.peek(0), l.peek(1)) { 172 | l.nextNumericToken() 173 | return 174 | } 175 | 176 | if p0, p1 := l.peek(0), l.peek(1); p0 == '-' && p1 == '>' { 177 | l.Current = CDC 178 | return 179 | } 180 | 181 | if startsIdentifier(l.peek(0), l.peek(1), l.peek(2)) { 182 | l.nextIdentLikeToken() 183 | return 184 | } 185 | 186 | l.nextDelimToken() 187 | 188 | case '<': 189 | if p0, p1, p2 := l.peek(0), l.peek(1), l.peek(2); p0 == '!' && p1 == '-' && p2 == '-' { 190 | l.Current = CDO 191 | return 192 | } 193 | 194 | // Otherwise save it as a delimiter. 195 | l.nextDelimToken() 196 | 197 | case '@': 198 | if startsIdentifier(l.peek(0), l.peek(1), l.peek(2)) { 199 | l.step() // Consume @. 200 | l.Current = At 201 | 202 | start := l.lastPos 203 | l.nextName() 204 | l.CurrentString = l.source.Content[start:l.lastPos] 205 | return 206 | } 207 | 208 | l.nextDelimToken() 209 | 210 | case '#': 211 | if isNameCodePoint(l.peek(0)) || startsEscape(l.peek(0), l.peek(1)) { 212 | l.Current = Hash 213 | 214 | l.step() 215 | start := l.lastPos 216 | l.nextName() 217 | l.CurrentString = l.source.Content[start:l.lastPos] 218 | return 219 | } 220 | 221 | l.nextDelimToken() 222 | 223 | case ',': 224 | l.Current = Comma 225 | l.step() 226 | 227 | case '(': 228 | l.Current = LParen 229 | l.step() 230 | 231 | case ')': 232 | l.Current = RParen 233 | l.step() 234 | 235 | case '[': 236 | l.Current = LBracket 237 | l.step() 238 | 239 | case ']': 240 | l.Current = RBracket 241 | l.step() 242 | 243 | case '{': 244 | l.Current = LCurly 245 | l.step() 246 | 247 | case '}': 248 | l.Current = RCurly 249 | l.step() 250 | 251 | case '.': 252 | if startsNumber(l.peek(0), l.peek(1), l.peek(2)) { 253 | l.nextNumericToken() 254 | return 255 | } 256 | 257 | l.nextDelimToken() 258 | 259 | case '\\': 260 | if !startsEscape(l.ch, l.peek(0)) { 261 | l.Errorf("parse error") 262 | } 263 | 264 | l.nextIdentLikeToken() 265 | 266 | case '/': 267 | if l.peek(0) != '*' { 268 | l.nextDelimToken() 269 | return 270 | } 271 | l.step() 272 | l.step() 273 | start, end := l.lastPos, -1 274 | 275 | commentToken: 276 | for { 277 | switch l.ch { 278 | case '*': 279 | maybeEnd := l.lastPos 280 | l.step() 281 | if l.ch == '/' { 282 | l.step() 283 | end = maybeEnd 284 | break commentToken 285 | } 286 | case -1: 287 | l.Errorf("unexpected EOF") 288 | default: 289 | l.step() 290 | } 291 | } 292 | l.Current = Comment 293 | l.CurrentString = l.source.Content[start:end] 294 | 295 | if !l.RetainComments { 296 | continue 297 | } 298 | 299 | case '"', '\'': 300 | mark := l.ch 301 | 302 | l.step() 303 | start, end := l.lastPos, -1 304 | 305 | stringToken: 306 | for { 307 | switch l.ch { 308 | case mark: 309 | end = l.lastPos 310 | l.step() 311 | break stringToken 312 | case '\n': 313 | l.Errorf("unclosed string: unexpected newline") 314 | case '\\': 315 | l.step() 316 | 317 | switch l.ch { 318 | case '\n': 319 | l.step() 320 | case -1: 321 | l.Errorf("unexpected EOF") 322 | default: 323 | if startsEscape(l.ch, l.peek(0)) { 324 | l.nextEscaped() 325 | } 326 | } 327 | case -1: 328 | l.Errorf("unexpected EOF") 329 | default: 330 | l.step() 331 | } 332 | } 333 | 334 | l.Current = String 335 | l.CurrentString = l.source.Content[start:end] 336 | 337 | default: 338 | if isWhitespace(l.ch) { 339 | if !l.RetainWhitespace { 340 | l.step() 341 | 342 | // Don't return out because we only processed whitespace and 343 | // there's nothing interesting for the caller yet. We don't emit 344 | // whitespace-token. 345 | continue 346 | } 347 | 348 | for isWhitespace(l.ch) { 349 | l.step() 350 | } 351 | l.Current = Whitespace 352 | return 353 | } 354 | 355 | if unicode.IsDigit(l.ch) { 356 | l.nextNumericToken() 357 | return 358 | } 359 | 360 | // https://www.w3.org/TR/css-syntax-3/#consume-ident-like-token 361 | if isNameStartCodePoint(l.ch) { 362 | l.nextIdentLikeToken() 363 | return 364 | } 365 | 366 | l.nextDelimToken() 367 | } 368 | 369 | return 370 | } 371 | } 372 | 373 | // startsIdentifier implements https://www.w3.org/TR/css-syntax-3/#would-start-an-identifier. 374 | func startsIdentifier(p0, p1, p2 rune) bool { 375 | switch p0 { 376 | case '-': 377 | return p1 == '-' || isNameCodePoint(p1) || startsEscape(p1, p2) 378 | case '\n': 379 | return false 380 | default: 381 | return isNameCodePoint(p0) 382 | } 383 | } 384 | 385 | // startsEscape implements https://www.w3.org/TR/css-syntax-3/#starts-with-a-valid-escape 386 | func startsEscape(p0, p1 rune) bool { 387 | if p0 != '\\' { 388 | return false 389 | } 390 | 391 | if p1 == '\n' { 392 | return false 393 | } 394 | 395 | return true 396 | } 397 | 398 | // startsNumber implements https://www.w3.org/TR/css-syntax-3/#starts-with-a-number. 399 | func startsNumber(p0, p1, p2 rune) bool { 400 | if p0 == '+' || p0 == '-' { 401 | if unicode.IsDigit(p1) { 402 | return true 403 | } 404 | 405 | if p1 == '.' && unicode.IsDigit(p2) { 406 | return true 407 | } 408 | 409 | return false 410 | } 411 | 412 | if p0 == '.' && unicode.IsDigit(p1) { 413 | return true 414 | } 415 | 416 | return unicode.IsDigit(p0) 417 | } 418 | 419 | // nextNumericToken implements https://www.w3.org/TR/css-syntax-3/#consume-a-numeric-token 420 | // and sets the lexer state. 421 | func (l *Lexer) nextNumericToken() { 422 | start := l.lastPos 423 | l.nextNumber() 424 | l.CurrentNumeral = l.source.Content[start:l.lastPos] 425 | 426 | if startsIdentifier(l.ch, l.peek(0), l.peek(1)) { 427 | dimenStart := l.lastPos 428 | l.nextName() 429 | l.CurrentString = l.source.Content[dimenStart:l.lastPos] 430 | l.Current = Dimension 431 | } else if l.ch == '%' { 432 | l.Current = Percentage 433 | l.step() 434 | } else { 435 | l.Current = Number 436 | } 437 | } 438 | 439 | // isURLString is a case-insensitive comparison to see if a string is url. 440 | func isURLString(in string) bool { 441 | return len(in) == 3 && (in[0] == 'u' || in[0] == 'U') && (in[1] == 'r' || in[1] == 'R') && (in[2] == 'l' || in[2] == 'L') 442 | } 443 | 444 | // nextIdentLikeToken implements https://www.w3.org/TR/css-syntax-3/#consume-an-ident-like-token. 445 | // The spec tells us to return a bad-url-token, but we 446 | // are uninterested in best-effort interpretation for compilation. 447 | func (l *Lexer) nextIdentLikeToken() { 448 | start := l.lastPos 449 | l.nextName() 450 | l.CurrentString = l.source.Content[start:l.lastPos] 451 | 452 | // Here, we need to special case the url function because it supports unquoted string content. 453 | if isURLString(l.CurrentString) && l.ch == '(' { 454 | l.step() 455 | for i := l.lastPos; i < len(l.source.Content) && isWhitespace(l.ch); i++ { 456 | l.step() 457 | } 458 | 459 | if l.ch == '\'' || l.ch == '"' { 460 | l.Current = FunctionStart 461 | return 462 | } 463 | 464 | l.Current = URL 465 | urlStart := l.lastPos 466 | for { 467 | switch l.ch { 468 | case ')': 469 | l.CurrentString = l.source.Content[urlStart:l.lastPos] 470 | l.step() 471 | return 472 | case -1: 473 | l.Errorf("unexpected EOF") 474 | case '"', '\'', '(': 475 | l.Errorf("unexpected token: %c", l.ch) 476 | case '\\': 477 | if startsEscape(l.ch, l.peek(0)) { 478 | l.nextEscaped() 479 | continue 480 | } 481 | 482 | l.Errorf("unexpected token: %c", l.ch) 483 | default: 484 | if isWhitespace(l.ch) { 485 | l.step() 486 | continue 487 | } 488 | 489 | if isNonPrintable(l.ch) { 490 | l.Errorf("unexpected token: %c", l.ch) 491 | } 492 | 493 | l.step() 494 | } 495 | } 496 | } 497 | 498 | // Otherwise, it's probably a normal function. 499 | if l.ch == '(' { 500 | l.step() 501 | l.Current = FunctionStart 502 | return 503 | } 504 | 505 | // Otherwise, it's an identifier. 506 | l.Current = Ident 507 | } 508 | 509 | // nextNumber implements https://www.w3.org/TR/css-syntax-3/#consume-a-number 510 | // and consumes a number. We don't distinguish between number and integer because 511 | // it doesn't matter for us. 512 | func (l *Lexer) nextNumber() { 513 | if l.ch == '+' || l.ch == '-' { 514 | l.step() 515 | } 516 | 517 | for unicode.IsDigit(l.ch) { 518 | l.step() 519 | } 520 | 521 | if l.ch == '.' && unicode.IsDigit(l.peek(0)) { 522 | l.step() 523 | l.step() 524 | 525 | for unicode.IsDigit(l.ch) { 526 | l.step() 527 | } 528 | } 529 | 530 | if p0, p1 := l.peek(0), l.peek(1); (l.ch == 'e' || l.ch == 'E') && (unicode.IsDigit(p0) || 531 | ((p0 == '+' || p0 == '-') && unicode.IsDigit(p1))) { 532 | l.step() 533 | if l.ch == '+' || l.ch == '-' { 534 | l.step() 535 | } 536 | 537 | for unicode.IsDigit(l.ch) { 538 | l.step() 539 | } 540 | } 541 | } 542 | 543 | // nextDelim consumes a codepoint and saves it as a delimiter token. 544 | func (l *Lexer) nextDelimToken() { 545 | start := l.lastPos 546 | l.step() 547 | l.Current = Delim 548 | l.CurrentString = l.source.Content[start:l.lastPos] 549 | } 550 | 551 | // nextName implements https://www.w3.org/TR/css-syntax-3/#consume-a-name. 552 | // It consumes and returns a name, stepping the lexer forward. 553 | func (l *Lexer) nextName() { 554 | for { 555 | if isNameCodePoint(l.ch) { 556 | l.step() 557 | } else if startsEscape(l.ch, l.peek(0)) { 558 | l.nextEscaped() 559 | } else { 560 | return 561 | } 562 | } 563 | } 564 | 565 | // nextEscaped consumes and returns an escaped codepoint, stepping the lexer forward. 566 | // It implements https://www.w3.org/TR/css-syntax-3/#consume-escaped-code-point. 567 | // 568 | // Note that we do not need to interpret the codepoint for our purposes - we can record 569 | // the byte offsets as-is for transformation. 570 | func (l *Lexer) nextEscaped() { 571 | l.step() 572 | for i := 0; i < 5 && isHexDigit(l.ch); i++ { 573 | l.step() 574 | if isWhitespace(l.ch) { 575 | l.step() 576 | } 577 | } 578 | } 579 | 580 | // LocationErrorf sends up a lexer panic with a custom location. 581 | func (l *Lexer) LocationErrorf(span ast.Span, f string, args ...interface{}) { 582 | panic(&Error{logging.LocationErrorf(l.source, span, f, args...)}) 583 | } 584 | 585 | // Errorf sends up a lexer panic at the range from start to lastPos. 586 | func (l *Lexer) Errorf(f string, args ...interface{}) { 587 | l.LocationErrorf(l.TokenSpan(), f, args...) 588 | } 589 | 590 | // Error is an error that the lexer ran into. 591 | type Error struct { 592 | inner error 593 | } 594 | 595 | // Unwrap satisfies errors.Unwrap. 596 | func (l *Error) Unwrap() error { 597 | return l.inner 598 | } 599 | 600 | // Error satisfies error. 601 | func (l *Error) Error() string { 602 | return l.inner.Error() 603 | } 604 | -------------------------------------------------------------------------------- /internal/lexer/lexer_harness_test.go: -------------------------------------------------------------------------------- 1 | package lexer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/lexer" 7 | "github.com/stephen/cssc/internal/sources" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func NewHarness(t testing.TB, content string) *Harness { 12 | return &Harness{t, lexer.NewLexer(&sources.Source{Path: "main.css", Content: content})} 13 | } 14 | 15 | // Harness is a simple test harness to expect on the lexer. 16 | type Harness struct { 17 | testing.TB 18 | *lexer.Lexer 19 | } 20 | 21 | // ExpectAndNext asserts against the current token and drives the lexer forward. 22 | // It resets CurrentString and CurrentNumeral before calling Next(). 23 | func (h *Harness) ExpectAndNext(token lexer.Token, stringLiteral, numericLiteral string) { 24 | h.TB.Helper() 25 | assert.Equal(h.TB, token, h.Current, "expected %s, but got %s", token.String(), h.Current.String()) 26 | assert.Equal(h.TB, stringLiteral, h.CurrentString) 27 | assert.Equal(h.TB, numericLiteral, h.CurrentNumeral) 28 | 29 | h.CurrentNumeral, h.CurrentString = "", "" 30 | 31 | h.Next() 32 | } 33 | 34 | func (h *Harness) RunUntil(token lexer.Token) { 35 | for h.Current != token { 36 | h.Next() 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /internal/lexer/lexer_test.go: -------------------------------------------------------------------------------- 1 | package lexer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/lexer" 7 | "github.com/stretchr/testify/assert" 8 | ) 9 | 10 | func TestLexer_URL(t *testing.T) { 11 | h := NewHarness(t, "url(http://test.com/image.jpg)") 12 | 13 | h.ExpectAndNext(lexer.URL, "http://test.com/image.jpg", "") 14 | h.ExpectAndNext(lexer.EOF, "", "") 15 | 16 | assert.Panics(t, func() { 17 | NewHarness(t, "url(http://test.com/image.jpg").Next() 18 | }) 19 | 20 | assert.Panics(t, func() { 21 | NewHarness(t, "url(())").Next() 22 | }) 23 | } 24 | 25 | func TestLexer_Function(t *testing.T) { 26 | h := NewHarness(t, `url("http://test.com/image.jpg")`) 27 | 28 | h.ExpectAndNext(lexer.FunctionStart, "url", "") 29 | h.ExpectAndNext(lexer.String, "http://test.com/image.jpg", "") 30 | h.ExpectAndNext(lexer.RParen, "", "") 31 | h.ExpectAndNext(lexer.EOF, "", "") 32 | } 33 | 34 | func TestLexer_Function_RGB(t *testing.T) { 35 | h := NewHarness(t, `rgb(255, 254, 253)`) 36 | 37 | h.ExpectAndNext(lexer.FunctionStart, "rgb", "") 38 | h.ExpectAndNext(lexer.Number, "", "255") 39 | h.ExpectAndNext(lexer.Comma, "", "") 40 | h.ExpectAndNext(lexer.Number, "", "254") 41 | h.ExpectAndNext(lexer.Comma, "", "") 42 | h.ExpectAndNext(lexer.Number, "", "253") 43 | h.ExpectAndNext(lexer.RParen, "", "") 44 | h.ExpectAndNext(lexer.EOF, "", "") 45 | } 46 | 47 | func TestLexer_Number(t *testing.T) { 48 | h := NewHarness(t, `20%`) 49 | 50 | h.ExpectAndNext(lexer.Percentage, "", "20") 51 | h.ExpectAndNext(lexer.EOF, "", "") 52 | } 53 | 54 | func TestLexer_Dimension(t *testing.T) { 55 | h := NewHarness(t, `0.15s`) 56 | 57 | h.ExpectAndNext(lexer.Dimension, "s", "0.15") 58 | h.ExpectAndNext(lexer.EOF, "", "") 59 | 60 | h = NewHarness(t, `2rem`) 61 | 62 | h.ExpectAndNext(lexer.Dimension, "rem", "2") 63 | h.ExpectAndNext(lexer.EOF, "", "") 64 | } 65 | 66 | func TestLexer_AtRule(t *testing.T) { 67 | h := NewHarness(t, `@import "test.css"`) 68 | 69 | h.ExpectAndNext(lexer.At, "import", "") 70 | h.ExpectAndNext(lexer.String, "test.css", "") 71 | h.ExpectAndNext(lexer.EOF, "", "") 72 | } 73 | 74 | func TestLexer_SimpleBlocks(t *testing.T) { 75 | h := NewHarness(t, `.class { 76 | width: 5px; 77 | } 78 | 79 | /** this is the root 80 | container id */ 81 | #id { 82 | margin: -2.75rem; 83 | content: "text"; 84 | }`) 85 | 86 | h.ExpectAndNext(lexer.Delim, ".", "") 87 | h.ExpectAndNext(lexer.Ident, "class", "") 88 | h.ExpectAndNext(lexer.LCurly, "", "") 89 | h.ExpectAndNext(lexer.Ident, "width", "") 90 | h.ExpectAndNext(lexer.Colon, "", "") 91 | h.ExpectAndNext(lexer.Dimension, "px", "5") 92 | h.ExpectAndNext(lexer.Semicolon, "", "") 93 | h.ExpectAndNext(lexer.RCurly, "", "") 94 | 95 | h.ExpectAndNext(lexer.Hash, "id", "") 96 | h.ExpectAndNext(lexer.LCurly, "", "") 97 | h.ExpectAndNext(lexer.Ident, "margin", "") 98 | h.ExpectAndNext(lexer.Colon, "", "") 99 | h.ExpectAndNext(lexer.Dimension, "rem", "-2.75") 100 | h.ExpectAndNext(lexer.Semicolon, "", "") 101 | h.ExpectAndNext(lexer.Ident, "content", "") 102 | h.ExpectAndNext(lexer.Colon, "", "") 103 | h.ExpectAndNext(lexer.String, "text", "") 104 | h.ExpectAndNext(lexer.Semicolon, "", "") 105 | h.ExpectAndNext(lexer.RCurly, "", "") 106 | } 107 | 108 | func TestLexer_BrowserPrefix(t *testing.T) { 109 | h := NewHarness(t, `[list]::-webkit-calendar-picker-indicator`) 110 | 111 | h.ExpectAndNext(lexer.LBracket, "", "") 112 | h.ExpectAndNext(lexer.Ident, "list", "") 113 | h.ExpectAndNext(lexer.RBracket, "", "") 114 | h.ExpectAndNext(lexer.Colon, "", "") 115 | h.ExpectAndNext(lexer.Colon, "", "") 116 | h.ExpectAndNext(lexer.Ident, "-webkit-calendar-picker-indicator", "") 117 | } 118 | 119 | func TestLexer_Errorf(t *testing.T) { 120 | assert.PanicsWithError(t, "main.css:3:7\nunclosed string: unexpected newline:\n\t bad: \"no good;\n\t ~~~~~~~~~", func() { 121 | NewHarness(t, `.class { 122 | something: "ok"; 123 | bad: "no good; 124 | }`).RunUntil(lexer.EOF) 125 | }) 126 | } 127 | -------------------------------------------------------------------------------- /internal/lexer/string_bench_test.go: -------------------------------------------------------------------------------- 1 | package lexer 2 | 3 | import ( 4 | "strings" 5 | "testing" 6 | ) 7 | 8 | func BenchmarkStringComparison(b *testing.B) { 9 | in := "URL" 10 | b.Run("strings.ToLower", func(b *testing.B) { 11 | b.ReportAllocs() 12 | for i := 0; i < b.N; i++ { 13 | _ = strings.ToLower(in) == "url" 14 | } 15 | }) 16 | 17 | b.Run("isURLString", func(b *testing.B) { 18 | b.ReportAllocs() 19 | for i := 0; i < b.N; i++ { 20 | isURLString(in) 21 | } 22 | }) 23 | } 24 | -------------------------------------------------------------------------------- /internal/lexer/tokens.go: -------------------------------------------------------------------------------- 1 | package lexer 2 | 3 | import "unicode" 4 | 5 | // Token is the set of lexical tokens in CSS. 6 | type Token int 7 | 8 | // https://www.w3.org/TR/css-syntax-3/#consume-token 9 | const ( 10 | Illegal Token = iota 11 | 12 | EOF 13 | 14 | Whitespace 15 | 16 | Comma // , 17 | Colon // : 18 | Semicolon // ; 19 | LParen // ( 20 | RParen // ) 21 | CDO // 23 | LBracket // [ 24 | RBracket // ] 25 | LCurly // { 26 | RCurly // } 27 | 28 | Comment // /* comment */ 29 | URL // url(...) 30 | FunctionStart // something( 31 | At // @keyword 32 | Hash // #hash 33 | Number // Number literal 34 | Percentage // Percentage literal 35 | Dimension // Dimension literal 36 | String // String literal 37 | Ident // Identifier 38 | Delim // Delimiter (used for preserving tokens for subprocessors) 39 | ) 40 | 41 | func (t Token) String() string { 42 | return tokens[t] 43 | } 44 | 45 | var tokens = [...]string{ 46 | Illegal: "Illegal", 47 | 48 | EOF: "EOF", 49 | 50 | Whitespace: "WHITESPACE", 51 | 52 | Comment: "COMMENT", 53 | Delim: "DELIMITER", 54 | 55 | Hash: "HASH", 56 | Number: "NUMBER", 57 | Percentage: "PERCENTAGE", 58 | Dimension: "DIMENSION", 59 | String: "STRING", 60 | Ident: "IDENT", 61 | URL: "URL", 62 | 63 | Comma: ",", 64 | Colon: ":", 65 | Semicolon: ";", 66 | At: "@", 67 | FunctionStart: "FUNCTION", 68 | 69 | CDO: "", 71 | 72 | LParen: "(", 73 | RParen: ")", 74 | 75 | LBracket: "[", 76 | RBracket: "]", 77 | 78 | LCurly: "{", 79 | RCurly: "}", 80 | } 81 | 82 | // isHexDigit implements https://www.w3.org/TR/css-syntax-3/#hex-digit. 83 | func isHexDigit(r rune) bool { 84 | return unicode.IsDigit(r) || (r >= 'A' && r <= 'F') || (r >= 'a' && r <= 'f') 85 | } 86 | 87 | // isWhitespace implements https://www.w3.org/TR/css-syntax-3/#whitespace. 88 | func isWhitespace(r rune) bool { 89 | return r == '\n' || r == '\u0009' || r == ' ' 90 | } 91 | 92 | // isNameStartCodePoint implements https://www.w3.org/TR/css-syntax-3/#name-start-code-point. 93 | func isNameStartCodePoint(r rune) bool { 94 | return (r >= 'A' && r <= 'Z') || (r >= 'a' && r <= 'z') || r >= 0x80 || r == '_' 95 | } 96 | 97 | // isNonPrintable implements https://www.w3.org/TR/css-syntax-3/#non-printable-code-point. 98 | func isNonPrintable(r rune) bool { 99 | return (r >= 0 && r <= 0x008) || (r == 0x0b) || (r >= 0x0e && r <= 0x1f) || r == 0x7f 100 | } 101 | 102 | // isNameCodePoint implements https://www.w3.org/TR/css-syntax-3/#name-code-point. 103 | func isNameCodePoint(r rune) bool { 104 | return isNameStartCodePoint(r) || (r <= '9' && r >= '0') || r == '-' 105 | } 106 | -------------------------------------------------------------------------------- /internal/logging/logging.go: -------------------------------------------------------------------------------- 1 | package logging 2 | 3 | import ( 4 | "fmt" 5 | "io" 6 | "os" 7 | "strings" 8 | 9 | "github.com/stephen/cssc/internal/ast" 10 | "github.com/stephen/cssc/internal/sources" 11 | ) 12 | 13 | // Reporter is an interface for reporting errors and warnings. 14 | type Reporter interface { 15 | AddError(error) 16 | } 17 | 18 | // DefaultReporter is the default reporter, which writes to stderr. 19 | var DefaultReporter = WriterReporter{os.Stderr} 20 | 21 | // WriterReporter is a simple adapter for writing logs to an io.Writer. 22 | // The default reporter is a WriterReporter(os.Stderr). 23 | type WriterReporter struct { 24 | io.Writer 25 | } 26 | 27 | // AddError implements Reporter. 28 | func (w WriterReporter) AddError(err error) { 29 | fmt.Fprintln(w, err.Error()) 30 | } 31 | 32 | // LocationErrorf adds an error from a specific location. 33 | func LocationErrorf(source *sources.Source, span ast.Span, f string, args ...interface{}) error { 34 | return &locationError{fmt.Errorf(f, args...), false, source, span} 35 | } 36 | 37 | // LocationWarnf adds a warning from a specific location. 38 | func LocationWarnf(source *sources.Source, span ast.Span, f string, args ...interface{}) error { 39 | return &locationError{fmt.Errorf(f, args...), true, source, span} 40 | } 41 | 42 | // locationError is an error that happened at a specific location 43 | // in the source. 44 | type locationError struct { 45 | inner error 46 | 47 | warning bool 48 | 49 | Source *sources.Source 50 | ast.Span 51 | } 52 | 53 | // Unwrap satisfies errors.Unwrap. 54 | func (l *locationError) Unwrap() error { 55 | return l.inner 56 | } 57 | 58 | // Error implements error. It's relatively slow because it needs to 59 | // rescan the source to figure out line and column numbers. The output 60 | // looks like: 61 | // file.css:1:4 62 | // there's a problem here: 63 | // (contents) or (other thing) 64 | // ~~~~~~~~ 65 | func (l *locationError) Error() string { 66 | // Unfortunately, AnnotateSourceSpan will also call LineAndCol, so we'll 67 | // end up duplicating that effort. It's probably okay since we're unlikely to be 68 | // generating high-throughput errors. 69 | lineNumber, col := l.Source.LineAndCol(l.Span) 70 | 71 | return fmt.Sprintf("%s:%d:%d\n%s:\n%s", l.Source.Path, lineNumber, col, l.inner.Error(), AnnotateSourceSpan(l.Source, l.Span)) 72 | } 73 | 74 | // Location returns the source and span for the location error. 75 | func (l *locationError) Location() (source *sources.Source, span ast.Span) { 76 | return l.Source, l.Span 77 | } 78 | 79 | // AnnotateSourceSpan annotates a span from a single line in the source code. 80 | // The output looks like: 81 | // (contents) or (other thing) 82 | // ~~~~~~~~ 83 | func AnnotateSourceSpan(source *sources.Source, span ast.Span) string { 84 | _, col := source.LineAndCol(span) 85 | lineSpan := source.FullLine(span) 86 | line := source.Content[lineSpan.Start:lineSpan.End] 87 | 88 | tabCount := strings.Count(line, "\t") 89 | withoutTabs := strings.ReplaceAll(line, "\t", " ") 90 | 91 | indent := strings.Repeat(" ", int(col)+tabCount-1) 92 | underline := strings.Repeat("~", span.End-span.Start)[:] 93 | excessMarker := "" 94 | if excess := span.End - lineSpan.End; excess > 0 { 95 | underline = underline[:len(underline)-excess] 96 | excessMarker = ">" 97 | } 98 | 99 | return fmt.Sprintf("\t%s\n\t%s%s%s", withoutTabs, indent, underline, excessMarker) 100 | } 101 | -------------------------------------------------------------------------------- /internal/parser/bench_test.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "io/ioutil" 5 | "testing" 6 | 7 | "github.com/stephen/cssc/internal/sources" 8 | "github.com/stretchr/testify/require" 9 | ) 10 | 11 | func BenchmarkParser(b *testing.B) { 12 | b.ReportAllocs() 13 | 14 | by, err := ioutil.ReadFile("../testdata/bootstrap.css") 15 | require.NoError(b, err) 16 | source := &sources.Source{ 17 | Path: "bootstrap.css", 18 | Content: string(by), 19 | } 20 | b.ResetTimer() 21 | 22 | for i := 0; i < b.N; i++ { 23 | Parse(source) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /internal/parser/parser.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "github.com/stephen/cssc/internal/ast" 5 | "github.com/stephen/cssc/internal/lexer" 6 | "github.com/stephen/cssc/internal/logging" 7 | "github.com/stephen/cssc/internal/sources" 8 | ) 9 | 10 | // Parse parses an input stylesheet. 11 | func Parse(source *sources.Source) (ss *ast.Stylesheet, err error) { 12 | p := newParser(source) 13 | defer func() { 14 | if rErr := recover(); rErr != nil { 15 | if errI, ok := rErr.(*lexer.Error); ok { 16 | ss, err = nil, errI 17 | return 18 | } 19 | 20 | if errI, ok := rErr.(error); ok { 21 | panic(logging.LocationErrorf(source, p.lexer.TokenSpan(), "%v", errI)) 22 | } 23 | 24 | // Re-panic unknown issues. 25 | panic(err) 26 | } 27 | }() 28 | 29 | p.parse() 30 | return p.ss, nil 31 | } 32 | 33 | func newParser(source *sources.Source) *parser { 34 | return &parser{ 35 | source: source, 36 | lexer: lexer.NewLexer(source), 37 | ss: &ast.Stylesheet{}, 38 | } 39 | } 40 | 41 | type parser struct { 42 | source *sources.Source 43 | lexer *lexer.Lexer 44 | ss *ast.Stylesheet 45 | } 46 | 47 | func (p *parser) parse() { 48 | for p.lexer.Current != lexer.EOF { 49 | switch p.lexer.Current { 50 | case lexer.At: 51 | p.parseAtRule() 52 | 53 | case lexer.Semicolon: 54 | p.lexer.Next() 55 | 56 | case lexer.CDO, lexer.CDC: 57 | // From https://www.w3.org/TR/css-syntax-3/#parser-entry-points, 58 | // we'll always assume we're parsing from the top-level, so we can discard CDO/CDC. 59 | p.lexer.Next() 60 | 61 | default: 62 | p.ss.Nodes = append(p.ss.Nodes, p.parseQualifiedRule(false)) 63 | } 64 | 65 | } 66 | } 67 | 68 | func isImportantString(in string) bool { 69 | return len(in) == 9 && 70 | (in[0] == 'i' || in[0] == 'I') && 71 | (in[1] == 'm' || in[1] == 'M') && 72 | (in[2] == 'p' || in[2] == 'P') && 73 | (in[3] == 'o' || in[3] == 'O') && 74 | (in[4] == 'r' || in[4] == 'R') && 75 | (in[5] == 't' || in[5] == 'T') && 76 | (in[6] == 'a' || in[6] == 'A') && 77 | (in[7] == 'n' || in[7] == 'N') && 78 | (in[8] == 't' || in[8] == 'T') 79 | } 80 | 81 | // parseQualifiedRule parses a rule. If isKeyframes is set, the parser will assume 82 | // all preludes are keyframes percentage selectors. Otherwise, it will assume 83 | // the preludes are selector lists. 84 | func (p *parser) parseQualifiedRule(isKeyframes bool) *ast.QualifiedRule { 85 | r := &ast.QualifiedRule{ 86 | Span: p.lexer.TokenSpan(), 87 | } 88 | 89 | for { 90 | switch p.lexer.Current { 91 | case lexer.EOF: 92 | p.lexer.Errorf("unexpected EOF") 93 | 94 | case lexer.LCurly: 95 | r.Block = p.parseDeclarationBlock() 96 | r.End = r.Block.Location().End 97 | return r 98 | 99 | default: 100 | if isKeyframes { 101 | r.Prelude = p.parseKeyframeSelectorList() 102 | continue 103 | } 104 | 105 | r.Prelude = p.parseSelectorList() 106 | } 107 | } 108 | } 109 | 110 | // parseDeclarationBlock parses a {} block with declarations, e.g. 111 | // { width: 1px; }. 112 | func (p *parser) parseDeclarationBlock() *ast.DeclarationBlock { 113 | block := &ast.DeclarationBlock{ 114 | Span: p.lexer.TokenSpan(), 115 | } 116 | p.lexer.Next() 117 | 118 | for p.lexer.Current != lexer.RCurly { 119 | block.Declarations = append(block.Declarations, p.parseDeclarationOrFallback()) 120 | if p.lexer.Current == lexer.Semicolon { 121 | p.lexer.Next() 122 | } 123 | } 124 | block.End = p.lexer.TokenEnd() 125 | p.lexer.Next() 126 | return block 127 | } 128 | 129 | func (p *parser) parseDeclarationOrFallback() (rv ast.Declarationish) { 130 | lexerState := *p.lexer 131 | defer func() { 132 | if err := recover(); err != nil { 133 | // Restore the lexer and try again to read the value. 134 | p.lexer = &lexerState 135 | rv = p.parseRaw() 136 | } 137 | }() 138 | 139 | return p.parseDeclaration() 140 | } 141 | 142 | func (p *parser) parseRaw() *ast.Raw { 143 | raw := &ast.Raw{ 144 | Span: p.lexer.TokenSpan(), 145 | } 146 | 147 | for p.lexer.Current != lexer.Semicolon { 148 | p.lexer.Next() 149 | } 150 | raw.End = p.lexer.TokenEnd() 151 | p.lexer.Next() 152 | 153 | raw.Value = p.source.Content[raw.Start:raw.End] 154 | 155 | return raw 156 | } 157 | 158 | func (p *parser) parseDeclaration() *ast.Declaration { 159 | var prefix string 160 | if p.lexer.Current == lexer.Delim { 161 | // Hack: support old browser declarations like *letter-spacing. 162 | if p.lexer.CurrentString == "*" { 163 | prefix = p.lexer.CurrentString 164 | p.lexer.Next() 165 | } 166 | } 167 | 168 | property := p.lexer.CurrentString 169 | if prefix != "" { 170 | // XXX: we can slice into the source instead of doing an allocation here. 171 | property = prefix + property 172 | } 173 | 174 | decl := &ast.Declaration{ 175 | Span: p.lexer.TokenSpan(), 176 | Property: property, 177 | } 178 | p.lexer.Expect(lexer.Ident) 179 | p.lexer.Expect(lexer.Colon) 180 | 181 | for { 182 | switch p.lexer.Current { 183 | case lexer.EOF: 184 | p.lexer.Errorf("unexpected EOF") 185 | 186 | case lexer.Comma: 187 | decl.Values = append(decl.Values, &ast.Comma{Span: p.lexer.TokenSpan()}) 188 | p.lexer.Next() 189 | 190 | case lexer.Delim: 191 | if p.lexer.CurrentString != "!" { 192 | decl.Values = append(decl.Values, &ast.Raw{Span: p.lexer.TokenSpan(), Value: p.lexer.CurrentString}) 193 | p.lexer.Next() 194 | continue 195 | } 196 | p.lexer.Next() 197 | 198 | if !isImportantString(p.lexer.CurrentString) { 199 | p.lexer.Errorf("expected !important, unexpected token: %s", p.lexer.CurrentString) 200 | } 201 | decl.End = p.lexer.TokenEnd() 202 | p.lexer.Next() 203 | decl.Important = true 204 | 205 | default: 206 | val := p.parseValue() 207 | if val == nil { 208 | if len(decl.Values) == 0 { 209 | p.lexer.Errorf("declaration must have a value") 210 | } 211 | if lastValueEnd := decl.Values[len(decl.Values)-1].Location().End; lastValueEnd > decl.End { 212 | decl.End = lastValueEnd 213 | } 214 | 215 | return decl 216 | } 217 | 218 | decl.Values = append(decl.Values, val) 219 | } 220 | } 221 | } 222 | 223 | func (p *parser) parseKeyframeSelectorList() *ast.KeyframeSelectorList { 224 | l := &ast.KeyframeSelectorList{ 225 | Span: p.lexer.TokenSpan(), 226 | } 227 | 228 | for { 229 | if p.lexer.Current == lexer.EOF { 230 | p.lexer.Errorf("unexpected EOF") 231 | } 232 | 233 | switch p.lexer.Current { 234 | case lexer.Percentage: 235 | l.Selectors = append(l.Selectors, &ast.Percentage{ 236 | Span: p.lexer.TokenSpan(), 237 | Value: p.lexer.CurrentNumeral, 238 | }) 239 | 240 | case lexer.Ident: 241 | if p.lexer.CurrentString != "from" && p.lexer.CurrentString != "to" { 242 | p.lexer.Errorf("unexpected string: %s. keyframe selector can only be from, to, or a percentage", p.lexer.CurrentString) 243 | } 244 | l.Selectors = append(l.Selectors, &ast.Identifier{ 245 | Span: p.lexer.TokenSpan(), 246 | Value: p.lexer.CurrentString, 247 | }) 248 | 249 | default: 250 | p.lexer.Errorf("unexepected token: %s. keyframe selector can only be from, to, or a percentage", p.lexer.Current.String()) 251 | } 252 | p.lexer.Next() 253 | 254 | if p.lexer.Current == lexer.Comma { 255 | p.lexer.Next() 256 | continue 257 | } 258 | 259 | break 260 | } 261 | 262 | if len(l.Selectors) == 0 { 263 | p.lexer.Errorf("keyframes rule must have at least one selector (from, to, or a percentage)") 264 | } 265 | 266 | l.End = l.Selectors[len(l.Selectors)-1].Location().End 267 | return l 268 | } 269 | 270 | // parseMathExpression does recursive-descent parsing for sums, 271 | // products, then individual values. See https://www.w3.org/TR/css-values-3/#calc-syntax. 272 | func (p *parser) parseMathExpression() ast.Value { 273 | return p.parseMathSum() 274 | } 275 | 276 | func (p *parser) parseMathSum() ast.Value { 277 | left := p.parseMathProduct() 278 | 279 | for p.lexer.Current == lexer.Delim && (p.lexer.CurrentString == "+" || p.lexer.CurrentString == "-") { 280 | op := p.lexer.CurrentString 281 | p.lexer.Expect(lexer.Delim) 282 | 283 | right := p.parseMathProduct() 284 | 285 | span := left.Location() 286 | span.End = right.Location().End 287 | 288 | left = &ast.MathExpression{ 289 | Span: span, 290 | Left: left, 291 | Operator: op, 292 | Right: right, 293 | } 294 | } 295 | 296 | return left 297 | } 298 | 299 | func (p *parser) parseMathProduct() ast.Value { 300 | left := p.parseMathParenthesizedExpression() 301 | 302 | for p.lexer.Current == lexer.Delim && (p.lexer.CurrentString == "*" || p.lexer.CurrentString == "/") { 303 | op := p.lexer.CurrentString 304 | p.lexer.Expect(lexer.Delim) 305 | 306 | right := p.parseMathParenthesizedExpression() 307 | 308 | span := left.Location() 309 | span.End = right.Location().End 310 | 311 | left = &ast.MathExpression{ 312 | Span: span, 313 | Left: left, 314 | Operator: op, 315 | Right: right, 316 | } 317 | } 318 | 319 | return left 320 | } 321 | 322 | func (p *parser) parseMathParenthesizedExpression() ast.Value { 323 | if p.lexer.Current != lexer.LParen { 324 | return p.parseValue() 325 | } 326 | 327 | loc := p.lexer.TokenSpan() 328 | p.lexer.Expect(lexer.LParen) 329 | expr := &ast.MathParenthesizedExpression{ 330 | Span: loc, 331 | Value: p.parseMathSum(), 332 | } 333 | expr.End = p.lexer.TokenSpan().End 334 | p.lexer.Expect(lexer.RParen) 335 | return expr 336 | } 337 | 338 | // parseValue parses a possible ast value at the current position. Callers 339 | // can set allowMathOperators if the enclosing context allows math expressions. 340 | // See: https://www.w3.org/TR/css-values-4/#math-function. 341 | func (p *parser) parseValue() ast.Value { 342 | switch p.lexer.Current { 343 | case lexer.Dimension: 344 | defer p.lexer.Next() 345 | return &ast.Dimension{ 346 | Span: p.lexer.TokenSpan(), 347 | 348 | Unit: p.lexer.CurrentString, 349 | Value: p.lexer.CurrentNumeral, 350 | } 351 | 352 | case lexer.LBracket: 353 | b := &ast.Brackets{ 354 | Span: p.lexer.TokenSpan(), 355 | } 356 | p.lexer.Next() 357 | 358 | for { 359 | switch p.lexer.Current { 360 | case lexer.RBracket: 361 | b.End = p.lexer.TokenEnd() 362 | p.lexer.Next() 363 | return b 364 | 365 | default: 366 | v := p.parseValue() 367 | if v == nil { 368 | p.lexer.Errorf("expected value") 369 | } 370 | 371 | b.Values = append(b.Values, v) 372 | } 373 | } 374 | 375 | case lexer.Percentage: 376 | defer p.lexer.Next() 377 | return &ast.Dimension{ 378 | Span: p.lexer.TokenSpan(), 379 | Unit: "%", 380 | Value: p.lexer.CurrentNumeral, 381 | } 382 | 383 | case lexer.Number: 384 | defer p.lexer.Next() 385 | return &ast.Dimension{ 386 | Span: p.lexer.TokenSpan(), 387 | Value: p.lexer.CurrentNumeral, 388 | } 389 | 390 | case lexer.Ident: 391 | defer p.lexer.Next() 392 | return &ast.Identifier{ 393 | Span: p.lexer.TokenSpan(), 394 | Value: p.lexer.CurrentString, 395 | } 396 | 397 | case lexer.Hash: 398 | defer p.lexer.Next() 399 | return &ast.HexColor{ 400 | Span: p.lexer.TokenSpan(), 401 | RGBA: p.lexer.CurrentString, 402 | } 403 | 404 | case lexer.String: 405 | defer p.lexer.Next() 406 | return &ast.String{ 407 | Span: p.lexer.TokenSpan(), 408 | Value: p.lexer.CurrentString, 409 | } 410 | 411 | case lexer.URL: 412 | defer p.lexer.Next() 413 | return &ast.Function{ 414 | Span: p.lexer.TokenSpan(), 415 | Name: "url", 416 | Arguments: []ast.Value{&ast.Identifier{Value: p.lexer.CurrentString}}, 417 | } 418 | 419 | case lexer.FunctionStart: 420 | fn := &ast.Function{ 421 | Span: p.lexer.TokenSpan(), 422 | Name: p.lexer.CurrentString, 423 | } 424 | p.lexer.Next() 425 | 426 | arguments: 427 | for { 428 | switch p.lexer.Current { 429 | case lexer.RParen: 430 | fn.End = p.lexer.TokenEnd() 431 | p.lexer.Next() 432 | break arguments 433 | case lexer.Comma: 434 | fn.Arguments = append(fn.Arguments, &ast.Comma{ 435 | Span: p.lexer.TokenSpan(), 436 | }) 437 | p.lexer.Next() 438 | default: 439 | if fn.IsMath() { 440 | fn.Arguments = append(fn.Arguments, p.parseMathExpression()) 441 | continue 442 | } 443 | val := p.parseValue() 444 | if val == nil { 445 | // XXX: there's probably some backtracking to do here? 446 | break arguments 447 | } 448 | 449 | fn.Arguments = append(fn.Arguments, val) 450 | } 451 | } 452 | 453 | return fn 454 | default: 455 | return nil 456 | } 457 | } 458 | 459 | func (p *parser) parseAtRule() { 460 | switch p.lexer.CurrentString { 461 | case "import": 462 | p.parseImportAtRule() 463 | 464 | case "media": 465 | p.parseMediaAtRule() 466 | 467 | case "keyframes", "-webkit-keyframes", "-o-keyframes": 468 | p.parseKeyframes() 469 | 470 | case "custom-media": 471 | p.parseCustomMediaAtRule() 472 | 473 | default: 474 | p.parseGenericAtRule() 475 | } 476 | } 477 | 478 | // parseImportAtRule parses an import at rule. It roughly implements 479 | // https://www.w3.org/TR/css-cascade-4/#at-import. 480 | func (p *parser) parseImportAtRule() { 481 | prelude := &ast.String{} 482 | 483 | imp := &ast.AtRule{ 484 | Span: p.lexer.TokenSpan(), 485 | Name: p.lexer.CurrentString, 486 | Preludes: []ast.AtPrelude{prelude}, 487 | } 488 | p.lexer.Next() 489 | 490 | switch p.lexer.Current { 491 | case lexer.URL: 492 | prelude.Span = p.lexer.TokenSpan() 493 | prelude.Value = p.lexer.CurrentString 494 | p.ss.Imports = append(p.ss.Imports, ast.ImportSpecifier{ 495 | Value: prelude.Value, 496 | AtRule: imp, 497 | }) 498 | p.lexer.Next() 499 | 500 | case lexer.FunctionStart: 501 | prelude.Span = p.lexer.TokenSpan() 502 | if p.lexer.CurrentString != "url" { 503 | p.lexer.Errorf("@import target must be a url or string") 504 | } 505 | p.lexer.Next() 506 | 507 | prelude.Value = p.lexer.CurrentString 508 | p.ss.Imports = append(p.ss.Imports, ast.ImportSpecifier{ 509 | Value: prelude.Value, 510 | AtRule: imp, 511 | }) 512 | p.lexer.Expect(lexer.String) 513 | prelude.Span.End = p.lexer.TokenEnd() 514 | p.lexer.Expect(lexer.RParen) 515 | 516 | case lexer.String: 517 | prelude.Span = p.lexer.TokenSpan() 518 | prelude.Value = p.lexer.CurrentString 519 | p.ss.Imports = append(p.ss.Imports, ast.ImportSpecifier{ 520 | Value: prelude.Value, 521 | AtRule: imp, 522 | }) 523 | p.lexer.Expect(lexer.String) 524 | 525 | default: 526 | p.lexer.Errorf("unexpected import specifier") 527 | } 528 | 529 | // XXX: also support @supports. 530 | mq := p.parseMediaQueryList() 531 | if mq != nil { 532 | imp.Preludes = append(imp.Preludes, mq) 533 | } 534 | 535 | imp.End = imp.Preludes[len(imp.Preludes)-1].Location().End 536 | p.ss.Nodes = append(p.ss.Nodes, imp) 537 | } 538 | 539 | // parseKeyframes parses a keyframes at rule. It roughly implements 540 | // https://www.w3.org/TR/css-animations-1/#keyframes 541 | func (p *parser) parseKeyframes() { 542 | r := &ast.AtRule{ 543 | Span: p.lexer.TokenSpan(), 544 | Name: p.lexer.CurrentString, 545 | } 546 | p.lexer.Next() 547 | 548 | switch p.lexer.Current { 549 | case lexer.String: 550 | r.Preludes = append(r.Preludes, &ast.String{ 551 | Span: p.lexer.TokenSpan(), 552 | Value: p.lexer.CurrentString, 553 | }) 554 | 555 | case lexer.Ident: 556 | r.Preludes = append(r.Preludes, &ast.Identifier{ 557 | Span: p.lexer.TokenSpan(), 558 | Value: p.lexer.CurrentString, 559 | }) 560 | 561 | default: 562 | p.lexer.Errorf("unexpected token %s, expected string or identifier for keyframes", p.lexer.Current.String()) 563 | } 564 | p.lexer.Next() 565 | 566 | block := &ast.QualifiedRuleBlock{ 567 | Span: p.lexer.TokenSpan(), 568 | } 569 | r.Block = block 570 | p.lexer.Expect(lexer.LCurly) 571 | for { 572 | switch p.lexer.Current { 573 | case lexer.EOF: 574 | p.lexer.Errorf("unexpected EOF") 575 | 576 | case lexer.RCurly: 577 | p.ss.Nodes = append(p.ss.Nodes, r) 578 | block.End = p.lexer.TokenEnd() 579 | r.End = block.End 580 | p.lexer.Next() 581 | return 582 | 583 | default: 584 | block.Rules = append(block.Rules, p.parseQualifiedRule(true)) 585 | } 586 | } 587 | } 588 | 589 | // parseMediaAtRule parses a media at rule. It roughly implements 590 | // https://www.w3.org/TR/mediaqueries-4/#media. 591 | func (p *parser) parseMediaAtRule() { 592 | r := &ast.AtRule{ 593 | Span: p.lexer.TokenSpan(), 594 | Name: p.lexer.CurrentString, 595 | } 596 | p.lexer.Next() 597 | 598 | r.Preludes = []ast.AtPrelude{p.parseMediaQueryList()} 599 | 600 | block := &ast.QualifiedRuleBlock{ 601 | Span: p.lexer.TokenSpan(), 602 | } 603 | r.Block = block 604 | p.lexer.Expect(lexer.LCurly) 605 | for { 606 | switch p.lexer.Current { 607 | case lexer.EOF: 608 | p.lexer.Errorf("unexpected EOF") 609 | 610 | case lexer.RCurly: 611 | p.ss.Nodes = append(p.ss.Nodes, r) 612 | block.End = p.lexer.TokenEnd() 613 | r.End = block.End 614 | p.lexer.Next() 615 | return 616 | 617 | default: 618 | block.Rules = append(block.Rules, p.parseQualifiedRule(false)) 619 | } 620 | } 621 | } 622 | 623 | func (p *parser) parseMediaQueryList() *ast.MediaQueryList { 624 | l := &ast.MediaQueryList{ 625 | Span: p.lexer.TokenSpan(), 626 | } 627 | 628 | for { 629 | if p.lexer.Current == lexer.EOF { 630 | p.lexer.Errorf("unexpected EOF") 631 | } 632 | 633 | q := p.parseMediaQuery() 634 | if q != nil { 635 | l.Queries = append(l.Queries, q) 636 | } 637 | 638 | if p.lexer.Current == lexer.Comma { 639 | p.lexer.Next() 640 | continue 641 | } 642 | 643 | break 644 | } 645 | 646 | if len(l.Queries) == 0 { 647 | return nil 648 | } 649 | 650 | l.End = l.Queries[len(l.Queries)-1].End 651 | return l 652 | } 653 | 654 | func (p *parser) parseMediaQuery() *ast.MediaQuery { 655 | q := &ast.MediaQuery{ 656 | Span: p.lexer.TokenSpan(), 657 | } 658 | 659 | for { 660 | switch p.lexer.Current { 661 | case lexer.EOF: 662 | p.lexer.Errorf("unexpected EOF") 663 | 664 | case lexer.LParen: 665 | q.Parts = append(q.Parts, p.parseMediaFeature()) 666 | 667 | case lexer.Ident: 668 | q.Parts = append(q.Parts, p.parseValue().(*ast.Identifier)) 669 | 670 | default: 671 | if len(q.Parts) > 0 { 672 | q.End = q.Parts[len(q.Parts)-1].Location().End 673 | return q 674 | } 675 | 676 | return nil 677 | } 678 | } 679 | } 680 | 681 | func (p *parser) parseMediaFeature() ast.MediaFeature { 682 | startLoc := p.lexer.TokenSpan() 683 | p.lexer.Expect(lexer.LParen) 684 | 685 | firstValue := p.parseValue() 686 | 687 | switch p.lexer.Current { 688 | case lexer.RParen: 689 | startLoc.End = p.lexer.TokenEnd() 690 | p.lexer.Next() 691 | ident, ok := firstValue.(*ast.Identifier) 692 | if !ok { 693 | // XXX: this location is wrong. also, can't figure out type since we lost the lexer value. 694 | p.lexer.Errorf("expected identifier in media feature with no value") 695 | } 696 | 697 | return &ast.MediaFeaturePlain{ 698 | Span: startLoc, 699 | Property: ident, 700 | } 701 | 702 | case lexer.Colon: 703 | p.lexer.Next() 704 | ident, ok := firstValue.(*ast.Identifier) 705 | if !ok { 706 | // XXX: this location is wrong. also, can't figure out type since we lost the lexer value. 707 | p.lexer.Errorf("expected identifier in non-range media feature") 708 | } 709 | 710 | secondValue := p.parseValue() 711 | 712 | plain := &ast.MediaFeaturePlain{ 713 | Span: startLoc, 714 | Property: ident, 715 | Value: secondValue, 716 | } 717 | // XXX: this one excludes the right paren even though the left paren is included. 718 | plain.End = p.lexer.TokenEnd() 719 | p.lexer.Expect(lexer.RParen) 720 | return plain 721 | 722 | case lexer.Delim: 723 | r := &ast.MediaFeatureRange{ 724 | Span: startLoc, 725 | LeftValue: firstValue, 726 | } 727 | r.Operator = p.parseMediaRangeOperator() 728 | 729 | secondValue := p.parseValue() 730 | 731 | maybeIdent, ok := secondValue.(*ast.Identifier) 732 | if !ok { 733 | // Since the second value isn't an identifier, we expect something like 734 | // width < 600px, so the first value must have been an identifier. 735 | maybeIdent, ok := firstValue.(*ast.Identifier) 736 | if !ok { 737 | p.lexer.Errorf("expected identifier") 738 | } 739 | 740 | r.LeftValue = nil 741 | r.Property = maybeIdent 742 | r.RightValue = secondValue 743 | 744 | r.End = p.lexer.TokenEnd() 745 | p.lexer.Expect(lexer.RParen) 746 | return r 747 | } 748 | 749 | // Otherwise, the second value is an identifier and we are looking at something like 750 | // 600px < width < 800px. 751 | r.Property = maybeIdent 752 | 753 | if p.lexer.Current == lexer.Delim { 754 | op := p.parseMediaRangeOperator() 755 | if op != r.Operator { 756 | p.lexer.Errorf("operators in a range must be the same") 757 | } 758 | r.RightValue = p.parseValue() 759 | } 760 | 761 | r.End = p.lexer.TokenEnd() 762 | p.lexer.Expect(lexer.RParen) 763 | return r 764 | } 765 | 766 | p.lexer.Errorf("unexpected token: %s", p.lexer.Current.String()) 767 | return nil 768 | } 769 | 770 | var ( 771 | mediaOperatorLT = "<" 772 | mediaOperatorLTE = "<=" 773 | mediaOperatorGT = ">" 774 | mediaOperatorGTE = ">=" 775 | ) 776 | 777 | func (p *parser) parseMediaRangeOperator() string { 778 | operator := p.lexer.CurrentString 779 | p.lexer.Next() 780 | 781 | if p.lexer.Current == lexer.Delim { 782 | if p.lexer.CurrentString != "=" || (operator != "<" && operator != ">") { 783 | p.lexer.Errorf("unexpected token: %s", p.lexer.Current.String()) 784 | } 785 | 786 | p.lexer.Next() 787 | 788 | switch operator { 789 | case "<": 790 | return mediaOperatorLTE 791 | case ">": 792 | return mediaOperatorGTE 793 | default: 794 | p.lexer.Errorf("unknown operator: %s", operator) 795 | } 796 | } 797 | 798 | switch operator { 799 | case "<": 800 | return mediaOperatorLT 801 | case ">": 802 | return mediaOperatorGT 803 | default: 804 | p.lexer.Errorf("unknown operator: %s", operator) 805 | return "" 806 | } 807 | } 808 | 809 | // parseCustomMediaAtRule parses a @custom-media rule. 810 | // See: https://www.w3.org/TR/mediaqueries-5/#custom-mq. 811 | func (p *parser) parseCustomMediaAtRule() { 812 | r := &ast.AtRule{ 813 | Span: p.lexer.TokenSpan(), 814 | Name: p.lexer.CurrentString, 815 | } 816 | p.lexer.Next() 817 | 818 | maybeName := p.parseValue() 819 | name, ok := maybeName.(*ast.Identifier) 820 | if !ok { 821 | // XXX: show received type 822 | p.lexer.Errorf("expected identifier") 823 | } 824 | 825 | r.Preludes = append(r.Preludes, name) 826 | queries := p.parseMediaQueryList() 827 | if len(queries.Queries) != 1 { 828 | p.lexer.Errorf("@custom-media rule requires a single media query argument") 829 | } 830 | r.Preludes = append(r.Preludes, queries.Queries[0]) 831 | r.End = queries.Queries[0].End 832 | 833 | p.ss.Nodes = append(p.ss.Nodes, r) 834 | } 835 | 836 | // parseGenericAtRule parses a generic atrule like @font-face. 837 | func (p *parser) parseGenericAtRule() { 838 | r := &ast.AtRule{ 839 | Span: p.lexer.TokenSpan(), 840 | Name: p.lexer.CurrentString, 841 | } 842 | p.lexer.Next() 843 | 844 | r.Block = p.parseDeclarationBlock() 845 | r.End = r.Block.Location().End 846 | 847 | p.ss.Nodes = append(p.ss.Nodes, r) 848 | } 849 | -------------------------------------------------------------------------------- /internal/parser/selectors.go: -------------------------------------------------------------------------------- 1 | package parser 2 | 3 | import ( 4 | "strings" 5 | 6 | "github.com/stephen/cssc/internal/ast" 7 | "github.com/stephen/cssc/internal/lexer" 8 | ) 9 | 10 | func (p *parser) parseSelectorList() *ast.SelectorList { 11 | l := &ast.SelectorList{ 12 | Span: p.lexer.TokenSpan(), 13 | } 14 | 15 | for { 16 | if p.lexer.Current == lexer.EOF { 17 | p.lexer.Errorf("unexpected EOF") 18 | } 19 | 20 | l.Selectors = append(l.Selectors, p.parseSelector()) 21 | 22 | if p.lexer.Current == lexer.Comma { 23 | p.lexer.Next() 24 | continue 25 | } 26 | 27 | break 28 | } 29 | 30 | // parseSelector will always return a selector or error, so we should 31 | // have at least one Selector. 32 | l.End = l.Selectors[len(l.Selectors)-1].End 33 | 34 | return l 35 | } 36 | 37 | func (p *parser) parseSelector() *ast.Selector { 38 | s := &ast.Selector{ 39 | Span: p.lexer.TokenSpan(), 40 | } 41 | 42 | prevRetainWhitespace := p.lexer.RetainWhitespace 43 | p.lexer.RetainWhitespace = true 44 | defer func() { 45 | p.lexer.RetainWhitespace = prevRetainWhitespace 46 | }() 47 | 48 | for { 49 | switch p.lexer.Current { 50 | case lexer.EOF: 51 | p.lexer.Errorf("unexpected EOF") 52 | 53 | case lexer.Whitespace: 54 | s.Parts = append(s.Parts, &ast.Whitespace{Span: p.lexer.TokenSpan()}) 55 | p.lexer.Next() 56 | 57 | case lexer.Ident: 58 | s.Parts = append(s.Parts, &ast.TypeSelector{ 59 | Span: p.lexer.TokenSpan(), 60 | Name: p.lexer.CurrentString, 61 | }) 62 | p.lexer.Next() 63 | 64 | case lexer.Hash: 65 | s.Parts = append(s.Parts, &ast.IDSelector{ 66 | Span: p.lexer.TokenSpan(), 67 | Name: p.lexer.CurrentString, 68 | }) 69 | p.lexer.Next() 70 | 71 | case lexer.Delim: 72 | switch p.lexer.CurrentString { 73 | case ".": 74 | span := p.lexer.TokenSpan() 75 | p.lexer.Next() 76 | cls := &ast.ClassSelector{ 77 | Span: span, 78 | Name: p.lexer.CurrentString, 79 | } 80 | s.Parts = append(s.Parts, cls) 81 | cls.End = p.lexer.TokenEnd() 82 | p.lexer.Expect(lexer.Ident) 83 | 84 | case "+", ">", "~", "|": 85 | s.Parts = append(s.Parts, &ast.CombinatorSelector{ 86 | Span: p.lexer.TokenSpan(), 87 | Operator: p.lexer.CurrentString, 88 | }) 89 | p.lexer.Next() 90 | 91 | case "*": 92 | s.Parts = append(s.Parts, &ast.TypeSelector{ 93 | Span: p.lexer.TokenSpan(), 94 | Name: p.lexer.CurrentString, 95 | }) 96 | p.lexer.Next() 97 | 98 | default: 99 | p.lexer.Errorf("unexpected delimeter: %s", p.lexer.CurrentString) 100 | } 101 | 102 | case lexer.Colon: 103 | span := p.lexer.TokenSpan() 104 | p.lexer.Next() 105 | 106 | // Wrap it in a PseudoElementSelector if there are two colons. 107 | var wrapper bool 108 | var wrapperLocation ast.Span 109 | if p.lexer.Current == lexer.Colon { 110 | wrapper = true 111 | wrapperLocation = span 112 | span = p.lexer.TokenSpan() 113 | p.lexer.Next() 114 | } 115 | 116 | pc := &ast.PseudoClassSelector{ 117 | Span: span, 118 | Name: p.lexer.CurrentString, 119 | } 120 | 121 | switch p.lexer.Current { 122 | case lexer.Ident: 123 | pc.End = p.lexer.TokenEnd() 124 | p.lexer.Next() 125 | 126 | case lexer.FunctionStart: 127 | p.lexer.Next() 128 | 129 | if pc.Name == "nth-child" || pc.Name == "nth-last-child" || pc.Name == "nth-of-type" || pc.Name == "nth-last-of-type" { 130 | switch p.lexer.Current { 131 | case lexer.Number, lexer.Dimension: 132 | pc.Arguments = p.parseANPlusB() 133 | case lexer.Ident: 134 | if p.lexer.CurrentString == "n" || p.lexer.CurrentString == "-n" { 135 | pc.Arguments = p.parseANPlusB() 136 | break 137 | } 138 | 139 | if p.lexer.CurrentString != "even" && p.lexer.CurrentString != "odd" { 140 | p.lexer.Errorf("expected even, odd, or an+b syntax") 141 | } 142 | pc.Arguments = &ast.Identifier{ 143 | Span: p.lexer.TokenSpan(), 144 | Value: p.lexer.CurrentString, 145 | } 146 | p.lexer.Next() 147 | } 148 | } else { 149 | pc.Arguments = p.parseSelectorList() 150 | } 151 | pc.End = p.lexer.TokenEnd() 152 | p.lexer.Expect(lexer.RParen) 153 | 154 | default: 155 | p.lexer.Errorf("unexpected token: %s", p.lexer.Current.String()) 156 | } 157 | 158 | if wrapper { 159 | wrapped := &ast.PseudoElementSelector{ 160 | Span: wrapperLocation, 161 | Inner: pc, 162 | } 163 | wrapped.End = pc.End 164 | s.Parts = append(s.Parts, wrapped) 165 | break 166 | } 167 | 168 | s.Parts = append(s.Parts, pc) 169 | 170 | case lexer.LBracket: 171 | s.Parts = append(s.Parts, p.parseAttributeSelector()) 172 | 173 | default: 174 | if len(s.Parts) == 0 { 175 | p.lexer.Errorf("expected selector") 176 | } 177 | s.End = s.Parts[len(s.Parts)-1].Location().End 178 | return s 179 | } 180 | } 181 | } 182 | 183 | func (p *parser) parseAttributeSelector() *ast.AttributeSelector { 184 | prev := p.lexer.RetainWhitespace 185 | p.lexer.RetainWhitespace = false 186 | defer func() { 187 | p.lexer.RetainWhitespace = prev 188 | }() 189 | 190 | startLoc := p.lexer.TokenSpan() 191 | p.lexer.Next() 192 | attr := &ast.AttributeSelector{ 193 | Span: startLoc, 194 | Property: p.lexer.CurrentString, 195 | } 196 | 197 | p.lexer.Expect(lexer.Ident) 198 | if p.lexer.Current == lexer.RBracket { 199 | attr.End = p.lexer.TokenEnd() 200 | p.lexer.Next() 201 | return attr 202 | } 203 | 204 | switch p.lexer.CurrentString { 205 | case "^", "~", "$", "*", "|": 206 | attr.PreOperator = p.lexer.CurrentString 207 | p.lexer.Next() 208 | 209 | if p.lexer.CurrentString != "=" { 210 | p.lexer.Errorf("expected =, got %s: ", p.lexer.CurrentString) 211 | } 212 | p.lexer.Expect(lexer.Delim) 213 | default: 214 | p.lexer.Expect(lexer.Delim) 215 | } 216 | 217 | attr.Value = p.parseValue() 218 | if attr.Value == nil { 219 | p.lexer.Errorf("value must be specified") 220 | } 221 | 222 | attr.End = p.lexer.TokenEnd() 223 | 224 | if p.lexer.Current == lexer.Ident && (p.lexer.CurrentString == "s" || p.lexer.CurrentString == "i") { 225 | attr.Modifier = p.lexer.CurrentString 226 | attr.End = p.lexer.TokenEnd() 227 | p.lexer.Next() 228 | } 229 | 230 | p.lexer.Expect(lexer.RBracket) 231 | return attr 232 | } 233 | 234 | func (p *parser) parseANPlusB() *ast.ANPlusB { 235 | prev := p.lexer.RetainWhitespace 236 | p.lexer.RetainWhitespace = false 237 | defer func() { 238 | p.lexer.RetainWhitespace = prev 239 | }() 240 | 241 | v := &ast.ANPlusB{Span: p.lexer.TokenSpan()} 242 | 243 | if p.lexer.Current == lexer.Dimension && p.lexer.CurrentString == "n" { 244 | v.A = p.lexer.CurrentNumeral 245 | v.End = p.lexer.TokenEnd() 246 | p.lexer.Next() 247 | } else if p.lexer.Current == lexer.Dimension && strings.HasPrefix(p.lexer.CurrentString, "n") { 248 | v.A = p.lexer.CurrentNumeral 249 | v.End = p.lexer.TokenEnd() 250 | 251 | numeral := p.lexer.CurrentString[1:] 252 | 253 | if strings.HasPrefix(numeral, "-") { 254 | v.Operator = "-" 255 | } else if strings.HasPrefix(numeral, "+") { 256 | v.Operator = "+" 257 | } else { 258 | p.lexer.Errorf("expected +/- as part of An+B") 259 | } 260 | 261 | v.B = numeral[1:] 262 | if len(v.B) == 0 { 263 | p.lexer.Errorf("expected number after operator") 264 | } 265 | 266 | v.End = p.lexer.TokenEnd() 267 | p.lexer.Next() 268 | } else if p.lexer.Current == lexer.Ident && p.lexer.CurrentString == "n" { 269 | v.End = p.lexer.TokenEnd() 270 | p.lexer.Expect(lexer.Ident) 271 | } else if p.lexer.Current == lexer.Ident && p.lexer.CurrentString == "-n" { 272 | v.A = "-1" 273 | v.End = p.lexer.TokenEnd() 274 | p.lexer.Expect(lexer.Ident) 275 | } else if p.lexer.Current == lexer.Number { 276 | v.A = "0" 277 | v.B = p.lexer.CurrentNumeral 278 | if strings.HasPrefix(p.lexer.CurrentNumeral, "-") { 279 | v.Operator = "-" 280 | v.B = v.B[1:] 281 | } else if strings.HasPrefix(p.lexer.CurrentNumeral, "+") { 282 | v.Operator = "+" 283 | v.B = v.B[1:] 284 | } 285 | v.End = p.lexer.TokenEnd() 286 | p.lexer.Expect(lexer.Number) 287 | } 288 | 289 | // If there was no whitespace, e.g. n+3, then the lexer will have given 290 | // us a number. Otherwise, it'll be n + 3 with a delimiter. 291 | if p.lexer.Current == lexer.Number { 292 | if strings.HasPrefix(p.lexer.CurrentNumeral, "-") { 293 | v.Operator = "-" 294 | } else if strings.HasPrefix(p.lexer.CurrentNumeral, "+") { 295 | v.Operator = "+" 296 | } else { 297 | p.lexer.Errorf("expected +/- as part of An+B") 298 | } 299 | v.B = p.lexer.CurrentNumeral[1:] 300 | v.End = p.lexer.TokenEnd() 301 | p.lexer.Expect(lexer.Number) 302 | } else if p.lexer.Current == lexer.Delim && (p.lexer.CurrentString == "+" || p.lexer.CurrentString == "-") { 303 | v.Operator = p.lexer.CurrentString 304 | p.lexer.Next() 305 | 306 | v.B = p.lexer.CurrentNumeral 307 | v.End = p.lexer.TokenEnd() 308 | p.lexer.Expect(lexer.Number) 309 | } 310 | return v 311 | } 312 | -------------------------------------------------------------------------------- /internal/parser/span_test.go: -------------------------------------------------------------------------------- 1 | package parser_test 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "io/ioutil" 7 | "os" 8 | "reflect" 9 | "testing" 10 | 11 | "github.com/stephen/cssc/internal/ast" 12 | "github.com/stephen/cssc/internal/logging" 13 | "github.com/stephen/cssc/internal/parser" 14 | "github.com/stephen/cssc/internal/sources" 15 | "github.com/stretchr/testify/assert" 16 | "github.com/stretchr/testify/require" 17 | ) 18 | 19 | func Parse(t testing.TB, source *sources.Source) *ast.Stylesheet { 20 | ss, err := parser.Parse(source) 21 | require.NoError(t, err) 22 | return ss 23 | } 24 | 25 | func TestSpans(t *testing.T) { 26 | source := &sources.Source{ 27 | Path: "main.css", 28 | Content: ` 29 | .simple { margin: 1px 2px; } 30 | .more, .complex { margin: 1px 2px; } 31 | 32 | .multiline { 33 | width: 50%; 34 | } 35 | 36 | @media (screen) or (print) { 37 | #id { 38 | line-height: 2rem; 39 | space-indented:1rem; 40 | tab-in-values: 1rem; 41 | } 42 | } 43 | 44 | /* what is this even targetting?? */ 45 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 46 | color: purple; 47 | background-color: rgba(calc(0 + 1), 2, 3); 48 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 49 | width: calc(2px / 2 + 1rem * 8) 50 | } 51 | 52 | @custom-media test (800px < width < 1000px) or (print); 53 | 54 | @keyframes custom { 55 | 0% { opacity: 0%; } 56 | 100% { opacity: 100%; } 57 | } 58 | 59 | @font-face { 60 | font-family: "whatever"; 61 | src: url("/what.eot?") format("eot"), 62 | url("./what.woff") format("woff"), 63 | url("./what.ttf") format("truetype"); 64 | } 65 | 66 | .broken { 67 | border-radius: 2px;; 68 | width: 200px; 69 | } 70 | `, 71 | } 72 | 73 | var b bytes.Buffer 74 | ast.Walk(Parse(t, source), func(n ast.Node) { 75 | line, col := source.LineAndCol(n.Location()) 76 | fmt.Fprintln(&b, fmt.Sprintf("%s:%d:%d", reflect.TypeOf(n).String(), line, col)) 77 | fmt.Fprintln(&b, logging.AnnotateSourceSpan(source, n.Location())) 78 | fmt.Fprintln(&b) 79 | }) 80 | 81 | if os.Getenv("WRITE_SNAPSHOTS") != "" { 82 | require.NoError(t, os.MkdirAll("testdata/", 0644)) 83 | require.NoError(t, ioutil.WriteFile("testdata/spans.txt", b.Bytes(), 0644)) 84 | return 85 | } 86 | 87 | expected, err := ioutil.ReadFile("testdata/spans.txt") 88 | require.NoError(t, err, "if you are trying to generate the snapshot, use WRITE_SNAPSHOTS=1") 89 | 90 | assert.Equal(t, string(expected), b.String()) 91 | } 92 | -------------------------------------------------------------------------------- /internal/parser/testdata/spans.txt: -------------------------------------------------------------------------------- 1 | *ast.Stylesheet:2:1 2 | .simple { margin: 1px 2px; } 3 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~> 4 | 5 | *ast.QualifiedRule:2:1 6 | .simple { margin: 1px 2px; } 7 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 8 | 9 | *ast.SelectorList:2:1 10 | .simple { margin: 1px 2px; } 11 | ~~~~~~~~ 12 | 13 | *ast.Selector:2:1 14 | .simple { margin: 1px 2px; } 15 | ~~~~~~~~ 16 | 17 | *ast.ClassSelector:2:1 18 | .simple { margin: 1px 2px; } 19 | ~~~~~~~ 20 | 21 | *ast.Whitespace:2:8 22 | .simple { margin: 1px 2px; } 23 | ~ 24 | 25 | *ast.DeclarationBlock:2:9 26 | .simple { margin: 1px 2px; } 27 | ~~~~~~~~~~~~~~~~~~~~ 28 | 29 | *ast.Declaration:2:11 30 | .simple { margin: 1px 2px; } 31 | ~~~~~~~~~~~~~~~ 32 | 33 | *ast.Dimension:2:19 34 | .simple { margin: 1px 2px; } 35 | ~~~ 36 | 37 | *ast.Dimension:2:23 38 | .simple { margin: 1px 2px; } 39 | ~~~ 40 | 41 | *ast.QualifiedRule:3:1 42 | .more, .complex { margin: 1px 2px; } 43 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 44 | 45 | *ast.SelectorList:3:1 46 | .more, .complex { margin: 1px 2px; } 47 | ~~~~~~~~~~~~~~~~ 48 | 49 | *ast.Selector:3:1 50 | .more, .complex { margin: 1px 2px; } 51 | ~~~~~ 52 | 53 | *ast.ClassSelector:3:1 54 | .more, .complex { margin: 1px 2px; } 55 | ~~~~~ 56 | 57 | *ast.Selector:3:8 58 | .more, .complex { margin: 1px 2px; } 59 | ~~~~~~~~~ 60 | 61 | *ast.ClassSelector:3:8 62 | .more, .complex { margin: 1px 2px; } 63 | ~~~~~~~~ 64 | 65 | *ast.Whitespace:3:16 66 | .more, .complex { margin: 1px 2px; } 67 | ~ 68 | 69 | *ast.DeclarationBlock:3:17 70 | .more, .complex { margin: 1px 2px; } 71 | ~~~~~~~~~~~~~~~~~~~~ 72 | 73 | *ast.Declaration:3:19 74 | .more, .complex { margin: 1px 2px; } 75 | ~~~~~~~~~~~~~~~ 76 | 77 | *ast.Dimension:3:27 78 | .more, .complex { margin: 1px 2px; } 79 | ~~~ 80 | 81 | *ast.Dimension:3:31 82 | .more, .complex { margin: 1px 2px; } 83 | ~~~ 84 | 85 | *ast.QualifiedRule:5:1 86 | .multiline { 87 | ~~~~~~~~~~~~> 88 | 89 | *ast.SelectorList:5:1 90 | .multiline { 91 | ~~~~~~~~~~~ 92 | 93 | *ast.Selector:5:1 94 | .multiline { 95 | ~~~~~~~~~~~ 96 | 97 | *ast.ClassSelector:5:1 98 | .multiline { 99 | ~~~~~~~~~~ 100 | 101 | *ast.Whitespace:5:11 102 | .multiline { 103 | ~ 104 | 105 | *ast.DeclarationBlock:5:12 106 | .multiline { 107 | ~> 108 | 109 | *ast.Declaration:6:2 110 | width: 50%; 111 | ~~~~~~~~~~ 112 | 113 | *ast.Dimension:6:9 114 | width: 50%; 115 | ~~~ 116 | 117 | *ast.AtRule:9:1 118 | @media (screen) or (print) { 119 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~> 120 | 121 | *ast.MediaQueryList:9:8 122 | @media (screen) or (print) { 123 | ~~~~~~~~~~~~~~~~~~~ 124 | 125 | *ast.MediaQuery:9:8 126 | @media (screen) or (print) { 127 | ~~~~~~~~~~~~~~~~~~~ 128 | 129 | *ast.MediaFeaturePlain:9:8 130 | @media (screen) or (print) { 131 | ~~~~~~~~ 132 | 133 | *ast.Identifier:9:9 134 | @media (screen) or (print) { 135 | ~~~~~~ 136 | 137 | *ast.Identifier:9:17 138 | @media (screen) or (print) { 139 | ~~ 140 | 141 | *ast.MediaFeaturePlain:9:20 142 | @media (screen) or (print) { 143 | ~~~~~~~ 144 | 145 | *ast.Identifier:9:21 146 | @media (screen) or (print) { 147 | ~~~~~ 148 | 149 | *ast.QualifiedRuleBlock:9:28 150 | @media (screen) or (print) { 151 | ~> 152 | 153 | *ast.QualifiedRule:10:2 154 | #id { 155 | ~~~~~> 156 | 157 | *ast.SelectorList:10:2 158 | #id { 159 | ~~~~ 160 | 161 | *ast.Selector:10:2 162 | #id { 163 | ~~~~ 164 | 165 | *ast.IDSelector:10:2 166 | #id { 167 | ~~~ 168 | 169 | *ast.Whitespace:10:5 170 | #id { 171 | ~ 172 | 173 | *ast.DeclarationBlock:10:6 174 | #id { 175 | ~> 176 | 177 | *ast.Declaration:11:3 178 | line-height: 2rem; 179 | ~~~~~~~~~~~~~~~~~ 180 | 181 | *ast.Dimension:11:16 182 | line-height: 2rem; 183 | ~~~~ 184 | 185 | *ast.Declaration:12:5 186 | space-indented:1rem; 187 | ~~~~~~~~~~~~~~~~~~~ 188 | 189 | *ast.Dimension:12:20 190 | space-indented:1rem; 191 | ~~~~ 192 | 193 | *ast.Declaration:13:5 194 | tab-in-values: 1rem; 195 | ~~~~~~~~~~~~~~~~~~~ 196 | 197 | *ast.Dimension:13:20 198 | tab-in-values: 1rem; 199 | ~~~~ 200 | 201 | *ast.QualifiedRule:18:1 202 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 203 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~> 204 | 205 | *ast.SelectorList:18:1 206 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 207 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 208 | 209 | *ast.Selector:18:1 210 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 211 | ~~~~~~~~~~~~~~~~~ 212 | 213 | *ast.ClassSelector:18:1 214 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 215 | ~~~~ 216 | 217 | *ast.IDSelector:18:5 218 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 219 | ~~~~~~~ 220 | 221 | *ast.Whitespace:18:12 222 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 223 | ~ 224 | 225 | *ast.CombinatorSelector:18:13 226 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 227 | ~ 228 | 229 | *ast.Whitespace:18:14 230 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 231 | ~ 232 | 233 | *ast.TypeSelector:18:15 234 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 235 | ~~~ 236 | 237 | *ast.Selector:18:20 238 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 239 | ~~~~~~~~~~~ 240 | 241 | *ast.TypeSelector:18:20 242 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 243 | ~~~~ 244 | 245 | *ast.PseudoElementSelector:18:24 246 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 247 | ~~~~~~~ 248 | 249 | *ast.PseudoClassSelector:18:25 250 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 251 | ~~~~~~ 252 | 253 | *ast.Selector:18:33 254 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 255 | ~~~~~~ 256 | 257 | *ast.TypeSelector:18:33 258 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 259 | ~ 260 | 261 | *ast.PseudoClassSelector:18:34 262 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 263 | ~~~~~ 264 | 265 | *ast.Selector:18:41 266 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 267 | ~~~~~~~~~ 268 | 269 | *ast.TypeSelector:18:41 270 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 271 | ~ 272 | 273 | *ast.PseudoClassSelector:18:42 274 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 275 | ~~~~~~~~ 276 | 277 | *ast.Selector:18:52 278 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 279 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 280 | 281 | *ast.TypeSelector:18:52 282 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 283 | ~~~ 284 | 285 | *ast.ClassSelector:18:55 286 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 287 | ~~ 288 | 289 | *ast.PseudoClassSelector:18:57 290 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 291 | ~~~~~~~~ 292 | 293 | *ast.Whitespace:18:65 294 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 295 | ~ 296 | 297 | *ast.CombinatorSelector:18:66 298 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 299 | ~ 300 | 301 | *ast.Whitespace:18:67 302 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 303 | ~ 304 | 305 | *ast.TypeSelector:18:68 306 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 307 | ~ 308 | 309 | *ast.AttributeSelector:18:69 310 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 311 | ~~~~~~~~~~~~~~ 312 | 313 | *ast.String:18:75 314 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 315 | ~~~~~~~ 316 | 317 | *ast.Selector:18:85 318 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 319 | ~~~~~~~~ 320 | 321 | *ast.TypeSelector:18:85 322 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 323 | ~ 324 | 325 | *ast.AttributeSelector:18:86 326 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 327 | ~~~~~~~ 328 | 329 | *ast.DeclarationBlock:18:94 330 | .big#THINGS + div, span::after, a:href, a:visited, not.a:problem + a[link="thing"], a[other] { 331 | ~> 332 | 333 | *ast.Declaration:19:2 334 | color: purple; 335 | ~~~~~~~~~~~~~ 336 | 337 | *ast.Identifier:19:9 338 | color: purple; 339 | ~~~~~~ 340 | 341 | *ast.Declaration:20:2 342 | background-color: rgba(calc(0 + 1), 2, 3); 343 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 344 | 345 | *ast.Function:20:20 346 | background-color: rgba(calc(0 + 1), 2, 3); 347 | ~~~~~~~~~~~~~~~~~~~~~~~ 348 | 349 | *ast.Function:20:25 350 | background-color: rgba(calc(0 + 1), 2, 3); 351 | ~~~~~~~~~~~ 352 | 353 | *ast.MathExpression:20:30 354 | background-color: rgba(calc(0 + 1), 2, 3); 355 | ~~~~~ 356 | 357 | *ast.Dimension:20:30 358 | background-color: rgba(calc(0 + 1), 2, 3); 359 | ~ 360 | 361 | *ast.Dimension:20:34 362 | background-color: rgba(calc(0 + 1), 2, 3); 363 | ~ 364 | 365 | *ast.Comma:20:36 366 | background-color: rgba(calc(0 + 1), 2, 3); 367 | ~ 368 | 369 | *ast.Dimension:20:38 370 | background-color: rgba(calc(0 + 1), 2, 3); 371 | ~ 372 | 373 | *ast.Comma:20:39 374 | background-color: rgba(calc(0 + 1), 2, 3); 375 | ~ 376 | 377 | *ast.Dimension:20:41 378 | background-color: rgba(calc(0 + 1), 2, 3); 379 | ~ 380 | 381 | *ast.Declaration:21:2 382 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 383 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 384 | 385 | *ast.Function:21:10 386 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 387 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 388 | 389 | *ast.MathExpression:21:15 390 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 391 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 392 | 393 | *ast.MathExpression:21:15 394 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 395 | ~~~~~~~ 396 | 397 | *ast.Dimension:21:15 398 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 399 | ~~~ 400 | 401 | *ast.Dimension:21:21 402 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 403 | ~ 404 | 405 | *ast.MathExpression:21:25 406 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 407 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 408 | 409 | *ast.MathParenthesizedExpression:21:25 410 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 411 | ~~~~~~~~~~~~~~~~~~~~~ 412 | 413 | *ast.MathExpression:21:27 414 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 415 | ~~~~~~~~~~~~~~~~~~ 416 | 417 | *ast.MathExpression:21:27 418 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 419 | ~~~~~~~~~~~ 420 | 421 | *ast.Dimension:21:27 422 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 423 | ~~~~ 424 | 425 | *ast.Dimension:21:34 426 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 427 | ~~~~ 428 | 429 | *ast.Dimension:21:41 430 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 431 | ~~~~ 432 | 433 | *ast.Dimension:21:49 434 | height: calc(2px / 2 + ( 1rem + 3rem + 6rem) * 8); 435 | ~ 436 | 437 | *ast.Declaration:22:2 438 | width: calc(2px / 2 + 1rem * 8) 439 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 440 | 441 | *ast.Function:22:9 442 | width: calc(2px / 2 + 1rem * 8) 443 | ~~~~~~~~~~~~~~~~~~~~~~~~ 444 | 445 | *ast.MathExpression:22:14 446 | width: calc(2px / 2 + 1rem * 8) 447 | ~~~~~~~~~~~~~~~~~~ 448 | 449 | *ast.MathExpression:22:14 450 | width: calc(2px / 2 + 1rem * 8) 451 | ~~~~~~~ 452 | 453 | *ast.Dimension:22:14 454 | width: calc(2px / 2 + 1rem * 8) 455 | ~~~ 456 | 457 | *ast.Dimension:22:20 458 | width: calc(2px / 2 + 1rem * 8) 459 | ~ 460 | 461 | *ast.MathExpression:22:24 462 | width: calc(2px / 2 + 1rem * 8) 463 | ~~~~~~~~ 464 | 465 | *ast.Dimension:22:24 466 | width: calc(2px / 2 + 1rem * 8) 467 | ~~~~ 468 | 469 | *ast.Dimension:22:31 470 | width: calc(2px / 2 + 1rem * 8) 471 | ~ 472 | 473 | *ast.AtRule:25:1 474 | @custom-media test (800px < width < 1000px) or (print); 475 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 476 | 477 | *ast.Identifier:25:15 478 | @custom-media test (800px < width < 1000px) or (print); 479 | ~~~~ 480 | 481 | *ast.MediaQuery:25:20 482 | @custom-media test (800px < width < 1000px) or (print); 483 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ 484 | 485 | *ast.MediaFeatureRange:25:20 486 | @custom-media test (800px < width < 1000px) or (print); 487 | ~~~~~~~~~~~~~~~~~~~~~~~~ 488 | 489 | *ast.Dimension:25:21 490 | @custom-media test (800px < width < 1000px) or (print); 491 | ~~~~~ 492 | 493 | *ast.Dimension:25:37 494 | @custom-media test (800px < width < 1000px) or (print); 495 | ~~~~~~ 496 | 497 | *ast.Identifier:25:45 498 | @custom-media test (800px < width < 1000px) or (print); 499 | ~~ 500 | 501 | *ast.MediaFeaturePlain:25:48 502 | @custom-media test (800px < width < 1000px) or (print); 503 | ~~~~~~~ 504 | 505 | *ast.Identifier:25:49 506 | @custom-media test (800px < width < 1000px) or (print); 507 | ~~~~~ 508 | 509 | *ast.AtRule:27:1 510 | @keyframes custom { 511 | ~~~~~~~~~~~~~~~~~~~> 512 | 513 | *ast.Identifier:27:12 514 | @keyframes custom { 515 | ~~~~~~ 516 | 517 | *ast.QualifiedRuleBlock:27:19 518 | @keyframes custom { 519 | ~> 520 | 521 | *ast.QualifiedRule:28:2 522 | 0% { opacity: 0%; } 523 | ~~~~~~~~~~~~~~~~~~~ 524 | 525 | *ast.KeyframeSelectorList:28:2 526 | 0% { opacity: 0%; } 527 | ~~ 528 | 529 | *ast.Percentage:28:2 530 | 0% { opacity: 0%; } 531 | ~~ 532 | 533 | *ast.DeclarationBlock:28:5 534 | 0% { opacity: 0%; } 535 | ~~~~~~~~~~~~~~~~ 536 | 537 | *ast.Declaration:28:7 538 | 0% { opacity: 0%; } 539 | ~~~~~~~~~~~ 540 | 541 | *ast.Dimension:28:16 542 | 0% { opacity: 0%; } 543 | ~~ 544 | 545 | *ast.QualifiedRule:29:2 546 | 100% { opacity: 100%; } 547 | ~~~~~~~~~~~~~~~~~~~~~~~ 548 | 549 | *ast.KeyframeSelectorList:29:2 550 | 100% { opacity: 100%; } 551 | ~~~~ 552 | 553 | *ast.Percentage:29:2 554 | 100% { opacity: 100%; } 555 | ~~~~ 556 | 557 | *ast.DeclarationBlock:29:7 558 | 100% { opacity: 100%; } 559 | ~~~~~~~~~~~~~~~~~~ 560 | 561 | *ast.Declaration:29:9 562 | 100% { opacity: 100%; } 563 | ~~~~~~~~~~~~~ 564 | 565 | *ast.Dimension:29:18 566 | 100% { opacity: 100%; } 567 | ~~~~ 568 | 569 | *ast.AtRule:32:1 570 | @font-face { 571 | ~~~~~~~~~~~~> 572 | 573 | *ast.DeclarationBlock:32:12 574 | @font-face { 575 | ~> 576 | 577 | *ast.Declaration:33:2 578 | font-family: "whatever"; 579 | ~~~~~~~~~~~~~~~~~~~~~~~ 580 | 581 | *ast.String:33:15 582 | font-family: "whatever"; 583 | ~~~~~~~~~~ 584 | 585 | *ast.Declaration:34:3 586 | src: url("/what.eot?") format("eot"), 587 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~> 588 | 589 | *ast.Function:34:8 590 | src: url("/what.eot?") format("eot"), 591 | ~~~~~~~~~~~~~~~~~ 592 | 593 | *ast.String:34:12 594 | src: url("/what.eot?") format("eot"), 595 | ~~~~~~~~~~~~ 596 | 597 | *ast.Function:34:26 598 | src: url("/what.eot?") format("eot"), 599 | ~~~~~~~~~~~~~ 600 | 601 | *ast.String:34:33 602 | src: url("/what.eot?") format("eot"), 603 | ~~~~~ 604 | 605 | *ast.Comma:34:39 606 | src: url("/what.eot?") format("eot"), 607 | ~ 608 | 609 | *ast.Function:35:5 610 | url("./what.woff") format("woff"), 611 | ~~~~~~~~~~~~~~~~~~ 612 | 613 | *ast.String:35:9 614 | url("./what.woff") format("woff"), 615 | ~~~~~~~~~~~~~ 616 | 617 | *ast.Function:35:24 618 | url("./what.woff") format("woff"), 619 | ~~~~~~~~~~~~~~ 620 | 621 | *ast.String:35:31 622 | url("./what.woff") format("woff"), 623 | ~~~~~~ 624 | 625 | *ast.Comma:35:38 626 | url("./what.woff") format("woff"), 627 | ~ 628 | 629 | *ast.Function:36:3 630 | url("./what.ttf") format("truetype"); 631 | ~~~~~~~~~~~~~~~~~ 632 | 633 | *ast.String:36:7 634 | url("./what.ttf") format("truetype"); 635 | ~~~~~~~~~~~~ 636 | 637 | *ast.Function:36:21 638 | url("./what.ttf") format("truetype"); 639 | ~~~~~~~~~~~~~~~~~~ 640 | 641 | *ast.String:36:28 642 | url("./what.ttf") format("truetype"); 643 | ~~~~~~~~~~ 644 | 645 | *ast.QualifiedRule:39:1 646 | .broken { 647 | ~~~~~~~~~> 648 | 649 | *ast.SelectorList:39:1 650 | .broken { 651 | ~~~~~~~~ 652 | 653 | *ast.Selector:39:1 654 | .broken { 655 | ~~~~~~~~ 656 | 657 | *ast.ClassSelector:39:1 658 | .broken { 659 | ~~~~~~~ 660 | 661 | *ast.Whitespace:39:8 662 | .broken { 663 | ~ 664 | 665 | *ast.DeclarationBlock:39:9 666 | .broken { 667 | ~> 668 | 669 | *ast.Declaration:40:2 670 | border-radius: 2px;; 671 | ~~~~~~~~~~~~~~~~~~ 672 | 673 | *ast.Dimension:40:17 674 | border-radius: 2px;; 675 | ~~~ 676 | 677 | *ast.Raw:40:21 678 | border-radius: 2px;; 679 | ~ 680 | 681 | *ast.Declaration:41:2 682 | width: 200px; 683 | ~~~~~~~~~~~~ 684 | 685 | *ast.Dimension:41:9 686 | width: 200px; 687 | ~~~~~ 688 | 689 | -------------------------------------------------------------------------------- /internal/printer/bench_test.go: -------------------------------------------------------------------------------- 1 | package printer 2 | 3 | import ( 4 | "io/ioutil" 5 | "testing" 6 | 7 | "github.com/stephen/cssc/internal/parser" 8 | "github.com/stephen/cssc/internal/sources" 9 | "github.com/stretchr/testify/assert" 10 | "github.com/stretchr/testify/require" 11 | ) 12 | 13 | func BenchmarkPrinter(b *testing.B) { 14 | b.ReportAllocs() 15 | 16 | by, err := ioutil.ReadFile("../testdata/bootstrap.css") 17 | require.NoError(b, err) 18 | source := &sources.Source{ 19 | Path: "bootstrap.css", 20 | Content: string(by), 21 | } 22 | ast, err := parser.Parse(source) 23 | assert.NoError(b, err) 24 | 25 | b.Run("no sourcemap", func(b *testing.B) { 26 | b.ReportAllocs() 27 | for i := 0; i < b.N; i++ { 28 | _, _ = Print(ast, Options{}) 29 | } 30 | }) 31 | 32 | b.Run("with sourcemap", func(b *testing.B) { 33 | b.ReportAllocs() 34 | for i := 0; i < b.N; i++ { 35 | _, _ = Print(ast, Options{OriginalSource: source}) 36 | } 37 | }) 38 | } 39 | -------------------------------------------------------------------------------- /internal/printer/manualtest/.gitignore: -------------------------------------------------------------------------------- 1 | index.html 2 | index.css 3 | -------------------------------------------------------------------------------- /internal/printer/manualtest/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "io/ioutil" 5 | "log" 6 | "path/filepath" 7 | 8 | "github.com/davecgh/go-spew/spew" 9 | "github.com/stephen/cssc/internal/parser" 10 | "github.com/stephen/cssc/internal/printer" 11 | "github.com/stephen/cssc/internal/sources" 12 | ) 13 | 14 | func main() { 15 | source := &sources.Source{ 16 | Path: "index.css", 17 | Content: ` 18 | body { 19 | background-color: purple; 20 | } 21 | 22 | @media not (width <= -100px) { 23 | body { 24 | background: red; 25 | } 26 | } 27 | @media (min-width: 30em) and (orientation: landscape) { 28 | body { 29 | 30 | background: green;} 31 | } 32 | 33 | @import url('landscape.css') screen and (orientation: landscape); 34 | 35 | .a { 36 | color: white; 37 | } 38 | 39 | .b { 40 | color: red; 41 | } 42 | `, 43 | } 44 | 45 | sheet, err := parser.Parse(source) 46 | if err != nil { 47 | panic(err) 48 | } 49 | log.Println(spew.Sdump(sheet)) 50 | 51 | out, err := printer.Print(sheet, printer.Options{ 52 | OriginalSource: source, 53 | }) 54 | if err != nil { 55 | panic(err) 56 | } 57 | log.Println(out) 58 | 59 | fp := filepath.Join("internal/printer/manualtest/", "index.css") 60 | if err := ioutil.WriteFile(fp, []byte(out), 0644); err != nil { 61 | panic(err) 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /internal/printer/math_test.go: -------------------------------------------------------------------------------- 1 | package printer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestMath(t *testing.T) { 10 | assert.Equal(t, `.class{width:calc(1px+2px)}`, Print(t, `.class { width: calc(1px + 2px) }`)) 11 | assert.Equal(t, `.class{width:calc(1px+2px/2)}`, Print(t, `.class { width: calc(1px + 2px / 2) }`)) 12 | assert.Equal(t, `.class{width:calc(22%+1rem)}`, Print(t, `.class { width: calc(22% + 1rem) }`)) 13 | assert.Equal(t, `.class{width:calc(22%-5%)}`, Print(t, `.class { width: calc(22% - 5%) }`)) 14 | } 15 | -------------------------------------------------------------------------------- /internal/printer/printer.go: -------------------------------------------------------------------------------- 1 | package printer 2 | 3 | import ( 4 | "encoding/base64" 5 | "encoding/json" 6 | "fmt" 7 | "reflect" 8 | "strings" 9 | 10 | "github.com/stephen/cssc/internal/ast" 11 | "github.com/stephen/cssc/internal/sources" 12 | ) 13 | 14 | type printer struct { 15 | options Options 16 | s strings.Builder 17 | 18 | sourceMappings strings.Builder 19 | lastWritten int 20 | lastMappingState mappingState 21 | } 22 | 23 | type mappingState struct { 24 | generatedColumn int32 25 | originalLine int32 26 | originalColumn int32 27 | } 28 | 29 | // Options is a set of options for printing. 30 | type Options struct { 31 | OriginalSource *sources.Source 32 | } 33 | 34 | // Print prints the input AST node into CSS. It should have deterministic 35 | // output. 36 | func Print(in ast.Node, opts Options) (output string, err error) { 37 | defer func() { 38 | if rErr := recover(); rErr != nil { 39 | if errI, ok := rErr.(error); ok { 40 | output, err = "", errI 41 | return 42 | } 43 | 44 | // Re-panic unknown issues. 45 | panic(rErr) 46 | } 47 | }() 48 | 49 | p := printer{ 50 | options: opts, 51 | } 52 | 53 | p.print(in) 54 | p.printMapping() 55 | 56 | return p.s.String(), nil 57 | } 58 | 59 | func (p *printer) printMapping() { 60 | if p.options.OriginalSource == nil { 61 | return 62 | } 63 | 64 | b := strings.Builder{} 65 | b.WriteString(`{"version": 3,"file":"`) 66 | b.WriteString(p.options.OriginalSource.Path) 67 | // XXX: sources content and names. 68 | b.WriteString(`","sourceRoot":"", "sources": ["source.css"], "sourcesContent":[`) 69 | out, err := json.Marshal(p.options.OriginalSource.Content) 70 | if err != nil { 71 | panic(err) 72 | } 73 | b.Write(out) 74 | b.WriteString(`],"names":[],"mappings":"`) 75 | b.WriteString(p.sourceMappings.String()) 76 | b.WriteString(`"}`) 77 | p.s.WriteString("\n/*# sourceMappingURL=data:application/json;base64,") 78 | // XXX: allocation. 79 | p.s.WriteString(base64.StdEncoding.EncodeToString([]byte(b.String()))) 80 | p.s.WriteString(" */\n") 81 | } 82 | 83 | // addMapping should be called from the printer 84 | // when a new symbol needs to be added to the sourcemap. 85 | func (p *printer) addMapping(loc ast.Span) { 86 | if p.options.OriginalSource == nil { 87 | return 88 | } 89 | 90 | newState := p.lastMappingState 91 | 92 | line, col := p.options.OriginalSource.LineAndCol(loc) 93 | newState.originalLine, newState.originalColumn = line-1, col-1 94 | 95 | // Note that String() here does not reallocate the string. 96 | for _, ch := range p.s.String()[p.lastWritten:] { 97 | if ch == '\n' { 98 | p.lastMappingState.generatedColumn = 0 99 | newState.generatedColumn = 0 100 | p.sourceMappings.WriteRune(';') 101 | continue 102 | } 103 | 104 | newState.generatedColumn++ 105 | } 106 | 107 | if p.s.Len() > 0 { 108 | lastByte := p.sourceMappings.String()[p.sourceMappings.Len()-1] 109 | if lastByte != ';' { 110 | p.sourceMappings.WriteRune(',') 111 | } 112 | } 113 | 114 | p.sourceMappings.Write(VLQEncode(newState.generatedColumn - p.lastMappingState.generatedColumn)) 115 | // XXX: figure out what to do for multiple sources. 116 | p.sourceMappings.Write(VLQEncode(0)) 117 | 118 | p.sourceMappings.Write(VLQEncode(newState.originalLine - p.lastMappingState.originalLine)) 119 | p.sourceMappings.Write(VLQEncode(newState.originalColumn - p.lastMappingState.originalColumn)) 120 | // XXX: 5th item for "names" mapping 121 | 122 | p.lastMappingState = newState 123 | p.lastWritten = p.s.Len() 124 | } 125 | 126 | // print prints the current ast node to the printer output. 127 | func (p *printer) print(in ast.Node) { 128 | switch node := in.(type) { 129 | case *ast.Stylesheet: 130 | for _, n := range node.Nodes { 131 | p.print(n) 132 | } 133 | 134 | case *ast.AtRule: 135 | p.addMapping(node.Span) 136 | p.s.WriteRune('@') 137 | p.s.WriteString(node.Name) 138 | if len(node.Preludes) > 0 { 139 | p.s.WriteRune(' ') 140 | for i, prelude := range node.Preludes { 141 | p.print(prelude) 142 | 143 | if i+1 < len(node.Preludes) { 144 | p.s.WriteRune(' ') 145 | } 146 | } 147 | } 148 | 149 | if node.Block != nil { 150 | p.s.WriteRune('{') 151 | p.print(node.Block) 152 | p.s.WriteRune('}') 153 | } else { 154 | p.s.WriteRune(';') 155 | } 156 | 157 | case *ast.SelectorList: 158 | for i, s := range node.Selectors { 159 | p.print(s) 160 | 161 | if i+1 < len(node.Selectors) { 162 | p.s.WriteRune(',') 163 | } 164 | } 165 | 166 | case *ast.KeyframeSelectorList: 167 | for _, s := range node.Selectors { 168 | p.print(s) 169 | } 170 | 171 | case *ast.QualifiedRule: 172 | p.addMapping(node.Location()) 173 | p.print(node.Prelude) 174 | 175 | p.s.WriteRune('{') 176 | p.print(node.Block) 177 | p.s.WriteRune('}') 178 | 179 | case *ast.QualifiedRuleBlock: 180 | for _, r := range node.Rules { 181 | p.print(r) 182 | } 183 | 184 | case *ast.DeclarationBlock: 185 | for i, d := range node.Declarations { 186 | p.print(d) 187 | 188 | if i+1 < len(node.Declarations) { 189 | p.s.WriteRune(';') 190 | } 191 | } 192 | 193 | case *ast.Declaration: 194 | p.s.WriteString(node.Property) 195 | p.s.WriteRune(':') 196 | for i, val := range node.Values { 197 | p.print(val) 198 | 199 | // Print space if we're not the last value and the previous or current 200 | // value was not a comma. 201 | if i+1 < len(node.Values) { 202 | if _, nextIsComma := node.Values[i+1].(*ast.Comma); !nextIsComma { 203 | if _, isComma := val.(*ast.Comma); !isComma { 204 | p.s.WriteRune(' ') 205 | } 206 | } 207 | } 208 | } 209 | 210 | if node.Important { 211 | p.s.WriteString("!important") 212 | } 213 | 214 | case *ast.Comma: 215 | p.s.WriteRune(',') 216 | 217 | case *ast.Dimension: 218 | p.s.WriteString(node.Value) 219 | p.s.WriteString(node.Unit) 220 | 221 | case *ast.Percentage: 222 | p.s.WriteString(node.Value) 223 | p.s.WriteRune('%') 224 | 225 | case *ast.String: 226 | p.s.WriteRune('"') 227 | p.s.WriteString(node.Value) 228 | p.s.WriteRune('"') 229 | 230 | case *ast.Identifier: 231 | p.s.WriteString(node.Value) 232 | 233 | case *ast.Function: 234 | p.s.WriteString(node.Name) 235 | p.s.WriteRune('(') 236 | for _, arg := range node.Arguments { 237 | p.print(arg) 238 | } 239 | p.s.WriteRune(')') 240 | 241 | case *ast.Brackets: 242 | p.s.WriteRune('[') 243 | for i, val := range node.Values { 244 | p.print(val) 245 | 246 | if i+1 < len(node.Values) { 247 | p.s.WriteRune(' ') 248 | } 249 | } 250 | p.s.WriteRune(']') 251 | 252 | case *ast.MathParenthesizedExpression: 253 | p.s.WriteRune('(') 254 | p.print(node.Value) 255 | p.s.WriteRune(')') 256 | 257 | case *ast.MathExpression: 258 | p.print(node.Left) 259 | p.s.WriteString(node.Operator) 260 | p.print(node.Right) 261 | 262 | case *ast.Whitespace: 263 | p.s.WriteRune(' ') 264 | 265 | case *ast.Selector: 266 | for i, part := range node.Parts { 267 | if _, isWhitespace := part.(*ast.Whitespace); i+1 >= len(node.Parts) && isWhitespace { 268 | continue 269 | } 270 | 271 | p.print(part) 272 | } 273 | 274 | case *ast.AttributeSelector: 275 | p.s.WriteRune('[') 276 | p.s.WriteString(node.Property) 277 | if node.Value != nil { 278 | p.s.WriteRune('=') 279 | p.print(node.Value) 280 | } 281 | p.s.WriteRune(']') 282 | 283 | case *ast.TypeSelector: 284 | p.s.WriteString(node.Name) 285 | 286 | case *ast.ClassSelector: 287 | p.s.WriteRune('.') 288 | p.s.WriteString(node.Name) 289 | 290 | case *ast.IDSelector: 291 | p.s.WriteRune('#') 292 | p.s.WriteString(node.Name) 293 | 294 | case *ast.CombinatorSelector: 295 | p.s.WriteString(node.Operator) 296 | 297 | case *ast.PseudoElementSelector: 298 | p.s.WriteRune(':') 299 | p.print(node.Inner) 300 | 301 | case *ast.HexColor: 302 | p.s.WriteRune('#') 303 | p.s.WriteString(node.RGBA) 304 | 305 | case *ast.PseudoClassSelector: 306 | p.s.WriteRune(':') 307 | p.s.WriteString(node.Name) 308 | if node.Arguments != nil { 309 | p.s.WriteRune('(') 310 | p.print(node.Arguments) 311 | p.s.WriteRune(')') 312 | } 313 | 314 | case *ast.ANPlusB: 315 | wrote := false 316 | if node.A != "" && node.A != "1" && node.A != "0" { 317 | if node.A == "-1" { 318 | p.s.WriteString("-") 319 | } else if strings.HasPrefix(node.A, "+") { 320 | p.s.WriteString(node.A[1:]) 321 | } else { 322 | p.s.WriteString(node.A) 323 | } 324 | wrote = true 325 | } 326 | if node.A != "0" { 327 | p.s.WriteRune('n') 328 | wrote = true 329 | } 330 | if node.B != "" && node.B != "0" { 331 | if wrote { 332 | p.s.WriteString(node.Operator) 333 | } 334 | p.s.WriteString(node.B) 335 | } 336 | 337 | case *ast.MediaQueryList: 338 | for i, q := range node.Queries { 339 | p.print(q) 340 | 341 | if i+1 < len(node.Queries) { 342 | p.s.WriteRune(',') 343 | } 344 | } 345 | 346 | case *ast.MediaQuery: 347 | for i, part := range node.Parts { 348 | p.print(part) 349 | 350 | if i+1 < len(node.Parts) { 351 | p.s.WriteRune(' ') 352 | } 353 | } 354 | 355 | case *ast.MediaFeaturePlain: 356 | p.s.WriteRune('(') 357 | p.print(node.Property) 358 | if node.Value != nil { 359 | p.s.WriteRune(':') 360 | p.print(node.Value) 361 | } 362 | p.s.WriteRune(')') 363 | 364 | case *ast.MediaFeatureRange: 365 | p.s.WriteRune('(') 366 | if node.LeftValue != nil { 367 | p.print(node.LeftValue) 368 | p.s.WriteString(node.Operator) 369 | } 370 | p.print(node.Property) 371 | if node.RightValue != nil { 372 | p.s.WriteString(node.Operator) 373 | p.print(node.RightValue) 374 | } 375 | p.s.WriteRune(')') 376 | 377 | case *ast.Raw: 378 | p.s.WriteString(node.Value) 379 | 380 | default: 381 | panic(fmt.Sprintf("unknown ast node: %s", reflect.TypeOf(in).String())) 382 | } 383 | 384 | } 385 | -------------------------------------------------------------------------------- /internal/printer/printer_test.go: -------------------------------------------------------------------------------- 1 | package printer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/parser" 7 | "github.com/stephen/cssc/internal/printer" 8 | "github.com/stephen/cssc/internal/sources" 9 | "github.com/stretchr/testify/assert" 10 | "github.com/stretchr/testify/require" 11 | ) 12 | 13 | func Print(t testing.TB, s string) string { 14 | ss, err := parser.Parse(&sources.Source{ 15 | Path: "main.css", 16 | Content: s, 17 | }) 18 | require.NoError(t, err) 19 | 20 | out, err := printer.Print(ss, printer.Options{}) 21 | require.NoError(t, err) 22 | 23 | return out 24 | } 25 | 26 | func TestClass(t *testing.T) { 27 | assert.Equal(t, `.class{font-family:"Helvetica",sans-serif}`, 28 | Print(t, `.class { 29 | font-family: "Helvetica", sans-serif; 30 | }`)) 31 | } 32 | 33 | func TestClass_MultipleDeclarations(t *testing.T) { 34 | assert.Equal(t, `.class{font-family:"Helvetica",sans-serif;width:2rem}`, 35 | Print(t, `.class { 36 | font-family: "Helvetica", sans-serif; 37 | width: 2rem; 38 | }`)) 39 | } 40 | 41 | func TestClass_ComplexSelector(t *testing.T) { 42 | assert.Equal(t, `div.test #thing,div.test#thing,div .test#thing{}`, 43 | Print(t, `div.test #thing, div.test#thing, div .test#thing { }`)) 44 | } 45 | 46 | func TestMediaQueryRanges(t *testing.T) { 47 | assert.Equal(t, `@media (200px>5 == 0 { 90 | offset := in & 0b11111 91 | return []byte(base64Forward[offset : offset+1]) 92 | } 93 | 94 | var rv []byte 95 | for { 96 | // Content bits. 97 | cur := in & 0b11111 98 | 99 | // Note: signed shift is ok because we've already switch over 100 | // to positive representation. 101 | in >>= 5 102 | 103 | if in != 0 { 104 | // Continuation bit. 105 | cur |= 0b100000 106 | } 107 | 108 | rv = append(rv, base64Forward[cur]) 109 | 110 | if in <= 0 { 111 | break 112 | } 113 | } 114 | 115 | return rv 116 | } 117 | 118 | // VLQDecode decodes the input slice into a signed bit. 119 | // It is complementary to VLQEncode. It returns the read 120 | // length because the ending is unknown to the caller 121 | // before decoding. 122 | func VLQDecode(in []byte) (value int32, len int32) { 123 | var rv int32 124 | var shift int32 125 | var read int32 126 | 127 | for _, b := range in { 128 | read++ 129 | 130 | cur := int32(base64Reverse[b]) 131 | 132 | rv += (cur & 0b11111) << shift 133 | 134 | shift += 5 135 | 136 | if cur&0b100000 == 0 { 137 | break 138 | } 139 | } 140 | 141 | if rv&1 == 1 { 142 | return -((rv - 1) >> 1), read 143 | } 144 | 145 | return rv >> 1, read 146 | } 147 | -------------------------------------------------------------------------------- /internal/printer/sourcemaps_test.go: -------------------------------------------------------------------------------- 1 | package printer 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stretchr/testify/assert" 7 | ) 8 | 9 | func TestVLQEncode(t *testing.T) { 10 | assert.Equal(t, []byte("A"), VLQEncode(0)) 11 | assert.Equal(t, []byte("2H"), VLQEncode(123)) 12 | assert.Equal(t, []byte("gkxH"), VLQEncode(123456)) 13 | assert.Equal(t, []byte("qxmvrH"), VLQEncode(123456789)) 14 | } 15 | 16 | func TestVLQDecode(t *testing.T) { 17 | val, len := VLQDecode([]byte("A")) 18 | assert.Equal(t, int32(0), val) 19 | assert.Equal(t, int32(1), len) 20 | 21 | val, len = VLQDecode([]byte("2H")) 22 | assert.Equal(t, int32(123), val) 23 | assert.Equal(t, int32(2), len) 24 | 25 | val, len = VLQDecode([]byte("gkxH")) 26 | assert.Equal(t, int32(123456), val) 27 | assert.Equal(t, int32(4), len) 28 | 29 | val, len = VLQDecode([]byte("qxmvrH")) 30 | assert.Equal(t, int32(123456789), val) 31 | assert.Equal(t, int32(6), len) 32 | 33 | } 34 | 35 | func TestVLQDecode_Continuation(t *testing.T) { 36 | var values []int32 37 | in := []byte("A2HgkxHqxmvrH") 38 | 39 | for { 40 | val, len := VLQDecode(in) 41 | if len == 0 { 42 | break 43 | } 44 | in = in[len:] 45 | 46 | values = append(values, val) 47 | } 48 | 49 | assert.Equal(t, []int32{0, 123, 123456, 123456789}, values) 50 | } 51 | 52 | func BenchmarkVLQEncode(b *testing.B) { 53 | b.Run("short encode", func(b *testing.B) { 54 | b.ReportAllocs() 55 | for i := 0; i < b.N; i++ { 56 | VLQEncode(1) 57 | } 58 | }) 59 | 60 | b.Run("long encode", func(b *testing.B) { 61 | b.ReportAllocs() 62 | for i := 0; i < b.N; i++ { 63 | VLQEncode(123456789) 64 | } 65 | }) 66 | 67 | b.Run("short decode", func(b *testing.B) { 68 | b.ReportAllocs() 69 | for i := 0; i < b.N; i++ { 70 | VLQDecode([]byte("B")) 71 | } 72 | }) 73 | 74 | b.Run("long decode", func(b *testing.B) { 75 | b.ReportAllocs() 76 | for i := 0; i < b.N; i++ { 77 | VLQDecode([]byte("A2HgkxHqxmvrH")) 78 | } 79 | }) 80 | } 81 | -------------------------------------------------------------------------------- /internal/printer/strings_bench_test.go: -------------------------------------------------------------------------------- 1 | package printer 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "strings" 7 | "testing" 8 | "unsafe" 9 | ) 10 | 11 | var out []byte 12 | 13 | const ( 14 | iters = 100 15 | str = "test" 16 | size = iters * len(str) 17 | ) 18 | 19 | func BenchmarkStringBuilding(b *testing.B) { 20 | b.Run("strings.Builder", func(b *testing.B) { 21 | b.ReportAllocs() 22 | for i := 0; i < b.N; i++ { 23 | s := strings.Builder{} 24 | for j := 0; j < iters; j++ { 25 | s.WriteString(str) 26 | } 27 | } 28 | }) 29 | 30 | b.Run("strings.Builder with cap", func(b *testing.B) { 31 | b.ReportAllocs() 32 | for i := 0; i < b.N; i++ { 33 | s := strings.Builder{} 34 | s.Grow(size) 35 | for j := 0; j < iters; j++ { 36 | s.WriteString(str) 37 | } 38 | } 39 | }) 40 | 41 | b.Run("[]byte", func(b *testing.B) { 42 | b.ReportAllocs() 43 | for i := 0; i < b.N; i++ { 44 | var s []byte 45 | for j := 0; j < iters; j++ { 46 | s = append(s, str...) 47 | } 48 | } 49 | }) 50 | 51 | b.Run("[]byte with cap", func(b *testing.B) { 52 | b.ReportAllocs() 53 | for i := 0; i < b.N; i++ { 54 | s := make([]byte, 0, size) 55 | for j := 0; j < iters; j++ { 56 | s = append(s, str...) 57 | } 58 | 59 | // Force value to escape to heap. 60 | out = s 61 | } 62 | }) 63 | 64 | b.Run("bytes.Buffer", func(b *testing.B) { 65 | b.ReportAllocs() 66 | for i := 0; i < b.N; i++ { 67 | s := bytes.Buffer{} 68 | for j := 0; j < iters; j++ { 69 | s.WriteString(str) 70 | } 71 | } 72 | }) 73 | 74 | b.Run("bytes.Buffer with cap", func(b *testing.B) { 75 | b.ReportAllocs() 76 | for i := 0; i < b.N; i++ { 77 | s := bytes.Buffer{} 78 | s.Grow(size) 79 | for j := 0; j < iters; j++ { 80 | s.WriteString(str) 81 | } 82 | } 83 | }) 84 | } 85 | 86 | var strOut string 87 | 88 | func BenchmarkString_Writes(b *testing.B) { 89 | b.Run("strings.Builder", func(b *testing.B) { 90 | b.ReportAllocs() 91 | s := strings.Builder{} 92 | for j := 0; j < iters; j++ { 93 | s.WriteString(str) 94 | } 95 | b.ResetTimer() 96 | 97 | for i := 0; i < b.N; i++ { 98 | strOut = s.String() 99 | } 100 | }) 101 | 102 | b.Run("[]byte", func(b *testing.B) { 103 | b.ReportAllocs() 104 | 105 | var s []byte 106 | for j := 0; j < iters; j++ { 107 | s = append(s, str...) 108 | } 109 | b.ResetTimer() 110 | 111 | for i := 0; i < b.N; i++ { 112 | strOut = string(s) 113 | } 114 | }) 115 | 116 | b.Run("[]byte with unsafe", func(b *testing.B) { 117 | b.ReportAllocs() 118 | 119 | var s []byte 120 | for j := 0; j < iters; j++ { 121 | s = append(s, str...) 122 | } 123 | b.ResetTimer() 124 | 125 | for i := 0; i < b.N; i++ { 126 | // Taken from strings.Builder.String(). 127 | strOut = *(*string)(unsafe.Pointer(&s)) 128 | } 129 | }) 130 | 131 | b.Run("bytes.Buffer", func(b *testing.B) { 132 | b.ReportAllocs() 133 | 134 | s := bytes.Buffer{} 135 | for j := 0; j < iters; j++ { 136 | s.WriteString(str) 137 | } 138 | b.ResetTimer() 139 | 140 | for i := 0; i < b.N; i++ { 141 | strOut = s.String() 142 | } 143 | }) 144 | } 145 | 146 | func BenchmarkStringBuilder_CapacityPlanning(b *testing.B) { 147 | for _, cap := range []int{ 148 | 0, 1, 10, 50, 100, 200, 500, 149 | } { 150 | b.Run(fmt.Sprint(cap), func(b *testing.B) { 151 | b.ReportAllocs() 152 | for i := 0; i < b.N; i++ { 153 | s := strings.Builder{} 154 | s.Grow(cap * len(str)) 155 | for j := 0; j < iters; j++ { 156 | s.WriteString(str) 157 | } 158 | } 159 | }) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /internal/sources/source.go: -------------------------------------------------------------------------------- 1 | package sources 2 | 3 | import ( 4 | "sort" 5 | 6 | "github.com/stephen/cssc/internal/ast" 7 | ) 8 | 9 | // Source is a container for a file and its contents. 10 | type Source struct { 11 | // Path is the path of the source file. 12 | Path string 13 | 14 | // Content is the content of the file. 15 | Content string 16 | 17 | // Lines is the offset of the beginning of every line. This 18 | // is useful for quickly finding the line and column for a 19 | // given byte offset (ast.Loc) and is filled in by the lexer. 20 | Lines []int 21 | } 22 | 23 | // LineAndCol computes the 1-index line and column for a given 24 | // ast.Loc (byte offset in the file). 25 | func (s *Source) LineAndCol(loc ast.Span) (int32, int32) { 26 | line := sort.Search(len(s.Lines), func(i int) bool { 27 | return loc.Start < s.Lines[i] 28 | }) 29 | 30 | return int32(line), int32(loc.Start - s.Lines[line-1] + 1) 31 | } 32 | 33 | // FullLine returns the full line that the span is on. If the 34 | // span spans multiple lines, then only the span of the first line is returned. 35 | func (s *Source) FullLine(loc ast.Span) ast.Span { 36 | lineNumber, _ := s.LineAndCol(loc) 37 | lineStart := s.Lines[lineNumber-1] 38 | 39 | lineEnd := len(s.Content) 40 | for i, ch := range s.Content[loc.Start:] { 41 | if ch == '\n' { 42 | lineEnd = i + loc.Start 43 | break 44 | } 45 | } 46 | 47 | return ast.Span{Start: lineStart, End: lineEnd} 48 | } 49 | -------------------------------------------------------------------------------- /internal/testdata/attributes.css: -------------------------------------------------------------------------------- 1 | /* 2 | Content from https://developer.mozilla.org/en-US/docs/Web/CSS/Attribute_selectors 3 | used under CC-BY-SA 2.5 licensing. 4 | 5 | See: https://developer.mozilla.org/en-US/docs/MDN/About 6 | */ 7 | 8 | /* Internal links, beginning with "#" */ 9 | a[href^="#"] { 10 | background-color: gold; 11 | } 12 | 13 | /* Links with "example" anywhere in the URL */ 14 | a[href*="example"] { 15 | background-color: silver; 16 | } 17 | 18 | /* Links with "insensitive" anywhere in the URL, 19 | regardless of capitalization */ 20 | a[href*="insensitive" i] { 21 | color: cyan; 22 | } 23 | 24 | /* Links with "cAsE" anywhere in the URL, 25 | with matching capitalization */ 26 | a[href*="cAsE" s] { 27 | color: pink; 28 | } 29 | 30 | /* Links that end in ".org" */ 31 | a[href$=".org"] { 32 | color: red; 33 | } 34 | 35 | /* Links that start with "https" and end in ".org" */ 36 | a[href^="https"][href$=".org"] { 37 | color: green; 38 | } -------------------------------------------------------------------------------- /internal/testdata/bem.css: -------------------------------------------------------------------------------- 1 | /** @define SomeComponent **/ 2 | 3 | .SomeComponent { 4 | position: relative; 5 | z-index: 10; 6 | width: 10rem; 7 | } 8 | -------------------------------------------------------------------------------- /internal/testdata/comments.css: -------------------------------------------------------------------------------- 1 | /* a comment */ 2 | 3 | /* stylelint: ignore-or-something */ 4 | .class { 5 | /* a comment */ 6 | width: 50rem /* here too! */; 7 | height: /* explanation */ 10rem; 8 | } 9 | 10 | /* hello */ @media /* lol */( /* haha */ print /* good luck */) /* dealing with this */ { 11 | .what { 12 | /* please */ 13 | margin: 5rem /* give me a break */ 10rem; 14 | } /* also! 15 | 16 | multiple lines with weird indent 17 | 18 | what could go wrong? 19 | */ 20 | } 21 | -------------------------------------------------------------------------------- /internal/testdata/font-face.css: -------------------------------------------------------------------------------- 1 | @font-face { 2 | font-family: "what"; 3 | src: url("/what.eot?") format("eot"), 4 | url("./what.woff") format("woff"), 5 | url("./what.ttf") format("truetype"); 6 | 7 | font-weight: normal; 8 | font-style: normal; 9 | } 10 | 11 | @font-face { 12 | font-family: "what"; 13 | src: url("./what-bold.eot?") format("eot"), 14 | url("./what-bold.woff") format("woff"), 15 | url("./what-bold.ttf") format("truetype"); 16 | 17 | font-weight: bold; 18 | font-style: normal; 19 | } 20 | -------------------------------------------------------------------------------- /internal/testdata/grid.css: -------------------------------------------------------------------------------- 1 | .a { 2 | grid-column: 1 / span 2; 3 | grid-template: "text" auto "meter" 1fr / 1fr; 4 | } 5 | 6 | .b { 7 | grid-column: 3 ; 8 | grid-row: 1 / span 2; 9 | } 10 | 11 | .c { 12 | grid-column: 1 ; 13 | grid-row: 2 ; 14 | } 15 | 16 | .d { 17 | grid-column: 2 ; 18 | grid-row: 2 ; 19 | } -------------------------------------------------------------------------------- /internal/transformer/any_link_test.go: -------------------------------------------------------------------------------- 1 | package transformer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/transformer" 7 | "github.com/stephen/cssc/transforms" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func compileAnyLink(o *transformer.Options) { 12 | o.AnyLink = transforms.AnyLinkTransform 13 | } 14 | 15 | func TestAnyLink(t *testing.T) { 16 | assert.Equal(t, ".test:visited,.test:link{color:red}", Transform(t, compileAnyLink, ` 17 | .test:any-link { 18 | color: red; 19 | }`)) 20 | 21 | assert.Equal(t, "complex .test:visited:not(.thing),complex .test:link:not(.thing){color:red}", Transform(t, compileAnyLink, ` 22 | complex .test:any-link:not(.thing) { 23 | color: red; 24 | }`)) 25 | 26 | assert.Equal(t, "a:visited,a:link,section,.Something{color:red}", Transform(t, compileAnyLink, ` 27 | a:any-link, section, .Something { 28 | color: red; 29 | }`)) 30 | 31 | assert.Equal(t, ".test:any-link{color:red}", Transform(t, nil, ` 32 | .test:any-link { 33 | color: red; 34 | }`)) 35 | } 36 | -------------------------------------------------------------------------------- /internal/transformer/bench_test.go: -------------------------------------------------------------------------------- 1 | package transformer 2 | 3 | import ( 4 | "io/ioutil" 5 | "testing" 6 | 7 | "github.com/stephen/cssc/internal/parser" 8 | "github.com/stephen/cssc/internal/sources" 9 | "github.com/stretchr/testify/assert" 10 | "github.com/stretchr/testify/require" 11 | ) 12 | 13 | func BenchmarkTransformer(b *testing.B) { 14 | b.ReportAllocs() 15 | 16 | by, err := ioutil.ReadFile("../testdata/bootstrap.css") 17 | require.NoError(b, err) 18 | source := &sources.Source{ 19 | Path: "bootstrap.css", 20 | Content: string(by), 21 | } 22 | s, err := parser.Parse(source) 23 | assert.NoError(b, err) 24 | b.ResetTimer() 25 | 26 | for i := 0; i < b.N; i++ { 27 | Transform(s, Options{ 28 | OriginalSource: source, 29 | }) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /internal/transformer/custom_media_test.go: -------------------------------------------------------------------------------- 1 | package transformer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/ast" 7 | "github.com/stephen/cssc/internal/parser" 8 | "github.com/stephen/cssc/internal/printer" 9 | "github.com/stephen/cssc/internal/sources" 10 | "github.com/stephen/cssc/internal/transformer" 11 | "github.com/stephen/cssc/transforms" 12 | "github.com/stretchr/testify/assert" 13 | "github.com/stretchr/testify/require" 14 | ) 15 | 16 | func TestCustomMedia(t *testing.T) { 17 | 18 | assert.Equal(t, "@media (max-width:30em){.a{color:green}}@media (max-width:30em) and (script){.c{color:red}}", Transform(t, func(o *transformer.Options) { 19 | o.CustomMediaQueries = transforms.CustomMediaQueriesTransform 20 | }, ` 21 | @custom-media --narrow-window (max-width: 30em); 22 | 23 | @media (--narrow-window) { 24 | .a { color: green; } 25 | } 26 | 27 | @media (--narrow-window) and (script) { 28 | .c { color: red; } 29 | }`)) 30 | 31 | } 32 | 33 | func TestCustomMedia_Unsupported(t *testing.T) { 34 | _, err := parser.Parse(&sources.Source{ 35 | Path: "main.css", 36 | Content: ` 37 | @custom-media --narrow-window (max-width: 30em), print; 38 | 39 | @media (--narrow-window) { 40 | .a { color: green; } 41 | } 42 | 43 | @media (--narrow-window) and (script) { 44 | .c { color: red; } 45 | }`, 46 | }) 47 | assert.EqualError(t, err, "main.css:2:56\n@custom-media rule requires a single media query argument:\n\t @custom-media --narrow-window (max-width: 30em), print;\n\t ~") 48 | } 49 | 50 | func TestCustomMedia_Passthrough(t *testing.T) { 51 | assert.Equal(t, "@media (--narrow-window){.a{color:green}}@media (--narrow-window) and (script){.c{color:red}}", Transform(t, nil, ` 52 | @custom-media --narrow-window (max-width: 30em); 53 | 54 | @media (--narrow-window) { 55 | .a { color: green; } 56 | } 57 | 58 | @media (--narrow-window) and (script) { 59 | .c { color: red; } 60 | }`)) 61 | } 62 | 63 | func TestCustomMediaViaImport(t *testing.T) { 64 | mainSource := &sources.Source{ 65 | Path: "main.css", 66 | Content: ` 67 | @import "other.css"; 68 | 69 | @media (--narrow-window) { 70 | .a { color: green; } 71 | } 72 | 73 | @media (--narrow-window) and (script) { 74 | .c { color: red; } 75 | }`, 76 | } 77 | main, err := parser.Parse(mainSource) 78 | other, err := parser.Parse(&sources.Source{ 79 | Path: "other.css", 80 | Content: `@custom-media --narrow-window (max-width: 30em);`, 81 | }) 82 | 83 | require.NoError(t, err) 84 | o := &transformer.Options{ 85 | OriginalSource: mainSource, 86 | Reporter: &reporter{}, 87 | ImportReplacements: map[*ast.AtRule]*ast.Stylesheet{ 88 | main.Imports[0].AtRule: other, 89 | }, 90 | Options: transforms.Options{ 91 | CustomMediaQueries: transforms.CustomMediaQueriesTransform, 92 | }, 93 | } 94 | 95 | out, err := printer.Print(transformer.Transform(main, *o), printer.Options{}) 96 | assert.NoError(t, err) 97 | 98 | assert.Equal(t, "@media (max-width:30em){.a{color:green}}@media (max-width:30em) and (script){.c{color:red}}", out) 99 | } 100 | -------------------------------------------------------------------------------- /internal/transformer/custom_properties_test.go: -------------------------------------------------------------------------------- 1 | package transformer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/transformer" 7 | "github.com/stephen/cssc/transforms" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func compileCustomProperties(o *transformer.Options) { 12 | o.CustomProperties = transforms.CustomPropertiesTransformRoot 13 | } 14 | 15 | func TestCustomProperties(t *testing.T) { 16 | assert.Equal(t, ".class{margin:0rem 1rem 3rem 5rem}", Transform(t, compileCustomProperties, `:root { 17 | --var-width: 1rem 3rem 5rem; 18 | } 19 | 20 | .class { 21 | margin: 0rem var(--var-width); 22 | }`)) 23 | 24 | assert.Equal(t, `.class{font-family:"Helvetica",sans-serif,other}`, Transform(t, compileCustomProperties, `:root { 25 | --font: "Helvetica", sans-serif, other; 26 | } 27 | 28 | .class { 29 | font-family: var(--font); 30 | }`)) 31 | 32 | } 33 | 34 | func TestCustomProperties_Fallback(t *testing.T) { 35 | assert.Equal(t, ".class{margin:0rem 2rem}", Transform(t, compileCustomProperties, `.class { 36 | margin: 0rem var(--var-width, 2rem); 37 | }`)) 38 | 39 | assert.Equal(t, ".class{margin:0rem 2rem 1rem 3rem}", Transform(t, compileCustomProperties, `.class { 40 | margin: 0rem var(--var-width, 2rem 1rem 3rem); 41 | }`)) 42 | 43 | assert.Equal(t, `.class{font-family:"Helvetica",sans-serif}`, Transform(t, compileCustomProperties, `.class { 44 | font-family: var(--font, "Helvetica", sans-serif); 45 | }`)) 46 | } 47 | -------------------------------------------------------------------------------- /internal/transformer/math_test.go: -------------------------------------------------------------------------------- 1 | package transformer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/transformer" 7 | "github.com/stephen/cssc/transforms" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func compileMath(o *transformer.Options) { 12 | o.CalcReduction = transforms.CalcReductionReduce 13 | } 14 | 15 | func TestMath(t *testing.T) { 16 | assert.Equal(t, `.class{width:3px}`, Transform(t, compileMath, `.class { width: calc(1px + 2px) }`)) 17 | assert.Equal(t, `.class{width:-1px}`, Transform(t, compileMath, `.class { width: calc(1px - 2px) }`)) 18 | assert.Equal(t, `.class{width:calc(1px+2rem)}`, Transform(t, compileMath, `.class { width: calc(1px + 2rem) }`)) 19 | assert.Equal(t, `.class{width:17%}`, Transform(t, compileMath, `.class { width: calc(22% - 5%) }`)) 20 | assert.Panics(t, func() { Transform(t, compileMath, `.class { width: calc(2 + 25%) }`) }) 21 | 22 | assert.Equal(t, `.class{width:35%}`, Transform(t, compileMath, `.class { width: calc(10% + 25%) }`)) 23 | assert.Equal(t, `.class{width:50%}`, Transform(t, compileMath, `.class { width: calc(2 * 25%) }`)) 24 | assert.Panics(t, func() { Transform(t, compileMath, `.class { width: calc(2% * 25%) }`) }) 25 | 26 | assert.Equal(t, `.class{width:5%}`, Transform(t, compileMath, `.class { width: calc(10% / 2) }`)) 27 | 28 | // XXX: fix precision in below 29 | assert.Equal(t, `.class{width:3.3333333333333335%}`, Transform(t, compileMath, `.class { width: calc(10% / 3) }`)) 30 | assert.Equal(t, `.class{width:4px}`, Transform(t, compileMath, `.class { width: calc(20px / 5) }`)) 31 | assert.Equal(t, `.class{width:20}`, Transform(t, compileMath, `.class { width: calc(20 / 1) }`)) 32 | assert.Panics(t, func() { Transform(t, compileMath, `.class { width: calc(2% / 25%) }`) }) 33 | assert.Panics(t, func() { Transform(t, compileMath, `.class { width: calc(2% / 0) }`) }) 34 | 35 | assert.Equal(t, `.class{width:3px}`, Transform(t, compileMath, `.class { width: calc(1px + 4px / 2) }`)) 36 | 37 | assert.Equal(t, `.class{width:calc(1px+2px)}`, Transform(t, nil, `.class { width: calc(1px + 2px) }`)) 38 | 39 | // XXX: this should work but doesn't. because we treat math as left-associative and binary but 40 | // 22% - 1rem cannot be reduced into something that can be added to 8rem. 41 | // assert.Equal(t, `.class{width:calc(22%+7rem)}`, Transform(t, compileMath, `.class { width: calc(22% - 1rem + 8rem) }`)) 42 | 43 | // XXX: does this one work? 44 | // calc(var(--x) + 5px) 45 | // ((5px + 22%) + 5px) 46 | // (((5px + 22%) + 5px) + 5px) = 22% + 15px 47 | // (((5px + 22%) + 22%) + 5px) = 44% + 10px 48 | } 49 | -------------------------------------------------------------------------------- /internal/transformer/media_feature_range_test.go: -------------------------------------------------------------------------------- 1 | package transformer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/transformer" 7 | "github.com/stephen/cssc/transforms" 8 | "github.com/stretchr/testify/assert" 9 | ) 10 | 11 | func compileMediaQueryRanges(o *transformer.Options) { 12 | o.MediaFeatureRanges = transforms.MediaFeatureRangesTransform 13 | } 14 | 15 | func TestMediaQueryRanges(t *testing.T) { 16 | 17 | assert.Equal(t, `@media (min-width:200px) and (max-width:600px),(min-width:200px),(max-width:600px){}`, 18 | Transform(t, compileMediaQueryRanges, `@media (200px <= width <= 600px), (200px <= width), (width <= 600px) {}`)) 19 | 20 | assert.Equal(t, `@media (max-width:200px) and (min-width:600px),(max-width:200px),(min-width:600px){}`, 21 | Transform(t, compileMediaQueryRanges, `@media (200px >= width >= 600px), (200px >= width), (width >= 600px) {}`)) 22 | 23 | assert.Equal(t, `@media (min-width:25.001%) and (max-width:74.999%){}`, Transform(t, compileMediaQueryRanges, `@media (25% < width < 75%) {}`)) 24 | assert.Equal(t, `@media (min-width:200.001px) and (max-width:599.999px){}`, Transform(t, compileMediaQueryRanges, `@media (200px < width < 600px) {}`)) 25 | assert.Equal(t, `@media (max-width:599.999px){}`, Transform(t, compileMediaQueryRanges, `@media (width < 600px) {}`)) 26 | assert.Equal(t, `@media (min-width:200.001px){}`, Transform(t, compileMediaQueryRanges, `@media (200px < width) {}`)) 27 | assert.Equal(t, `@media (max-width:199.999px) and (min-width:600.001px){}`, Transform(t, compileMediaQueryRanges, `@media (200px > width > 600px) {}`)) 28 | assert.Equal(t, `@media (min-width:600.001px){}`, Transform(t, compileMediaQueryRanges, `@media (width > 600px) {}`)) 29 | assert.Equal(t, `@media (max-width:199.999px){}`, Transform(t, compileMediaQueryRanges, `@media (200px > width) {}`)) 30 | } 31 | 32 | func TestMediaQueryRanges_Passthrough(t *testing.T) { 33 | assert.Equal(t, `@media (200px>=width>=600px),(200px>=width),(width>=600px){}`, 34 | Transform(t, nil, `@media (200px >= width >= 600px), (200px >= width), (width >= 600px) {}`)) 35 | } 36 | -------------------------------------------------------------------------------- /internal/transformer/transformer.go: -------------------------------------------------------------------------------- 1 | package transformer 2 | 3 | import ( 4 | "fmt" 5 | "reflect" 6 | "strconv" 7 | "strings" 8 | 9 | "github.com/stephen/cssc/internal/ast" 10 | "github.com/stephen/cssc/internal/logging" 11 | "github.com/stephen/cssc/internal/sources" 12 | "github.com/stephen/cssc/transforms" 13 | ) 14 | 15 | // Options is the set of options for transformation. 16 | type Options struct { 17 | // OriginalSource is used to report error locations. 18 | // XXX: work even if original source is not passed by having janky errors. 19 | OriginalSource *sources.Source 20 | 21 | // Reporter is the reporter for errors and warnings. 22 | Reporter logging.Reporter 23 | 24 | // Options is the set of transform options. Some transforms may need additional context passed in. 25 | transforms.Options 26 | 27 | // ImportReplacements is the set of import references to inline. ImportReplacements must be non-nil 28 | // if ImportRules is set to ImportRulesInline. 29 | ImportReplacements map[*ast.AtRule]*ast.Stylesheet 30 | } 31 | 32 | // Transform takes a pass over the input AST and runs various 33 | // transforms. 34 | func Transform(s *ast.Stylesheet, opts Options) *ast.Stylesheet { 35 | t := &transformer{ 36 | Options: opts, 37 | } 38 | 39 | if opts.Reporter == nil { 40 | t.Reporter = logging.DefaultReporter 41 | } 42 | 43 | if opts.CustomProperties != transforms.CustomPropertiesPassthrough { 44 | t.variables = make(map[string][]ast.Value) 45 | } 46 | 47 | if opts.CustomMediaQueries != transforms.CustomMediaQueriesPassthrough { 48 | t.customMedia = make(map[string]*ast.MediaQuery) 49 | } 50 | 51 | if opts.ImportReplacements == nil && opts.ImportRules == transforms.ImportRulesInline { 52 | t.Reporter.AddError(fmt.Errorf("ImportRules is set to ImportRulesInline, but ImportReplacements is not set")) 53 | } 54 | 55 | s.Nodes = t.transformNodes(s.Nodes) 56 | 57 | return s 58 | } 59 | 60 | // transformer takes a pass over the AST and makes 61 | // modifications to the AST, depending on the settings. 62 | type transformer struct { 63 | Options 64 | 65 | variables map[string][]ast.Value 66 | customMedia map[string]*ast.MediaQuery 67 | } 68 | 69 | func (t *transformer) addError(loc ast.Node, fmt string, args ...interface{}) { 70 | t.Reporter.AddError(logging.LocationErrorf(t.OriginalSource, loc.Location(), fmt, args...)) 71 | } 72 | 73 | func (t *transformer) addWarn(loc ast.Node, fmt string, args ...interface{}) { 74 | t.Reporter.AddError(logging.LocationWarnf(t.OriginalSource, loc.Location(), fmt, args...)) 75 | } 76 | 77 | func (t *transformer) transformSelectors(nodes []*ast.Selector) []*ast.Selector { 78 | newNodes := make([]*ast.Selector, 0, len(nodes)) 79 | for _, n := range nodes { 80 | newParts := make([]ast.SelectorPart, 0, len(n.Parts)) 81 | for index, p := range n.Parts { 82 | switch part := p.(type) { 83 | case *ast.PseudoClassSelector: 84 | if part.Name != "any-link" || t.AnyLink == transforms.AnyLinkPassthrough { 85 | newParts = append(newParts, p) 86 | break 87 | } 88 | 89 | // Replace one of them with :link. 90 | newParts = append( 91 | newParts, 92 | &ast.PseudoClassSelector{Name: "link"}, 93 | ) 94 | 95 | // Make a duplicate with :visited. 96 | duplicate := *n 97 | duplicate.Parts[index] = &ast.PseudoClassSelector{Name: "visited"} 98 | newNodes = append(newNodes, &duplicate) 99 | 100 | default: 101 | newParts = append(newParts, p) 102 | } 103 | } 104 | 105 | n.Parts = newParts 106 | newNodes = append(newNodes, n) 107 | } 108 | return newNodes 109 | } 110 | 111 | func (t *transformer) transformNodes(nodes []ast.Node) []ast.Node { 112 | rv := make([]ast.Node, 0, len(nodes)) 113 | for _, value := range nodes { 114 | switch node := value.(type) { 115 | case *ast.QualifiedRule: 116 | func() { 117 | selList, ok := node.Prelude.(*ast.SelectorList) 118 | if !ok { 119 | return 120 | } 121 | 122 | isRoot := false 123 | for _, sel := range selList.Selectors { 124 | if len(sel.Parts) != 1 { 125 | isRoot = true 126 | break 127 | } 128 | } 129 | 130 | if !isRoot { 131 | return 132 | } 133 | 134 | rootSel, ok := selList.Selectors[0].Parts[0].(*ast.PseudoClassSelector) 135 | if !ok { 136 | return 137 | } 138 | 139 | if rootSel.Name != "root" { 140 | return 141 | } 142 | 143 | declBlock, ok := node.Block.(*ast.DeclarationBlock) 144 | if !ok { 145 | return 146 | } 147 | 148 | newDecls := make([]ast.Declarationish, 0, len(declBlock.Declarations)) 149 | for _, decl := range declBlock.Declarations { 150 | switch d := decl.(type) { 151 | case *ast.Declaration: 152 | 153 | if strings.HasPrefix(d.Property, "--") && t.variables != nil { 154 | t.variables[d.Property] = d.Values 155 | continue 156 | } 157 | newDecls = append(newDecls, d) 158 | default: 159 | newDecls = append(newDecls, d) 160 | } 161 | 162 | } 163 | 164 | declBlock.Declarations = newDecls 165 | }() 166 | 167 | selList, ok := node.Prelude.(*ast.SelectorList) 168 | if !ok { 169 | t.addError(node.Prelude, "expected selector list for qualified rule") 170 | } 171 | selList.Selectors = t.transformSelectors(selList.Selectors) 172 | node.Block = t.transformBlock(node.Block) 173 | 174 | if node.Block == nil { 175 | continue 176 | } 177 | rv = append(rv, node) 178 | 179 | case *ast.AtRule: 180 | switch node.Name { 181 | case "import": 182 | if t.ImportReplacements == nil { 183 | rv = append(rv, node) 184 | } 185 | 186 | imported, ok := t.ImportReplacements[node] 187 | if !ok { 188 | rv = append(rv, node) 189 | break 190 | } 191 | 192 | if len(node.Preludes) > 1 { 193 | t.addWarn(node, "@import transform does not yet support @supports or media queries") 194 | } 195 | 196 | rv = append(rv, t.transformNodes(imported.Nodes)...) 197 | 198 | case "custom-media": 199 | func() { 200 | if t.customMedia == nil { 201 | return 202 | } 203 | 204 | if len(node.Preludes) != 2 { 205 | return 206 | } 207 | 208 | name, ok := node.Preludes[0].(*ast.Identifier) 209 | if !ok { 210 | return 211 | } 212 | 213 | query, ok := node.Preludes[1].(*ast.MediaQuery) 214 | if !ok { 215 | return 216 | } 217 | 218 | t.customMedia[name.Value] = query 219 | }() 220 | 221 | case "media": 222 | mq := node.Preludes[0].(*ast.MediaQueryList) 223 | mq.Queries = t.transformMediaQueries(mq.Queries) 224 | rv = append(rv, node) 225 | 226 | default: 227 | rv = append(rv, node) 228 | } 229 | 230 | default: 231 | rv = append(rv, value) 232 | } 233 | } 234 | return rv 235 | } 236 | 237 | func (t *transformer) transformMediaQueries(queries []*ast.MediaQuery) []*ast.MediaQuery { 238 | newQueries := make([]*ast.MediaQuery, 0, len(queries)) 239 | for _, q := range queries { 240 | q.Parts = t.transformMediaQueryParts(q.Parts) 241 | newQueries = append(newQueries, q) 242 | } 243 | return newQueries 244 | } 245 | 246 | // addToValue takes an ast.Value and adds diff to it. 247 | // XXX: change this out to only operate on ast.Dimension, since there are no other valid number types now. 248 | func (t *transformer) addToValue(v ast.Value, diff float64) ast.Value { 249 | if diff == 0 { 250 | return v 251 | } 252 | 253 | switch oldValue := v.(type) { 254 | case *ast.Dimension: 255 | f, err := strconv.ParseFloat(oldValue.Value, 10) 256 | if err != nil { 257 | t.addError(oldValue, "could not parse dimension value to lower media range: %s", oldValue.Value) 258 | return oldValue 259 | } 260 | return &ast.Dimension{Value: strconv.FormatFloat(f+diff, 'f', -1, 64), Unit: oldValue.Unit} 261 | 262 | default: 263 | t.addError(oldValue, "tried to modify non-numeric value. expected dimension, percentage, or number, but got: %s", reflect.TypeOf(v).String()) 264 | return v 265 | } 266 | } 267 | 268 | func (t *transformer) transformMediaFeatureRange(part *ast.MediaFeatureRange) []ast.MediaQueryPart { 269 | asIs := []ast.MediaQueryPart{part} 270 | if t.MediaFeatureRanges == transforms.MediaFeatureRangesPassthrough { 271 | return asIs 272 | } 273 | 274 | var newParts []ast.MediaQueryPart 275 | if part.LeftValue != nil { 276 | var direction string 277 | var diff float64 278 | switch part.Operator { 279 | case ">": 280 | diff = -.001 281 | fallthrough 282 | case ">=": 283 | direction = "max" 284 | case "<": 285 | diff = .001 286 | fallthrough 287 | case "<=": 288 | direction = "min" 289 | } 290 | 291 | newParts = append(newParts, &ast.MediaFeaturePlain{ 292 | // XXX: replace this allocation with a lookup. 293 | Property: &ast.Identifier{Value: fmt.Sprintf("%s-%s", direction, part.Property.Value)}, 294 | Value: t.addToValue(part.LeftValue, diff), 295 | }) 296 | } 297 | 298 | if part.RightValue != nil { 299 | if part.LeftValue != nil { 300 | newParts = append(newParts, &ast.Identifier{Value: "and"}) 301 | } 302 | 303 | var direction string 304 | var diff float64 305 | switch part.Operator { 306 | case ">": 307 | diff = .001 308 | fallthrough 309 | case ">=": 310 | direction = "min" 311 | case "<": 312 | diff = -.001 313 | fallthrough 314 | case "<=": 315 | direction = "max" 316 | } 317 | 318 | newParts = append(newParts, &ast.MediaFeaturePlain{ 319 | // XXX: replace this allocation with a lookup. 320 | Property: &ast.Identifier{Value: fmt.Sprintf("%s-%s", direction, part.Property.Value)}, 321 | Value: t.addToValue(part.RightValue, diff), 322 | }) 323 | } 324 | 325 | return newParts 326 | } 327 | 328 | func (t *transformer) transformMediaQueryParts(parts []ast.MediaQueryPart) []ast.MediaQueryPart { 329 | newParts := make([]ast.MediaQueryPart, 0, len(parts)) 330 | for _, p := range parts { 331 | switch part := p.(type) { 332 | case *ast.MediaFeaturePlain: 333 | if part.Value != nil || !strings.HasPrefix(part.Property.Value, "--") { 334 | newParts = append(newParts, p) 335 | break 336 | } 337 | 338 | if t.customMedia == nil { 339 | newParts = append(newParts, p) 340 | break 341 | } 342 | 343 | replacement, ok := t.customMedia[part.Property.Value] 344 | if !ok { 345 | newParts = append(newParts, p) 346 | break 347 | } 348 | 349 | newParts = append(newParts, replacement.Parts...) 350 | 351 | case *ast.MediaFeatureRange: 352 | newParts = append(newParts, t.transformMediaFeatureRange(part)...) 353 | 354 | default: 355 | newParts = append(newParts, p) 356 | } 357 | } 358 | return newParts 359 | } 360 | 361 | func (t *transformer) transformBlock(block ast.Block) ast.Block { 362 | switch node := block.(type) { 363 | case *ast.QualifiedRuleBlock: 364 | // for _, d := range node.Rules { 365 | // // t.transform(d) 366 | // } 367 | if len(node.Rules) == 0 { 368 | return nil 369 | } 370 | 371 | case *ast.DeclarationBlock: 372 | node.Declarations = t.transformDeclarations(node.Declarations) 373 | if len(node.Declarations) == 0 { 374 | return nil 375 | } 376 | 377 | default: 378 | panic("unknown block") 379 | } 380 | 381 | return block 382 | } 383 | 384 | func (t *transformer) transformDeclarations(decls []ast.Declarationish) []ast.Declarationish { 385 | newDecls := make([]ast.Declarationish, 0, len(decls)) 386 | for _, decl := range decls { 387 | switch d := decl.(type) { 388 | case *ast.Declaration: 389 | d.Values = t.transformValues(d.Values) 390 | newDecls = append(newDecls, d) 391 | default: 392 | newDecls = append(newDecls, d) 393 | } 394 | } 395 | 396 | return newDecls 397 | } 398 | 399 | func (t *transformer) transformValues(values []ast.Value) []ast.Value { 400 | rv := make([]ast.Value, 0, len(values)) 401 | for _, value := range values { 402 | switch v := value.(type) { 403 | case *ast.Function: 404 | newValues := []ast.Value{v} 405 | func() { 406 | if v.Name != "var" { 407 | return 408 | } 409 | 410 | if t.variables == nil { 411 | return 412 | } 413 | 414 | if len(v.Arguments) == 0 { 415 | t.addError(v, "expected at least one argument to var()") 416 | return 417 | } 418 | 419 | varName, ok := v.Arguments[0].(*ast.Identifier) 420 | if !ok { 421 | t.addError(v, "expected identifier as argument to var()") 422 | return 423 | } 424 | 425 | vals, ok := t.variables[varName.Value] 426 | if !ok { 427 | // The first argument is the value, the second is a comma. 428 | if len(v.Arguments) > 2 { 429 | newValues = v.Arguments[2:] 430 | return 431 | } 432 | 433 | t.addWarn(v, "use of undefined variable without fallback: %s", varName.Value) 434 | return 435 | } 436 | 437 | newValues = vals 438 | }() 439 | 440 | func() { 441 | if v.Name != "calc" { 442 | return 443 | } 444 | 445 | if t.CalcReduction == transforms.CalcReductionPassthrough { 446 | return 447 | } 448 | 449 | if len(v.Arguments) != 1 { 450 | t.addWarn(v, "expected single argument for calc()") 451 | return 452 | } 453 | 454 | args := t.transformValues([]ast.Value{v.Arguments[0]}) 455 | if len(args) != 1 { 456 | t.addWarn(v, "expected single argument for calc()") 457 | return 458 | } 459 | 460 | arg, ok := args[0].(*ast.MathExpression) 461 | if !ok { 462 | return 463 | } 464 | 465 | l, r := t.transformValues([]ast.Value{arg.Left}), t.transformValues([]ast.Value{arg.Right}) 466 | if len(l) != 1 { 467 | t.addWarn(arg.Left, "expected left-hand side of math expression to be a single value") 468 | return 469 | } 470 | if len(r) != 1 { 471 | t.addWarn(arg.Right, "expected right-hand side of math expression to be a single value") 472 | return 473 | } 474 | 475 | newValue := t.evaluateMathExpression(l[0], r[0], arg.Operator) 476 | if newValue == nil { 477 | return 478 | } 479 | 480 | newValues = []ast.Value{newValue} 481 | }() 482 | 483 | rv = append(rv, newValues...) 484 | 485 | default: 486 | rv = append(rv, v) 487 | } 488 | } 489 | 490 | return rv 491 | } 492 | 493 | func (t *transformer) doMath(left, right, op string) (float64, error) { 494 | leftValue, err := strconv.ParseFloat(left, 10) 495 | if err != nil { 496 | return 0, fmt.Errorf("could not parse dimension value: %s", left) 497 | } 498 | 499 | rightValue, err := strconv.ParseFloat(right, 10) 500 | if err != nil { 501 | return 0, fmt.Errorf("could not parse dimension value: %s", right) 502 | } 503 | switch op { 504 | case "+": 505 | return leftValue + rightValue, nil 506 | case "-": 507 | return leftValue - rightValue, nil 508 | case "*": 509 | return leftValue * rightValue, nil 510 | case "/": 511 | if rightValue == 0 { 512 | return 0, fmt.Errorf("cannot divide by zero") 513 | } 514 | return leftValue / rightValue, nil 515 | default: 516 | return 0, fmt.Errorf("unknown op: %s", op) 517 | } 518 | } 519 | 520 | // evaluateMathExpression attempts to add l + r. If sub is true, subtraction will be applied 521 | // instead. For sum to succeed, both l and r must be of the same type. 522 | // See notes from https://www.w3.org/TR/css-values-3/#calc-type-checking. 523 | func (t *transformer) evaluateMathExpression(l, r ast.Value, op string) ast.Value { 524 | if expr, ok := l.(*ast.MathExpression); ok { 525 | if evaluated := t.evaluateMathExpression(expr.Left, expr.Right, expr.Operator); evaluated != nil { 526 | l = evaluated 527 | } 528 | } 529 | 530 | if expr, ok := r.(*ast.MathExpression); ok { 531 | if evaluated := t.evaluateMathExpression(expr.Left, expr.Right, expr.Operator); evaluated != nil { 532 | r = evaluated 533 | } 534 | } 535 | 536 | switch op { 537 | case "+", "-": 538 | switch left := l.(type) { 539 | case *ast.Dimension: 540 | right, ok := r.(*ast.Dimension) 541 | if !ok { 542 | return nil 543 | } 544 | 545 | if left.Unit != right.Unit { 546 | if left.Unit == "" && right.Unit != "" { 547 | // Invalid, because we cannot mix number types and lengths, e.g. (2 + 5rem). 548 | t.addError(left, "cannot add number type and %s type together", right.Unit) 549 | } 550 | 551 | if left.Unit != "" && right.Unit == "" { 552 | // Invalid, because we cannot mix number types and lengths, e.g. (5rem + 2). 553 | t.addError(left, "cannot add number type and %s type together", left.Unit) 554 | } 555 | 556 | // Valid css, but we cannot reduce (e.g. 2px + 3rem). 557 | return nil 558 | } 559 | 560 | newValue, err := t.doMath(left.Value, right.Value, op) 561 | if err != nil { 562 | t.addError(l, err.Error()) 563 | return nil 564 | } 565 | 566 | return &ast.Dimension{ 567 | Value: strconv.FormatFloat(newValue, 'f', -1, 64), 568 | Unit: left.Unit, 569 | } 570 | 571 | // case *ast.MathExpression: 572 | // if left.op is not + or -, return nil 573 | 574 | // We know left.Right is an ast.Dimension because of the structure of the parse tree 575 | // We know Right is an ast.Dimension 576 | // If left.Right.Unit != right.Unit and left.Left.Unit != right.Unit, return nil 577 | // left.Right.Unit case: doMath on left.Right.Value and right.Value and return left with Right = [the result of doMath] 578 | // left.Left.Unit case: doMath on left.Left.Value and right.Value and return left with Left = [the result of doMath] 579 | 580 | default: 581 | t.addError(l, "cannot perform %s on this type", op) 582 | return nil 583 | } 584 | 585 | case "*": 586 | leftAsDimension, leftIsDimension := l.(*ast.Dimension) 587 | rightAsDimension, rightIsDimension := r.(*ast.Dimension) 588 | if !leftIsDimension || !rightIsDimension { 589 | return nil 590 | } 591 | 592 | if leftAsDimension.Unit != "" && rightAsDimension.Unit != "" { 593 | t.addError(l, "one side of multiplication must be a number (non-percentage/dimension)") 594 | return nil 595 | } 596 | 597 | maybeWithUnit, number := leftAsDimension, rightAsDimension 598 | if leftAsDimension.Unit == "" { 599 | maybeWithUnit, number = rightAsDimension, leftAsDimension 600 | } 601 | 602 | // XXX: handle cases like calc(var(--x) * 2 * 2) 603 | 604 | newValue, err := t.doMath(maybeWithUnit.Value, number.Value, op) 605 | if err != nil { 606 | t.addError(l, err.Error()) 607 | return nil 608 | } 609 | 610 | return &ast.Dimension{ 611 | Value: strconv.FormatFloat(newValue, 'f', -1, 64), 612 | Unit: maybeWithUnit.Unit, 613 | } 614 | 615 | case "/": 616 | rightAsDimension, rightIsDimension := r.(*ast.Dimension) 617 | if !rightIsDimension || rightAsDimension.Unit != "" { 618 | t.addError(l, "right side of division must be a number (non-percentage/dimension)") 619 | return nil 620 | } 621 | 622 | switch left := l.(type) { 623 | case *ast.Dimension: 624 | newValue, err := t.doMath(left.Value, rightAsDimension.Value, op) 625 | if err != nil { 626 | t.addError(l, err.Error()) 627 | return nil 628 | } 629 | 630 | return &ast.Dimension{ 631 | Value: strconv.FormatFloat(newValue, 'f', -1, 64), 632 | Unit: left.Unit, 633 | } 634 | 635 | // case *ast.MathExpression 636 | 637 | default: 638 | t.addError(l, "cannot perform %s on this type", op) 639 | return nil 640 | } 641 | 642 | default: 643 | t.addError(l, "unknown op: %s", op) 644 | return nil 645 | } 646 | } 647 | -------------------------------------------------------------------------------- /internal/transformer/transformer_test.go: -------------------------------------------------------------------------------- 1 | package transformer_test 2 | 3 | import ( 4 | "testing" 5 | 6 | "github.com/stephen/cssc/internal/parser" 7 | "github.com/stephen/cssc/internal/printer" 8 | "github.com/stephen/cssc/internal/sources" 9 | "github.com/stephen/cssc/internal/transformer" 10 | "github.com/stretchr/testify/require" 11 | ) 12 | 13 | func Transform(t testing.TB, modifier func(o *transformer.Options), s string) string { 14 | source := &sources.Source{ 15 | Path: "main.css", 16 | Content: s, 17 | } 18 | ast, err := parser.Parse(source) 19 | 20 | require.NoError(t, err) 21 | o := &transformer.Options{ 22 | OriginalSource: source, 23 | Reporter: &reporter{}, 24 | } 25 | 26 | if modifier != nil { 27 | modifier(o) 28 | } 29 | 30 | out, err := printer.Print(transformer.Transform(ast, *o), printer.Options{}) 31 | require.NoError(t, err) 32 | return out 33 | } 34 | 35 | type reporter struct{} 36 | 37 | func (r *reporter) AddError(err error) { 38 | panic(err) 39 | } 40 | -------------------------------------------------------------------------------- /resolver.go: -------------------------------------------------------------------------------- 1 | package cssc 2 | 3 | // Resolver implements a method of resolving an import spec (e.g. @import "test.css") 4 | // into a path on the filesystem. 5 | type Resolver interface { 6 | // Resolve spec relative to from. 7 | Resolve(spec, fromDir string) (path string, err error) 8 | } 9 | -------------------------------------------------------------------------------- /resolver/node_resolver.go: -------------------------------------------------------------------------------- 1 | package resolver 2 | 3 | import ( 4 | "encoding/json" 5 | "errors" 6 | "io/ioutil" 7 | "os" 8 | "path/filepath" 9 | "strings" 10 | 11 | "github.com/samsarahq/go/oops" 12 | ) 13 | 14 | // ErrNotFound is returned when the resolver cannot resolve a path. 15 | var ErrNotFound = errors.New("could not resolve css module") 16 | 17 | // NodeResolver implements the default node import resolution strategy. See 18 | // https://www.typescriptlang.org/docs/handbook/module-resolution.html. 19 | // 20 | // When resolving node_modules, the resolver will use the style attribute in 21 | // package.json for resolution. 22 | type NodeResolver struct { 23 | // BaseURL is the root directory of the project. It serves 24 | // the same purpose as baseUrl in tsconfig.json. If the value is relative, 25 | // it will be resolved against the current working directory. 26 | BaseURL string 27 | } 28 | 29 | // Resolve implements Resolver. 30 | func (r *NodeResolver) Resolve(spec, fromDir string) (string, error) { 31 | if isRelative := strings.HasPrefix(spec, "../") || strings.HasPrefix(spec, "./") || strings.HasPrefix(spec, "/"); isRelative { 32 | path := filepath.Join(fromDir, spec) 33 | if res, err := r.resolve(path); err != nil { 34 | return "", oops.Wrapf(err, "could not resolve %s relative to %s", spec, fromDir) 35 | } else { 36 | return res, nil 37 | } 38 | } 39 | 40 | // For non-relative imports, first try resolving against baseUrl. 41 | if r.BaseURL != "" { 42 | if res, err := r.resolve(filepath.Join(r.BaseURL, spec)); err == nil { 43 | return res, nil 44 | } 45 | } 46 | 47 | // Lastly, try looking through node_modules. 48 | res, err := r.resolveFromNodeModules(spec, fromDir) 49 | if err != nil { 50 | return "", oops.Wrapf(ErrNotFound, "could not resolve absolute path %s from %s", spec, fromDir) 51 | } 52 | 53 | return res, nil 54 | } 55 | 56 | type packageJSON struct { 57 | Style string `json:"style"` 58 | } 59 | 60 | // resolve attempts to resolve given absolute path as a file, then 61 | // as a package folder, then as a folder with an index. 62 | func (r *NodeResolver) resolve(absPath string) (string, error) { 63 | // Attempt to resolve first as a file. 64 | info, err := os.Stat(absPath) 65 | if err != nil { 66 | if os.IsNotExist(err) { 67 | // If it doesn't exist, try to resolve with an extension. 68 | withExtension := absPath + ".css" 69 | info, err := os.Stat(withExtension) 70 | if err != nil { 71 | if os.IsNotExist(err) { 72 | return "", oops.Wrapf(ErrNotFound, "could not resolve as file: %s or %s", absPath, withExtension) 73 | } 74 | return "", oops.Wrapf(err, "failure during resolution") 75 | } 76 | 77 | if info.IsDir() { 78 | return "", oops.Wrapf(ErrNotFound, "%s exists, but is directory", withExtension) 79 | } 80 | 81 | return withExtension, nil 82 | } 83 | 84 | return "", oops.Wrapf(err, "failure during resolution") 85 | } 86 | 87 | if !info.IsDir() { 88 | return absPath, nil 89 | } 90 | 91 | // Otherwise, try to resolve as a directory. 92 | path, err := r.resolveAsDir(absPath) 93 | if err == nil { 94 | return path, nil 95 | } 96 | 97 | return "", oops.Wrapf(err, "could not resolve path: %s", absPath) 98 | } 99 | 100 | // resolveAsDir takes a directory path and resolves its css entry point. 101 | func (r *NodeResolver) resolveAsDir(path string) (string, error) { 102 | pkgPath := filepath.Join(path, "package.json") 103 | pkg, err := ioutil.ReadFile(pkgPath) 104 | if err != nil { 105 | // If there is no package.json, then try resolving an index.css file. 106 | indexPath := filepath.Join(path, "index.css") 107 | if info, err := os.Stat(indexPath); err != nil || info.IsDir() { 108 | if os.IsNotExist(err) { 109 | return "", oops.Wrapf(ErrNotFound, "could not resolve as directory: %s", path) 110 | } 111 | return "", oops.Wrapf(err, "failure during resolution") 112 | } 113 | 114 | return indexPath, nil 115 | } 116 | 117 | // Look for the style attribute in the package.json. 118 | var pkgContent packageJSON 119 | if err := json.Unmarshal(pkg, &pkgContent); err != nil { 120 | return "", oops.Wrapf(err, "failed to read package.json: %s", pkgPath) 121 | } 122 | 123 | if pkgContent.Style == "" { 124 | return "", oops.Wrapf(ErrNotFound, "package.json exists, but has no style attribute: %s", pkgPath) 125 | } 126 | 127 | stylePath := filepath.Join(path, pkgContent.Style) 128 | if info, err := os.Stat(stylePath); err != nil || info.IsDir() { 129 | if os.IsNotExist(err) { 130 | return "", oops.Wrapf(ErrNotFound, "package.json has style attribute, but it cannot be resolved: %s (to %s)", pkgContent.Style, stylePath) 131 | } 132 | return "", oops.Wrapf(err, "failure during resolution") 133 | } 134 | 135 | return stylePath, nil 136 | } 137 | 138 | // resolveAsNodeModule walks directories from fromDir to find node_modules paths. 139 | func (r *NodeResolver) resolveFromNodeModules(module, fromDir string) (string, error) { 140 | currentDir := fromDir 141 | for currentDir != "/" { 142 | modulePkgPath := filepath.Join(currentDir, "node_modules", module) 143 | 144 | if res, err := r.resolve(modulePkgPath); err == nil { 145 | return res, nil 146 | } 147 | 148 | currentDir = filepath.Dir(currentDir) 149 | } 150 | 151 | return "", oops.Wrapf(ErrNotFound, "could not find absolute path in node_modules") 152 | } 153 | -------------------------------------------------------------------------------- /resolver/node_resolver_test.go: -------------------------------------------------------------------------------- 1 | package resolver_test 2 | 3 | import ( 4 | "path/filepath" 5 | "testing" 6 | 7 | "github.com/stephen/cssc/resolver" 8 | "github.com/stretchr/testify/assert" 9 | "github.com/stretchr/testify/require" 10 | ) 11 | 12 | func TestResolver_Relative(t *testing.T) { 13 | testdata, err := filepath.Abs("testdata/") 14 | require.NoError(t, err) 15 | 16 | r := resolver.NodeResolver{} 17 | 18 | result, err := r.Resolve("./case-1.css", testdata) 19 | assert.NoError(t, err) 20 | assert.Equal(t, filepath.Join(testdata, "case-1.css"), result) 21 | 22 | result, err = r.Resolve("./case-1", testdata) 23 | assert.NoError(t, err) 24 | assert.Equal(t, filepath.Join(testdata, "case-1.css"), result) 25 | 26 | result, err = r.Resolve("./case-2", testdata) 27 | assert.NoError(t, err) 28 | assert.Equal(t, filepath.Join(testdata, "case-2/index.css"), result) 29 | 30 | result, err = r.Resolve("./case-3", testdata) 31 | assert.NoError(t, err) 32 | assert.Equal(t, filepath.Join(testdata, "case-3/whatever.css"), result) 33 | 34 | result, err = r.Resolve("./case-2/index.css", testdata) 35 | assert.NoError(t, err) 36 | assert.Equal(t, filepath.Join(testdata, "case-2/index.css"), result) 37 | 38 | result, err = r.Resolve("./case-3/whatever.css", testdata) 39 | assert.NoError(t, err) 40 | assert.Equal(t, filepath.Join(testdata, "case-3/whatever.css"), result) 41 | 42 | result, err = r.Resolve("./case-0", testdata) 43 | assert.Error(t, err) 44 | assert.Equal(t, "", result) 45 | 46 | result, err = r.Resolve("./case-0.css", testdata) 47 | assert.Error(t, err) 48 | assert.Equal(t, "", result) 49 | 50 | result, err = r.Resolve("./case-7", testdata) 51 | assert.Error(t, err) 52 | assert.Equal(t, "", result) 53 | 54 | result, err = r.Resolve("./case-7.css", testdata) 55 | assert.Error(t, err) 56 | assert.Equal(t, "", result) 57 | 58 | result, err = r.Resolve("./case-8.css", testdata) 59 | assert.NoError(t, err) 60 | assert.Equal(t, filepath.Join(testdata, "case-8.css/index.css"), result) 61 | } 62 | 63 | func TestResolver_Absolute_WithBaseURL(t *testing.T) { 64 | testdata, err := filepath.Abs("testdata/") 65 | require.NoError(t, err) 66 | 67 | r := resolver.NodeResolver{BaseURL: testdata} 68 | 69 | result, err := r.Resolve("case-1.css", testdata) 70 | assert.NoError(t, err) 71 | assert.Equal(t, filepath.Join(testdata, "case-1.css"), result) 72 | 73 | result, err = r.Resolve("case-2", testdata) 74 | assert.NoError(t, err) 75 | assert.Equal(t, filepath.Join(testdata, "case-2/index.css"), result) 76 | 77 | result, err = r.Resolve("case-3", testdata) 78 | assert.NoError(t, err) 79 | assert.Equal(t, filepath.Join(testdata, "case-3/whatever.css"), result) 80 | 81 | result, err = r.Resolve("case-0", testdata) 82 | assert.Error(t, err) 83 | assert.Equal(t, "", result) 84 | 85 | result, err = r.Resolve("case-7", testdata) 86 | assert.Error(t, err) 87 | assert.Equal(t, "", result) 88 | 89 | result, err = r.Resolve("case-7.css", testdata) 90 | assert.Error(t, err) 91 | assert.Equal(t, "", result) 92 | } 93 | 94 | func TestResolver_Absolute(t *testing.T) { 95 | testdata, err := filepath.Abs("testdata/nested/1/2/") 96 | require.NoError(t, err) 97 | 98 | r := resolver.NodeResolver{} 99 | 100 | result, err := r.Resolve("case-4", testdata) 101 | assert.NoError(t, err) 102 | assert.Equal(t, filepath.Join(testdata, "./node_modules/case-4.css"), result) 103 | 104 | result, err = r.Resolve("case-5", testdata) 105 | assert.NoError(t, err) 106 | assert.Equal(t, filepath.Join(testdata, "../node_modules/case-5/index.css"), result) 107 | 108 | result, err = r.Resolve("case-6", testdata) 109 | assert.NoError(t, err) 110 | assert.Equal(t, filepath.Join(testdata, "../../node_modules/case-6/dist/whatever.css"), result) 111 | 112 | result, err = r.Resolve("case-6/dist/unreferenced.css", testdata) 113 | assert.NoError(t, err) 114 | assert.Equal(t, filepath.Join(testdata, "../../node_modules/case-6/dist/unreferenced.css"), result) 115 | 116 | result, err = r.Resolve("case-0", testdata) 117 | assert.Error(t, err) 118 | assert.Equal(t, "", result) 119 | } 120 | -------------------------------------------------------------------------------- /resolver/testdata/case-1.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/case-1.css -------------------------------------------------------------------------------- /resolver/testdata/case-2/index.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/case-2/index.css -------------------------------------------------------------------------------- /resolver/testdata/case-3/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "style": "./whatever.css" 3 | } -------------------------------------------------------------------------------- /resolver/testdata/case-3/whatever.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/case-3/whatever.css -------------------------------------------------------------------------------- /resolver/testdata/case-7.css/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/case-7.css/.gitkeep -------------------------------------------------------------------------------- /resolver/testdata/case-8.css/index.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/case-8.css/index.css -------------------------------------------------------------------------------- /resolver/testdata/nested/1/2/node_modules/case-4.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/nested/1/2/node_modules/case-4.css -------------------------------------------------------------------------------- /resolver/testdata/nested/1/node_modules/case-5/index.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/nested/1/node_modules/case-5/index.css -------------------------------------------------------------------------------- /resolver/testdata/nested/node_modules/case-6/dist/unreferenced.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/nested/node_modules/case-6/dist/unreferenced.css -------------------------------------------------------------------------------- /resolver/testdata/nested/node_modules/case-6/dist/whatever.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/stephen/cssc/1efd103a49152b8253f62805cae27e34709f1427/resolver/testdata/nested/node_modules/case-6/dist/whatever.css -------------------------------------------------------------------------------- /resolver/testdata/nested/node_modules/case-6/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "style": "./dist/whatever.css" 3 | } -------------------------------------------------------------------------------- /testdata/brokenimports/index.css: -------------------------------------------------------------------------------- 1 | @import "./nonsense.css"; 2 | 3 | div { 4 | background-color: green; 5 | } 6 | 7 | body { 8 | color: white; 9 | } 10 | -------------------------------------------------------------------------------- /testdata/crlf/monaco.css: -------------------------------------------------------------------------------- 1 | /*--------------------------------------------------------------------------------------------- 2 | * Copyright (c) Microsoft Corporation. All rights reserved. 3 | * Licensed under the MIT License. See License.txt in the project root for license information. 4 | *--------------------------------------------------------------------------------------------*/ 5 | 6 | .monaco-action-bar { 7 | text-align: right; 8 | overflow: hidden; 9 | white-space: nowrap; 10 | } 11 | 12 | .monaco-action-bar .actions-container { 13 | display: flex; 14 | margin: 0 auto; 15 | padding: 0; 16 | width: 100%; 17 | justify-content: flex-end; 18 | } 19 | 20 | .monaco-action-bar.vertical .actions-container { 21 | display: inline-block; 22 | } 23 | 24 | .monaco-action-bar.reverse .actions-container { 25 | flex-direction: row-reverse; 26 | } 27 | 28 | .monaco-action-bar .action-item { 29 | cursor: pointer; 30 | display: inline-block; 31 | transition: transform 50ms ease; 32 | position: relative; /* DO NOT REMOVE - this is the key to preventing the ghosting icon bug in Chrome 42 */ 33 | } 34 | 35 | .monaco-action-bar .action-item.disabled { 36 | cursor: default; 37 | } 38 | 39 | .monaco-action-bar.animated .action-item.active { 40 | transform: scale(1.272019649, 1.272019649); /* 1.272019649 = √φ */ 41 | } 42 | 43 | .monaco-action-bar .action-item .icon, 44 | .monaco-action-bar .action-item .codicon { 45 | display: inline-block; 46 | } 47 | 48 | .monaco-action-bar .action-label { 49 | font-size: 11px; 50 | margin-right: 4px; 51 | } 52 | 53 | .monaco-action-bar .action-item.disabled .action-label, 54 | .monaco-action-bar .action-item.disabled .action-label:hover { 55 | opacity: 0.4; 56 | } 57 | 58 | /* Vertical actions */ 59 | 60 | .monaco-action-bar.vertical { 61 | text-align: left; 62 | } 63 | 64 | .monaco-action-bar.vertical .action-item { 65 | display: block; 66 | } 67 | 68 | .monaco-action-bar.vertical .action-label.separator { 69 | display: block; 70 | border-bottom: 1px solid #bbb; 71 | padding-top: 1px; 72 | margin-left: .8em; 73 | margin-right: .8em; 74 | } 75 | 76 | .monaco-action-bar.animated.vertical .action-item.active { 77 | transform: translate(5px, 0); 78 | } 79 | 80 | .secondary-actions .monaco-action-bar .action-label { 81 | margin-left: 6px; 82 | } 83 | 84 | /* Action Items */ 85 | .monaco-action-bar .action-item.select-container { 86 | overflow: hidden; /* somehow the dropdown overflows its container, we prevent it here to not push */ 87 | flex: 1; 88 | max-width: 170px; 89 | min-width: 60px; 90 | display: flex; 91 | align-items: center; 92 | justify-content: center; 93 | margin-right: 10px; 94 | } 95 | -------------------------------------------------------------------------------- /testdata/imports/another.css: -------------------------------------------------------------------------------- 1 | .another { 2 | margin: 0 auto; 3 | } 4 | -------------------------------------------------------------------------------- /testdata/imports/index.css: -------------------------------------------------------------------------------- 1 | @import "./other.css"; 2 | 3 | div { 4 | background-color: green; 5 | } 6 | 7 | body { 8 | color: white; 9 | } 10 | -------------------------------------------------------------------------------- /testdata/imports/other.css: -------------------------------------------------------------------------------- 1 | .other { 2 | padding: 10rem; 3 | } 4 | 5 | @import "./another.css"; 6 | -------------------------------------------------------------------------------- /testdata/simple/index.css: -------------------------------------------------------------------------------- 1 | div { 2 | background-color: green; 3 | } 4 | 5 | body { 6 | color: white; 7 | } 8 | -------------------------------------------------------------------------------- /transforms/transforms.go: -------------------------------------------------------------------------------- 1 | package transforms 2 | 3 | // ImportRules controls transform behavior for @imports. 4 | type ImportRules int 5 | 6 | const ( 7 | // ImportRulesPassthrough passes @imports down without changes. It is the default. 8 | ImportRulesPassthrough ImportRules = iota 9 | // ImportRulesFollow passes @imports down without changes. It also follows import specifiers 10 | // and adds them to the compilation output. 11 | ImportRulesFollow ImportRules = iota 12 | // ImportRulesInline inlines imported content where an @import statement is seen. In this 13 | // version, it ignores @supports rules and meedia queries. 14 | ImportRulesInline 15 | ) 16 | 17 | // MediaFeatureRanges controls transform options for feature ranges, 18 | // introduced in CSS Media Queries Level 4. 19 | // See: https://www.w3.org/TR/mediaqueries-4/#mq-range-context. 20 | type MediaFeatureRanges int 21 | 22 | const ( 23 | // MediaFeatureRangesPassthrough passes @imports down without changes. It is the default. 24 | MediaFeatureRangesPassthrough MediaFeatureRanges = iota 25 | // MediaFeatureRangesTransform transforms ranges into best-effort min- and max- values. When 26 | // > and < are used, we follow the guidance from https://www.w3.org/TR/mediaqueries-5/#mq-min-max and 27 | // use min/max with a change in .001 precision. 28 | MediaFeatureRangesTransform 29 | ) 30 | 31 | // AnyLink controls transform options for :any-link selectors. 32 | // introduced in CSS Selectors Level 4. 33 | // See: https://www.w3.org/TR/selectors-4/#the-any-link-pseudo 34 | type AnyLink int 35 | 36 | const ( 37 | // AnyLinkPassthrough passes :any-link down without changes. It is the default. 38 | AnyLinkPassthrough AnyLink = iota 39 | // AnyLinkTransform transforms :any-link selectors into selectors for both :visited and :link. 40 | AnyLinkTransform 41 | ) 42 | 43 | // CustomProperties controls transform options for custom properteries (--var) and the var() function 44 | // from CSS Variables Level 1. 45 | // See: https://www.w3.org/TR/css-variables-1/. 46 | type CustomProperties int 47 | 48 | const ( 49 | // CustomPropertiesPassthrough passes variable declarations and var() down without changes. It is the default. 50 | CustomPropertiesPassthrough CustomProperties = iota 51 | // CustomPropertiesTransformRoot will transform properties defiend in :root selectors. Custom property definitions 52 | // under any other selectors will be ignored and passed through. 53 | CustomPropertiesTransformRoot 54 | ) 55 | 56 | // CustomMediaQueries controls transform options for @custom-media usage, specified in CSS Media Queries Level 5. 57 | // See: https://www.w3.org/TR/mediaqueries-5/#custom-mq. 58 | type CustomMediaQueries int 59 | 60 | const ( 61 | // CustomMediaQueriesPassthrough passes custom media query definitions and usages through. It is the default. 62 | CustomMediaQueriesPassthrough CustomMediaQueries = iota 63 | // CustomMediaQueriesTransform will transform custom media queries when used in @media rules. 64 | CustomMediaQueriesTransform 65 | ) 66 | 67 | // CalcReduction controls transform options for reducing math functions. 68 | // calc(), min(), max(), and clamp() are supported. 69 | // See: https://drafts.csswg.org/css-values-4/#calc-func. 70 | type CalcReduction int 71 | 72 | const ( 73 | // CalcReductionPassthrough passes through any math functions. It is the default. 74 | CalcReductionPassthrough CalcReduction = iota 75 | // CalcReductionReduce will attempt to reduce all math functions. If the call cannot be fully reduced, it 76 | // will be left. 77 | CalcReductionReduce 78 | ) 79 | 80 | // Options sets options about what transforms to run. By default, 81 | // no transforms are run. 82 | type Options struct { 83 | ImportRules 84 | MediaFeatureRanges 85 | AnyLink 86 | CustomProperties 87 | CustomMediaQueries 88 | CalcReduction 89 | } 90 | --------------------------------------------------------------------------------