├── upstream ├── in │ ├── base.css │ ├── components.css │ ├── utilities.css │ └── tests │ │ ├── container.css │ │ ├── space-x-16.css │ │ ├── btn-blue-hover.css │ │ ├── sm-leading-tight.css │ │ └── btn-blue.css ├── package.json ├── tailwind.config.js └── build.sh ├── twhandler ├── testdata │ └── demo1.css ├── twhandler_test.go └── twhandler.go ├── .gitignore ├── Makefile ├── twembed ├── doc.go ├── twembed.go └── embed_mk.go ├── go.mod ├── dist.go ├── LICENSE ├── twfiles └── twfiles.go ├── twpurge ├── tokenizer.go ├── twpurge_test.go ├── rule-names.go └── twpurge.go ├── go.sum ├── apply.go ├── cmd └── gotailwindcss │ └── main.go ├── README.md ├── converter_test.go └── converter.go /upstream/in/base.css: -------------------------------------------------------------------------------- 1 | @tailwind base; -------------------------------------------------------------------------------- /upstream/in/components.css: -------------------------------------------------------------------------------- 1 | @tailwind components; -------------------------------------------------------------------------------- /upstream/in/utilities.css: -------------------------------------------------------------------------------- 1 | @tailwind utilities; -------------------------------------------------------------------------------- /upstream/in/tests/container.css: -------------------------------------------------------------------------------- 1 | .test1 { 2 | @apply container; 3 | } 4 | -------------------------------------------------------------------------------- /upstream/in/tests/space-x-16.css: -------------------------------------------------------------------------------- 1 | .test1 { 2 | @apply space-x-16; 3 | } 4 | -------------------------------------------------------------------------------- /upstream/in/tests/btn-blue-hover.css: -------------------------------------------------------------------------------- 1 | .btn-blue:hover { 2 | @apply bg-blue-700; 3 | } -------------------------------------------------------------------------------- /twhandler/testdata/demo1.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | 3 | .test1 { 4 | @apply px-1; 5 | } 6 | -------------------------------------------------------------------------------- /upstream/in/tests/sm-leading-tight.css: -------------------------------------------------------------------------------- 1 | .test-leading-tight { 2 | @apply leading-tight; 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.out 2 | *.test 3 | upstream/out 4 | .vscode 5 | cmd/gotailwindcss/gotailwindcss 6 | upstream/node_modules 7 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: generate 2 | generate: 3 | cd upstream; npm install; ./build.sh 4 | cd twembed; go run embed_mk.go 5 | -------------------------------------------------------------------------------- /upstream/in/tests/btn-blue.css: -------------------------------------------------------------------------------- 1 | .btn-blue { 2 | @apply bg-blue-500 text-white font-bold py-2 px-4 rounded; 3 | } 4 | -------------------------------------------------------------------------------- /twembed/doc.go: -------------------------------------------------------------------------------- 1 | // Package twembed contains an embedded copy of the TailwindCSS distribution. 2 | // TODO explain how it's used and the idea is to only include this in a project 3 | // if you're okay with adding the size to the resulting executable. 4 | package twembed 5 | -------------------------------------------------------------------------------- /upstream/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@tailwindcss/aspect-ratio": "^0.2.0", 4 | "@tailwindcss/forms": "^0.3.2", 5 | "@tailwindcss/line-clamp": "^0.2.0", 6 | "@tailwindcss/typography": "^0.4.0", 7 | "npx": "^10.2.2", 8 | "tailwindcss": "^2.0.4" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /upstream/tailwind.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | purge: [], 3 | darkMode: false, 4 | theme: { 5 | extend: {}, 6 | }, 7 | variants: { 8 | extend: {}, 9 | }, 10 | plugins: [ 11 | require('@tailwindcss/typography'), 12 | require('@tailwindcss/forms'), 13 | require('@tailwindcss/line-clamp'), 14 | require('@tailwindcss/aspect-ratio'), 15 | ], 16 | } 17 | -------------------------------------------------------------------------------- /go.mod: -------------------------------------------------------------------------------- 1 | module github.com/gotailwindcss/tailwind 2 | 3 | go 1.14 4 | 5 | require ( 6 | github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 // indirect 7 | github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d // indirect 8 | github.com/cespare/xxhash v1.1.0 9 | github.com/tdewolff/minify/v2 v2.9.0 10 | github.com/tdewolff/parse/v2 v2.5.0 11 | gopkg.in/alecthomas/kingpin.v2 v2.2.6 12 | ) 13 | -------------------------------------------------------------------------------- /upstream/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Build some files we need to to update the internal embedded copy of tailwindcss. 4 | # This is only used/needed by the maintainer of this project. 5 | 6 | # Dependencies: 7 | # - node, npm, npx 8 | 9 | mkdir -p out 10 | mkdir -p out/tests 11 | 12 | # run them all, ignore errors 13 | for n in `find in -type f`; do 14 | outn=`echo $n|sed 's/in\//out\//g'` 15 | echo "Processing $n => $outn" 16 | npx tailwindcss build $n -o $outn 17 | done 18 | -------------------------------------------------------------------------------- /dist.go: -------------------------------------------------------------------------------- 1 | package tailwind 2 | 3 | import "io" 4 | 5 | // Dist is where tailwind CSS data can be read. 6 | type Dist interface { 7 | // OpenDist should return a new ReadCloser for the specific tailwind section name. 8 | // Valid names are "base", "utilities" and "components" (only those exact strings, 9 | // without .css or anything like that) and will be updated along with 10 | // what the TailwindCSS project does. The caller is responsible for ensuring 11 | // Close() is called on the response if the error is non-nil. 12 | OpenDist(name string) (io.ReadCloser, error) 13 | } 14 | 15 | // // DefaultDist is the Dist used by default by New(). 16 | // // Importing the twembed package sets this. 17 | // var DefaultDist Dist = nil 18 | -------------------------------------------------------------------------------- /twhandler/twhandler_test.go: -------------------------------------------------------------------------------- 1 | package twhandler_test 2 | 3 | import ( 4 | "io/ioutil" 5 | "net/http" 6 | "net/http/httptest" 7 | "path/filepath" 8 | "strings" 9 | "testing" 10 | 11 | "github.com/gotailwindcss/tailwind/twembed" 12 | "github.com/gotailwindcss/tailwind/twhandler" 13 | ) 14 | 15 | func TestHandler(t *testing.T) { 16 | 17 | td, _ := filepath.Abs("testdata") 18 | h := twhandler.New(http.Dir(td), "/td1", twembed.New()) 19 | 20 | w := httptest.NewRecorder() 21 | r := httptest.NewRequest("GET", "/td1/demo1.css", nil) 22 | h.ServeHTTP(w, r) 23 | res := w.Result() 24 | defer res.Body.Close() 25 | b, err := ioutil.ReadAll(res.Body) 26 | if err != nil { 27 | t.Fatal(err) 28 | } 29 | 30 | bs := string(b) 31 | if !strings.Contains(bs, `b,strong{`) { 32 | t.Errorf("missing expected string") 33 | } 34 | if !strings.Contains(bs, `.test1{padding-left:0.25rem;`) { 35 | t.Errorf("didn't match .test1") 36 | } 37 | if t.Failed() { 38 | t.Logf("b = %s", b) 39 | } 40 | 41 | // TODO: table test with cases for compressor, 304, mod time of file changes, multiple files, cache disabled, etc. 42 | 43 | } 44 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Brad Peabody 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /twembed/twembed.go: -------------------------------------------------------------------------------- 1 | package twembed 2 | 3 | import ( 4 | "fmt" 5 | "io" 6 | "io/ioutil" 7 | "strings" 8 | ) 9 | 10 | //go:generate go run embed_mk.go 11 | 12 | // New returns an instance that implements tailwind.Dist using data embedded in this package. 13 | func New() Dist { 14 | return Dist{} 15 | } 16 | 17 | // func init() { 18 | // // package import causes embedded version to be default if no other is specified 19 | // if tailwind.DefaultDist == nil { 20 | // tailwind.DefaultDist = New() 21 | // } 22 | // } 23 | 24 | // Dist implements tailwind.Dist 25 | type Dist struct{} 26 | 27 | // OpenDist implements the interface and returns embedded data. 28 | func (d Dist) OpenDist(name string) (io.ReadCloser, error) { 29 | 30 | switch name { 31 | case "base": 32 | return ioutil.NopCloser(strings.NewReader(twbase())), nil 33 | case "utilities": 34 | return ioutil.NopCloser(strings.NewReader(twutilities())), nil 35 | case "components": 36 | return ioutil.NopCloser(strings.NewReader(twcomponents())), nil 37 | } 38 | 39 | return nil, fmt.Errorf("twembed unknown name %q", name) 40 | 41 | } 42 | 43 | // PurgeKeyMap returns a map of all of the possible keys that can be purged. 44 | func (d Dist) PurgeKeyMap() map[string]struct{} { 45 | return twPurgeKeyMap 46 | } 47 | -------------------------------------------------------------------------------- /twembed/embed_mk.go: -------------------------------------------------------------------------------- 1 | // +build ignore 2 | 3 | package main 4 | 5 | import ( 6 | "fmt" 7 | "io/ioutil" 8 | "os" 9 | 10 | "github.com/gotailwindcss/tailwind/twfiles" 11 | "github.com/gotailwindcss/tailwind/twpurge" 12 | ) 13 | 14 | // read from upstream folder and build copy of tailwindcss embedded in this project 15 | 16 | func main() { 17 | 18 | out, err := os.Create("embed_gen.go") 19 | if err != nil { 20 | panic(err) 21 | } 22 | defer out.Close() 23 | 24 | twf := twfiles.New("../upstream/out") 25 | 26 | fmt.Fprintf(out, `package twembed 27 | 28 | // WARNING: DO NOT MODIFY, THIS FILE IS GENERATED BY embed_mk.go 29 | 30 | `) 31 | 32 | for _, name := range []string{"base", "components", "utilities"} { 33 | 34 | rc, err := twf.OpenDist(name) 35 | if err != nil { 36 | panic(err) 37 | } 38 | defer rc.Close() 39 | 40 | b, err := ioutil.ReadAll(rc) 41 | if err != nil { 42 | panic(err) 43 | } 44 | 45 | fmt.Fprintf(out, "func tw%s() string {\n\treturn %q\n}\n\n", name, b) 46 | 47 | } 48 | 49 | // build the purge keys 50 | pkm, err := twpurge.PurgeKeysFromDist(twf) 51 | if err != nil { 52 | panic(err) 53 | } 54 | 55 | fmt.Fprintf(out, "var twPurgeKeyMap = %#v\n\n", pkm) 56 | 57 | // var b []byte 58 | 59 | // b, err = ioutil.ReadFile("../upstream/out/base.css") 60 | // if err != nil { 61 | // panic(err) 62 | // } 63 | // fmt.Fprintf(out, "func twbase() string {\n\treturn %q\n}\n\n", b) 64 | 65 | // b, err = ioutil.ReadFile("../upstream/out/components.css") 66 | // if err != nil { 67 | // panic(err) 68 | // } 69 | // fmt.Fprintf(out, "func twcomponents() string {\n\treturn %q\n}\n\n", b) 70 | 71 | // b, err = ioutil.ReadFile("../upstream/out/utilities.css") 72 | // if err != nil { 73 | // panic(err) 74 | // } 75 | // fmt.Fprintf(out, "func twutilities() string {\n\treturn %q\n}\n\n", b) 76 | 77 | } 78 | -------------------------------------------------------------------------------- /twfiles/twfiles.go: -------------------------------------------------------------------------------- 1 | // Package twfiles implements tailwind.Dist against a filesystem. 2 | // Implementations are provided against the OS filesystem and for net/http.FileSystem. 3 | package twfiles 4 | 5 | import ( 6 | "io" 7 | "net/http" 8 | ) 9 | 10 | // New returns a tailwind.Dist instance that reads from the underlying OS directory you provide. 11 | // Implementation is done via net/http Filesystem. 12 | func New(baseDir string) *HTTPFiles { 13 | return &HTTPFiles{ 14 | FileSystem: http.Dir(baseDir), 15 | } 16 | } 17 | 18 | // NewHTTP returns an HTTP file instance which reads from the underlying net/http.Filesystem. 19 | // Files are mapped by default so e.g. requests for "base" look for "base.css". 20 | func NewHTTP(fs http.FileSystem) *HTTPFiles { 21 | return &HTTPFiles{ 22 | FileSystem: fs, 23 | } 24 | } 25 | 26 | // HTTPFiles implements tailwind.Dist against a net/http.FileSystem. 27 | // By default the file name mapings are the name of the tailwind section plus 28 | // ".css", e.g. "base.css", "utilities.css", "components.css". 29 | type HTTPFiles struct { 30 | http.FileSystem // underlying http FileSystem 31 | NameMapFunc func(name string) string // name conversion func, default returns name+".css" 32 | } 33 | 34 | // OpenDist implements tailwind.Dist. 35 | func (hf *HTTPFiles) OpenDist(name string) (io.ReadCloser, error) { 36 | 37 | var fileName string 38 | if hf.NameMapFunc != nil { 39 | fileName = hf.NameMapFunc(name) 40 | } else { 41 | fileName = name + ".css" 42 | } 43 | 44 | f, err := hf.FileSystem.Open(fileName) 45 | if err != nil { 46 | return nil, err 47 | } 48 | 49 | return f, nil 50 | } 51 | 52 | // // PurgeKeyMap returns a map of all of the possible keys that can be purged. 53 | // func (hf *HTTPFiles) PurgeKeyMap() map[string]struct{} { 54 | // ret := make(map[string]struct{}, 1024) 55 | // panic(fmt.Errorf("not yet implemented")) 56 | // return ret 57 | // } 58 | -------------------------------------------------------------------------------- /twpurge/tokenizer.go: -------------------------------------------------------------------------------- 1 | package twpurge 2 | 3 | import ( 4 | "bufio" 5 | "bytes" 6 | "io" 7 | ) 8 | 9 | // Tokenizer returns the next token from a markup file. 10 | type Tokenizer interface { 11 | NextToken() ([]byte, error) // returns a token or error (not both), io.EOF indicates end of stream 12 | } 13 | 14 | func isbr(c byte) bool { 15 | switch c { 16 | // NOTE: We're going to assume ASCII is fine here - we could do some UTF-8 fanciness but I don't know 17 | // of any situation where it would matter for our purposes here. 18 | case '<', '>', '"', '\'', '`', 19 | '\t', '\n', '\v', '\f', '\r', ' ': 20 | return true 21 | } 22 | return false 23 | } 24 | 25 | func NewDefaultTokenizer(r io.Reader) *DefaultTokenizer { 26 | s := bufio.NewScanner(r) 27 | s.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) { 28 | 29 | // log.Printf("Split(data=%q, atEOF=%v)", data, atEOF) 30 | // defer func() { 31 | // log.Printf("Split(data=%q, atEOF=%v) returning (advance=%d, token=%q, err=%v)", data, atEOF, advance, token, err) 32 | // }() 33 | 34 | // consume any break text 35 | for len(data) > 0 { 36 | if !isbr(data[0]) { 37 | break 38 | } 39 | data = data[1:] 40 | advance++ 41 | } 42 | 43 | // now read thorugh any non-break text 44 | var i int 45 | for i = 0; i < len(data); i++ { 46 | 47 | if isbr(data[i]) { 48 | // if we encounter a break, then return what we've read so far as the token 49 | if i > 0 { 50 | token = data[:i] 51 | } 52 | advance += i 53 | return 54 | } 55 | 56 | // otherwise just continue 57 | } 58 | 59 | // if we get here it means we read until the end of the buffer 60 | // and it's still in the middle of non-break text 61 | 62 | if atEOF { // this is the end of the stream, return this last as a token 63 | if i > 0 { 64 | token = data[:i] 65 | } 66 | advance += i 67 | return 68 | } 69 | 70 | // not end of stream, tell it we need more (advance may have been incremented above) 71 | return advance, nil, nil 72 | }) 73 | return &DefaultTokenizer{ 74 | s: s, 75 | } 76 | } 77 | 78 | // DefaultTokenizer implements Tokenizer with a sensible default tokenization. 79 | type DefaultTokenizer struct { 80 | s *bufio.Scanner 81 | } 82 | 83 | func (t *DefaultTokenizer) NextToken() ([]byte, error) { 84 | for t.s.Scan() { 85 | // fmt.Println(len(scanner.Bytes()) == 6) 86 | b := t.s.Bytes() 87 | if len(b) == 0 { 88 | continue 89 | } 90 | b = bytes.Trim(b, `/\:=`) 91 | return b, nil 92 | } 93 | if err := t.s.Err(); err != nil { 94 | // fmt.Fprintln(os.Stderr, "shouldn't see an error scanning a string") 95 | return nil, err 96 | } 97 | return nil, io.EOF 98 | } 99 | -------------------------------------------------------------------------------- /twpurge/twpurge_test.go: -------------------------------------------------------------------------------- 1 | package twpurge 2 | 3 | import ( 4 | "errors" 5 | "io" 6 | "reflect" 7 | "strings" 8 | "testing" 9 | ) 10 | 11 | func TestDefaultTokenizer(t *testing.T) { 12 | 13 | tz := NewDefaultTokenizer(strings.NewReader(` 14 |
15 | `)) 16 | 17 | var tokList []string 18 | for { 19 | tok, err := tz.NextToken() 20 | if err != nil { 21 | if errors.Is(err, io.EOF) { 22 | break 23 | } 24 | t.Fatal(err) 25 | } 26 | tokList = append(tokList, string(tok)) 27 | // t.Logf("TOKEN(len=%d): %s", len(tok), tok) 28 | } 29 | 30 | tokMap := make(map[string]bool, len(tokList)) 31 | for _, tok := range tokList { 32 | tokMap[tok] = true 33 | } 34 | if !tokMap["sm:px-1"] { 35 | t.Fail() 36 | } 37 | if !tokMap["lg:w-10"] { 38 | t.Fail() 39 | } 40 | if !tokMap["w-1/2"] { 41 | t.Fail() 42 | } 43 | if tokMap["class="] { // should not have trailing equal sign 44 | t.Fail() 45 | } 46 | 47 | } 48 | 49 | func TestPurgeKeysFromReader(t *testing.T) { 50 | 51 | pk, err := PurgeKeysFromReader(strings.NewReader(` 52 | .md\:bg-purple-500 { 53 | --bg-opacity: 1; 54 | background-color: #9f7aea; 55 | background-color: rgba(159, 122, 234, var(--bg-opacity)) 56 | } 57 | .space-x-0 > :not(template) ~ :not(template) { 58 | --space-x-reverse: 0; 59 | margin-right: calc(0px * var(--space-x-reverse)); 60 | margin-left: calc(0px * calc(1 - var(--space-x-reverse))) 61 | } 62 | .xl\:w-auto { 63 | width: auto 64 | } 65 | .focus\:placeholder-gray-200:focus:-ms-input-placeholder { 66 | --placeholder-opacity: 1; 67 | color: #edf2f7; 68 | color: rgba(237, 242, 247, var(--placeholder-opacity)) 69 | } 70 | .placeholder-indigo-800::placeholder { 71 | --placeholder-opacity: 1; 72 | color: #434190; 73 | color: rgba(67, 65, 144, var(--placeholder-opacity)) 74 | } 75 | .-my-56 { 76 | margin-top: -14rem; 77 | margin-bottom: -14rem 78 | } 79 | @media (min-width: 640px) { 80 | .sm\:space-y-0 > :not(template) ~ :not(template) { 81 | --space-y-reverse: 0; 82 | margin-top: calc(0px * calc(1 - var(--space-y-reverse))); 83 | margin-bottom: calc(0px * var(--space-y-reverse)) 84 | } 85 | } 86 | .scale-y-125 { 87 | --transform-scale-y: 1.25 88 | } 89 | `)) 90 | if err != nil { 91 | t.Fatal(err) 92 | } 93 | 94 | v := struct{}{} 95 | if !reflect.DeepEqual(pk, map[string]struct{}{ 96 | "md:bg-purple-500": v, 97 | "space-x-0": v, 98 | "xl:w-auto": v, 99 | "focus:placeholder-gray-200": v, 100 | "placeholder-indigo-800": v, 101 | "-my-56": v, 102 | "sm:space-y-0": v, 103 | "scale-y-125": v, 104 | }) { 105 | t.Errorf("unexpected result: %+v", pk) 106 | } 107 | 108 | } 109 | -------------------------------------------------------------------------------- /go.sum: -------------------------------------------------------------------------------- 1 | github.com/OneOfOne/xxhash v1.2.2 h1:KMrpdQIwFcEqXDklaen+P1axHaj9BSKzvpUUfnHldSE= 2 | github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= 3 | github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM= 4 | github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= 5 | github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d h1:UQZhZ2O0vMHr2cI+DC1Mbh0TJxzA3RcLoMsFw+aXw7E= 6 | github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= 7 | github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= 8 | github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= 9 | github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U= 10 | github.com/davecgh/go-spew v1.1.0 h1:ZDRjVQ15GmhC3fiQ8ni8+OwkZQO4DARzQgrnXU1Liz8= 11 | github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= 12 | github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= 13 | github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= 14 | github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O4hW7qJOT7vyQPKrWmj26uf5wMc/IiIs= 15 | github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= 16 | github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= 17 | github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ= 18 | github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= 19 | github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= 20 | github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= 21 | github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= 22 | github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= 23 | github.com/tdewolff/minify/v2 v2.9.0 h1:9c078np6EmaBWPteq/bp43YgIlfylH8qEt+FaMfNWBo= 24 | github.com/tdewolff/minify/v2 v2.9.0/go.mod h1:dkCdk+keSmcckneKfxOgf8/X/3iyCnz1tjIas5Sn1Cs= 25 | github.com/tdewolff/parse/v2 v2.5.0 h1:SE6eIhpSLAEgYx1EpGXIcGyggfw4omUOcSCaeaH7WJU= 26 | github.com/tdewolff/parse/v2 v2.5.0/go.mod h1:WzaJpRSbwq++EIQHYIRTpbYKNA3gn9it1Ik++q4zyho= 27 | github.com/tdewolff/test v1.0.6 h1:76mzYJQ83Op284kMT+63iCNCI7NEERsIN8dLM+RiKr4= 28 | github.com/tdewolff/test v1.0.6/go.mod h1:6DAvZliBAAnD7rhVgwaM7DE5/d9NMOAJ09SqYqeK4QE= 29 | golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 30 | golang.org/x/sys v0.0.0-20200724161237-0e2f3a69832c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= 31 | gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc= 32 | gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= 33 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= 34 | gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= 35 | gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= 36 | gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= 37 | -------------------------------------------------------------------------------- /twpurge/rule-names.go: -------------------------------------------------------------------------------- 1 | package twpurge 2 | 3 | import ( 4 | "bytes" 5 | "errors" 6 | "fmt" 7 | "io" 8 | 9 | "github.com/tdewolff/parse/v2" 10 | "github.com/tdewolff/parse/v2/css" 11 | ) 12 | 13 | type purgeKeyMapper interface { 14 | PurgeKeyMap() map[string]struct{} 15 | } 16 | 17 | // FIXME: this should probably be called RuleNamesFromDist, and document the idea of "rule names" vs "purge keys". 18 | // PurgeKeysFromDist runs PurgeKeysFromReader on the appropriate(s) file from the dist. 19 | // A check is done to see if Dist implements interface { PurgeKeyMap() map[string]struct{} } 20 | // and this is used if avialable. Otherwise the appropriate files(s) are processed from 21 | // the dist using PurgeKeysFromReader. 22 | func PurgeKeysFromDist(dist Dist) (map[string]struct{}, error) { 23 | 24 | pkmr, ok := dist.(purgeKeyMapper) 25 | if ok { 26 | return pkmr.PurgeKeyMap(), nil 27 | } 28 | 29 | f, err := dist.OpenDist("utilities") 30 | if err != nil { 31 | return nil, err 32 | } 33 | defer f.Close() 34 | return PurgeKeysFromReader(f) 35 | } 36 | 37 | // takes the rule info from a BeginRulesetGrammar returns the purge key if there is one or else empty string 38 | func ruleToPurgeKey(data []byte, tokens []css.Token) string { 39 | 40 | if len(data) != 0 { 41 | panic("unexpected data") 42 | } 43 | 44 | // we're looking for Delim('.') followed by Ident() - we disregard everything after 45 | if len(tokens) < 2 { 46 | return "" 47 | } 48 | if tokens[0].TokenType != css.DelimToken || !bytes.Equal(tokens[0].Data, []byte(".")) { 49 | return "" 50 | } 51 | if tokens[1].TokenType != css.IdentToken { 52 | return "" 53 | } 54 | 55 | // if we get here we're good, we just need to unescape the ident (e.g. `\:` becomes just `:`) 56 | return cssUnescape(tokens[1].Data) 57 | } 58 | 59 | func cssUnescape(b []byte) string { 60 | 61 | var buf bytes.Buffer 62 | 63 | var i int 64 | for i = 0; i < len(b); i++ { 65 | if b[i] == '\\' { 66 | // set up buf with the stuff we've already scanned 67 | buf.Grow(len(b)) 68 | buf.Write(b[:i]) 69 | goto foundEsc 70 | } 71 | continue 72 | } 73 | // no escaping needed 74 | return string(b) 75 | 76 | foundEsc: 77 | inEsc := false 78 | for ; i < len(b); i++ { 79 | if b[i] == '\\' && !inEsc { 80 | inEsc = true 81 | continue 82 | } 83 | buf.WriteByte(b[i]) 84 | inEsc = false 85 | } 86 | return buf.String() 87 | } 88 | 89 | // PurgeKeysFromReader parses the contents of this reader as CSS and builds a map 90 | // of purge keys. 91 | func PurgeKeysFromReader(cssR io.Reader) (map[string]struct{}, error) { 92 | ret := make(map[string]struct{}) 93 | 94 | inp := parse.NewInput(cssR) 95 | p := css.NewParser(inp, false) 96 | 97 | mainLoop: 98 | for { 99 | 100 | gt, tt, data := p.Next() 101 | _, _ = tt, data 102 | 103 | switch gt { 104 | 105 | case css.ErrorGrammar: 106 | err := p.Err() 107 | if errors.Is(err, io.EOF) { 108 | break mainLoop 109 | } 110 | return ret, err 111 | 112 | case css.AtRuleGrammar: 113 | case css.BeginAtRuleGrammar: 114 | case css.EndAtRuleGrammar: 115 | case css.QualifiedRuleGrammar: 116 | k := ruleToPurgeKey(nil, p.Values()) 117 | if k != "" { 118 | ret[k] = struct{}{} 119 | } 120 | case css.BeginRulesetGrammar: 121 | k := ruleToPurgeKey(nil, p.Values()) 122 | if k != "" { 123 | ret[k] = struct{}{} 124 | } 125 | case css.DeclarationGrammar: 126 | case css.CustomPropertyGrammar: 127 | case css.EndRulesetGrammar: 128 | case css.TokenGrammar: 129 | case css.CommentGrammar: 130 | 131 | default: // verify we aren't missing a type 132 | panic(fmt.Errorf("unexpected grammar type %v at offset %v", gt, inp.Offset())) 133 | 134 | } 135 | 136 | } 137 | 138 | return ret, nil 139 | } 140 | -------------------------------------------------------------------------------- /apply.go: -------------------------------------------------------------------------------- 1 | package tailwind 2 | 3 | import ( 4 | "bytes" 5 | "errors" 6 | "fmt" 7 | "io" 8 | 9 | "github.com/tdewolff/parse/v2" 10 | "github.com/tdewolff/parse/v2/css" 11 | ) 12 | 13 | type applier struct { 14 | m map[string]string 15 | } 16 | 17 | func (a *applier) apply(names []string) ([]byte, error) { 18 | ret := make([]byte, 0, len(names)*8) 19 | for _, name := range names { 20 | css, ok := a.m[name] 21 | if !ok { 22 | return ret, fmt.Errorf("unknown @apply name: %s", name) 23 | } 24 | ret = append(ret, css...) 25 | } 26 | return ret, nil 27 | } 28 | 29 | func newApplier(dist Dist) (*applier, error) { 30 | 31 | var a applier 32 | 33 | rc, err := dist.OpenDist("utilities") 34 | if err != nil { 35 | return nil, err 36 | } 37 | defer rc.Close() 38 | 39 | a.m = make(map[string]string, 128) 40 | 41 | depth := 0 42 | entryName := "" 43 | var entryData bytes.Buffer 44 | _ = entryData 45 | 46 | inp := parse.NewInput(rc) 47 | p := css.NewParser(inp, false) 48 | parseLoop: 49 | for { 50 | 51 | gt, tt, data := p.Next() 52 | _ = tt 53 | _ = data 54 | 55 | switch gt { 56 | 57 | case css.ErrorGrammar: 58 | err := p.Err() 59 | if errors.Is(err, io.EOF) { 60 | break parseLoop 61 | } 62 | return nil, fmt.Errorf("applier.setupOnce: %w", err) 63 | 64 | case css.AtRuleGrammar: 65 | // ignored 66 | 67 | case css.BeginAtRuleGrammar: 68 | depth++ 69 | 70 | case css.EndAtRuleGrammar: 71 | depth-- 72 | 73 | case css.BeginRulesetGrammar: 74 | if depth != 0 { // ignore everything not at top level 75 | continue parseLoop 76 | } 77 | 78 | // only handle rules exactly maching pattern [Delim(".") Ident("someclass")] 79 | ts := trimTokenWs(p.Values()) 80 | if !(len(ts) == 2 && 81 | (ts[0].TokenType == css.DelimToken && bytes.Equal(ts[0].Data, []byte(`.`))) && 82 | (ts[1].TokenType == css.IdentToken && len(ts[1].Data) > 0)) { 83 | continue parseLoop 84 | } 85 | name := string(ts[1].Data) 86 | if entryName != "" { // this should not be possible, just make sure 87 | panic(fmt.Errorf("about to start new entry %q but already in entry %q", name, entryName)) 88 | } 89 | entryName = name 90 | 91 | // Delim('.') Ident('space-y-0') 92 | // log.Printf("applier BeginRulesetGrammar: data=%q, p.Values=%v", data, p.Values()) 93 | // log.Printf("BeginRulesetGrammar: classes=%v (values=%v)", toklistClasses(p.Values()), p.Values()) 94 | 95 | // inEntryName 96 | 97 | // err := write(w, data, p.Values(), '{') 98 | // if err != nil { 99 | // return err 100 | // } 101 | 102 | case css.EndRulesetGrammar: 103 | 104 | // we only need to look at entries that are closing 105 | if entryName == "" { 106 | continue parseLoop 107 | } 108 | 109 | b := entryData.Bytes() 110 | b = bytes.TrimSpace(b) 111 | a.m[entryName] = string(b) 112 | 113 | entryData.Reset() 114 | entryName = "" 115 | 116 | case css.DeclarationGrammar: 117 | if entryName == "" { // ignore content not inside an appropriate entry 118 | continue parseLoop 119 | } 120 | 121 | err := write(&entryData, data, ':', p.Values(), ';') 122 | if err != nil { 123 | return nil, err 124 | } 125 | 126 | case css.CustomPropertyGrammar: 127 | if entryName == "" { // ignore content not inside an appropriate entry 128 | continue parseLoop 129 | } 130 | 131 | // panic(fmt.Errorf(`CustomPropertyGrammar unsupported: data=%q, p.Values=%v`, data, p.Values())) 132 | err := write(&entryData, data, ':', p.Values(), ';') 133 | if err != nil { 134 | return nil, err 135 | } 136 | 137 | case css.QualifiedRuleGrammar: 138 | // FIXME: still not sure about when this is used... 139 | // panic(fmt.Errorf("applier: QualifiedRuleGrammar not yet implemented")) 140 | continue // should be okay to skip for now... 141 | 142 | case css.TokenGrammar: 143 | continue // just skip 144 | 145 | case css.CommentGrammar: 146 | continue // strip comments 147 | 148 | default: // verify we aren't missing a type 149 | panic(fmt.Errorf("unexpected grammar type %v at offset %v", gt, inp.Offset())) 150 | 151 | } 152 | 153 | } 154 | 155 | return &a, nil 156 | } 157 | -------------------------------------------------------------------------------- /cmd/gotailwindcss/main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "io" 6 | "log" 7 | "os" 8 | "path/filepath" 9 | "sort" 10 | "strings" 11 | 12 | "github.com/gotailwindcss/tailwind" 13 | "github.com/gotailwindcss/tailwind/twembed" 14 | "github.com/gotailwindcss/tailwind/twpurge" 15 | "gopkg.in/alecthomas/kingpin.v2" 16 | ) 17 | 18 | // NOTES: 19 | // command line tool should provide: 20 | // - processing similar to npx tailwindcss build 21 | // - output as Go source code - skip for now, the workflow of CSS being served instead of embedded in a client seems to be a better way to go, let's try that first 22 | // - plus internal gzipped format? 23 | // - probably output file auto-detection can work, 24 | // if the output file is .go then it emits go src, 25 | // or if .css emit CSS 26 | // - follow command line format of npx tailwindcss build unless there is a reason not to (i.e. -o for output) 27 | // - should we provide some sort of minimal static server? 28 | // not super useful but easy to do and maybe people would find use for demos 29 | // - for purging we just have code gen which scans a dir and makes the purge keys in a file, includes 30 | // go generate comment so it gets run again 31 | // - option to print all allow/disallow possibilities? 32 | 33 | var ( 34 | app = kingpin.New("gotailwindcss", "Go+TailwindCSS tools") 35 | v = app.Flag("verbose", "Print verbose output").Short('v').Bool() 36 | 37 | build = app.Command("build", "Build CSS output") 38 | buildOutput = build.Flag("output", "Output file name, use hyphen for stdout").Short('o').Default("-").String() 39 | buildPurgescan = build.Flag("purgescan", "Scan file/folder recursively for purge keys").String() 40 | buildPurgeext = build.Flag("purgeext", "Comma separated list of file extensions (no periods) to scan for purge keys").Default("html,vue,jsx,vugu").String() 41 | buildInput = build.Arg("input", "Input file name(s)").Strings() 42 | 43 | purgescan = app.Command("purgescan", "Perform a purge scan of one or more files/dirs and output the purge keys found") 44 | purgescanExt = build.Flag("ext", "Comma separated list of file extensions (no periods) to scan for purge keys").Default("html,vue,jsx,vugu").String() 45 | purgescanOutput = purgescan.Flag("output", "Output file name - extension can be .go, .txt or .json and determines format").Short('o').Default("-").String() 46 | purgescanNogen = purgescan.Flag("nogen", "For .go output, do not emit a //go:generate line").Bool() 47 | purgescanInput = purgescan.Arg("input", "Input files/dirs").Strings() 48 | 49 | // serve 50 | 51 | ) 52 | 53 | var dist = twembed.New() 54 | 55 | func main() { 56 | 57 | switch kingpin.MustParse(app.Parse(os.Args[1:])) { 58 | 59 | case build.FullCommand(): 60 | 61 | runBuild() 62 | 63 | case purgescan.FullCommand(): 64 | 65 | runPurgescan() 66 | 67 | default: 68 | fmt.Fprintf(os.Stderr, "No command specified\n") 69 | os.Exit(1) 70 | } 71 | 72 | } 73 | 74 | func runBuild() { 75 | 76 | if *v { 77 | log.Printf("Starting build...") 78 | } 79 | 80 | w := mkout(*buildOutput) 81 | defer w.Close() 82 | // var w io.Writer 83 | // outpath := *buildOutput 84 | // if outpath == "" || outpath == "-" { 85 | // if *v { 86 | // log.Printf("Using stdout") 87 | // } 88 | // w = os.Stdout 89 | // } else { 90 | // if *v { 91 | // log.Printf("Creating output file: %s", outpath) 92 | // } 93 | // f, err := os.Create(outpath) 94 | // if err != nil { 95 | // log.Fatal(err) 96 | // } 97 | // defer f.Close() 98 | // w = f 99 | // } 100 | 101 | conv := tailwind.New(w, dist) 102 | 103 | if *buildPurgescan != "" { 104 | if *v { 105 | log.Printf("Performing purge scan on: %s", *buildPurgescan) 106 | } 107 | 108 | extParts := strings.Split(*buildPurgeext, ",") 109 | extMap := make(map[string]bool, len(extParts)) 110 | for _, p := range extParts { 111 | extMap["."+strings.TrimPrefix(p, ".")] = true 112 | } 113 | 114 | pscanner, err := twpurge.NewScannerFromDist(dist) 115 | if err != nil { 116 | log.Fatal(err) 117 | } 118 | 119 | err = filepath.Walk(*buildPurgescan, pscanner.WalkFunc(func(fn string) bool { 120 | return extMap[filepath.Ext(fn)] 121 | })) 122 | if err != nil { 123 | log.Fatal(err) 124 | } 125 | 126 | conv.SetPurgeChecker(pscanner.Map()) 127 | } 128 | 129 | for _, inPath := range *buildInput { 130 | if *v { 131 | log.Printf("Adding file: %s", inPath) 132 | } 133 | fin, err := os.Open(inPath) 134 | if err != nil { 135 | log.Fatal(err) 136 | } 137 | defer fin.Close() 138 | conv.AddReader(inPath, fin, false) 139 | } 140 | 141 | if *v { 142 | log.Printf("Performing conversion...") 143 | } 144 | 145 | err := conv.Run() 146 | if err != nil { 147 | log.Fatal(err) 148 | } 149 | 150 | } 151 | 152 | func runPurgescan() { 153 | 154 | if *v { 155 | log.Printf("Starting purge scan...") 156 | } 157 | 158 | outExt := filepath.Ext(*purgescanOutput) 159 | 160 | w := mkout(*purgescanOutput) 161 | defer w.Close() 162 | 163 | extParts := strings.Split(*purgescanExt, ",") 164 | extMap := make(map[string]bool, len(extParts)) 165 | for _, p := range extParts { 166 | extMap["."+strings.TrimPrefix(p, ".")] = true 167 | } 168 | 169 | pscanner, err := twpurge.NewScannerFromDist(dist) 170 | if err != nil { 171 | log.Fatal(err) 172 | } 173 | 174 | for _, in := range *purgescanInput { 175 | err = filepath.Walk(in, pscanner.WalkFunc(func(fn string) bool { 176 | return extMap[filepath.Ext(fn)] 177 | })) 178 | if err != nil { 179 | log.Fatal(err) 180 | } 181 | } 182 | 183 | m := pscanner.Map() 184 | mk := mkeys(m) 185 | 186 | pkgName := "test123" 187 | 188 | switch outExt { 189 | case ".go": 190 | 191 | fmt.Fprintf(w, `package %s`+"\n", pkgName) 192 | fmt.Fprintf(w, "\n") 193 | fmt.Fprintf(w, "// WARNING: DO NOT EDIT, THIS IS A GENERATED FILE\n") 194 | fmt.Fprintf(w, "\n") 195 | if !*purgescanNogen { 196 | fmt.Fprintf(w, "//go:generate gotailwindcss -o %s %s\n", *purgescanOutput, strings.Join(*purgescanInput, " ")) 197 | fmt.Fprintf(w, "\n") 198 | } 199 | fmt.Fprintf(w, "// PurgeKeyMap is a list of keys which should not be purged from CSS output.\n") 200 | fmt.Fprintf(w, "var PurgeKeyMap = %#v\n", (map[string]struct{})(m)) 201 | fmt.Fprintf(w, "\n") 202 | 203 | case ".txt": 204 | for _, k := range mk { 205 | fmt.Fprintln(w, k) 206 | } 207 | 208 | case ".json": 209 | fmt.Fprintf(w, "[\n") 210 | for i := 0; i < len(mk); i++ { 211 | k := mk[i] 212 | if i < len(mk)-1 { 213 | fmt.Fprintf(w, `"%s",`, k) 214 | } else { 215 | fmt.Fprintf(w, `"%s"`, k) 216 | } 217 | } 218 | fmt.Fprintf(w, "]\n") 219 | 220 | } 221 | 222 | } 223 | 224 | func mkout(outpath string) io.WriteCloser { 225 | 226 | var ret io.WriteCloser 227 | if outpath == "" || outpath == "-" { 228 | if *v { 229 | log.Printf("Using stdout") 230 | } 231 | ret = nopWriteCloser{Writer: os.Stdout} 232 | } else { 233 | if *v { 234 | log.Printf("Creating output file: %s", outpath) 235 | } 236 | f, err := os.Create(outpath) 237 | if err != nil { 238 | log.Fatal(err) 239 | } 240 | ret = f 241 | } 242 | 243 | return ret 244 | } 245 | 246 | func mkeys(m map[string]struct{}) (ret []string) { 247 | for k := range m { 248 | ret = append(ret, k) 249 | } 250 | sort.Strings(ret) 251 | return 252 | } 253 | 254 | type nopWriteCloser struct { 255 | io.Writer 256 | } 257 | 258 | func (w nopWriteCloser) Close() error { 259 | return nil 260 | } 261 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # A Tailwind CSS implementation in Go 2 | 3 | This project provides an implementation of Tailwind CSS functionality in pure Go. It includes the ability to embed an HTTP handler which processes Tailwind directives on the fly, facilities to purge unneeded styles, and a command line tool. 4 | 5 | ## Documentation 6 | 7 | Godoc can be found in the usual place: https://pkg.go.dev/github.com/gotailwindcss/tailwind?tab=doc 8 | 9 | ## Typical Usage 10 | 11 | For development, the typical use is to integrate the handler found in `twhandler` so Tailwind CSS processing is done as your CSS file is served. Example: 12 | 13 | **main.go** 14 | ```go 15 | // ... 16 | import "github.com/gotailwindcss/tailwind/twembed" 17 | import "github.com/gotailwindcss/tailwind/twhandler" 18 | 19 | func main() { 20 | mux := http.NewServeMux() 21 | mux.Handle("/", http.FileServer(http.Dir("static"))) 22 | mux.Handle("/css/", twhandler.New(http.Dir("css"), "/css", twembed.New())) 23 | 24 | s := &http.Server{Addr: ":8182", Handler: mux} 25 | log.Fatal(s.ListenAndServe()) 26 | } 27 | ``` 28 | 29 | **static/index.html** 30 | ```html 31 | 32 | 33 | Test Button 34 | 35 | ``` 36 | 37 | **css/main.css** 38 | ```css 39 | @tailwind base; 40 | @tailwind components; 41 | .button { @apply inline-block m-2 p-2 rounded-md bg-green-400; } 42 | @tailwind utilities; 43 | ``` 44 | 45 | ## In Production 46 | 47 | In production we recommend you use a simple static file server whever possible, e.g. `http.FileServer(distDir)`. 48 | 49 | See *Procesing CSS Files* below for more info on how to create output from the command line, or *Library Usage* for how to perform Tailwind CSS conversion from Go. 50 | 51 | ## Supported Tailwind CSS Directives 52 | 53 | The following Tailwind directives are supported: 54 | 55 | - `@tailwind` 56 | - `@apply` 57 | 58 | These are intended to work with the same behavior as the [Tailwind](https://tailwindcss.com/) project. If differences are encountered/necessary this section will be updated as applicable. 59 | 60 | ## Command Line 61 | 62 | To install the gotailwindcss command, do: 63 | 64 | ``` 65 | go get github.com/gotailwindcss/tailwind/cmd/gotailwindcss 66 | ``` 67 | 68 | Once installed, for help: 69 | 70 | ``` 71 | gotailwindcss --help 72 | ``` 73 | 74 | ### Processing CSS Files 75 | 76 | Use the `build` subcommand to perform processing on one or more CSS files. 77 | 78 | ``` 79 | gotailwindcss build -o out.css in1.css in2.css 80 | ``` 81 | 82 | 86 | 87 | ## Library Usage 88 | 89 | This project is organized into the following packages: 90 | 91 | - **[tailwind](https://pkg.go.dev/github.com/gotailwindcss/tailwind)** - Handles CSS conversion and Tailwind processing logic 92 | - **[twhandler](https://pkg.go.dev/github.com/gotailwindcss/tailwind/twhandler)** - HTTP Handler for processing CSS files 93 | - **[twpurge](https://pkg.go.dev/github.com/gotailwindcss/tailwind/twpurge)** - Handles purging unused style rules 94 | - **[twembed](https://pkg.go.dev/github.com/gotailwindcss/tailwind/twembed)** - Contains an embedded copy of Tailwind CSS styles 95 | - **[twfiles](https://pkg.go.dev/github.com/gotailwindcss/tailwind/twfiles)** - Facilitates using a directory as source for Tailwind CSS styles 96 | 97 | ### Embedded TailwindCSS 98 | 99 | To process "convert" files, a "Dist" (distribution) of Tailwind CSS is required. The `twembed` package provides this. Importing it embeds this data into your application, which is usually file for server applications. 100 | 101 | Calling `twembed.New()` will return a new `Dist` corresponding to this embedded CSS. It is intentionally inexpensive to call and there is no need to retain an instance as opposed ot calling `twembed.New()` again. 102 | 103 | ### Performing Conversion 104 | 105 | A `tailwind.Convert` is used to perform processing of directives like `@tailwind` and `@apply`. Example: 106 | 107 | ``` 108 | var w bytes.Buffer 109 | conv := tailwind.New(&w, twembed.New()) 110 | conv.AddReader("base.css", strings.NewReader(`@tailwind base;`), false) 111 | err := conv.Run() 112 | // w now has the processed CSS output 113 | ``` 114 | 115 | ## HTTP Handler 116 | 117 | The `twhandler` package has an HTTP handler intended to be useful during development by performing CSS processing on the fly as the file is requested. Creating a handler is simple: 118 | 119 | ``` 120 | h := twhandler.New( 121 | http.Dir("/path/to/css"), // directory from which to read input CSS files 122 | "/css", // HTTP path prefix to expect 123 | twembed.New(), // Tailwind distribution 124 | ) 125 | ``` 126 | 127 | From there it is used like any other `http.Handler`. 128 | 129 | ### Compression 130 | 131 | The [SetWriteCloserFunc](https://pkg.go.dev/github.com/gotailwindcss/tailwind/twhandler?tab=doc#Handler.SetWriteCloserFunc) can be used in conjunction with [brotli.HTTPCompressor](https://pkg.go.dev/github.com/andybalholm/brotli?tab=doc#HTTPCompressor) in order to enable brotli and gzip compression. Example: 132 | 133 | ``` 134 | h := twhandler.New(http.Dir("/path/to/css"), "/css", twembed.New()) 135 | h.SetWriteCloserFunc(brotli.HTTPCompressor) 136 | // ... 137 | ``` 138 | 139 | ### Caching 140 | 141 | By default, caching is enabled on handlers created. Meaning the same output will be served without re-processing as long as the underlying input CSS file's timestamp is not modified. 142 | 143 | And by default, responses do not have a browser caching max-age, so each load results in a new request back to the server to check for a modified file. This can be adjusted with [SetMaxAge](https://pkg.go.dev/github.com/gotailwindcss/tailwind/twhandler?tab=doc#Handler.SetMaxAge) if needed. 144 | 145 | ## Purging 146 | 147 | TODO: write doc and example 148 | 149 | 208 | 209 | ## See Also 210 | 211 | This project was created as part of research while developing [Vugu](https://vugu.org/) ([doc](https://godoc.org/github.com/vugu/vugu)). 212 | 213 | ## Roadmap 214 | 215 | - [x] Command line build tool 216 | - [x] Pure Go library, no npm/node dependency 217 | - [x] HTTP Handler 218 | - [x] Purge functionality to minimize output file size 219 | - [ ] Test server for prototyping 220 | -------------------------------------------------------------------------------- /converter_test.go: -------------------------------------------------------------------------------- 1 | package tailwind_test 2 | 3 | import ( 4 | "bytes" 5 | "fmt" 6 | "io" 7 | "regexp" 8 | "sort" 9 | "strings" 10 | "testing" 11 | 12 | "github.com/gotailwindcss/tailwind" 13 | "github.com/gotailwindcss/tailwind/twembed" 14 | "github.com/gotailwindcss/tailwind/twpurge" 15 | "github.com/tdewolff/minify/v2" 16 | "github.com/tdewolff/minify/v2/css" 17 | ) 18 | 19 | // type mapFS map[string]string 20 | 21 | // // Open implements an http.FileSystem as a map of names and contents. 22 | // func (m mapFS) Open(name string) (http.File, error) { 23 | // b, ok := m[name] 24 | // rd := bytes.NewReader() 25 | // } 26 | 27 | // type mapFSFile struct { 28 | // bytes.Reader 29 | // } 30 | 31 | // func (mf *mapFSFile) Readdir(count int) ([]os.FileInfo, error) { 32 | // panic("not implemented") 33 | // } 34 | // func (mf *mapFSFile) Stat() (os.FileInfo, error) { 35 | // panic("not implemented") 36 | // } 37 | 38 | func ExampleConverter_SetPostProcFunc() { 39 | 40 | var buf bytes.Buffer 41 | conv := tailwind.New(&buf, twembed.New()) 42 | conv.SetPostProcFunc(func(out io.Writer, in io.Reader) error { 43 | m := minify.New() 44 | m.AddFunc("text/css", css.Minify) 45 | return m.Minify("text/css", out, in) 46 | }) 47 | conv.AddReader("input.css", strings.NewReader(`.test1 { @apply font-bold; }`), false) 48 | err := conv.Run() 49 | if err != nil { 50 | panic(err) 51 | } 52 | fmt.Printf("%s", buf.String()) 53 | 54 | // notice the missing trailing semicolon 55 | 56 | // Output: .test1{font-weight:700} 57 | } 58 | 59 | func TestConverter(t *testing.T) { 60 | 61 | type tcase struct { 62 | name string // test case name 63 | in map[string]string // input files (processed alphabetical by filename) 64 | purgeChecker func() tailwind.PurgeChecker // optional purgeChecker to set on converter 65 | out []*regexp.Regexp // output must match these regexps 66 | outnot []*regexp.Regexp // output must not match these regexps 67 | outerr *regexp.Regexp // must result in an error with text that matches this (if non-nil) 68 | } 69 | 70 | tcaseList := []tcase{ 71 | { 72 | name: "simple1", 73 | in: map[string]string{ 74 | "001.css": `.test1 { display: block; }`, 75 | }, 76 | out: []*regexp.Regexp{ 77 | regexp.MustCompile(`^` + regexp.QuoteMeta(`.test1{display:block;}`) + `$`), 78 | }, 79 | }, 80 | { 81 | name: "two-files1", 82 | in: map[string]string{ 83 | "001.css": `.test1 { display: block; }`, 84 | "002.css": `.test2 { display: inline; }`, 85 | }, 86 | out: []*regexp.Regexp{ 87 | regexp.MustCompile(`^` + regexp.QuoteMeta(`.test1{display:block;}.test2{display:inline;}`) + `$`), 88 | }, 89 | }, 90 | { 91 | name: "two-files2", // verify the sequence is correct 92 | in: map[string]string{ 93 | "012.css": `.test2 { display: inline; }`, 94 | "021.css": `.test1 { display: block; }`, 95 | }, 96 | out: []*regexp.Regexp{ 97 | regexp.MustCompile(`^` + regexp.QuoteMeta(`.test2{display:inline;}.test1{display:block;}`) + `$`), 98 | }, 99 | }, 100 | { 101 | name: "bad1", 102 | in: map[string]string{ 103 | "001.css": `.test1 { display: block; ! }`, 104 | }, 105 | outerr: regexp.MustCompile(`expected colon in declaration`), 106 | }, 107 | { 108 | name: "atrule1", 109 | in: map[string]string{ 110 | "001.css": `@charset "utf-8"; .test1 { display: block; }`, 111 | }, 112 | out: []*regexp.Regexp{ 113 | regexp.MustCompile(`^` + regexp.QuoteMeta(`@charset "utf-8";.test1{display:block;}`) + `$`), 114 | }, 115 | }, 116 | { 117 | name: "tailwind-base1", 118 | in: map[string]string{ 119 | "001.css": `@tailwind base;`, 120 | }, 121 | out: []*regexp.Regexp{ 122 | regexp.MustCompile(regexp.QuoteMeta(`html{line-height:1.15;`)), 123 | regexp.MustCompile(regexp.QuoteMeta(`b,strong{`)), // ensure QualifiedRuleGrammar is working 124 | }, 125 | }, 126 | { 127 | name: "tailwind-components1", 128 | in: map[string]string{ 129 | "001.css": `@tailwind components;`, 130 | }, 131 | out: []*regexp.Regexp{ 132 | regexp.MustCompile(regexp.QuoteMeta(`.container{width:100%`)), 133 | }, 134 | }, 135 | { 136 | name: "tailwind-utilities1", 137 | in: map[string]string{ 138 | "001.css": `@tailwind utilities;`, 139 | }, 140 | out: []*regexp.Regexp{ 141 | regexp.MustCompile(regexp.QuoteMeta(`var(--bg-opacity)`)), 142 | }, 143 | }, 144 | { 145 | name: "tailwind-utilities2", 146 | in: map[string]string{ 147 | "001.css": `@tailwind utilities;`, 148 | }, 149 | out: []*regexp.Regexp{ 150 | regexp.MustCompile(regexp.QuoteMeta(`--bg-opacity: 1`)), 151 | }, 152 | }, 153 | { 154 | name: "tailwind-unknown1", 155 | in: map[string]string{ 156 | "001.css": `@tailwind otherthing;`, 157 | }, 158 | outerr: regexp.MustCompile(regexp.QuoteMeta(`@tailwind followed by unknown identifier: otherthing`)), 159 | }, 160 | { 161 | name: "apply1", 162 | in: map[string]string{ 163 | "001.css": `.test { @apply px-1; }`, 164 | }, 165 | out: []*regexp.Regexp{ 166 | regexp.MustCompile(regexp.QuoteMeta(`.test{padding-left:0.25rem;padding-right:0.25rem;}`)), 167 | }, 168 | }, 169 | { 170 | name: "apply2", 171 | in: map[string]string{ 172 | "001.css": `.test { @apply px-1 py-2; }`, 173 | }, 174 | out: []*regexp.Regexp{ 175 | regexp.MustCompile(regexp.QuoteMeta(`.test{padding-left:0.25rem;padding-right:0.25rem;padding-top:0.5rem;padding-bottom:0.5rem;}`)), 176 | }, 177 | }, 178 | { 179 | name: "purge1", 180 | in: map[string]string{ 181 | "001.css": `@tailwind components; @tailwind utilities; .test { @apply px-1 py-2; }`, 182 | }, 183 | purgeChecker: func() tailwind.PurgeChecker { 184 | s, _ := twpurge.NewScannerFromDist(twembed.New()) 185 | _ = s.Scan(strings.NewReader(``)) 186 | return s.Map() 187 | }, 188 | out: []*regexp.Regexp{ 189 | // should not have been purged 190 | regexp.MustCompile(regexp.QuoteMeta(`.p-1{`)), 191 | regexp.MustCompile(regexp.QuoteMeta(`.test{padding-left:0.25rem`)), 192 | regexp.MustCompile(regexp.QuoteMeta(`.md\:bg-purple-500`) + `\b`), 193 | // components stuff is always present 194 | regexp.MustCompile(regexp.QuoteMeta(`.container{`)), 195 | }, 196 | outnot: []*regexp.Regexp{ 197 | // others stuff should not appear 198 | regexp.MustCompile(regexp.QuoteMeta(`.bg-purple-600`)), 199 | }, 200 | }, 201 | } 202 | 203 | for _, tc := range tcaseList { 204 | tc := tc 205 | t.Run(tc.name, func(t *testing.T) { 206 | var buf bytes.Buffer 207 | c := tailwind.New(&buf, twembed.New()) 208 | if tc.purgeChecker != nil { 209 | p := tc.purgeChecker() 210 | if p != nil { 211 | c.SetPurgeChecker(p) 212 | } 213 | } 214 | klist := make([]string, len(tc.in)) 215 | for k := range tc.in { 216 | klist = append(klist, k) 217 | } 218 | sort.Strings(klist) 219 | for _, k := range klist { 220 | c.AddReader(k, strings.NewReader(tc.in[k]), !strings.HasSuffix(k, ".css")) 221 | } 222 | err := c.Run() 223 | if err != nil { // got error 224 | if tc.outerr != nil { // expected error 225 | if !tc.outerr.MatchString(err.Error()) { 226 | t.Errorf("error failed to match regexp: %s - %v", tc.outerr.String(), err.Error()) 227 | } 228 | } else { 229 | t.Fatal(err) // error not expected 230 | } 231 | } 232 | bufstr := buf.String() 233 | for _, outre := range tc.out { 234 | if !outre.MatchString(bufstr) { 235 | t.Errorf("output failed to match regexp: %s", outre.String()) 236 | } 237 | } 238 | for _, outnotre := range tc.outnot { 239 | if outnotre.MatchString(bufstr) { 240 | t.Errorf("output unexpectedly matched regexp: %s", outnotre.String()) 241 | } 242 | } 243 | if t.Failed() { 244 | t.Logf("OUTPUT: (err=%v)\n%s", err, bufstr) 245 | } 246 | }) 247 | } 248 | 249 | } 250 | -------------------------------------------------------------------------------- /twhandler/twhandler.go: -------------------------------------------------------------------------------- 1 | // Package twhandler provides an HTTP handler that performs processing on CSS files and serves them. 2 | package twhandler 3 | 4 | import ( 5 | "bytes" 6 | "fmt" 7 | "io" 8 | "net/http" 9 | "os" 10 | "path" 11 | "strings" 12 | "sync" 13 | 14 | "github.com/cespare/xxhash" 15 | "github.com/gotailwindcss/tailwind" 16 | ) 17 | 18 | // New returns a Handler. TODO explain args 19 | // The internal cache is enabled on the Handler returned. 20 | func New(fs http.FileSystem, pathPrefix string, dist tailwind.Dist) *Handler { 21 | return NewFromFunc(fs, pathPrefix, func(w io.Writer) *tailwind.Converter { 22 | return tailwind.New(w, dist) 23 | }) 24 | } 25 | 26 | // allows things like purger to be set 27 | func NewFromFunc(fs http.FileSystem, pathPrefix string, converterFunc func(w io.Writer) *tailwind.Converter) *Handler { 28 | return &Handler{ 29 | converterFunc: converterFunc, 30 | fs: fs, 31 | pathPrefix: pathPrefix, 32 | cache: make(map[string]cacheValue), 33 | headerFunc: defaultHeaderFunc, 34 | } 35 | } 36 | 37 | func defaultHeaderFunc(w http.ResponseWriter, r *http.Request) { 38 | cc := w.Header().Get("Cache-Control") 39 | if cc == "" { 40 | // Force browser to check each time, but 304 still works. 41 | w.Header().Set("Cache-Control", "no-cache") 42 | } 43 | } 44 | 45 | // // TODO: probably a shorthand that sets up the purger, etc. would make sense 46 | // // enables purging for all default file types on dir 47 | // func NewDev(dir, pathPrefix string, dist tailwind.Dist) { 48 | // } 49 | 50 | // Handler serves an HTTP response for a CSS file that is process using tailwind. 51 | type Handler struct { 52 | // dist tailwind.Dist 53 | converterFunc func(w io.Writer) *tailwind.Converter 54 | fs http.FileSystem 55 | notFound http.Handler 56 | pathPrefix string 57 | writeCloserFunc func(w http.ResponseWriter, r *http.Request) io.WriteCloser 58 | cache map[string]cacheValue 59 | rwmu sync.RWMutex 60 | headerFunc func(w http.ResponseWriter, r *http.Request) 61 | } 62 | 63 | // SetMaxAge calls SetHeaderFunc with a function that sets the Cache-Control header (if not already set) 64 | // with a corresponding maximum timeout specified in seconds. If cache-breaking 65 | // URLs are in use, this is a good option to set in production. 66 | func (h *Handler) SetMaxAge(n int) { 67 | h.SetHeaderFunc(func(w http.ResponseWriter, r *http.Request) { 68 | cc := w.Header().Get("Cache-Control") 69 | if cc == "" { 70 | w.Header().Set("Cache-Control", fmt.Sprintf("public, max-age=%d", n)) 71 | } 72 | }) 73 | } 74 | 75 | // SetHeaderFunc assigns a function that gets called immediately before a valid response is served. 76 | // It was added so applications could customize cache headers. By default, the Cache-Control 77 | // header will be set to "no-cache" if it was not set earlier (causing the browser to check 78 | // each time for an updated resource - which may result in a full response or a 304). 79 | func (h *Handler) SetHeaderFunc(f func(w http.ResponseWriter, r *http.Request)) { 80 | h.headerFunc = f 81 | } 82 | 83 | // SetNotFoundHandler assigns the handler that gets called when something is not found. 84 | func (h *Handler) SetNotFoundHandler(nfh http.Handler) { 85 | h.notFound = nfh 86 | } 87 | 88 | // SetCache with false will disable the cache. 89 | func (h *Handler) SetCache(enabled bool) { 90 | if enabled { 91 | h.cache = make(map[string]cacheValue) 92 | } else { 93 | h.cache = nil 94 | } 95 | } 96 | 97 | // TODO: be sure to have clear example showing brotli 98 | func (h *Handler) SetWriteCloserFunc(f func(w http.ResponseWriter, r *http.Request) io.WriteCloser) { 99 | h.writeCloserFunc = f 100 | } 101 | 102 | // not sure if we need something like this... 103 | // // SetAllowFileFunc 104 | // func (h *Handler) SetAllowFileFunc(f func(p string) bool) { 105 | // } 106 | 107 | // ServeHTTP implements http.Handler. 108 | func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { 109 | 110 | p := path.Clean(r.URL.Path) 111 | p = path.Clean(strings.TrimPrefix(p, h.pathPrefix)) 112 | 113 | f, err := h.fs.Open(p) 114 | if err != nil { 115 | code := 500 116 | if os.IsPermission(err) { 117 | code = 403 118 | } else if os.IsNotExist(err) { 119 | if h.notFound != nil { 120 | h.notFound.ServeHTTP(w, r) 121 | return 122 | } 123 | code = 404 124 | } 125 | http.Error(w, fmt.Sprintf("error opening %s: %v", r.URL.Path, err), code) 126 | return 127 | } 128 | defer f.Close() 129 | 130 | w.Header().Set("Content-Type", "text/css") 131 | 132 | st, err := f.Stat() 133 | if err != nil { 134 | http.Error(w, fmt.Sprintf("stat failed for %s: %v", r.URL.Path, err), 500) 135 | return 136 | } 137 | 138 | if h.headerFunc != nil { 139 | h.headerFunc(w, r) 140 | } 141 | 142 | if h.cache != nil { // if cache enabled 143 | h.rwmu.RLock() 144 | cv, ok := h.cache[p] 145 | h.rwmu.RUnlock() 146 | if ok { 147 | 148 | // if h.check304(w, r, cv) { 149 | // return 150 | // } 151 | 152 | wc := h.makeW(w, r) 153 | defer wc.Close() 154 | 155 | // handle 304s properly with ServeContent 156 | http.ServeContent( 157 | &wwrap{Writer: wc, ResponseWriter: w}, 158 | r, 159 | p, 160 | st.ModTime(), 161 | strings.NewReader(cv.content), 162 | ) 163 | return 164 | } 165 | 166 | cv.content, cv.hash, err = h.process(w, r, f) 167 | if err != nil { 168 | http.Error(w, fmt.Sprintf("processing failed on %s: %v", r.URL.Path, err), 500) 169 | return 170 | } 171 | 172 | h.rwmu.Lock() 173 | h.cache[p] = cv 174 | h.rwmu.Unlock() 175 | return 176 | } 177 | 178 | _, _, err = h.process(w, r, f) 179 | if err != nil { 180 | http.Error(w, fmt.Sprintf("processing failed on %s: %v", r.URL.Path, err), 500) 181 | return 182 | } 183 | 184 | // // ck := cacheKey{ 185 | // // tsnano: st.ModTime().UnixNano(), 186 | // // size: st.Size(), 187 | // // path: p, 188 | // // } 189 | 190 | // conv := tailwind.New(w, h.dist) 191 | // conv.AddReader(p, f, false) 192 | // err = conv.Run() 193 | // if err != nil { 194 | // http.Error(w, err.Error(), 500) 195 | // return 196 | // } 197 | } 198 | 199 | func (h *Handler) makeW(w http.ResponseWriter, r *http.Request) io.WriteCloser { 200 | var wc io.WriteCloser 201 | if h.writeCloserFunc != nil { 202 | wc = h.writeCloserFunc(w, r) 203 | } else { 204 | wc = &nopWriteCloser{Writer: w} 205 | } 206 | return wc 207 | } 208 | 209 | // func (h *Handler) send(w http.ResponseWriter, r *http.Request, r io.Reader) { 210 | // var wc io.WriteCloser 211 | // if h.writeCloserFunc != nil { 212 | // wc = h.writeCloserFunc(w, r) 213 | // } else { 214 | // wc = &nopWriteCloser{Writer: w} 215 | // } 216 | // defer wc.Close() 217 | // } 218 | 219 | // // see if we can respond with a 304, returns true if we did 220 | // func (h *Handler) check304(w http.ResponseWriter, r *http.Request, cv cacheValue) bool { 221 | // // if t, err := time.Parse(TimeFormat, r.Header.Get("If-Modified-Since")); err == nil && modtime.Before(t.Add(1*time.Second)) { 222 | // // TODO: use etag 223 | // return false 224 | // } 225 | 226 | func (h *Handler) process(w http.ResponseWriter, r *http.Request, rd io.Reader) (content string, hash uint64, reterr error) { 227 | 228 | wc := h.makeW(w, r) 229 | defer wc.Close() 230 | 231 | var outbuf bytes.Buffer 232 | // outbuf.Grow(4096) 233 | 234 | d := xxhash.New() 235 | 236 | // write to response (optionally via compressor from makeW), cache buffer, and hash calc'er at the same time 237 | mw := io.MultiWriter(wc, &outbuf, d) 238 | 239 | conv := h.converterFunc(mw) 240 | // conv := tailwind.New(mw, h.dist) 241 | conv.AddReader(r.URL.Path, rd, false) 242 | err := conv.Run() 243 | if err != nil { 244 | reterr = err 245 | return 246 | } 247 | 248 | return outbuf.String(), d.Sum64(), nil 249 | } 250 | 251 | type nopWriteCloser struct { 252 | io.Writer 253 | } 254 | 255 | func (n *nopWriteCloser) Close() error { 256 | return nil 257 | } 258 | 259 | // type cacheKey struct { 260 | // path string 261 | // size int64 262 | // tsnano int64 263 | // } 264 | 265 | type cacheValue struct { 266 | size int64 // in bytes 267 | tsnano int64 // file mod time 268 | content string // output 269 | hash uint64 // for e-tag 270 | } 271 | 272 | // wwrap wraps a ResponseWriter allowing us to override where the Write calls go 273 | type wwrap struct { 274 | io.Writer 275 | http.ResponseWriter 276 | } 277 | 278 | func (w *wwrap) Write(b []byte) (int, error) { 279 | return w.Writer.Write(b) 280 | } 281 | -------------------------------------------------------------------------------- /twpurge/twpurge.go: -------------------------------------------------------------------------------- 1 | package twpurge 2 | 3 | import ( 4 | "errors" 5 | "io" 6 | "os" 7 | "path/filepath" 8 | "strings" 9 | ) 10 | 11 | // Hmmmm 12 | // What if we broke up the purging into two steps - the scan for which classes to include is run manually 13 | // but then it stays in a file and is used to filter results quickly. This way you get purged files 14 | // in development (one benefit of this is you don't have different styles in dev and production). 15 | 16 | // From a naming perspective, breaking up the tasks of "scanning for things to not purge" and "implementing the ShouldPurgeKey method" 17 | // should probably be two different things... 18 | // twpurge.Scanner - scans text to exract purge keys 19 | // twpurge.Checker - just has ShouldPurgeKey 20 | // twpurge.CheckerFunc - implement ShouldPurgeKey as a function 21 | // twpurge.Map - cast map[string]struct{}{} to type that implements Checker 22 | 23 | // TODO: PurgeDir - reads directory, can reload upon demand (first call to ShouldPurgeKey) after X time, by default only loads first time 24 | // Should we/can we abstract this out to some sort of reload function that gets invoked? Or is that too much, maybe we just go simple. 25 | 26 | // type Purger struct { 27 | // } 28 | 29 | // // ShouldInclude implements ... 30 | // // The name var has a class like "..." (decide what the rules are here - what about these crazy colons and stuff) 31 | // func (p *Purger) ShouldInclude(name string) bool { 32 | // panic(errors.New("not yet implemented")) 33 | // } 34 | 35 | // // NameParser extracts possible names of things to allow 36 | // type NameParser struct { 37 | // } 38 | 39 | // PurgeKeyParser is anything that can read a stream (containing usually HTML or HTML-like layout content) 40 | // and parse purge keys from it. 41 | // 42 | // A "purge key" is an identifier like "px-1", it can contain layout other contstraints like "sm:px-1", "md:w-full", 43 | // or "sm:focus:placeholder-green-200". It does not have a period prefix, no backslashes should appear, 44 | // and should not contain any colon suffixes (prefixes shown before are correct, but things like :focus 45 | // at the end, etc.) 46 | // 47 | // The purgeKeyMap map, if not-nil, provides a list of all possible purge keys, which can be used to 48 | // discard keys found that aren't in the map. 49 | // type PurgeKeyParser interface { 50 | // ParsePurgeKeys(r io.Reader, purgeKeyMap map[string]struct{}) error 51 | // } 52 | 53 | // read a single file 54 | // read a tree 55 | // read a tree every X seconds 56 | // each needs to provide a ShouldPurgeKey(k string) bool 57 | 58 | // Checker is implemented by something that can answer the question "should this CSS rule be purged from the output because it is unused". 59 | type Checker interface { 60 | ShouldPurgeKey(k string) bool 61 | } 62 | 63 | // Map is a set of strings that implements Checker. 64 | // The output of a Scanner is a Map that can be used to during conversion 65 | // to rapidly check if a style rule needs to be output. 66 | type Map map[string]struct{} 67 | 68 | // ShouldPurgeKey implements Checker. 69 | func (m Map) ShouldPurgeKey(k string) bool { 70 | _, ok := m[k] 71 | return !ok 72 | } 73 | 74 | func (m Map) Merge(fromMap Map) { 75 | for k, v := range fromMap { 76 | m[k] = v 77 | } 78 | } 79 | 80 | // Scanner scans through textual files (generally HTML-like content) and looks for tokens 81 | // to be preserved when purging. The scanning is intentionally naive in order to keep 82 | // it's rules simple to understand and reasonbly performant. (TODO: explain more) 83 | type Scanner struct { 84 | tokenizerFunc func(r io.Reader) Tokenizer 85 | ruleNames map[string]struct{} 86 | m Map 87 | } 88 | 89 | func NewScanner(ruleNames map[string]struct{}) *Scanner { 90 | return &Scanner{ruleNames: ruleNames} 91 | } 92 | 93 | func NewScannerFromDist(dist Dist) (*Scanner, error) { 94 | pkmap, err := PurgeKeysFromDist(dist) 95 | if err != nil { 96 | return nil, err 97 | } 98 | return NewScanner(pkmap), nil 99 | } 100 | 101 | var defaultTokenizerFunc = func(r io.Reader) Tokenizer { return NewDefaultTokenizer(r) } 102 | 103 | func (s *Scanner) Scan(r io.Reader) error { 104 | 105 | if s.m == nil { 106 | s.m = make(Map, len(s.ruleNames)/16) 107 | } 108 | 109 | tf := s.tokenizerFunc 110 | if tf == nil { 111 | tf = defaultTokenizerFunc 112 | } 113 | t := tf(r) 114 | 115 | for { 116 | b, err := t.NextToken() 117 | if err != nil { 118 | if errors.Is(err, io.EOF) { 119 | return nil 120 | } 121 | return err 122 | } 123 | bstr := string(b) // FIXME: cheating with zero-alloc unsafe cast would be appropriate here 124 | found := true 125 | if s.ruleNames != nil { 126 | _, found = s.ruleNames[bstr] 127 | } 128 | if found { 129 | s.m[bstr] = struct{}{} 130 | } 131 | } 132 | } 133 | 134 | func (s *Scanner) ScanFile(fpath string) error { 135 | f, err := os.Open(fpath) 136 | if err != nil { 137 | return err 138 | } 139 | defer f.Close() 140 | return s.Scan(f) 141 | } 142 | 143 | // WalkFunc returns a function which can be called by filepath.Walk to scan each matching file encountered. 144 | // The fnmatch func says which files to scan, if nil is passed then MatchDefault will be used. 145 | func (s *Scanner) WalkFunc(fnmatch func(fn string) bool) filepath.WalkFunc { 146 | if fnmatch == nil { 147 | fnmatch = MatchDefault 148 | } 149 | return filepath.WalkFunc(func(fpath string, info os.FileInfo, err error) error { 150 | if info.IsDir() { // ignore dirs 151 | return nil 152 | } 153 | if err != nil { // any stat errors get returned as-is 154 | return err 155 | } 156 | if !fnmatch(fpath) { // ignore if filename doesn't match 157 | return nil 158 | } 159 | return s.ScanFile(fpath) 160 | }) 161 | } 162 | 163 | // Map returns the Map which is the result of all previous Scan calls. 164 | func (s *Scanner) Map() Map { 165 | return s.m 166 | } 167 | 168 | // MatchDefault is a filename matcher function which will return true for files 169 | // end in .html, .vugu, .jsx or .vue. 170 | var MatchDefault = func(fn string) bool { 171 | ext := strings.ToLower(filepath.Ext(fn)) 172 | switch ext { 173 | case ".html", ".vugu", ".jsx", ".vue": 174 | return true 175 | } 176 | return false 177 | } 178 | 179 | // // Purger can parse markup and accumulate a list of purge keys which can be used to 180 | // // vet the output of tailwind.Converter to eliminate unused styles. 181 | // type Purger struct { 182 | // purgeKeyMap map[string]struct{} // all possible purge keys, passed in from New 183 | // parsedKeyMap map[string]struct{} // the keys parsed from the markup (filtered to include only purgeKeyMap entries if not nil), these are the keys to be kept during conversion 184 | // tokenizer Tokenizer 185 | // } 186 | 187 | // // TODO: 188 | // // New(Dist) 189 | // // NewFromMap 190 | // // BuildPurgeKeyMap(Dist) map 191 | // // and then twfiles does not have purge support, but twembed can have it all preprocessed 192 | // // New(Dist) looks for interface and calls BuildPurgeKeyMap if not implemeneted 193 | 194 | // // MustNew is like New but panics upon error. 195 | // func MustNew(dist Dist) *Purger { 196 | // ret, err := New(dist) 197 | // if err != nil { 198 | // panic(err) 199 | // } 200 | // return ret 201 | // } 202 | 203 | // // New returns a new Purger instance. Uses PurgeKeysFromDist. 204 | // func New(dist Dist) (*Purger, error) { 205 | 206 | // // moved to PurgeKeysFromDist 207 | // // pkmr, ok := dist.(purgeKeyMapper) 208 | // // if ok { 209 | // // return NewFromMap(pkmr.PurgeKeyMap()), nil 210 | // // } 211 | 212 | // pkmap, err := PurgeKeysFromDist(dist) 213 | // if err != nil { 214 | // return nil, err 215 | // } 216 | // return NewFromMap(pkmap), nil 217 | // } 218 | 219 | // // NewFromMap returns a new Purger instance. If purgeKeyMap is not nil, it is a map 220 | // // of all the possible keys that can be purged, which is then used during 221 | // // markup parsing to be able to scan for just the purge keys that are relevant. 222 | // // Passing nil will still result in proper function but will use more memory 223 | // // and potentially be slower. 224 | // func NewFromMap(purgeKeyMap map[string]struct{}) *Purger { 225 | // return &Purger{purgeKeyMap: purgeKeyMap, parsedKeyMap: make(map[string]struct{})} 226 | // } 227 | 228 | // // WalkFunc returns a function which can be called by filepath.Walk 229 | // func (p *Purger) WalkFunc(fnmatch func(fn string) bool) filepath.WalkFunc { 230 | // if fnmatch == nil { 231 | // fnmatch = MatchDefault 232 | // } 233 | // return filepath.WalkFunc(func(fpath string, info os.FileInfo, err error) error { 234 | // if info.IsDir() { // ignore dirs 235 | // return nil 236 | // } 237 | // if err != nil { // any stat errors get returned as-is 238 | // return err 239 | // } 240 | // if !fnmatch(fpath) { // ignore if filename doesn't match 241 | // return nil 242 | // } 243 | // return p.ParseFile(fpath) 244 | // }) 245 | // } 246 | 247 | // func (p *Purger) SetTokenizer(t Tokenizer) { 248 | // p.tokenizer = t 249 | // } 250 | 251 | // func (p *Purger) ParseReader(r io.Reader) error { 252 | // if p.parsedKeyMap == nil { 253 | // p.parsedKeyMap = make(map[string]struct{}) 254 | // } 255 | 256 | // t := NewDefaultTokenizer(r) 257 | // for { 258 | // b, err := t.NextToken() 259 | // if err != nil { 260 | // if errors.Is(err, io.EOF) { 261 | // return nil 262 | // } 263 | // return err 264 | // } 265 | // bstr := string(b) // FIXME: cheating with zero-alloc unsafe cast would be appropriate here 266 | // found := true 267 | // if p.purgeKeyMap != nil { 268 | // _, found = p.purgeKeyMap[bstr] 269 | // } 270 | // if found { 271 | // p.parsedKeyMap[bstr] = struct{}{} 272 | // } 273 | // } 274 | 275 | // } 276 | 277 | // func (p *Purger) ParseFile(fpath string) error { 278 | // f, err := os.Open(fpath) 279 | // if err != nil { 280 | // return err 281 | // } 282 | // defer f.Close() 283 | // return p.ParseReader(f) 284 | // } 285 | 286 | // func (p *Purger) ShouldPurgeKey(k string) bool { 287 | // _, ok := p.parsedKeyMap[k] 288 | // return !ok 289 | // } 290 | 291 | // Dist matches tailwind.Dist 292 | type Dist interface { 293 | OpenDist(name string) (io.ReadCloser, error) 294 | } 295 | -------------------------------------------------------------------------------- /converter.go: -------------------------------------------------------------------------------- 1 | package tailwind 2 | 3 | import ( 4 | "bytes" 5 | "errors" 6 | "fmt" 7 | "io" 8 | "sync" 9 | 10 | "github.com/tdewolff/parse/v2" 11 | "github.com/tdewolff/parse/v2/css" 12 | ) 13 | 14 | // TODO: types that are: 15 | // useful for output 16 | // useful for filtering (callback func to veto rules from being output for smaller file size) 17 | // can represent tree (so you can look and see that a rule is nested in a media query) 18 | // workable with stream processing (do whatever extent possible) 19 | // do not directly expose tdewolf css types (but can depend on them and include them unexported) 20 | 21 | // New returns an initialized instance of Converter. The out param 22 | // indicates where output is written, it must not be nil. 23 | func New(out io.Writer, dist Dist) *Converter { 24 | if out == nil { 25 | panic(fmt.Errorf("tailwind.Converter.out is nil, cannot continue")) 26 | } 27 | return &Converter{ 28 | out: out, 29 | dist: dist, 30 | } 31 | } 32 | 33 | // Converter does processing of CSS input files and writes a single output 34 | // CSS file with the appropriate @ directives processed. 35 | // Inputs are processed in the order they are added (see e.g. AddReader()). 36 | type Converter struct { 37 | out io.Writer 38 | inputs []*input 39 | dist Dist // tailwind is sourced from here 40 | *applier // initialized as needed 41 | postProcFunc func(out io.Writer, in io.Reader) error 42 | purgeChecker PurgeChecker // the purgeChecker, if any 43 | } 44 | 45 | type input struct { 46 | name string // display file name 47 | r io.Reader // read input from here 48 | isInline bool 49 | } 50 | 51 | // TODO: compare to https://tailwindcss.com/docs/controlling-file-size 52 | // func (c *Converter) SetAllow(rule ...string) { 53 | // panic(fmt.Errorf("not yet implemented")) 54 | // } 55 | // func (c *Converter) SetDisallow(rule ...string) { 56 | // panic(fmt.Errorf("not yet implemented")) 57 | // } 58 | 59 | // SetPostProcFunc sets the function that is called to post-process the output of the converter. 60 | // The typical use of this is for minification. 61 | func (c *Converter) SetPostProcFunc(f func(out io.Writer, in io.Reader) error) { 62 | c.postProcFunc = f 63 | } 64 | 65 | func (c *Converter) SetPurgeChecker(purgeChecker PurgeChecker) { 66 | c.purgeChecker = purgeChecker 67 | } 68 | 69 | // AddReader adds an input source. The name is used only in error 70 | // messages to indicate the source. And r is the CSS source to be processed, 71 | // it must not be nil. If isInline it indicates this CSS is from an HTML 72 | // style attribute, otherwise it's from the contents of a style tag or a 73 | // standlone CSS file. 74 | func (c *Converter) AddReader(name string, r io.Reader, isInline bool) { 75 | if r == nil { 76 | panic(fmt.Errorf("tailwind.Converter.AddReader(%q, r): r is nil, cannot continue", name)) 77 | } 78 | c.inputs = append(c.inputs, &input{name: name, r: r, isInline: isInline}) 79 | } 80 | 81 | // Run performs the conversion. The output is written to the writer specified 82 | // in New(). 83 | func (c *Converter) Run() (reterr error) { 84 | 85 | if c.out == nil { 86 | panic(fmt.Errorf("tailwind.Converter.out is nil, cannot continue")) 87 | } 88 | 89 | defer func() { 90 | if r := recover(); r != nil { 91 | e, ok := r.(error) 92 | if ok { 93 | reterr = e 94 | } else { 95 | reterr = fmt.Errorf("%v", r) 96 | } 97 | } 98 | }() 99 | 100 | var w io.Writer = c.out 101 | 102 | // if postProcFunc is specified then use a pipe to integrate it 103 | if c.postProcFunc != nil { 104 | pr, pw := io.Pipe() 105 | w = pw 106 | var wg sync.WaitGroup 107 | wg.Add(1) 108 | defer wg.Wait() 109 | defer pw.Close() 110 | 111 | go func() { 112 | defer wg.Done() 113 | err := c.postProcFunc(c.out, pr) 114 | if err != nil && reterr == nil { 115 | reterr = err 116 | } 117 | }() 118 | } 119 | 120 | for _, in := range c.inputs { 121 | inp := parse.NewInput(in.r) 122 | p := css.NewParser(inp, in.isInline) 123 | 124 | err := c.runParse(in.name, p, inp, w, false) 125 | if err != nil { 126 | return err 127 | } 128 | 129 | } 130 | 131 | return nil 132 | } 133 | 134 | func (c *Converter) runParse(name string, p *css.Parser, inp *parse.Input, w io.Writer, doPurge bool) error { 135 | 136 | // set to true when we enter a ruleset that we're omitting from the output 137 | inPurgeRule := false 138 | // set to true when we find a rule with a comma in it, which we then just decline to purge 139 | isQualifiedRule := false 140 | 141 | for { 142 | 143 | gt, tt, data := p.Next() 144 | _ = tt 145 | 146 | // TODO: it's unfortunate we cannot get some sort of context from p, 147 | // although in the ErrorGrammar it does give it's own line number; 148 | // so for now we just print the name in front of every error 149 | 150 | switch gt { 151 | 152 | case css.ErrorGrammar: 153 | err := p.Err() 154 | if errors.Is(err, io.EOF) { 155 | return nil 156 | } 157 | return fmt.Errorf("%s: %w", name, err) 158 | 159 | case css.AtRuleGrammar: 160 | 161 | switch { 162 | 163 | case bytes.Equal(data, []byte("@tailwind")): 164 | tokens := trimTokenWs(p.Values()) 165 | if len(tokens) != 1 { 166 | return fmt.Errorf("%s: @tailwind should be followed by exactly one token, instead found: %v", name, tokens) 167 | } 168 | token := tokens[0] 169 | if token.TokenType != css.IdentToken { 170 | return fmt.Errorf("%s: @tailwind should be followed by an identifier token, instead found: %v", name, token) 171 | } 172 | switch string(token.Data) { 173 | case "base": 174 | 175 | rc, err := c.dist.OpenDist("base") 176 | if err != nil { 177 | return err 178 | } 179 | defer rc.Close() 180 | 181 | subpi := parse.NewInput(rc) 182 | subp := css.NewParser(subpi, false) 183 | err = c.runParse("[tailwind-dist/base]", subp, subpi, w, false) 184 | if err != nil { 185 | return err 186 | } 187 | 188 | case "components": 189 | 190 | rc, err := c.dist.OpenDist("components") 191 | if err != nil { 192 | return err 193 | } 194 | defer rc.Close() 195 | 196 | subpi := parse.NewInput(rc) 197 | subp := css.NewParser(subpi, false) 198 | err = c.runParse("[tailwind-dist/components]", subp, subpi, w, false) 199 | if err != nil { 200 | return err 201 | } 202 | 203 | case "utilities": 204 | 205 | rc, err := c.dist.OpenDist("utilities") 206 | if err != nil { 207 | return err 208 | } 209 | defer rc.Close() 210 | 211 | subpi := parse.NewInput(rc) 212 | subp := css.NewParser(subpi, false) 213 | err = c.runParse("[tailwind-dist/utilities]", subp, subpi, w, true) // for utilities we enable purging (if available) 214 | if err != nil { 215 | return err 216 | } 217 | 218 | default: 219 | return fmt.Errorf("%s: @tailwind followed by unknown identifier: %s", name, token.Data) 220 | } 221 | 222 | case bytes.Equal(data, []byte("@apply")): 223 | 224 | if c.applier == nil { 225 | var err error 226 | c.applier, err = newApplier(c.dist) 227 | if err != nil { 228 | return fmt.Errorf("error while creating applier: %w", err) 229 | } 230 | } 231 | 232 | idents, err := tokensToIdents(p.Values()) 233 | if err != nil { 234 | return err 235 | } 236 | 237 | b, err := c.applier.apply(idents) 238 | if err != nil { 239 | return err 240 | } 241 | 242 | _, err = w.Write(b) 243 | if err != nil { 244 | return err 245 | } 246 | 247 | default: // other @ rules just get copied verbatim 248 | err := write(w, data, p.Values(), ';') 249 | if err != nil { 250 | return err 251 | } 252 | 253 | } 254 | 255 | case css.BeginAtRuleGrammar: 256 | err := write(w, data, p.Values(), '{') 257 | if err != nil { 258 | return err 259 | } 260 | 261 | case css.EndAtRuleGrammar: 262 | err := write(w, data) 263 | if err != nil { 264 | return err 265 | } 266 | 267 | case css.QualifiedRuleGrammar: 268 | // NOTE: this is used for rules like: b,strong { ... 269 | // we'll get a QualifiedRuleGrammar entry with empty data and p.Values() 270 | // has the 'b' in it. 271 | isQualifiedRule = true 272 | err := write(w, p.Values(), ',') 273 | if err != nil { 274 | return err 275 | } 276 | 277 | case css.BeginRulesetGrammar: 278 | // log.Printf("BeginRulesetGrammar: data=%s; tokens = %v", data, p.Values()) 279 | if doPurge && !isQualifiedRule && c.purgeChecker != nil { 280 | key := ruleToPurgeKey(data, p.Values()) 281 | if c.purgeChecker.ShouldPurgeKey(key) { 282 | inPurgeRule = true 283 | } 284 | } 285 | isQualifiedRule = false // once we start a ruleset, this goes away 286 | if !inPurgeRule { 287 | err := write(w, data, p.Values(), '{') 288 | if err != nil { 289 | return err 290 | } 291 | } 292 | 293 | case css.DeclarationGrammar: 294 | if !inPurgeRule { 295 | err := write(w, data, ':', p.Values(), ';') 296 | if err != nil { 297 | return err 298 | } 299 | } 300 | 301 | case css.CustomPropertyGrammar: 302 | if !inPurgeRule { 303 | err := write(w, data, ':', p.Values(), ';') 304 | if err != nil { 305 | return err 306 | } 307 | } 308 | 309 | case css.EndRulesetGrammar: 310 | if !inPurgeRule { 311 | err := write(w, data) 312 | if err != nil { 313 | return err 314 | } 315 | } 316 | inPurgeRule = false 317 | 318 | case css.TokenGrammar: 319 | continue // HTML-style comment, just skip 320 | 321 | case css.CommentGrammar: 322 | continue // strip comments 323 | 324 | default: // verify we aren't missing a type 325 | panic(fmt.Errorf("%s: unexpected grammar type %v at offset %v", name, gt, inp.Offset())) 326 | 327 | } 328 | 329 | } 330 | 331 | } 332 | 333 | // scan tokens and extract just class names 334 | func toklistClasses(toklist []css.Token) (ret []string) { // FIXME: think about efficiency - we should probably be using []byte and then have a static list of strings for stuff that can be @apply'd and only one copy of each of those strings 335 | priorDot := false 336 | for _, tok := range toklist { 337 | if tok.TokenType == css.DelimToken && bytes.Equal(tok.Data, []byte(".")) { 338 | priorDot = true 339 | continue 340 | } 341 | if priorDot && tok.TokenType == css.IdentToken { 342 | // parser will give us escapes and colons as part of identifiers, which indicate entires we can skip over for our purposes here 343 | if bytes.IndexByte(tok.Data, '\\') < 0 && 344 | bytes.IndexByte(tok.Data, ':') < 0 { 345 | ret = append(ret, string(tok.Data)) 346 | } 347 | } 348 | priorDot = false 349 | } 350 | return 351 | } 352 | 353 | // a general purpose write so we can just do one error check, 354 | // check later for performance implications of interface{} 355 | // and fmt.Fprintf here but I suspect it'll be minimal 356 | func write(w io.Writer, what ...interface{}) error { 357 | for _, i := range what { 358 | 359 | switch v := i.(type) { 360 | 361 | case byte: 362 | fmt.Fprintf(w, "%c", v) 363 | 364 | case rune: 365 | fmt.Fprintf(w, "%c", v) 366 | 367 | case []byte: 368 | fmt.Fprintf(w, "%s", v) 369 | 370 | case []css.Token: 371 | err := writeTokens(w, v...) 372 | if err != nil { 373 | return err 374 | } 375 | 376 | default: 377 | _, err := fmt.Fprint(w, v) 378 | if err != nil { 379 | return err 380 | } 381 | } 382 | 383 | } 384 | return nil 385 | } 386 | 387 | func writeTokens(w io.Writer, tokens ...css.Token) error { 388 | for _, val := range tokens { 389 | _, err := w.Write(val.Data) 390 | if err != nil { 391 | return err 392 | } 393 | } 394 | return nil 395 | } 396 | 397 | func trimTokenWs(tokens []css.Token) []css.Token { 398 | for len(tokens) > 0 && tokens[0].TokenType == css.WhitespaceToken { 399 | tokens = tokens[1:] 400 | } 401 | for len(tokens) > 0 && tokens[len(tokens)-1].TokenType == css.WhitespaceToken { 402 | tokens = tokens[:len(tokens)-1] 403 | } 404 | return tokens 405 | } 406 | 407 | func tokensToIdents(tokens []css.Token) ([]string, error) { 408 | 409 | ret := make([]string, 0, len(tokens)/2) 410 | 411 | for _, token := range tokens { 412 | switch token.TokenType { 413 | case css.IdentToken: 414 | ret = append(ret, string(token.Data)) 415 | case css.CommentToken, css.WhitespaceToken: 416 | // ignore 417 | default: 418 | return ret, fmt.Errorf("unexpected token while looking for ident: %v", token) 419 | } 420 | } 421 | 422 | return ret, nil 423 | } 424 | 425 | // takes the rule info from a BeginRulesetGrammar returns the purge key if there is one or else empty string 426 | func ruleToPurgeKey(data []byte, tokens []css.Token) string { 427 | 428 | if len(data) != 0 { 429 | panic("unexpected data") 430 | } 431 | 432 | // we're looking for Delim('.') followed by Ident() - we disregard everything after 433 | if len(tokens) < 2 { 434 | return "" 435 | } 436 | if tokens[0].TokenType != css.DelimToken || !bytes.Equal(tokens[0].Data, []byte(".")) { 437 | return "" 438 | } 439 | if tokens[1].TokenType != css.IdentToken { 440 | return "" 441 | } 442 | 443 | // if we get here we're good, we just need to unescape the ident (e.g. `\:` becomes just `:`) 444 | return cssUnescape(tokens[1].Data) 445 | } 446 | 447 | // PurgeChecker is something which can tell us if a key should be purged from the final output (because it is not used). 448 | // See package twpurge for default implementation. 449 | type PurgeChecker interface { 450 | ShouldPurgeKey(k string) bool 451 | } 452 | 453 | func cssUnescape(b []byte) string { 454 | 455 | var buf bytes.Buffer 456 | 457 | var i int 458 | for i = 0; i < len(b); i++ { 459 | if b[i] == '\\' { 460 | // set up buf with the stuff we've already scanned 461 | buf.Grow(len(b)) 462 | buf.Write(b[:i]) 463 | goto foundEsc 464 | } 465 | continue 466 | } 467 | // no escaping needed 468 | return string(b) 469 | 470 | foundEsc: 471 | inEsc := false 472 | for ; i < len(b); i++ { 473 | if b[i] == '\\' && !inEsc { 474 | inEsc = true 475 | continue 476 | } 477 | buf.WriteByte(b[i]) 478 | inEsc = false 479 | } 480 | return buf.String() 481 | } 482 | --------------------------------------------------------------------------------