├── .gitignore ├── js ├── .gitignore ├── .prettierignore ├── format.wasm ├── tsconfig.json ├── .prettierrc.json ├── wasm_exec.d.ts ├── README.md ├── format.go ├── node.ts ├── package.json ├── format.ts ├── package-lock.json └── wasm_exec.js ├── tests ├── in │ ├── run.dockerfile │ ├── onbuild-arg-env.dockerfile │ ├── comment.dockerfile │ ├── andissue.dockerfile │ ├── onbuild.dockerfile │ ├── crash.dockerfile │ ├── env.dockerfile │ ├── flags.dockerfile │ ├── heredoc.dockerfile │ ├── quoting.dockerfile │ ├── run3.dockerfile │ ├── moretests.dockerfile │ ├── run5.dockerfile │ ├── whitespace.dockerfile │ ├── misc.dockerfile │ ├── formatted.dockerfile │ ├── run4.dockerfile │ ├── issue32.dockerfile │ ├── run2.dockerfile │ ├── shell.dockerfile │ └── label.dockerfile └── out │ ├── run.dockerfile │ ├── onbuild-arg-env.dockerfile │ ├── comment.dockerfile │ ├── andissue.dockerfile │ ├── onbuild.dockerfile │ ├── crash.dockerfile │ ├── env.dockerfile │ ├── heredoc.dockerfile │ ├── flags.dockerfile │ ├── quoting.dockerfile │ ├── run3.dockerfile │ ├── whitespace.dockerfile │ ├── run5.dockerfile │ ├── moretests.dockerfile │ ├── misc.dockerfile │ ├── formatted.dockerfile │ ├── run4.dockerfile │ ├── issue32.dockerfile │ ├── label.dockerfile │ ├── shell.dockerfile │ └── run2.dockerfile ├── main.go ├── cmd ├── version.go └── root.go ├── .pre-commit-hooks.yaml ├── .github └── workflows │ ├── release-js.yaml │ └── release.yaml ├── go.mod ├── LICENSE ├── dockerfmt_test.go ├── README.md ├── go.sum └── lib └── format.go /.gitignore: -------------------------------------------------------------------------------- 1 | dockerfmt -------------------------------------------------------------------------------- /js/.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | dist -------------------------------------------------------------------------------- /js/.prettierignore: -------------------------------------------------------------------------------- 1 | /package-lock.json 2 | /wasm_exec.* 3 | -------------------------------------------------------------------------------- /tests/in/run.dockerfile: -------------------------------------------------------------------------------- 1 | RUN ["ls" , "-la"] 2 | 3 | RUN echo foo >> bar -------------------------------------------------------------------------------- /tests/out/run.dockerfile: -------------------------------------------------------------------------------- 1 | RUN ["ls", "-la"] 2 | 3 | RUN echo foo >>bar 4 | -------------------------------------------------------------------------------- /js/format.wasm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/reteps/dockerfmt/HEAD/js/format.wasm -------------------------------------------------------------------------------- /tests/out/onbuild-arg-env.dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | ONBUILD ARG 3 | ONBUILD ENV 4 | -------------------------------------------------------------------------------- /tests/in/onbuild-arg-env.dockerfile: -------------------------------------------------------------------------------- 1 | FROM alpine 2 | ONBUiLD arG 3 | onbuild enV 4 | 5 | 6 | -------------------------------------------------------------------------------- /tests/in/comment.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/12 2 | FROM scratch 3 | # a comment -------------------------------------------------------------------------------- /tests/out/comment.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/12 2 | FROM scratch 3 | # a comment 4 | -------------------------------------------------------------------------------- /tests/in/andissue.dockerfile: -------------------------------------------------------------------------------- 1 | RUN foo \ 2 | # comment 3 | && \ 4 | # comment 2 5 | bar && \ 6 | # comment 3 7 | baz -------------------------------------------------------------------------------- /tests/out/andissue.dockerfile: -------------------------------------------------------------------------------- 1 | RUN foo \ 2 | # comment 3 | # comment 2 4 | && bar \ 5 | # comment 3 6 | && baz 7 | -------------------------------------------------------------------------------- /main.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "github.com/reteps/dockerfmt/cmd" 5 | ) 6 | 7 | func main() { 8 | cmd.Execute() 9 | } 10 | -------------------------------------------------------------------------------- /tests/out/onbuild.dockerfile: -------------------------------------------------------------------------------- 1 | # syntax=docker/dockerfile:1.11 2 | FROM alpine AS baseimage 3 | ONBUILD COPY --from=build /usr/bin/app /app 4 | ONBUILD RUN --mount=from=config,target=/opt/appconfig ... 5 | -------------------------------------------------------------------------------- /js/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES2020", 4 | "moduleResolution": "NodeNext", 5 | "module": "NodeNext", 6 | "declaration": true, 7 | "outDir": "./dist" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /tests/in/onbuild.dockerfile: -------------------------------------------------------------------------------- 1 | # syntax=docker/dockerfile:1.11 2 | FROM alpine AS baseimage 3 | ONBUILD COPY --from=build /usr/bin/app /app 4 | ONBUILD RUN --mount=from=config,target=/opt/appconfig ... -------------------------------------------------------------------------------- /tests/in/crash.dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx 2 | WORKDIR /app 3 | ARG PROJECT_DIR=/ 4 | ARG NGINX_CONF=nginx.conf 5 | COPY $NGINX_CONF /etc/nginx/conf.d/nginx.conf 6 | COPY $PROJECT_DIR /app 7 | CMD mkdir --parents /var/log/nginx && nginx -g "daemon off;" -------------------------------------------------------------------------------- /tests/out/crash.dockerfile: -------------------------------------------------------------------------------- 1 | FROM nginx 2 | WORKDIR /app 3 | ARG PROJECT_DIR=/ 4 | ARG NGINX_CONF=nginx.conf 5 | COPY $NGINX_CONF /etc/nginx/conf.d/nginx.conf 6 | COPY $PROJECT_DIR /app 7 | CMD mkdir --parents /var/log/nginx && nginx -g "daemon off;" 8 | -------------------------------------------------------------------------------- /tests/in/env.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/un-ts/prettier/issues/398 2 | ENV a=1 \ 3 | b=2 \ 4 | # comment 5 | c=3 \ 6 | d=4 \ 7 | # comment 8 | e=5 9 | 10 | ENV MY_VAR my-value 11 | ENV MY_VAR=my-value2 \ 12 | c=4 13 | ENV MY_VAR=my-value3 -------------------------------------------------------------------------------- /tests/out/env.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/un-ts/prettier/issues/398 2 | ENV a=1 \ 3 | b=2 \ 4 | # comment 5 | c=3 \ 6 | d=4 \ 7 | # comment 8 | e=5 9 | 10 | ENV MY_VAR=my-value 11 | ENV MY_VAR=my-value2 \ 12 | c=4 13 | ENV MY_VAR=my-value3 14 | -------------------------------------------------------------------------------- /js/.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["prettier-plugin-pkg"], 3 | "singleQuote": true, 4 | "semi": false, 5 | "tabWidth": 4, 6 | "overrides": [ 7 | { 8 | "files": "*.json", 9 | "options": { 10 | "tabWidth": 2 11 | } 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /js/wasm_exec.d.ts: -------------------------------------------------------------------------------- 1 | declare class Go { 2 | argv: string[]; 3 | env: { [envKey: string]: string }; 4 | exit: (code: number) => void; 5 | importObject: WebAssembly.Imports; 6 | exited: boolean; 7 | mem: DataView; 8 | run(instance: WebAssembly.Instance): Promise; 9 | } -------------------------------------------------------------------------------- /tests/in/flags.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/23 2 | FROM --platform=linux/arm64 debian 3 | 4 | RUN --network=host apt-get install vim 5 | RUN --security echo "test" 6 | COPY --chown=my-user:my-group --chmod=644 ./config.conf /data/config.conf 7 | COPY --link ./another-file /data/linked-file 8 | ADD --keep-git-dir ./ /data/src -------------------------------------------------------------------------------- /tests/out/heredoc.dockerfile: -------------------------------------------------------------------------------- 1 | RUN <>/hello 3 | echo "World!" >>/hello 4 | EOF 5 | RUN ls 6 | 7 | RUN <<-EOF 8 | echo "Hello" >>/hello 9 | echo "World!" >>/hello 10 | EOF 11 | 12 | COPY <<-EOF /x 13 | x 14 | EOF 15 | 16 | COPY <<-EOT /script.sh 17 | echo "hello ${FOO}" 18 | EOT 19 | COPY <<-EOF /x 20 | x 21 | EOF 22 | -------------------------------------------------------------------------------- /tests/out/flags.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/23 2 | FROM --platform=linux/arm64 debian 3 | 4 | RUN --network=host apt-get install vim 5 | RUN --security echo "test" 6 | COPY --chown=my-user:my-group --chmod=644 ./config.conf /data/config.conf 7 | COPY --link ./another-file /data/linked-file 8 | ADD --keep-git-dir ./ /data/src 9 | -------------------------------------------------------------------------------- /tests/in/heredoc.dockerfile: -------------------------------------------------------------------------------- 1 | RUN <> /hello 3 | echo "World!">>/hello 4 | EOF 5 | RUN ls 6 | 7 | RUN <<-EOF 8 | echo "Hello" >> /hello 9 | echo "World!">>/hello 10 | EOF 11 | 12 | COPY <<-EOF /x 13 | x 14 | EOF 15 | 16 | COPY <<-EOT /script.sh 17 | echo "hello ${FOO}" 18 | EOT 19 | COPY <<-EOF /x 20 | x 21 | EOF 22 | 23 | -------------------------------------------------------------------------------- /cmd/version.go: -------------------------------------------------------------------------------- 1 | package cmd 2 | 3 | import ( 4 | "fmt" 5 | 6 | "github.com/spf13/cobra" 7 | ) 8 | 9 | func init() { 10 | rootCmd.AddCommand(versionCmd) 11 | } 12 | 13 | var versionCmd = &cobra.Command{ 14 | Use: "version", 15 | Short: "Print the version number of dockerfmt", 16 | Run: func(cmd *cobra.Command, args []string) { 17 | fmt.Println("dockerfmt 0.3.9") 18 | }, 19 | } 20 | -------------------------------------------------------------------------------- /tests/in/quoting.dockerfile: -------------------------------------------------------------------------------- 1 | FROM hackernews 2 | # https://news.ycombinator.com/item?id=43630653 3 | ENTRYPOINT service ssh restart && bash 4 | 5 | ENTRYPOINT sh -c 'service ssh restart && bash' 6 | 7 | # https://github.com/reteps/dockerfmt/issues/20 8 | FROM nginx 9 | ENTRYPOINT ["nginx", "-g", "daemon off;"] 10 | 11 | # https://github.com/reteps/dockerfmt/issues/20 12 | FROM nginx 13 | ENTRYPOINT nginx -g 'daemon off;' -------------------------------------------------------------------------------- /tests/out/quoting.dockerfile: -------------------------------------------------------------------------------- 1 | FROM hackernews 2 | # https://news.ycombinator.com/item?id=43630653 3 | ENTRYPOINT service ssh restart && bash 4 | 5 | ENTRYPOINT sh -c 'service ssh restart && bash' 6 | 7 | # https://github.com/reteps/dockerfmt/issues/20 8 | FROM nginx 9 | ENTRYPOINT ["nginx", "-g", "daemon off;"] 10 | 11 | # https://github.com/reteps/dockerfmt/issues/20 12 | FROM nginx 13 | ENTRYPOINT nginx -g 'daemon off;' 14 | -------------------------------------------------------------------------------- /tests/out/run3.dockerfile: -------------------------------------------------------------------------------- 1 | RUN apt-get update \ 2 | # Comment here 3 | && apt-get install -y --no-install-recommends \ 4 | # Another comment here 5 | man-db unminimize \ 6 | # Multiline comment here 7 | # Here 8 | # And here 9 | gosu curl git htop less nano unzip vim wget zip \ 10 | && yes | unminimize \ 11 | && apt-get clean \ 12 | && rm -rf /var/lib/apt/lists/* \ 13 | && find /tmp -not -path /tmp -delete 14 | -------------------------------------------------------------------------------- /tests/in/run3.dockerfile: -------------------------------------------------------------------------------- 1 | RUN apt-get update \ 2 | # Comment here 3 | && apt-get install -y --no-install-recommends \ 4 | # Another comment here 5 | man-db unminimize \ 6 | # Multiline comment here 7 | # Here 8 | # And here 9 | gosu curl git htop less nano unzip vim wget zip && \ 10 | yes | unminimize && \ 11 | apt-get clean && \ 12 | rm -rf /var/lib/apt/lists/* \ 13 | && find /tmp -not -path /tmp -delete -------------------------------------------------------------------------------- /.pre-commit-hooks.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | - additional_dependencies: 3 | - ./cmd 4 | - ./lib 5 | alias: dockerfmt 6 | args: 7 | - --write 8 | - --newline 9 | - --indent=4 10 | - --space-redirects 11 | description: Format Dockerfile files 12 | entry: dockerfmt 13 | files: ^.*(Container|Docker)file.*$ 14 | id: dockerfmt 15 | language: golang 16 | name: dockerfmt 17 | pass_filenames: true 18 | require_serial: false 19 | exclude: ".dockerignore$" 20 | types_or: 21 | - dockerfile 22 | -------------------------------------------------------------------------------- /tests/out/whitespace.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/reteps/dockerfmt/issues/1#issuecomment-2785329824 2 | FROM node:lts-alpine as builder 3 | 4 | # 安装与编译代码 5 | COPY . /app 6 | WORKDIR /app 7 | RUN yarn --frozen-lockfile \ 8 | && yarn build \ 9 | && find . -name '*.map' -type f -exec rm -f {} \; 10 | 11 | # 最终的应用 12 | FROM abiosoft/caddy 13 | COPY --from=builder /app/packages/ufc-host-app/build /srv 14 | EXPOSE 2015 15 | 16 | FROM foobar 17 | RUN ls 18 | LABEL foo=bar 19 | HEALTHCHECK NONE 20 | CMD ls 21 | COPY . . 22 | ADD . . 23 | -------------------------------------------------------------------------------- /js/README.md: -------------------------------------------------------------------------------- 1 | # `@reteps/dockerfmt` 2 | 3 | Bindings around the Golang `dockerfmt` tooling. It uses [tinygo](https://github.com/tinygo-org/tinygo) to compile the Go code to WebAssembly, which is then used in the JS bindings. 4 | 5 | 6 | ```js 7 | import { formatDockerfile } from '@reteps/dockerfmt' 8 | // Alternatively, you can use `formatDockerfileContents` to format a string instead of a file. 9 | 10 | const result = await formatDockerfile('../tests/comment.dockerfile', { indent: 4, trailingNewline: true }) 11 | 12 | console.log(result) 13 | ``` 14 | -------------------------------------------------------------------------------- /tests/out/run5.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/un-ts/prettier/issues/441#issuecomment-2793674631 2 | FROM ghcr.io/zerocluster/node/app 3 | 4 | RUN \ 5 | # install dependencies 6 | NODE_ENV=production npm install-clean \ 7 | # cleanup 8 | && /usr/bin/env bash <(curl -fsSL https://raw.githubusercontent.com/softvisio/scripts/main/env-build-node.sh) cleanup 9 | 10 | RUN \ 11 | # install dependencies 12 | # multiline comment 13 | NODE_ENV=production npm install-clean \ 14 | # cleanup 15 | && /usr/bin/env bash <(curl -fsSL https://raw.githubusercontent.com/softvisio/scripts/main/env-build-node.sh) cleanup 16 | -------------------------------------------------------------------------------- /tests/in/moretests.dockerfile: -------------------------------------------------------------------------------- 1 | ENV a=1 \ 2 | b=2 \ 3 | # comment 4 | c=3 \ 5 | d=4 \ 6 | # comment 7 | e=5 8 | 9 | MAINTAINER Jean Luc Picard 10 | 11 | FROM debian:12.6-slim 12 | 13 | RUN set -eux; for x in {1..3}; do echo 'foo'; echo 'bar'; echo "$x"; done 14 | 15 | RUN < { 11 | return fs.readFile( 12 | path.resolve( 13 | path.dirname(fileURLToPath(import.meta.url)), 14 | 'format.wasm', 15 | ), 16 | ) 17 | } 18 | 19 | export const formatDockerfileContents = async ( 20 | fileContents: string, 21 | options: FormatOptions, 22 | ) => { 23 | return formatDockerfileContents_(fileContents, options, getWasm) 24 | } 25 | 26 | export const formatDockerfile = async ( 27 | fileName: string, 28 | options: FormatOptions, 29 | ) => { 30 | const fileBuffer = await fs.readFile(fileName) 31 | const fileContents = fileBuffer.toString() 32 | return formatDockerfileContents(fileContents, options) 33 | } 34 | 35 | export { FormatOptions } 36 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | on: 2 | workflow_dispatch: 3 | release: 4 | types: [created] 5 | 6 | permissions: 7 | contents: write 8 | packages: write 9 | 10 | jobs: 11 | release-docker: 12 | name: Release Docker Image 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | - uses: actions/setup-go@v5 17 | with: 18 | go-version: '1.24.x' 19 | - uses: ko-build/setup-ko@v0.8 20 | - run: ko build --bare 21 | releases-matrix: 22 | name: Release Go Binary 23 | runs-on: ubuntu-latest 24 | strategy: 25 | matrix: 26 | # build and publish in parallel: linux/386, linux/amd64, linux/arm64, windows/386, windows/amd64, darwin/amd64, darwin/arm64 27 | goos: [linux, darwin] 28 | goarch: [amd64, arm64] 29 | exclude: 30 | - goarch: amd64 31 | goos: darwin 32 | steps: 33 | - uses: actions/checkout@v4 34 | - uses: wangyoucao577/go-release-action@v1 35 | with: 36 | github_token: ${{ secrets.GITHUB_TOKEN }} 37 | goos: ${{ matrix.goos }} 38 | goarch: ${{ matrix.goarch }} -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Pete Stenger 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /tests/in/formatted.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/2 2 | # set up PrairieLearn and run migrations to initialize the DB 3 | RUN chmod +x /PrairieLearn/scripts/init.sh \ 4 | && mkdir /course{,{2..9}} \ 5 | && mkdir -p /workspace_{main,host}_zips \ 6 | && mkdir -p /jobs \ 7 | # Here is a comment in the middle of my command -- `docker run -it --rm` 8 | && /PrairieLearn/scripts/start_postgres.sh \ 9 | && cd /PrairieLearn \ 10 | && make build \ 11 | # Here is a multiline comment in my command 12 | # The parser has to handle this case, and strip out the 13 | # $() chars surrounding 14 | && node apps/prairielearn/dist/server.js --migrate-and-exit \ 15 | && su postgres -c "createuser -s root" \ 16 | && /PrairieLearn/scripts/start_postgres.sh stop \ 17 | && /PrairieLearn/scripts/gen_ssl.sh \ 18 | && git config --global user.email "dev@example.com" \ 19 | && git config --global user.name "Dev User" \ 20 | && git config --global safe.directory '*' 21 | 22 | HEALTHCHECK --interval=5m --timeout=3s \ 23 | CMD curl -f http://localhost/ || exit 1 24 | 25 | CMD ["/PrairieLearn/scripts/init.sh"] -------------------------------------------------------------------------------- /tests/out/formatted.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/2 2 | # set up PrairieLearn and run migrations to initialize the DB 3 | RUN chmod +x /PrairieLearn/scripts/init.sh \ 4 | && mkdir /course{,{2..9}} \ 5 | && mkdir -p /workspace_{main,host}_zips \ 6 | && mkdir -p /jobs \ 7 | # Here is a comment in the middle of my command -- `docker run -it --rm` 8 | && /PrairieLearn/scripts/start_postgres.sh \ 9 | && cd /PrairieLearn \ 10 | && make build \ 11 | # Here is a multiline comment in my command 12 | # The parser has to handle this case, and strip out the 13 | # $() chars surrounding 14 | && node apps/prairielearn/dist/server.js --migrate-and-exit \ 15 | && su postgres -c "createuser -s root" \ 16 | && /PrairieLearn/scripts/start_postgres.sh stop \ 17 | && /PrairieLearn/scripts/gen_ssl.sh \ 18 | && git config --global user.email "dev@example.com" \ 19 | && git config --global user.name "Dev User" \ 20 | && git config --global safe.directory '*' 21 | 22 | HEALTHCHECK --interval=5m --timeout=3s \ 23 | CMD curl -f http://localhost/ || exit 1 24 | 25 | CMD ["/PrairieLearn/scripts/init.sh"] 26 | -------------------------------------------------------------------------------- /tests/in/run4.dockerfile: -------------------------------------------------------------------------------- 1 | RUN arch="$(uname -m)" \ 2 | && curl -sfLO "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-${arch}.sh" \ 3 | && chmod +x "Miniforge3-Linux-${arch}.sh" \ 4 | && ./"Miniforge3-Linux-${arch}.sh" -b -p /home/coder/conda \ 5 | # Install conda and mamba hooks for future interactive bash sessions: 6 | && /home/coder/conda/bin/mamba init bash \ 7 | # Activate hooks in the current noninteractive session: 8 | && . "/home/coder/conda/etc/profile.d/conda.sh" \ 9 | && . "/home/coder/conda/etc/profile.d/mamba.sh" \ 10 | && mamba activate \ 11 | # Installing `pygraphviz` with pip would require `build-essentials`, `graphviz`, 12 | # and `graphviz-dev` to be installed at the OS level, which would increase the 13 | # image size. Instead, we install it from Conda, which prebuilds it and also 14 | # automatically installs a Conda-specific `graphviz` dependency. 15 | && mamba install --yes "$(grep pygraphviz /requirements.txt | head -n 1)" \ 16 | && pip install --no-cache-dir -r /requirements.txt \ 17 | && rm "Miniforge3-Linux-${arch}.sh" \ 18 | && mamba clean --all --yes --quiet \ 19 | && pip cache purge -------------------------------------------------------------------------------- /tests/out/run4.dockerfile: -------------------------------------------------------------------------------- 1 | RUN arch="$(uname -m)" \ 2 | && curl -sfLO "https://github.com/conda-forge/miniforge/releases/latest/download/Miniforge3-Linux-${arch}.sh" \ 3 | && chmod +x "Miniforge3-Linux-${arch}.sh" \ 4 | && ./"Miniforge3-Linux-${arch}.sh" -b -p /home/coder/conda \ 5 | # Install conda and mamba hooks for future interactive bash sessions: 6 | && /home/coder/conda/bin/mamba init bash \ 7 | # Activate hooks in the current noninteractive session: 8 | && . "/home/coder/conda/etc/profile.d/conda.sh" \ 9 | && . "/home/coder/conda/etc/profile.d/mamba.sh" \ 10 | && mamba activate \ 11 | # Installing `pygraphviz` with pip would require `build-essentials`, `graphviz`, 12 | # and `graphviz-dev` to be installed at the OS level, which would increase the 13 | # image size. Instead, we install it from Conda, which prebuilds it and also 14 | # automatically installs a Conda-specific `graphviz` dependency. 15 | && mamba install --yes "$(grep pygraphviz /requirements.txt | head -n 1)" \ 16 | && pip install --no-cache-dir -r /requirements.txt \ 17 | && rm "Miniforge3-Linux-${arch}.sh" \ 18 | && mamba clean --all --yes --quiet \ 19 | && pip cache purge 20 | -------------------------------------------------------------------------------- /js/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@reteps/dockerfmt", 3 | "version": "0.3.6", 4 | "type": "module", 5 | "description": "", 6 | "repository": "git+https://github.com/reteps/dockerfmt/tree/main/js", 7 | "author": "Peter Stenger ", 8 | "license": "MIT", 9 | "engines": { 10 | "node": "^v12.20.0 || ^14.13.0 || >=16.0.0" 11 | }, 12 | "exports": { 13 | ".": { 14 | "browser": { 15 | "types": "./dist/format.d.ts", 16 | "default": "./dist/format.js" 17 | }, 18 | "default": { 19 | "types": "./dist/node.d.ts", 20 | "default": "./dist/node.js" 21 | } 22 | }, 23 | "./format.wasm": "./dist/format.wasm", 24 | "./package.json": "./package.json", 25 | "./wasm_exec": "./dist/wasm_exec.js", 26 | "./wasm_exec.js": "./dist/wasm_exec.js" 27 | }, 28 | "files": [ 29 | "dist" 30 | ], 31 | "scripts": { 32 | "//": "Requires tinygo 0.38.0 or later", 33 | "build": "npm run build-go && npm run build-js", 34 | "build-go": "tinygo build -o format.wasm -target wasm --no-debug", 35 | "build-js": "tsc && cp format.wasm wasm_exec.js dist", 36 | "format": "prettier --write \"**/*.{js,ts,json}\"" 37 | }, 38 | "devDependencies": { 39 | "@types/node": "^22.14.0", 40 | "prettier": "^3.5.3", 41 | "prettier-plugin-pkg": "^0.19.0", 42 | "typescript": "^5.8.3" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /dockerfmt_test.go: -------------------------------------------------------------------------------- 1 | package main 2 | 3 | import ( 4 | "fmt" 5 | "path/filepath" 6 | "strings" 7 | "testing" 8 | 9 | "github.com/reteps/dockerfmt/lib" 10 | "github.com/stretchr/testify/assert" 11 | ) 12 | 13 | func TestFormatter(t *testing.T) { 14 | // assert equality 15 | assert.Equal(t, 123, 123, "they should be equal") 16 | matchingFiles, err := filepath.Glob("tests/in/*.dockerfile") 17 | if err != nil { 18 | t.Fatalf("Failed to find test files: %v", err) 19 | } 20 | c := &lib.Config{ 21 | IndentSize: 4, 22 | TrailingNewline: true, 23 | SpaceRedirects: false, 24 | } 25 | for _, fileName := range matchingFiles { 26 | t.Run(fileName, func(t *testing.T) { 27 | outFile := strings.Replace(fileName, "in", "out", 1) 28 | originalLines, err := lib.GetFileLines(fileName) 29 | if err != nil { 30 | t.Fatalf("Failed to read file %s: %v", fileName, err) 31 | } 32 | fmt.Printf("Comparing file %s with %s\n", fileName, outFile) 33 | formattedLines := lib.FormatFileLines(originalLines, c) 34 | 35 | // Write outFile to directory 36 | // err = os.WriteFile(outFile, []byte(formattedLines), 0644) 37 | // if err != nil { 38 | // t.Fatalf("Failed to write to file %s: %v", outFile, err) 39 | // } 40 | 41 | // Read outFile 42 | outLines, err := lib.GetFileLines(outFile) 43 | if err != nil { 44 | t.Fatalf("Failed to read file %s: %v", outFile, err) 45 | } 46 | // Compare outLines with formattedLines 47 | assert.Equal(t, strings.Join(outLines, ""), formattedLines, "Files should be equal") 48 | }) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /tests/in/issue32.dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | LABEL maintainer="... <...>" 3 | 4 | COPY sources.list /etc/apt/sources.list 5 | 6 | # Upgrade the system + Install all packages 7 | ARG DEBIAN_FRONTEND=noninteractive 8 | # Install all packages below : 9 | RUN apt-get update && \ 10 | apt-get install --no-install-recommends -y \ 11 | ca-certificates \ 12 | imagemagick \ 13 | php-bcmath \ 14 | php-curl \ 15 | php-db \ 16 | php-fpm \ 17 | php-gd \ 18 | php-imagick \ 19 | php-intl \ 20 | php-ldap \ 21 | php-mail \ 22 | php-mail-mime \ 23 | php-mbstring \ 24 | php-mysql \ 25 | php-redis \ 26 | php-soap \ 27 | php-sqlite3 \ 28 | php-xml \ 29 | php-zip \ 30 | ssmtp \ 31 | # bind9-host iputils-ping lsof iproute2 netcat-openbsd procps strace tcpdump traceroute \ 32 | && \ 33 | # Clean and save space 34 | rm -rf /var/lib/apt/lists/* && \ 35 | # Set timezone 36 | ln -sf /usr/share/zoneinfo/Europe/Berlin /etc/localtime && \ 37 | dpkg-reconfigure tzdata 38 | 39 | WORKDIR /var/www/html 40 | 41 | ADD https://.../check_mk/agents/check_mk_agent.linux /usr/bin/check_mk_agent 42 | COPY php_fpm_pools /usr/lib/check_mk_agent/plugins/ 43 | COPY php_fpm_pools.cfg /etc/check_mk/ 44 | RUN chmod 755 /usr/bin/check_mk_agent /usr/lib/check_mk_agent/plugins/* 45 | 46 | COPY www.conf /etc/php/7.4/fpm/pool.d/ 47 | COPY ssmtp.conf /etc/ssmtp/ssmtp.conf 48 | COPY environment /etc/environment 49 | 50 | RUN mkdir -m 0755 /run/php 51 | 52 | ENV http_proxy ... 53 | ENV https_proxy ... 54 | ENV no_proxy ... 55 | ENV HTTP_PROXY ... 56 | ENV HTTPS_PROXY ... 57 | ENV NO_PROXY ... 58 | 59 | EXPOSE 9000 60 | 61 | CMD ["/usr/sbin/php-fpm7.4", "--nodaemonize", "--fpm-config", "/etc/php/7.4/fpm/php-fpm.conf"] -------------------------------------------------------------------------------- /tests/out/issue32.dockerfile: -------------------------------------------------------------------------------- 1 | FROM debian:bullseye-slim 2 | LABEL maintainer="... <...>" 3 | 4 | COPY sources.list /etc/apt/sources.list 5 | 6 | # Upgrade the system + Install all packages 7 | ARG DEBIAN_FRONTEND=noninteractive 8 | # Install all packages below : 9 | RUN apt-get update \ 10 | && apt-get install --no-install-recommends -y \ 11 | ca-certificates \ 12 | imagemagick \ 13 | php-bcmath \ 14 | php-curl \ 15 | php-db \ 16 | php-fpm \ 17 | php-gd \ 18 | php-imagick \ 19 | php-intl \ 20 | php-ldap \ 21 | php-mail \ 22 | php-mail-mime \ 23 | php-mbstring \ 24 | php-mysql \ 25 | php-redis \ 26 | php-soap \ 27 | php-sqlite3 \ 28 | php-xml \ 29 | php-zip \ 30 | ssmtp \ 31 | # bind9-host iputils-ping lsof iproute2 netcat-openbsd procps strace tcpdump traceroute \ 32 | # Clean and save space 33 | && rm -rf /var/lib/apt/lists/* \ 34 | # Set timezone 35 | && ln -sf /usr/share/zoneinfo/Europe/Berlin /etc/localtime \ 36 | && dpkg-reconfigure tzdata 37 | 38 | WORKDIR /var/www/html 39 | 40 | ADD https://.../check_mk/agents/check_mk_agent.linux /usr/bin/check_mk_agent 41 | COPY php_fpm_pools /usr/lib/check_mk_agent/plugins/ 42 | COPY php_fpm_pools.cfg /etc/check_mk/ 43 | RUN chmod 755 /usr/bin/check_mk_agent /usr/lib/check_mk_agent/plugins/* 44 | 45 | COPY www.conf /etc/php/7.4/fpm/pool.d/ 46 | COPY ssmtp.conf /etc/ssmtp/ssmtp.conf 47 | COPY environment /etc/environment 48 | 49 | RUN mkdir -m 0755 /run/php 50 | 51 | ENV http_proxy=... 52 | ENV https_proxy=... 53 | ENV no_proxy=... 54 | ENV HTTP_PROXY=... 55 | ENV HTTPS_PROXY=... 56 | ENV NO_PROXY=... 57 | 58 | EXPOSE 9000 59 | 60 | CMD ["/usr/sbin/php-fpm7.4", "--nodaemonize", "--fpm-config", "/etc/php/7.4/fpm/php-fpm.conf"] 61 | -------------------------------------------------------------------------------- /tests/out/label.dockerfile: -------------------------------------------------------------------------------- 1 | # QLC Docker Container 2 | # https://www.qlcplus.org 3 | 4 | # https://github.com/phusion/baseimage-docker/blob/master/Changelog.md 5 | FROM phusion/baseimage:0.11 6 | 7 | LABEL maintainer="REDACTED" 8 | 9 | ARG BUILD_DATE 10 | ARG VCS_REF 11 | ARG BUILD_VERSION 12 | 13 | # Labels. 14 | LABEL org.label-schema.schema-version="1.0" 15 | LABEL org.label-schema.build-date=$BUILD_DATE 16 | LABEL org.label-schema.name="djarbz/qlcplus" 17 | LABEL org.label-schema.description="QLC+ Docker Image with GUI" 18 | LABEL org.label-schema.url="https://www.qlcplus.org" 19 | LABEL org.label-schema.vcs-url="https://github.com/djarbz/qlcplus" 20 | LABEL org.label-schema.vcs-ref=$VCS_REF 21 | LABEL org.label-schema.vendor="DJArbz" 22 | LABEL org.label-schema.version=$BUILD_VERSION 23 | LABEL org.label-schema.docker.cmd="docker run -it --rm --name QLCplus --device /dev/snd -p 9999:80 --volume='/tmp/.X11-unix:/tmp/.X11-unix:rw' --env=DISPLAY=unix${DISPLAY} djarbz/qlcplus" 24 | LABEL org.label-schema.docker.cmd.devel="docker run -it --rm --name QLCplus djarbz/qlcplus:4.11.2 xvfb-run qlcplus" 25 | 26 | VOLUME /QLC 27 | 28 | WORKDIR /QLC 29 | 30 | ENV QLC_DEPENDS="\ 31 | libasound2 \ 32 | libfftw3-double3 \ 33 | libftdi1 \ 34 | libqt4-network \ 35 | libqt4-script \ 36 | libqtcore4 \ 37 | libqtgui4 \ 38 | libusb-0.1-4" 39 | 40 | # XVFB is used to fake an X server for testing and headless mode. 41 | RUN apt-get update \ 42 | && apt-get install -y --no-install-recommends \ 43 | ${QLC_DEPENDS} \ 44 | xvfb \ 45 | && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 46 | 47 | # https://github.com/mcallegari/qlcplus/releases/tag/QLC+_4.11.2 48 | ARG QLC_VERSION=4.11.2 49 | 50 | ADD https://www.qlcplus.org/downloads/${QLC_VERSION}/qlcplus_${QLC_VERSION}_amd64.deb /opt/qlcplus.deb 51 | 52 | RUN dpkg -i /opt/qlcplus.deb 53 | 54 | # https://www.qlcplus.org/docs/html_en_EN/commandlineparameters.html 55 | CMD ["/usr/bin/qlcplus", "--operate", "--web", "--open /QLC/default_workspace.qxw"] 56 | -------------------------------------------------------------------------------- /tests/out/shell.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/2 2 | # set up PrairieLearn and run migrations to initialize the DB 3 | RUN chmod +x /PrairieLearn/scripts/init.sh \ 4 | && mkdir /course{,{2..9}} \ 5 | && mkdir -p /workspace_{main,host}_zips \ 6 | && mkdir -p /jobs \ 7 | # Here is a comment in the middle of my command -- `docker run -it --rm` 8 | && /PrairieLearn/scripts/start_postgres.sh \ 9 | && cd /PrairieLearn \ 10 | && make build \ 11 | # Here is a multiline comment in my command 12 | # The parser has to handle this case, and strip out the 13 | # $() chars surrounding 14 | && node apps/prairielearn/dist/server.js --migrate-and-exit \ 15 | && su postgres -c "createuser -s root" \ 16 | && /PrairieLearn/scripts/start_postgres.sh stop \ 17 | && /PrairieLearn/scripts/gen_ssl.sh \ 18 | && git config --global user.email "dev@example.com" \ 19 | && git config --global user.name "Dev User" \ 20 | && git config --global safe.directory '*' 21 | 22 | RUN chmod +x /PrairieLearn/scripts/init.sh \ 23 | && mkdir /course{,{2..9}} \ 24 | && mkdir -p /workspace_{main,host}_zips \ 25 | && mkdir -p /jobs \ 26 | # Here is a comment in the middle of my command -- `docker run -it --rm` 27 | && /PrairieLearn/scripts/start_postgres.sh \ 28 | && cd /PrairieLearn \ 29 | && make build \ 30 | # Here is a multiline comment in my command 31 | # The parser has to handle this case, and strip out the 32 | # $() chars surrounding 33 | && node apps/prairielearn/dist/server.js --migrate-and-exit \ 34 | && su postgres -c "createuser -s root" \ 35 | && /PrairieLearn/scripts/start_postgres.sh stop \ 36 | && /PrairieLearn/scripts/gen_ssl.sh \ 37 | && git config --global user.email "dev@example.com" \ 38 | && git config --global user.name "Dev User" \ 39 | && git config --global safe.directory '*' 40 | 41 | HEALTHCHECK --interval=5m --timeout=3s \ 42 | CMD curl -f http://localhost/ || exit 1 43 | CMD /PrairieLearn/scripts/init.sh 44 | -------------------------------------------------------------------------------- /tests/in/run2.dockerfile: -------------------------------------------------------------------------------- 1 | RUN apt-get update \ 2 | # Comment here 3 | && apt-get install -y --no-install-recommends \ 4 | # Another comment here 5 | man-db unminimize \ 6 | # Multiline comment here 7 | # Here 8 | gosu curl git htop less nano unzip vim wget zip && \ 9 | yes | unminimize && \ 10 | apt-get clean && \ 11 | rm -rf /var/lib/apt/lists/* \ 12 | && find /tmp -not -path /tmp -delete 13 | 14 | RUN apt-get update && \ 15 | # Run 'unminimize' to add docs 16 | apt-get install -y --no-install-recommends man-db unminimize \ 17 | && yes | unminimize \ 18 | && apt-get install -y --no-install-recommends \ 19 | # Reverse proxy workaround for PrairieLearn: 20 | nginx \ 21 | gettext \ 22 | gosu \ 23 | fonts-dejavu \ 24 | # Utilities for convenience debugging this container: 25 | less htop vim nano silversearcher-ag zip unzip git cmake curl wget sqlite3 && \ 26 | # Test: 27 | gosu nobody true && \ 28 | # Cleanup: 29 | apt-get clean \ 30 | && rm -rf /var/lib/apt/lists/* \ 31 | && find /tmp -not -path /tmp -delete 32 | 33 | RUN apt-get update && \ 34 | apt-get install -y --no-install-recommends man-db unminimize \ 35 | && yes | unminimize \ 36 | && apt-get install -y --no-install-recommends \ 37 | nginx \ 38 | gettext \ 39 | gosu \ 40 | fonts-dejavu \ 41 | less htop vim nano silversearcher-ag zip unzip git cmake curl wget sqlite3 && \ 42 | gosu nobody true && \ 43 | apt-get clean \ 44 | && rm -rf /var/lib/apt/lists/* \ 45 | && find /tmp -not -path /tmp -delete 46 | 47 | CMD apt-get update && \ 48 | apt-get install -y --no-install-recommends man-db unminimize \ 49 | && yes | unminimize \ 50 | && apt-get install -y --no-install-recommends \ 51 | nginx \ 52 | gettext \ 53 | gosu \ 54 | fonts-dejavu \ 55 | less htop vim nano silversearcher-ag zip unzip git cmake curl wget sqlite3 && \ 56 | gosu nobody true && \ 57 | apt-get clean \ 58 | && rm -rf /var/lib/apt/lists/* \ 59 | && find /tmp -not -path /tmp -delete -------------------------------------------------------------------------------- /tests/in/shell.dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/jessfraz/dockfmt/issues/2 2 | # set up PrairieLearn and run migrations to initialize the DB 3 | RUN chmod +x /PrairieLearn/scripts/init.sh \ 4 | && mkdir /course{,{2..9}} \ 5 | && mkdir -p /workspace_{main,host}_zips \ 6 | && mkdir -p /jobs \ 7 | # Here is a comment in the middle of my command -- `docker run -it --rm` 8 | && /PrairieLearn/scripts/start_postgres.sh \ 9 | && cd /PrairieLearn \ 10 | && make build \ 11 | # Here is a multiline comment in my command 12 | # The parser has to handle this case, and strip out the 13 | # $() chars surrounding 14 | && node apps/prairielearn/dist/server.js --migrate-and-exit \ 15 | && su postgres -c "createuser -s root" \ 16 | && /PrairieLearn/scripts/start_postgres.sh stop \ 17 | && /PrairieLearn/scripts/gen_ssl.sh \ 18 | && git config --global user.email "dev@example.com" \ 19 | && git config --global user.name "Dev User" \ 20 | && git config --global safe.directory '*' 21 | 22 | RUN chmod +x /PrairieLearn/scripts/init.sh \ 23 | && mkdir /course{,{2..9}} \ 24 | && mkdir -p /workspace_{main,host}_zips \ 25 | && mkdir -p /jobs && \ 26 | # Here is a comment in the middle of my command -- `docker run -it --rm` 27 | /PrairieLearn/scripts/start_postgres.sh \ 28 | && cd /PrairieLearn && \ 29 | make build && \ 30 | # Here is a multiline comment in my command 31 | # The parser has to handle this case, and strip out the 32 | # $() chars surrounding 33 | node apps/prairielearn/dist/server.js --migrate-and-exit \ 34 | && su postgres -c "createuser -s root" \ 35 | && /PrairieLearn/scripts/start_postgres.sh stop \ 36 | && /PrairieLearn/scripts/gen_ssl.sh \ 37 | && git config --global user.email "dev@example.com" \ 38 | && git config --global user.name "Dev User" \ 39 | && git config --global safe.directory '*' 40 | 41 | healthcheck --interval=5m --timeout=3s \ 42 | CMD curl -f http://localhost/ || exit 1 43 | CMD /PrairieLearn/scripts/init.sh 44 | -------------------------------------------------------------------------------- /tests/in/label.dockerfile: -------------------------------------------------------------------------------- 1 | # QLC Docker Container 2 | # https://www.qlcplus.org 3 | 4 | # https://github.com/phusion/baseimage-docker/blob/master/Changelog.md 5 | FROM phusion/baseimage:0.11 6 | 7 | LABEL maintainer="REDACTED" 8 | 9 | ARG BUILD_DATE 10 | ARG VCS_REF 11 | ARG BUILD_VERSION 12 | 13 | # Labels. 14 | LABEL org.label-schema.schema-version="1.0" 15 | LABEL org.label-schema.build-date=$BUILD_DATE 16 | LABEL org.label-schema.name="djarbz/qlcplus" 17 | LABEL org.label-schema.description="QLC+ Docker Image with GUI" 18 | LABEL org.label-schema.url="https://www.qlcplus.org" 19 | LABEL org.label-schema.vcs-url="https://github.com/djarbz/qlcplus" 20 | LABEL org.label-schema.vcs-ref=$VCS_REF 21 | LABEL org.label-schema.vendor="DJArbz" 22 | LABEL org.label-schema.version=$BUILD_VERSION 23 | LABEL org.label-schema.docker.cmd="docker run -it --rm --name QLCplus --device /dev/snd -p 9999:80 --volume='/tmp/.X11-unix:/tmp/.X11-unix:rw' --env=DISPLAY=unix${DISPLAY} djarbz/qlcplus" 24 | LABEL org.label-schema.docker.cmd.devel="docker run -it --rm --name QLCplus djarbz/qlcplus:4.11.2 xvfb-run qlcplus" 25 | 26 | VOLUME /QLC 27 | 28 | WORKDIR /QLC 29 | 30 | ENV QLC_DEPENDS="\ 31 | libasound2 \ 32 | libfftw3-double3 \ 33 | libftdi1 \ 34 | libqt4-network \ 35 | libqt4-script \ 36 | libqtcore4 \ 37 | libqtgui4 \ 38 | libusb-0.1-4" 39 | 40 | # XVFB is used to fake an X server for testing and headless mode. 41 | RUN apt-get update \ 42 | && apt-get install -y --no-install-recommends \ 43 | ${QLC_DEPENDS} \ 44 | xvfb \ 45 | && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* 46 | 47 | # https://github.com/mcallegari/qlcplus/releases/tag/QLC+_4.11.2 48 | ARG QLC_VERSION=4.11.2 49 | 50 | ADD https://www.qlcplus.org/downloads/${QLC_VERSION}/qlcplus_${QLC_VERSION}_amd64.deb /opt/qlcplus.deb 51 | 52 | RUN dpkg -i /opt/qlcplus.deb 53 | 54 | # https://www.qlcplus.org/docs/html_en_EN/commandlineparameters.html 55 | CMD ["/usr/bin/qlcplus","--operate","--web","--open /QLC/default_workspace.qxw" ] -------------------------------------------------------------------------------- /tests/out/run2.dockerfile: -------------------------------------------------------------------------------- 1 | RUN apt-get update \ 2 | # Comment here 3 | && apt-get install -y --no-install-recommends \ 4 | # Another comment here 5 | man-db unminimize \ 6 | # Multiline comment here 7 | # Here 8 | gosu curl git htop less nano unzip vim wget zip \ 9 | && yes | unminimize \ 10 | && apt-get clean \ 11 | && rm -rf /var/lib/apt/lists/* \ 12 | && find /tmp -not -path /tmp -delete 13 | 14 | RUN apt-get update \ 15 | # Run 'unminimize' to add docs 16 | && apt-get install -y --no-install-recommends man-db unminimize \ 17 | && yes | unminimize \ 18 | && apt-get install -y --no-install-recommends \ 19 | # Reverse proxy workaround for PrairieLearn: 20 | nginx \ 21 | gettext \ 22 | gosu \ 23 | fonts-dejavu \ 24 | # Utilities for convenience debugging this container: 25 | less htop vim nano silversearcher-ag zip unzip git cmake curl wget sqlite3 \ 26 | # Test: 27 | && gosu nobody true \ 28 | # Cleanup: 29 | && apt-get clean \ 30 | && rm -rf /var/lib/apt/lists/* \ 31 | && find /tmp -not -path /tmp -delete 32 | 33 | RUN apt-get update \ 34 | && apt-get install -y --no-install-recommends man-db unminimize \ 35 | && yes | unminimize \ 36 | && apt-get install -y --no-install-recommends \ 37 | nginx \ 38 | gettext \ 39 | gosu \ 40 | fonts-dejavu \ 41 | less htop vim nano silversearcher-ag zip unzip git cmake curl wget sqlite3 \ 42 | && gosu nobody true \ 43 | && apt-get clean \ 44 | && rm -rf /var/lib/apt/lists/* \ 45 | && find /tmp -not -path /tmp -delete 46 | 47 | CMD apt-get update \ 48 | && apt-get install -y --no-install-recommends man-db unminimize \ 49 | && yes | unminimize \ 50 | && apt-get install -y --no-install-recommends \ 51 | nginx \ 52 | gettext \ 53 | gosu \ 54 | fonts-dejavu \ 55 | less htop vim nano silversearcher-ag zip unzip git cmake curl wget sqlite3 \ 56 | && gosu nobody true \ 57 | && apt-get clean \ 58 | && rm -rf /var/lib/apt/lists/* \ 59 | && find /tmp -not -path /tmp -delete 60 | -------------------------------------------------------------------------------- /js/format.ts: -------------------------------------------------------------------------------- 1 | import './wasm_exec.js' 2 | 3 | export interface FormatOptions { 4 | indent: number 5 | trailingNewline: boolean 6 | spaceRedirects: boolean 7 | } 8 | 9 | export const formatDockerfileContents = async ( 10 | fileContents: string, 11 | options: FormatOptions, 12 | getWasm: () => Promise, 13 | ) => { 14 | const go = new Go() // Defined in wasm_exec.js 15 | const encoder = new TextEncoder() 16 | const decoder = new TextDecoder() 17 | 18 | // get current working directory 19 | const wasmBuffer = await getWasm() 20 | const wasm = await WebAssembly.instantiate(wasmBuffer, go.importObject) 21 | 22 | /** 23 | * Do not await this promise, because it only resolves once the go main() 24 | * function has exited. But we need the main function to stay alive to be 25 | * able to call the `parse` and `print` function. 26 | */ 27 | go.run(wasm.instance) 28 | 29 | const { memory, malloc, free, formatBytes } = wasm.instance.exports as { 30 | memory: WebAssembly.Memory 31 | malloc: (size: number) => number 32 | free: (pointer: number) => void 33 | formatBytes: ( 34 | pointer: number, 35 | length: number, 36 | indent: number, 37 | trailingNewline: boolean, 38 | spaceRedirects: boolean, 39 | ) => number 40 | } 41 | 42 | const fileBufferBytes = encoder.encode(fileContents) 43 | const filePointer = malloc(fileBufferBytes.byteLength) 44 | 45 | new Uint8Array(memory.buffer).set(fileBufferBytes, filePointer) 46 | 47 | // Call formatBytes function from WebAssembly 48 | const resultPointer = formatBytes( 49 | filePointer, 50 | fileBufferBytes.byteLength, 51 | options.indent, 52 | options.trailingNewline, 53 | options.spaceRedirects, 54 | ) 55 | 56 | // Decode the result 57 | const resultBytes = new Uint8Array(memory.buffer).subarray(resultPointer) 58 | const end = resultBytes.indexOf(0) 59 | const result = decoder.decode(resultBytes.subarray(0, end)) 60 | free(filePointer) 61 | 62 | return result 63 | } 64 | 65 | export const formatDockerfile = () => { 66 | throw new Error( 67 | '`formatDockerfile` is not implemented in the browser. Use `formatDockerfileContents` instead.', 68 | ) 69 | } 70 | -------------------------------------------------------------------------------- /js/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@reteps/dockerfmt", 3 | "version": "0.3.5", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@reteps/dockerfmt", 9 | "version": "0.3.5", 10 | "license": "MIT", 11 | "devDependencies": { 12 | "@types/node": "^22.14.0", 13 | "prettier": "^3.5.3", 14 | "prettier-plugin-pkg": "^0.19.0", 15 | "typescript": "^5.8.3" 16 | }, 17 | "engines": { 18 | "node": "^v12.20.0 || ^14.13.0 || >=16.0.0" 19 | } 20 | }, 21 | "node_modules/@types/node": { 22 | "version": "22.14.0", 23 | "resolved": "https://registry.npmjs.org/@types/node/-/node-22.14.0.tgz", 24 | "integrity": "sha512-Kmpl+z84ILoG+3T/zQFyAJsU6EPTmOCj8/2+83fSN6djd6I4o7uOuGIH6vq3PrjY5BGitSbFuMN18j3iknubbA==", 25 | "dev": true, 26 | "license": "MIT", 27 | "dependencies": { 28 | "undici-types": "~6.21.0" 29 | } 30 | }, 31 | "node_modules/prettier": { 32 | "version": "3.5.3", 33 | "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.5.3.tgz", 34 | "integrity": "sha512-QQtaxnoDJeAkDvDKWCLiwIXkTgRhwYDEQCghU9Z6q03iyek/rxRh/2lC3HB7P8sWT2xC/y5JDctPLBIGzHKbhw==", 35 | "dev": true, 36 | "license": "MIT", 37 | "bin": { 38 | "prettier": "bin/prettier.cjs" 39 | }, 40 | "engines": { 41 | "node": ">=14" 42 | }, 43 | "funding": { 44 | "url": "https://github.com/prettier/prettier?sponsor=1" 45 | } 46 | }, 47 | "node_modules/prettier-plugin-pkg": { 48 | "version": "0.19.0", 49 | "resolved": "https://registry.npmjs.org/prettier-plugin-pkg/-/prettier-plugin-pkg-0.19.0.tgz", 50 | "integrity": "sha512-wlBvVhAZQ+iOH8/4gWc1SxJbf5++xwKmnFkqHYUsmoQIg6hgdyL1055Z9FOWa6cumqL/QwqdOzY9aH4McdjKyw==", 51 | "dev": true, 52 | "license": "MPL-2.0", 53 | "engines": { 54 | "node": "^14.18.0 || >=16.0.0" 55 | }, 56 | "funding": { 57 | "url": "https://opencollective.com/unts" 58 | }, 59 | "peerDependencies": { 60 | "prettier": "^3.0.3" 61 | } 62 | }, 63 | "node_modules/typescript": { 64 | "version": "5.8.3", 65 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.8.3.tgz", 66 | "integrity": "sha512-p1diW6TqL9L07nNxvRMM7hMMw4c5XOo/1ibL4aAIGmSAt9slTE1Xgw5KWuof2uTOvCg9BY7ZRi+GaF+7sfgPeQ==", 67 | "dev": true, 68 | "license": "Apache-2.0", 69 | "bin": { 70 | "tsc": "bin/tsc", 71 | "tsserver": "bin/tsserver" 72 | }, 73 | "engines": { 74 | "node": ">=14.17" 75 | } 76 | }, 77 | "node_modules/undici-types": { 78 | "version": "6.21.0", 79 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz", 80 | "integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==", 81 | "dev": true, 82 | "license": "MIT" 83 | } 84 | } 85 | } 86 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # dockerfmt 2 | 3 | Dockerfile formatter, and a modern version of [dockfmt](https://github.com/jessfraz/dockfmt). Built on top of the internal [buildkit](https://github.com/moby/buildkit) parser. 4 | 5 | ## Installation 6 | 7 | ### Binaries 8 | 9 | Binaries are available from the [releases](https://github.com/reteps/dockerfmt/releases) page. 10 | 11 | ### go install 12 | 13 | ```bash 14 | go install github.com/reteps/dockerfmt@latest 15 | ``` 16 | 17 | ### docker 18 | 19 | ```bash 20 | docker run --rm -v $(pwd):/pwd ghcr.io/reteps/dockerfmt:latest /pwd/tests/in/run2.dockerfile 21 | ``` 22 | 23 | ## Usage 24 | 25 | ```output 26 | A updated version of the dockfmt. Uses the dockerfile parser from moby/buildkit and the shell formatter from mvdan/sh. 27 | 28 | Usage: 29 | dockerfmt [Dockerfile...] [flags] 30 | dockerfmt [command] 31 | 32 | Available Commands: 33 | completion Generate the autocompletion script for the specified shell 34 | help Help about any command 35 | version Print the version number of dockerfmt 36 | 37 | Flags: 38 | -c, --check Check if the file(s) are formatted 39 | -h, --help help for dockerfmt 40 | -i, --indent uint Number of spaces to use for indentation (default 4) 41 | -n, --newline End the file with a trailing newline 42 | -s, --space-redirects Redirect operators will be followed by a space 43 | -w, --write Write the formatted output back to the file(s) 44 | 45 | Use "dockerfmt [command] --help" for more information about a command. 46 | ``` 47 | 48 | ## Pre-commit 49 | 50 | You can add the following entry to your `.pre-commit-config.yaml` file to use 51 | `dockerfmt` as a pre-commit hook: 52 | 53 | ```yaml 54 | repos: 55 | - repo: https://github.com/reteps/dockerfmt 56 | # run `pre-commit autoupdate` to pin the version 57 | rev: main 58 | hooks: 59 | - id: dockerfmt 60 | args: 61 | # optional: add additional arguments here 62 | - --indent=4 63 | - --write 64 | ``` 65 | 66 | ## Limitations 67 | 68 | - The `RUN` parser currently doesn't support grouping or semicolons in commands. Adding semicolon support is a non-trivial task. 69 | 70 | - No line wrapping is performed for long JSON commands 71 | - The `# escape=X` directive is not supported 72 | 73 | Contributions are welcome! 74 | 75 | ## Issues 76 | 77 | - This is not production software until the `1.0.0` release, please treat it as such. 78 | - Please file issues for any bugs or feature requests! 79 | 80 | ## Features 81 | 82 | - Format `RUN` steps with 83 | - Support for basic heredocs: 84 | 85 | ```dockerfile 86 | RUN < { 9 | // Map multiple JavaScript environments to a single common API, 10 | // preferring web standards over Node.js API. 11 | // 12 | // Environments considered: 13 | // - Browsers 14 | // - Node.js 15 | // - Electron 16 | // - Parcel 17 | 18 | if (typeof global !== "undefined") { 19 | // global already exists 20 | } else if (typeof window !== "undefined") { 21 | window.global = window; 22 | } else if (typeof self !== "undefined") { 23 | self.global = self; 24 | } else { 25 | throw new Error("cannot export Go (neither global, window nor self is defined)"); 26 | } 27 | 28 | if (!global.require && typeof require !== "undefined") { 29 | global.require = require; 30 | } 31 | 32 | if (!global.fs && global.require) { 33 | global.fs = require("node:fs"); 34 | } 35 | 36 | const enosys = () => { 37 | const err = new Error("not implemented"); 38 | err.code = "ENOSYS"; 39 | return err; 40 | }; 41 | 42 | if (!global.fs) { 43 | let outputBuf = ""; 44 | global.fs = { 45 | constants: { O_WRONLY: -1, O_RDWR: -1, O_CREAT: -1, O_TRUNC: -1, O_APPEND: -1, O_EXCL: -1 }, // unused 46 | writeSync(fd, buf) { 47 | outputBuf += decoder.decode(buf); 48 | const nl = outputBuf.lastIndexOf("\n"); 49 | if (nl != -1) { 50 | console.log(outputBuf.substr(0, nl)); 51 | outputBuf = outputBuf.substr(nl + 1); 52 | } 53 | return buf.length; 54 | }, 55 | write(fd, buf, offset, length, position, callback) { 56 | if (offset !== 0 || length !== buf.length || position !== null) { 57 | callback(enosys()); 58 | return; 59 | } 60 | const n = this.writeSync(fd, buf); 61 | callback(null, n); 62 | }, 63 | chmod(path, mode, callback) { callback(enosys()); }, 64 | chown(path, uid, gid, callback) { callback(enosys()); }, 65 | close(fd, callback) { callback(enosys()); }, 66 | fchmod(fd, mode, callback) { callback(enosys()); }, 67 | fchown(fd, uid, gid, callback) { callback(enosys()); }, 68 | fstat(fd, callback) { callback(enosys()); }, 69 | fsync(fd, callback) { callback(null); }, 70 | ftruncate(fd, length, callback) { callback(enosys()); }, 71 | lchown(path, uid, gid, callback) { callback(enosys()); }, 72 | link(path, link, callback) { callback(enosys()); }, 73 | lstat(path, callback) { callback(enosys()); }, 74 | mkdir(path, perm, callback) { callback(enosys()); }, 75 | open(path, flags, mode, callback) { callback(enosys()); }, 76 | read(fd, buffer, offset, length, position, callback) { callback(enosys()); }, 77 | readdir(path, callback) { callback(enosys()); }, 78 | readlink(path, callback) { callback(enosys()); }, 79 | rename(from, to, callback) { callback(enosys()); }, 80 | rmdir(path, callback) { callback(enosys()); }, 81 | stat(path, callback) { callback(enosys()); }, 82 | symlink(path, link, callback) { callback(enosys()); }, 83 | truncate(path, length, callback) { callback(enosys()); }, 84 | unlink(path, callback) { callback(enosys()); }, 85 | utimes(path, atime, mtime, callback) { callback(enosys()); }, 86 | }; 87 | } 88 | 89 | if (!global.process) { 90 | global.process = { 91 | getuid() { return -1; }, 92 | getgid() { return -1; }, 93 | geteuid() { return -1; }, 94 | getegid() { return -1; }, 95 | getgroups() { throw enosys(); }, 96 | pid: -1, 97 | ppid: -1, 98 | umask() { throw enosys(); }, 99 | cwd() { throw enosys(); }, 100 | chdir() { throw enosys(); }, 101 | } 102 | } 103 | 104 | if (!global.crypto) { 105 | const nodeCrypto = require("node:crypto"); 106 | global.crypto = { 107 | getRandomValues(b) { 108 | nodeCrypto.randomFillSync(b); 109 | }, 110 | }; 111 | } 112 | 113 | if (!global.performance) { 114 | global.performance = { 115 | now() { 116 | const [sec, nsec] = process.hrtime(); 117 | return sec * 1000 + nsec / 1000000; 118 | }, 119 | }; 120 | } 121 | 122 | if (!global.TextEncoder) { 123 | global.TextEncoder = require("node:util").TextEncoder; 124 | } 125 | 126 | if (!global.TextDecoder) { 127 | global.TextDecoder = require("node:util").TextDecoder; 128 | } 129 | 130 | // End of polyfills for common API. 131 | 132 | const encoder = new TextEncoder("utf-8"); 133 | const decoder = new TextDecoder("utf-8"); 134 | let reinterpretBuf = new DataView(new ArrayBuffer(8)); 135 | var logLine = []; 136 | const wasmExit = {}; // thrown to exit via proc_exit (not an error) 137 | 138 | global.Go = class { 139 | constructor() { 140 | this._callbackTimeouts = new Map(); 141 | this._nextCallbackTimeoutID = 1; 142 | 143 | const mem = () => { 144 | // The buffer may change when requesting more memory. 145 | return new DataView(this._inst.exports.memory.buffer); 146 | } 147 | 148 | const unboxValue = (v_ref) => { 149 | reinterpretBuf.setBigInt64(0, v_ref, true); 150 | const f = reinterpretBuf.getFloat64(0, true); 151 | if (f === 0) { 152 | return undefined; 153 | } 154 | if (!isNaN(f)) { 155 | return f; 156 | } 157 | 158 | const id = v_ref & 0xffffffffn; 159 | return this._values[id]; 160 | } 161 | 162 | 163 | const loadValue = (addr) => { 164 | let v_ref = mem().getBigUint64(addr, true); 165 | return unboxValue(v_ref); 166 | } 167 | 168 | const boxValue = (v) => { 169 | const nanHead = 0x7FF80000n; 170 | 171 | if (typeof v === "number") { 172 | if (isNaN(v)) { 173 | return nanHead << 32n; 174 | } 175 | if (v === 0) { 176 | return (nanHead << 32n) | 1n; 177 | } 178 | reinterpretBuf.setFloat64(0, v, true); 179 | return reinterpretBuf.getBigInt64(0, true); 180 | } 181 | 182 | switch (v) { 183 | case undefined: 184 | return 0n; 185 | case null: 186 | return (nanHead << 32n) | 2n; 187 | case true: 188 | return (nanHead << 32n) | 3n; 189 | case false: 190 | return (nanHead << 32n) | 4n; 191 | } 192 | 193 | let id = this._ids.get(v); 194 | if (id === undefined) { 195 | id = this._idPool.pop(); 196 | if (id === undefined) { 197 | id = BigInt(this._values.length); 198 | } 199 | this._values[id] = v; 200 | this._goRefCounts[id] = 0; 201 | this._ids.set(v, id); 202 | } 203 | this._goRefCounts[id]++; 204 | let typeFlag = 1n; 205 | switch (typeof v) { 206 | case "string": 207 | typeFlag = 2n; 208 | break; 209 | case "symbol": 210 | typeFlag = 3n; 211 | break; 212 | case "function": 213 | typeFlag = 4n; 214 | break; 215 | } 216 | return id | ((nanHead | typeFlag) << 32n); 217 | } 218 | 219 | const storeValue = (addr, v) => { 220 | let v_ref = boxValue(v); 221 | mem().setBigUint64(addr, v_ref, true); 222 | } 223 | 224 | const loadSlice = (array, len, cap) => { 225 | return new Uint8Array(this._inst.exports.memory.buffer, array, len); 226 | } 227 | 228 | const loadSliceOfValues = (array, len, cap) => { 229 | const a = new Array(len); 230 | for (let i = 0; i < len; i++) { 231 | a[i] = loadValue(array + i * 8); 232 | } 233 | return a; 234 | } 235 | 236 | const loadString = (ptr, len) => { 237 | return decoder.decode(new DataView(this._inst.exports.memory.buffer, ptr, len)); 238 | } 239 | 240 | const timeOrigin = Date.now() - performance.now(); 241 | this.importObject = { 242 | wasi_snapshot_preview1: { 243 | // https://github.com/WebAssembly/WASI/blob/main/phases/snapshot/docs.md#fd_write 244 | fd_write: function(fd, iovs_ptr, iovs_len, nwritten_ptr) { 245 | let nwritten = 0; 246 | if (fd == 1) { 247 | for (let iovs_i=0; iovs_i 0, // dummy 273 | fd_fdstat_get: () => 0, // dummy 274 | fd_seek: () => 0, // dummy 275 | proc_exit: (code) => { 276 | this.exited = true; 277 | this.exitCode = code; 278 | this._resolveExitPromise(); 279 | throw wasmExit; 280 | }, 281 | random_get: (bufPtr, bufLen) => { 282 | crypto.getRandomValues(loadSlice(bufPtr, bufLen)); 283 | return 0; 284 | }, 285 | }, 286 | gojs: { 287 | // func ticks() float64 288 | "runtime.ticks": () => { 289 | return timeOrigin + performance.now(); 290 | }, 291 | 292 | // func sleepTicks(timeout float64) 293 | "runtime.sleepTicks": (timeout) => { 294 | // Do not sleep, only reactivate scheduler after the given timeout. 295 | setTimeout(() => { 296 | if (this.exited) return; 297 | try { 298 | this._inst.exports.go_scheduler(); 299 | } catch (e) { 300 | if (e !== wasmExit) throw e; 301 | } 302 | }, timeout); 303 | }, 304 | 305 | // func finalizeRef(v ref) 306 | "syscall/js.finalizeRef": (v_ref) => { 307 | // Note: TinyGo does not support finalizers so this is only called 308 | // for one specific case, by js.go:jsString. and can/might leak memory. 309 | const id = v_ref & 0xffffffffn; 310 | if (this._goRefCounts?.[id] !== undefined) { 311 | this._goRefCounts[id]--; 312 | if (this._goRefCounts[id] === 0) { 313 | const v = this._values[id]; 314 | this._values[id] = null; 315 | this._ids.delete(v); 316 | this._idPool.push(id); 317 | } 318 | } else { 319 | console.error("syscall/js.finalizeRef: unknown id", id); 320 | } 321 | }, 322 | 323 | // func stringVal(value string) ref 324 | "syscall/js.stringVal": (value_ptr, value_len) => { 325 | value_ptr >>>= 0; 326 | const s = loadString(value_ptr, value_len); 327 | return boxValue(s); 328 | }, 329 | 330 | // func valueGet(v ref, p string) ref 331 | "syscall/js.valueGet": (v_ref, p_ptr, p_len) => { 332 | let prop = loadString(p_ptr, p_len); 333 | let v = unboxValue(v_ref); 334 | let result = Reflect.get(v, prop); 335 | return boxValue(result); 336 | }, 337 | 338 | // func valueSet(v ref, p string, x ref) 339 | "syscall/js.valueSet": (v_ref, p_ptr, p_len, x_ref) => { 340 | const v = unboxValue(v_ref); 341 | const p = loadString(p_ptr, p_len); 342 | const x = unboxValue(x_ref); 343 | Reflect.set(v, p, x); 344 | }, 345 | 346 | // func valueDelete(v ref, p string) 347 | "syscall/js.valueDelete": (v_ref, p_ptr, p_len) => { 348 | const v = unboxValue(v_ref); 349 | const p = loadString(p_ptr, p_len); 350 | Reflect.deleteProperty(v, p); 351 | }, 352 | 353 | // func valueIndex(v ref, i int) ref 354 | "syscall/js.valueIndex": (v_ref, i) => { 355 | return boxValue(Reflect.get(unboxValue(v_ref), i)); 356 | }, 357 | 358 | // valueSetIndex(v ref, i int, x ref) 359 | "syscall/js.valueSetIndex": (v_ref, i, x_ref) => { 360 | Reflect.set(unboxValue(v_ref), i, unboxValue(x_ref)); 361 | }, 362 | 363 | // func valueCall(v ref, m string, args []ref) (ref, bool) 364 | "syscall/js.valueCall": (ret_addr, v_ref, m_ptr, m_len, args_ptr, args_len, args_cap) => { 365 | const v = unboxValue(v_ref); 366 | const name = loadString(m_ptr, m_len); 367 | const args = loadSliceOfValues(args_ptr, args_len, args_cap); 368 | try { 369 | const m = Reflect.get(v, name); 370 | storeValue(ret_addr, Reflect.apply(m, v, args)); 371 | mem().setUint8(ret_addr + 8, 1); 372 | } catch (err) { 373 | storeValue(ret_addr, err); 374 | mem().setUint8(ret_addr + 8, 0); 375 | } 376 | }, 377 | 378 | // func valueInvoke(v ref, args []ref) (ref, bool) 379 | "syscall/js.valueInvoke": (ret_addr, v_ref, args_ptr, args_len, args_cap) => { 380 | try { 381 | const v = unboxValue(v_ref); 382 | const args = loadSliceOfValues(args_ptr, args_len, args_cap); 383 | storeValue(ret_addr, Reflect.apply(v, undefined, args)); 384 | mem().setUint8(ret_addr + 8, 1); 385 | } catch (err) { 386 | storeValue(ret_addr, err); 387 | mem().setUint8(ret_addr + 8, 0); 388 | } 389 | }, 390 | 391 | // func valueNew(v ref, args []ref) (ref, bool) 392 | "syscall/js.valueNew": (ret_addr, v_ref, args_ptr, args_len, args_cap) => { 393 | const v = unboxValue(v_ref); 394 | const args = loadSliceOfValues(args_ptr, args_len, args_cap); 395 | try { 396 | storeValue(ret_addr, Reflect.construct(v, args)); 397 | mem().setUint8(ret_addr + 8, 1); 398 | } catch (err) { 399 | storeValue(ret_addr, err); 400 | mem().setUint8(ret_addr+ 8, 0); 401 | } 402 | }, 403 | 404 | // func valueLength(v ref) int 405 | "syscall/js.valueLength": (v_ref) => { 406 | return unboxValue(v_ref).length; 407 | }, 408 | 409 | // valuePrepareString(v ref) (ref, int) 410 | "syscall/js.valuePrepareString": (ret_addr, v_ref) => { 411 | const s = String(unboxValue(v_ref)); 412 | const str = encoder.encode(s); 413 | storeValue(ret_addr, str); 414 | mem().setInt32(ret_addr + 8, str.length, true); 415 | }, 416 | 417 | // valueLoadString(v ref, b []byte) 418 | "syscall/js.valueLoadString": (v_ref, slice_ptr, slice_len, slice_cap) => { 419 | const str = unboxValue(v_ref); 420 | loadSlice(slice_ptr, slice_len, slice_cap).set(str); 421 | }, 422 | 423 | // func valueInstanceOf(v ref, t ref) bool 424 | "syscall/js.valueInstanceOf": (v_ref, t_ref) => { 425 | return unboxValue(v_ref) instanceof unboxValue(t_ref); 426 | }, 427 | 428 | // func copyBytesToGo(dst []byte, src ref) (int, bool) 429 | "syscall/js.copyBytesToGo": (ret_addr, dest_addr, dest_len, dest_cap, src_ref) => { 430 | let num_bytes_copied_addr = ret_addr; 431 | let returned_status_addr = ret_addr + 4; // Address of returned boolean status variable 432 | 433 | const dst = loadSlice(dest_addr, dest_len); 434 | const src = unboxValue(src_ref); 435 | if (!(src instanceof Uint8Array || src instanceof Uint8ClampedArray)) { 436 | mem().setUint8(returned_status_addr, 0); // Return "not ok" status 437 | return; 438 | } 439 | const toCopy = src.subarray(0, dst.length); 440 | dst.set(toCopy); 441 | mem().setUint32(num_bytes_copied_addr, toCopy.length, true); 442 | mem().setUint8(returned_status_addr, 1); // Return "ok" status 443 | }, 444 | 445 | // copyBytesToJS(dst ref, src []byte) (int, bool) 446 | // Originally copied from upstream Go project, then modified: 447 | // https://github.com/golang/go/blob/3f995c3f3b43033013013e6c7ccc93a9b1411ca9/misc/wasm/wasm_exec.js#L404-L416 448 | "syscall/js.copyBytesToJS": (ret_addr, dst_ref, src_addr, src_len, src_cap) => { 449 | let num_bytes_copied_addr = ret_addr; 450 | let returned_status_addr = ret_addr + 4; // Address of returned boolean status variable 451 | 452 | const dst = unboxValue(dst_ref); 453 | const src = loadSlice(src_addr, src_len); 454 | if (!(dst instanceof Uint8Array || dst instanceof Uint8ClampedArray)) { 455 | mem().setUint8(returned_status_addr, 0); // Return "not ok" status 456 | return; 457 | } 458 | const toCopy = src.subarray(0, dst.length); 459 | dst.set(toCopy); 460 | mem().setUint32(num_bytes_copied_addr, toCopy.length, true); 461 | mem().setUint8(returned_status_addr, 1); // Return "ok" status 462 | }, 463 | } 464 | }; 465 | 466 | // Go 1.20 uses 'env'. Go 1.21 uses 'gojs'. 467 | // For compatibility, we use both as long as Go 1.20 is supported. 468 | this.importObject.env = this.importObject.gojs; 469 | } 470 | 471 | async run(instance) { 472 | this._inst = instance; 473 | this._values = [ // JS values that Go currently has references to, indexed by reference id 474 | NaN, 475 | 0, 476 | null, 477 | true, 478 | false, 479 | global, 480 | this, 481 | ]; 482 | this._goRefCounts = []; // number of references that Go has to a JS value, indexed by reference id 483 | this._ids = new Map(); // mapping from JS values to reference ids 484 | this._idPool = []; // unused ids that have been garbage collected 485 | this.exited = false; // whether the Go program has exited 486 | this.exitCode = 0; 487 | 488 | if (this._inst.exports._start) { 489 | let exitPromise = new Promise((resolve, reject) => { 490 | this._resolveExitPromise = resolve; 491 | }); 492 | 493 | // Run program, but catch the wasmExit exception that's thrown 494 | // to return back here. 495 | try { 496 | this._inst.exports._start(); 497 | } catch (e) { 498 | if (e !== wasmExit) throw e; 499 | } 500 | 501 | await exitPromise; 502 | return this.exitCode; 503 | } else { 504 | this._inst.exports._initialize(); 505 | } 506 | } 507 | 508 | _resume() { 509 | if (this.exited) { 510 | throw new Error("Go program has already exited"); 511 | } 512 | try { 513 | this._inst.exports.resume(); 514 | } catch (e) { 515 | if (e !== wasmExit) throw e; 516 | } 517 | if (this.exited) { 518 | this._resolveExitPromise(); 519 | } 520 | } 521 | 522 | _makeFuncWrapper(id) { 523 | const go = this; 524 | return function () { 525 | const event = { id: id, this: this, args: arguments }; 526 | go._pendingEvent = event; 527 | go._resume(); 528 | return event.result; 529 | }; 530 | } 531 | } 532 | })(); 533 | -------------------------------------------------------------------------------- /lib/format.go: -------------------------------------------------------------------------------- 1 | package lib 2 | 3 | import ( 4 | "bytes" 5 | "encoding/json" 6 | "fmt" 7 | "io" 8 | "log" 9 | "os" 10 | "regexp" 11 | "slices" 12 | "strings" 13 | 14 | "github.com/google/shlex" 15 | "github.com/moby/buildkit/frontend/dockerfile/command" 16 | "github.com/moby/buildkit/frontend/dockerfile/parser" 17 | "mvdan.cc/sh/v3/syntax" 18 | ) 19 | 20 | type ExtendedNode struct { 21 | *parser.Node 22 | Children []*ExtendedNode 23 | Next *ExtendedNode 24 | OriginalMultiline string 25 | } 26 | 27 | type ParseState struct { 28 | CurrentLine int 29 | Output string 30 | // Needed to pull in comments 31 | AllOriginalLines []string 32 | Config *Config 33 | } 34 | 35 | type Config struct { 36 | IndentSize uint 37 | TrailingNewline bool 38 | SpaceRedirects bool 39 | } 40 | 41 | func FormatNode(ast *ExtendedNode, c *Config) (string, bool) { 42 | nodeName := strings.ToLower(ast.Node.Value) 43 | dispatch := map[string]func(*ExtendedNode, *Config) string{ 44 | command.Add: formatSpaceSeparated, 45 | command.Arg: formatBasic, 46 | command.Cmd: formatCmd, 47 | command.Copy: formatSpaceSeparated, 48 | command.Entrypoint: formatEntrypoint, 49 | command.Env: formatEnv, 50 | command.Expose: formatSpaceSeparated, 51 | command.From: formatSpaceSeparated, 52 | command.Healthcheck: formatBasic, 53 | command.Label: formatBasic, // TODO: order labels? 54 | command.Maintainer: formatMaintainer, 55 | command.Onbuild: FormatOnBuild, 56 | command.Run: formatRun, 57 | command.Shell: formatCmd, 58 | command.StopSignal: formatBasic, 59 | command.User: formatBasic, 60 | command.Volume: formatBasic, 61 | command.Workdir: formatSpaceSeparated, 62 | } 63 | 64 | fmtFunc := dispatch[nodeName] 65 | if fmtFunc == nil { 66 | return "", false 67 | // log.Fatalf("Unknown command: %s %s\n", nodeName, ast.OriginalMultiline) 68 | } 69 | return fmtFunc(ast, c), true 70 | } 71 | 72 | func (df *ParseState) processNode(ast *ExtendedNode) { 73 | 74 | // We don't want to process nodes that don't have a start or end line. 75 | if ast.Node.StartLine == 0 || ast.Node.EndLine == 0 { 76 | return 77 | } 78 | 79 | // check if we are on the correct line, 80 | // otherwise get the comments we are missing 81 | if df.CurrentLine != ast.StartLine { 82 | df.Output += FormatComments(df.AllOriginalLines[df.CurrentLine : ast.StartLine-1]) 83 | df.CurrentLine = ast.StartLine 84 | } 85 | // if df.Output != "" { 86 | // // If the previous line isn't a comment or newline, add a newline 87 | // lastTwoChars := df.Output[len(df.Output)-2 : len(df.Output)] 88 | // lastNonTrailingNewline := strings.LastIndex(strings.TrimRight(df.Output, "\n"), "\n") 89 | // if lastTwoChars != "\n\n" && df.Output[lastNonTrailingNewline+1] != '#' { 90 | // df.Output += "\n" 91 | // } 92 | // } 93 | 94 | output, ok := FormatNode(ast, df.Config) 95 | if ok { 96 | df.Output += output 97 | df.CurrentLine = ast.EndLine 98 | } 99 | // fmt.Printf("CurrentLine: %d, %d\n", df.CurrentLine, ast.EndLine) 100 | // fmt.Printf("Unknown command: %s %s\n", nodeName, ast.OriginalMultiline) 101 | 102 | for _, child := range ast.Children { 103 | df.processNode(child) 104 | } 105 | 106 | // fmt.Printf("CurrentLine2: %d, %d\n", df.CurrentLine, ast.EndLine) 107 | 108 | if ast.Node.Next != nil { 109 | df.processNode(ast.Next) 110 | } 111 | } 112 | 113 | func FormatOnBuild(n *ExtendedNode, c *Config) string { 114 | if len(n.Node.Next.Children) == 1 { 115 | // fmt.Printf("Onbuild: %s\n", n.Node.Next.Children[0].Value) 116 | output, ok := FormatNode(n.Next.Children[0], c) 117 | if ok { 118 | return strings.ToUpper(n.Node.Value) + " " + output 119 | } 120 | } 121 | 122 | return n.OriginalMultiline 123 | } 124 | 125 | func FormatFileLines(fileLines []string, c *Config) string { 126 | result, err := parser.Parse(strings.NewReader(strings.Join(fileLines, ""))) 127 | if err != nil { 128 | log.Printf("%s\n", strings.Join(fileLines, "")) 129 | log.Fatalf("Error parsing file: %v", err) 130 | } 131 | 132 | parseState := &ParseState{ 133 | CurrentLine: 0, 134 | Output: "", 135 | AllOriginalLines: fileLines, 136 | } 137 | rootNode := BuildExtendedNode(result.AST, fileLines) 138 | parseState.Config = c 139 | parseState.processNode(rootNode) 140 | 141 | // After all directives are processed, we need to check if we have any trailing comments to add. 142 | if parseState.CurrentLine < len(parseState.AllOriginalLines) { 143 | // Add the rest of the file 144 | parseState.Output += FormatComments(parseState.AllOriginalLines[parseState.CurrentLine:]) 145 | } 146 | 147 | parseState.Output = strings.TrimRight(parseState.Output, "\n") 148 | // Ensure the output ends with a newline if requested 149 | if c.TrailingNewline { 150 | parseState.Output += "\n" 151 | } 152 | return parseState.Output 153 | } 154 | 155 | func BuildExtendedNode(n *parser.Node, fileLines []string) *ExtendedNode { 156 | // Build an extended node from the parser node 157 | // This is used to add the original multiline string to the node 158 | // and to add the original line numbers 159 | 160 | if n == nil { 161 | return nil 162 | } 163 | 164 | // Create the extended node with the current parser node 165 | en := &ExtendedNode{ 166 | Node: n, 167 | Next: nil, 168 | Children: nil, 169 | OriginalMultiline: "", // Default to empty string 170 | } 171 | 172 | // If we have valid start and end lines, construct the multiline representation 173 | if n.StartLine > 0 && n.EndLine > 0 { 174 | // Subtract 1 from StartLine because fileLines is 0-indexed while StartLine is 1-indexed 175 | for i := n.StartLine - 1; i < n.EndLine; i++ { 176 | en.OriginalMultiline += fileLines[i] 177 | } 178 | } 179 | 180 | // Process all children recursively 181 | if len(n.Children) > 0 { 182 | childrenNodes := make([]*ExtendedNode, 0, len(n.Children)) 183 | for _, child := range n.Children { 184 | extChild := BuildExtendedNode(child, fileLines) 185 | if extChild != nil { 186 | childrenNodes = append(childrenNodes, extChild) 187 | } 188 | } 189 | // Replace the children with the processed ones 190 | en.Children = childrenNodes 191 | } 192 | 193 | // Process the next node recursively 194 | if n.Next != nil { 195 | extNext := BuildExtendedNode(n.Next, fileLines) 196 | if extNext != nil { 197 | en.Next = extNext 198 | } 199 | } 200 | 201 | return en 202 | } 203 | 204 | func formatEnv(n *ExtendedNode, c *Config) string { 205 | // Handle missing arguments safely 206 | if n.Next == nil { 207 | return strings.ToUpper(n.Node.Value) 208 | } 209 | 210 | // Only the legacy format will have an empty 3rd child 211 | if n.Next.Next.Next.Value == "" { 212 | return strings.ToUpper(n.Node.Value) + " " + n.Next.Node.Value + "=" + n.Next.Next.Node.Value + "\n" 213 | } 214 | 215 | // Otherwise, we have a valid env command; fall back to original if parsing fails 216 | originalTrimmed := strings.TrimLeft(n.OriginalMultiline, " \t") 217 | parts := regexp.MustCompile("[ \t]").Split(originalTrimmed, 2) 218 | if len(parts) < 2 { 219 | return n.OriginalMultiline 220 | } 221 | content := StripWhitespace(parts[1], true) 222 | // Indent all lines with indentSize spaces 223 | re := regexp.MustCompile("(?m)^ *") 224 | content = strings.Trim(re.ReplaceAllString(content, strings.Repeat(" ", int(c.IndentSize))), " ") 225 | return strings.ToUpper(n.Value) + " " + content 226 | } 227 | 228 | func formatShell(content string, hereDoc bool, c *Config) string { 229 | // Semicolons require special handling so we don't break the command 230 | // TODO: support semicolons in commands 231 | 232 | // check for [^\;] 233 | if regexp.MustCompile(`[^\\];`).MatchString(content) { 234 | return content 235 | } 236 | // Grouped expressions aren't formatted well 237 | // See: https://github.com/mvdan/sh/issues/1148 238 | if strings.Contains(content, "{ \\") { 239 | return content 240 | } 241 | 242 | if !hereDoc { 243 | // Here lies some cursed magic. Be careful. 244 | 245 | // Replace comments with a subshell evaluation -- they won't be run so we can do this. 246 | content = StripWhitespace(content, true) 247 | lineComment := regexp.MustCompile(`(\n\s*)(#.*)`) 248 | lines := strings.SplitAfter(content, "\n") 249 | for i := range lines { 250 | lineTrim := strings.TrimLeft(lines[i], " \t") 251 | if len(lineTrim) >= 1 && lineTrim[0] == '#' { 252 | lines[i] = strings.ReplaceAll(lines[i], "`", "×") 253 | } 254 | } 255 | content = strings.Join(lines, "") 256 | 257 | content = lineComment.ReplaceAllString(content, "$1`$2#`\\") 258 | // fmt.Printf("Content-1: %s\n", content) 259 | 260 | /* 261 | ``` 262 | foo \ 263 | `#comment#`\ 264 | && bar 265 | ``` 266 | 267 | ``` 268 | foo && \ 269 | `#comment#` \ 270 | bar 271 | ``` 272 | */ 273 | 274 | // The (.[^\\]) prevents an edge case with '&& \'. See tests/in/andissue.dockerfile 275 | commentContinuation := regexp.MustCompile(`(\\(?:\s*` + "`#.*#`" + `\\){1,}\s*)&&(.[^\\])`) 276 | content = commentContinuation.ReplaceAllString(content, "&&$1$2") 277 | 278 | // fmt.Printf("Content0: %s\n", content) 279 | lines = strings.SplitAfter(content, "\n") 280 | /** 281 | if the next line is not a comment, and we didn't start with a continuation, don't add the `&&`. 282 | */ 283 | inContinuation := false 284 | for i := range lines { 285 | lineTrim := strings.Trim(lines[i], " \t\\\n") 286 | // fmt.Printf("LineTrim: %s\n", lineTrim) 287 | nextLine := "" 288 | isComment := false 289 | nextLineIsComment := false 290 | if i+1 < len(lines) { 291 | nextLine = strings.Trim(lines[i+1], " \t\\\n") 292 | } 293 | if len(nextLine) >= 2 && nextLine[:2] == "`#" { 294 | nextLineIsComment = true 295 | } 296 | if len(lineTrim) >= 2 && lineTrim[:2] == "`#" { 297 | isComment = true 298 | } 299 | 300 | // fmt.Printf("isComment: %v, nextLineIsComment: %v, inContinuation: %v\n", isComment, nextLineIsComment, inContinuation) 301 | if isComment && (inContinuation || nextLineIsComment) { 302 | lines[i] = strings.Replace(lines[i], "#`\\", "#`&&\\", 1) 303 | } 304 | 305 | if len(lineTrim) >= 2 && !isComment && lineTrim[len(lineTrim)-2:] == "&&" { 306 | inContinuation = true 307 | } else if !isComment { 308 | inContinuation = false 309 | } 310 | } 311 | 312 | content = strings.Join(lines, "") 313 | } 314 | 315 | // Now that we have a valid bash-style command, we can format it with shfmt 316 | // log.Printf("Content1: %s\n", content) 317 | content = formatBash(content, c) 318 | 319 | // log.Printf("Content2: %s\n", content) 320 | 321 | if !hereDoc { 322 | reBacktickComment := regexp.MustCompile(`([ \t]*)(?:&& )?` + "`(#.*)#` " + `\\`) 323 | content = reBacktickComment.ReplaceAllString(content, "$1$2") 324 | 325 | // Fixup the comment indentation 326 | lines := strings.SplitAfter(content, "\n") 327 | prevIsComment := false 328 | prevCommentSpacing := "" 329 | firstLineIsComment := false 330 | for i := range lines { 331 | lineTrim := strings.TrimLeft(lines[i], " \t") 332 | // fmt.Printf("LineTrim: %s, %v\n", lineTrim, prevIsComment) 333 | if len(lineTrim) >= 1 && lineTrim[0] == '#' { 334 | if i == 0 { 335 | firstLineIsComment = true 336 | lines[i] = strings.Repeat(" ", int(c.IndentSize)) + lineTrim 337 | } 338 | lineParts := strings.SplitN(lines[i], "#", 2) 339 | 340 | if prevIsComment { 341 | lines[i] = prevCommentSpacing + "#" + lineParts[1] 342 | } else { 343 | prevCommentSpacing = lineParts[0] 344 | } 345 | prevIsComment = true 346 | } else { 347 | prevIsComment = false 348 | } 349 | } 350 | // TODO: this formatting isn't perfect (see tests/out/run5.dockerfile) 351 | if firstLineIsComment { 352 | lines = slices.Insert(lines, 0, "\\\n") 353 | } 354 | content = strings.Join(lines, "") 355 | content = strings.ReplaceAll(content, "×", "`") 356 | 357 | } 358 | return content 359 | } 360 | func formatRun(n *ExtendedNode, c *Config) string { 361 | // Get the original RUN command text 362 | hereDoc := false 363 | flags := n.Node.Flags 364 | 365 | var content string 366 | if len(n.Node.Heredocs) >= 1 { 367 | content = n.Node.Heredocs[0].Content 368 | hereDoc = true 369 | // TODO: check if doc.FileDescriptor == 0? 370 | } else { 371 | // We split the original multiline string by whitespace 372 | originalText := n.OriginalMultiline 373 | if n.OriginalMultiline == "" { 374 | // If the original multiline string is empty, use the original value 375 | originalText = n.Node.Original 376 | } 377 | 378 | originalTrimmed := strings.TrimLeft(originalText, " \t") 379 | parts := regexp.MustCompile("[ \t]").Split(originalTrimmed, 2+len(flags)) 380 | content = parts[1+len(flags)] 381 | } 382 | // Try to parse as JSON 383 | var jsonItems []string 384 | err := json.Unmarshal([]byte(content), &jsonItems) 385 | if err == nil { 386 | out, err := Marshal(jsonItems) 387 | if err != nil { 388 | panic(err) 389 | } 390 | outStr := strings.ReplaceAll(string(out), "\",\"", "\", \"") 391 | content = outStr + "\n" 392 | } else { 393 | content = formatShell(content, hereDoc, c) 394 | if hereDoc { 395 | n.Node.Heredocs[0].Content = content 396 | content, _ = GetHeredoc(n) 397 | } 398 | } 399 | 400 | if len(flags) > 0 { 401 | content = strings.Join(flags, " ") + " " + content 402 | } 403 | 404 | return strings.ToUpper(n.Value) + " " + content 405 | } 406 | 407 | func GetHeredoc(n *ExtendedNode) (string, bool) { 408 | if len(n.Node.Heredocs) == 0 { 409 | return "", false 410 | } 411 | 412 | // printAST(n, 0) 413 | args := []string{} 414 | cur := n.Next 415 | for cur != nil { 416 | if cur.Node.Value != "" { 417 | args = append(args, cur.Node.Value) 418 | } 419 | cur = cur.Next 420 | } 421 | content := strings.Join(args, " ") + "\n" + n.Node.Heredocs[0].Content + n.Node.Heredocs[0].Name + "\n" 422 | return content, true 423 | } 424 | func formatBasic(n *ExtendedNode, c *Config) string { 425 | // Uppercases the command, and indent the following lines 426 | originalTrimmed := strings.TrimLeft(n.OriginalMultiline, " \t") 427 | 428 | value, success := GetHeredoc(n) 429 | if !success { 430 | parts := regexp.MustCompile("[ \t]").Split(originalTrimmed, 2) 431 | if len(parts) < 2 { 432 | // No argument after directive; just return the directive itself 433 | return strings.ToUpper(n.Value) + "\n" 434 | } 435 | value = strings.TrimLeft(parts[1], " \t") 436 | } 437 | return IndentFollowingLines(strings.ToUpper(n.Value)+" "+value, c.IndentSize) 438 | } 439 | 440 | // Marshal is a UTF-8 friendly marshaler. Go's json.Marshal is not UTF-8 441 | // friendly because it replaces the valid UTF-8 and JSON characters "&". "<", 442 | // ">" with the "slash u" unicode escaped forms (e.g. \u0026). It preemptively 443 | // escapes for HTML friendliness. Where text may include any of these 444 | // characters, json.Marshal should not be used. Playground of Go breaking a 445 | // title: https://play.golang.org/p/o2hiX0c62oN 446 | // Source: https://stackoverflow.com/a/69502657/5684541 447 | func Marshal(i interface{}) ([]byte, error) { 448 | buffer := &bytes.Buffer{} 449 | encoder := json.NewEncoder(buffer) 450 | encoder.SetEscapeHTML(false) 451 | err := encoder.Encode(i) 452 | return bytes.TrimRight(buffer.Bytes(), "\n"), err 453 | } 454 | 455 | func getCmd(n *ExtendedNode, shouldSplitNode bool) []string { 456 | cmd := []string{} 457 | for node := n; node != nil; node = node.Next { 458 | // Split value by whitespace 459 | rawValue := strings.Trim(node.Node.Value, " \t") 460 | if len(node.Node.Flags) > 0 { 461 | cmd = append(cmd, node.Node.Flags...) 462 | } 463 | // log.Printf("ShouldSplitNode: %v\n", shouldSplitNode) 464 | if shouldSplitNode { 465 | parts, err := shlex.Split(rawValue) 466 | if err != nil { 467 | log.Fatalf("Error splitting: %s\n", node.Node.Value) 468 | } 469 | cmd = append(cmd, parts...) 470 | } else { 471 | cmd = append(cmd, rawValue) 472 | } 473 | } 474 | // log.Printf("getCmd: %v\n", cmd) 475 | return cmd 476 | } 477 | 478 | func formatEntrypoint(n *ExtendedNode, c *Config) string { 479 | return formatCmd(n, c) 480 | } 481 | func formatCmd(n *ExtendedNode, c *Config) string { 482 | // Determine JSON form from parser attributes 483 | isJSON, ok := n.Node.Attributes["json"] 484 | if !ok { 485 | isJSON = false 486 | } 487 | 488 | // Extract raw content after directive (and any flags) 489 | flags := n.Node.Flags 490 | originalText := n.OriginalMultiline 491 | if originalText == "" { 492 | originalText = n.Node.Original 493 | } 494 | originalTrimmed := strings.TrimLeft(originalText, " \t") 495 | parts := regexp.MustCompile("[ \t]").Split(originalTrimmed, 2+len(flags)) 496 | if len(parts) < 1+len(flags) { 497 | return strings.ToUpper(n.Value) + "\n" 498 | } 499 | var content string 500 | if len(parts) >= 2+len(flags) { 501 | content = parts[1+len(flags)] 502 | } 503 | 504 | // If JSON form (attribute or decodable), format as JSON array with spaces 505 | var jsonItems []string 506 | if isJSON || json.Unmarshal([]byte(content), &jsonItems) == nil { 507 | items := getCmd(n.Next, false) 508 | if !isJSON && len(items) == 0 { 509 | items = jsonItems 510 | } 511 | b, err := Marshal(items) 512 | if err != nil { 513 | return "" 514 | } 515 | bWithSpace := strings.ReplaceAll(string(b), "\",\"", "\", \"") 516 | return strings.ToUpper(n.Node.Value) + " " + bWithSpace + "\n" 517 | } 518 | 519 | // Otherwise, format as shell command 520 | shell := formatShell(content, false, c) 521 | if len(flags) > 0 { 522 | shell = strings.Join(flags, " ") + " " + shell 523 | } 524 | return strings.ToUpper(n.Node.Value) + " " + shell 525 | } 526 | 527 | func formatSpaceSeparated(n *ExtendedNode, c *Config) string { 528 | isJSON, ok := n.Node.Attributes["json"] 529 | if !ok { 530 | isJSON = false 531 | } 532 | cmd, success := GetHeredoc(n) 533 | if !success { 534 | cmd = strings.Join(getCmd(n.Next, isJSON), " ") 535 | if len(n.Node.Flags) > 0 { 536 | cmd = strings.Join(n.Node.Flags, " ") + " " + cmd 537 | } 538 | cmd += "\n" 539 | } 540 | 541 | return strings.ToUpper(n.Node.Value) + " " + cmd 542 | } 543 | 544 | func formatMaintainer(n *ExtendedNode, c *Config) string { 545 | 546 | // Get text between quotes 547 | maintainer := strings.Trim(n.Next.Node.Value, "\"") 548 | return "LABEL org.opencontainers.image.authors=\"" + maintainer + "\"\n" 549 | } 550 | 551 | func GetFileLines(fileName string) ([]string, error) { 552 | // Open the file 553 | f, err := os.Open(fileName) 554 | if err != nil { 555 | return []string{}, err 556 | } 557 | defer f.Close() 558 | 559 | // Read the file contents 560 | b := new(strings.Builder) 561 | io.Copy(b, f) 562 | fileLines := strings.SplitAfter(b.String(), "\n") 563 | 564 | return fileLines, nil 565 | } 566 | 567 | func StripWhitespace(lines string, rightOnly bool) string { 568 | // Split the string into lines by newlines 569 | // log.Printf("Lines: .%s.\n", lines) 570 | linesArray := strings.SplitAfter(lines, "\n") 571 | // Create a new slice to hold the stripped lines 572 | var strippedLines string 573 | // Iterate over each line 574 | for _, line := range linesArray { 575 | // Trim leading and trailing whitespace 576 | // log.Printf("Line .%s.\n", line) 577 | hadNewline := len(line) > 0 && line[len(line)-1] == '\n' 578 | if rightOnly { 579 | // Only trim trailing whitespace 580 | line = strings.TrimRight(line, " \t\n") 581 | } else { 582 | // Trim both leading and trailing whitespace 583 | line = strings.Trim(line, " \t\n") 584 | } 585 | 586 | // log.Printf("Line2 .%s.", line) 587 | if hadNewline { 588 | line += "\n" 589 | } 590 | strippedLines += line 591 | } 592 | return strippedLines 593 | } 594 | 595 | func FormatComments(lines []string) string { 596 | // Adds lines to the output, collapsing multiple newlines into a single newline 597 | // and removing leading / trailing whitespace. We can do this because 598 | // we are adding comments and we don't care about the formatting. 599 | missingContent := StripWhitespace(strings.Join(lines, ""), false) 600 | // Replace multiple newlines with a single newline 601 | re := regexp.MustCompile(`\n{3,}`) 602 | return re.ReplaceAllString(missingContent, "\n") 603 | } 604 | 605 | func IndentFollowingLines(lines string, indentSize uint) string { 606 | // Split the input by lines 607 | allLines := strings.SplitAfter(lines, "\n") 608 | 609 | // If there's only one line or no lines, return the original 610 | if len(allLines) <= 1 { 611 | return lines 612 | } 613 | 614 | // Keep the first line as is 615 | result := allLines[0] 616 | // Indent all subsequent lines 617 | for i := 1; i < len(allLines); i++ { 618 | if allLines[i] != "" { // Skip empty lines 619 | // Remove existing indentation and add new indentation 620 | trimmedLine := strings.TrimLeft(allLines[i], " \t") 621 | allLines[i] = strings.Repeat(" ", int(indentSize)) + trimmedLine 622 | } 623 | 624 | // Add to result (with newline except for the last line) 625 | result += allLines[i] 626 | } 627 | 628 | return result 629 | } 630 | 631 | func formatBash(s string, c *Config) string { 632 | r := strings.NewReader(s) 633 | f, err := syntax.NewParser(syntax.KeepComments(true)).Parse(r, "") 634 | if err != nil { 635 | fmt.Printf("Error parsing: %s\n", s) 636 | panic(err) 637 | } 638 | buf := new(bytes.Buffer) 639 | syntax.NewPrinter( 640 | syntax.Minify(false), 641 | syntax.SingleLine(false), 642 | syntax.SpaceRedirects(c.SpaceRedirects), 643 | syntax.Indent(c.IndentSize), 644 | syntax.BinaryNextLine(true), 645 | ).Print(buf, f) 646 | return buf.String() 647 | } 648 | 649 | /* 650 | * 651 | // Node is a structure used to represent a parse tree. 652 | // 653 | // In the node there are three fields, Value, Next, and Children. Value is the 654 | // current token's string value. Next is always the next non-child token, and 655 | // children contains all the children. Here's an example: 656 | // 657 | // (value next (child child-next child-next-next) next-next) 658 | // 659 | */ 660 | func printAST(n *ExtendedNode, indent int) { 661 | 662 | fmt.Printf("\n%sNode: %s\n", strings.Repeat("\t", indent), n.Node.Value) 663 | fmt.Printf("%sOriginal: %s\n", strings.Repeat("\t", indent), n.Node.Original) 664 | fmt.Printf("%sOriginalMultiline\n%s=====\n%s%s======\n", strings.Repeat("\t", indent), strings.Repeat("\t", indent), n.OriginalMultiline, strings.Repeat("\t", indent)) 665 | fmt.Printf("%sAttributes: %v\n", strings.Repeat("\t", indent), n.Node.Attributes) 666 | fmt.Printf("%sHeredocs: %v\n", strings.Repeat("\t", indent), n.Node.Heredocs) 667 | // n.PrevComment 668 | fmt.Printf("%sPrevComment: %v\n", strings.Repeat("\t", indent), n.Node.PrevComment) 669 | fmt.Printf("%sStartLine: %d\n", strings.Repeat("\t", indent), n.Node.StartLine) 670 | fmt.Printf("%sEndLine: %d\n", strings.Repeat("\t", indent), n.Node.EndLine) 671 | fmt.Printf("%sFlags: %v\n", strings.Repeat("\t", indent), n.Node.Flags) 672 | 673 | if n.Children != nil { 674 | fmt.Printf("\n%s!!!! Children\n%s==========\n", strings.Repeat("\t", indent), strings.Repeat("\t", indent)) 675 | for _, c := range n.Children { 676 | printAST(c, indent+1) 677 | } 678 | } 679 | if n.Next != nil { 680 | fmt.Printf("\n%s!!!! Next\n%s==========\n", strings.Repeat("\t", indent), strings.Repeat("\t", indent)) 681 | printAST(n.Next, indent+1) 682 | } 683 | 684 | } 685 | --------------------------------------------------------------------------------