├── .github ├── ISSUE_TEMPLATE │ └── bug_template.yml ├── labeler.yml └── workflows │ ├── cargo.yml │ ├── deno.yml │ ├── dotnet.yml │ ├── label.yml │ ├── node.yaml │ ├── pip.yml │ ├── poetry.yml │ └── publish-jsr.yml ├── .gitignore ├── CODEOWNERS ├── cargo ├── README.md ├── flatpak-cargo-generator.py ├── poetry.lock ├── pyproject.toml └── quickstart │ ├── Cargo.lock │ ├── Cargo.toml │ ├── com.flatpak.quickstart.json │ └── src │ └── main.rs ├── cpan ├── README.md └── flatpak-cpan-generator.pl ├── deno ├── README.md ├── deno.json ├── deno.lock ├── src │ ├── main.ts │ └── utils.ts └── tests │ ├── main.test.ts │ ├── main_function.test.ts │ └── utils.test.ts ├── dotnet ├── README.md ├── flatpak-dotnet-generator.py ├── poetry.lock └── pyproject.toml ├── dub └── flatpak-dub-generator.py ├── flatpak-json2yaml.py ├── go-get ├── README.md ├── flatpak-go-get-generator.py └── flatpak-go-vendor-generator.py ├── go-modules └── README.md ├── gradle ├── README.md └── flatpak-gradle-generator.py ├── node ├── .gitattributes ├── .gitignore ├── README.md ├── flatpak_node_generator │ ├── __init__.py │ ├── __main__.py │ ├── cache.py │ ├── electron.py │ ├── integrity.py │ ├── main.py │ ├── manifest.py │ ├── node_headers.py │ ├── package.py │ ├── progress.py │ ├── providers │ │ ├── __init__.py │ │ ├── npm.py │ │ ├── special.py │ │ └── yarn.py │ ├── py.typed │ ├── requests.py │ └── url_metadata.py ├── poetry.lock ├── poetry.toml ├── pyproject.toml ├── tests │ ├── conftest.py │ ├── data │ │ └── packages │ │ │ ├── electron │ │ │ ├── package-lock.v1.json │ │ │ ├── package-lock.v2.json │ │ │ ├── package-lock.v3.json │ │ │ ├── package.json │ │ │ └── yarn.lock │ │ │ ├── local-link-yarn │ │ │ ├── package.json │ │ │ ├── subdir │ │ │ │ ├── index.js │ │ │ │ └── package.json │ │ │ └── yarn.lock │ │ │ ├── local │ │ │ ├── package-lock.v1.json │ │ │ ├── package-lock.v2.json │ │ │ ├── package-lock.v3.json │ │ │ ├── package.json │ │ │ ├── subdir │ │ │ │ ├── index.js │ │ │ │ └── package.json │ │ │ └── yarn.lock │ │ │ ├── minimal-git │ │ │ ├── package-lock.json │ │ │ ├── package-lock.v1.json │ │ │ ├── package-lock.v2.json │ │ │ ├── package-lock.v3.json │ │ │ ├── package.json │ │ │ └── yarn.lock │ │ │ ├── missing-resolved-npm │ │ │ ├── package-lock.v2.json │ │ │ ├── package-lock.v3.json │ │ │ └── package.json │ │ │ └── url-as-dep │ │ │ ├── package-lock.v1.json │ │ │ ├── package-lock.v2.json │ │ │ ├── package-lock.v3.json │ │ │ └── package.json │ ├── test_electron.py │ ├── test_integrity.py │ ├── test_manifest.py │ ├── test_package.py │ ├── test_providers.py │ ├── test_requests.py │ └── test_yarn.py ├── tools │ ├── b64-integrity.sh │ ├── b64-to-hex.sh │ ├── hex-to-b64.sh │ └── lockfile-utils.sh ├── vanilla-quick-start │ ├── README.md │ ├── generated-sources.json │ ├── org.electronjs.ElectronQuickStart.yaml │ └── package-lock.json └── webpack-quick-start │ ├── README.md │ ├── build.electron.webpack.ElectronWebpackQuickStart.yaml │ └── generated-sources.json ├── npm ├── .gitignore ├── README.md ├── electron-quick-start-package-lock.json ├── flatpak-npm-generator.py └── io.atom.electron.ElectronQuickStart.json ├── opam ├── README.md └── flatpak-opam-generator.py ├── pip ├── flatpak-pip-generator ├── flatpak-pip-generator.py ├── pyproject.toml ├── readme.md └── uv.lock ├── poetry ├── flatpak-poetry-generator.py ├── poetry.lock ├── pyproject.toml └── readme.md ├── readme.md ├── rubygems ├── LICENSE ├── README.md └── flatpak_rubygems_generator.rb ├── spm ├── README.md ├── flatpak-spm-generator.swift └── quickstart │ ├── Package.swift │ ├── Sources │ └── quickstart │ │ └── main.swift │ └── org.flatpak.quickstart.json └── yarn ├── README.md ├── electron-quick-start-yarn.lock ├── flatpak-yarn-generator.py ├── generated-sources.json ├── io.atom.electron.ElectronQuickStart.json └── yarnrc /.github/ISSUE_TEMPLATE/bug_template.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Report a bug in some of the flatpak-builder tools 3 | labels: 4 | - "bug" 5 | body: 6 | - id: flatpak-builder-version 7 | type: input 8 | attributes: 9 | label: flatpak-builder version 10 | description: >- 11 | What version of flatpak-builder are you using? 12 | If unsure, run `flatpak-builder --version` in the terminal. 13 | placeholder: 1.4.1 14 | validations: 15 | required: true 16 | 17 | - id: os-info 18 | type: input 19 | attributes: 20 | label: Linux distribution and version 21 | description: >- 22 | What Linux distribution are you using? If unsure, run `uname -a` in the terminal. 23 | placeholder: "Ubuntu 20.04" 24 | validations: 25 | required: true 26 | 27 | - id: tool-name 28 | type: dropdown 29 | attributes: 30 | label: Affected flatpak-builder tool 31 | description: >- 32 | What tool are you experiencing problem with? 33 | Please specify the script file name in this repository. 34 | options: 35 | - cargo/flatpak-cargo-generator.py 36 | - cpan/flatpak-cpan-generator.pl 37 | - dotnet/flatpak-dotnet-generator.py 38 | - dub/flatpak-dub-generator.py 39 | - go-get/flatpak-go-get-generator.py 40 | - go-get/flatpak-go-vendor-generator.py 41 | - node/flatpak-node-generator.py 42 | - npm/flatpak-npm-generator.py 43 | - pip/flatpak-pip-generator 44 | - poetry/flatpak-poetry-generator.py 45 | - rubygems/flatpak_rubygems_generator.rb 46 | - spm/flatpak-spm-generator.swift 47 | - yarn/flatpak-yarn-generator.py 48 | validations: 49 | required: true 50 | 51 | - id: tool-args 52 | type: input 53 | attributes: 54 | label: flatpak-builder tool cli args 55 | description: >- 56 | If you're passing additional command-line options to the tool, specify them here. 57 | placeholder: --xdg-layout --electron-ffmpeg=archive 58 | validations: 59 | required: false 60 | 61 | - id: source-url 62 | type: input 63 | attributes: 64 | label: Source repository URL 65 | description: >- 66 | Source URL for the project you're running the tool against. 67 | If the tool is taking a lockfile as input (e.g. node or cargo generators), 68 | please link the repository containing the lockfile. 69 | placeholder: https://github.com/microsoft/vscode.git 70 | validations: 71 | required: false 72 | 73 | - id: manifest-url 74 | type: input 75 | attributes: 76 | label: Flatpak-builder manifest URL 77 | description: >- 78 | URL for the flatpak-builder manifest you're building. 79 | Please link the app manifest which references generated-sources produced by the tool, 80 | not the generated-sources manifest itself. 81 | placeholder: https://github.com/flathub/com.visualstudio.code-oss.git 82 | validations: 83 | required: false 84 | 85 | - id: description 86 | type: textarea 87 | attributes: 88 | label: Description 89 | description: A clear and concise description of the problem. 90 | placeholder: | 91 | When trying to build the manifest generated by TOOL from LOCKFILE, it gives this error: ... 92 | validations: 93 | required: true 94 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | cargo: 2 | - changed-files: 3 | - any-glob-to-any-file: cargo/** 4 | 5 | cpan: 6 | - changed-files: 7 | - any-glob-to-any-file: cpan/** 8 | 9 | dotnet: 10 | - changed-files: 11 | - any-glob-to-any-file: dotnet/** 12 | 13 | deno: 14 | - changed-files: 15 | - any-glob-to-any-file: deno/** 16 | 17 | dub: 18 | - changed-files: 19 | - any-glob-to-any-file: dub/** 20 | 21 | go: 22 | - changed-files: 23 | - any-glob-to-any-file: 24 | - go-get/** 25 | - go-modules/** 26 | 27 | gradle: 28 | - changed-files: 29 | - any-glob-to-any-file: gradle/** 30 | 31 | node: 32 | - changed-files: 33 | - any-glob-to-any-file: node/** 34 | 35 | npm: 36 | - changed-files: 37 | - any-glob-to-any-file: npm/** 38 | 39 | opam: 40 | - changed-files: 41 | - any-glob-to-any-file: opam/** 42 | 43 | pip: 44 | - changed-files: 45 | - any-glob-to-any-file: pip/** 46 | 47 | poetry: 48 | - changed-files: 49 | - any-glob-to-any-file: poetry/** 50 | 51 | ruby: 52 | - changed-files: 53 | - any-glob-to-any-file: ruby/** 54 | 55 | swift: 56 | - changed-files: 57 | - any-glob-to-any-file: spm/** 58 | 59 | yarn: 60 | - changed-files: 61 | - any-glob-to-any-file: yarn/** 62 | 63 | CI: 64 | - changed-files: 65 | - any-glob-to-any-file: .github/** 66 | -------------------------------------------------------------------------------- /.github/workflows/cargo.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: master 6 | paths: 7 | - cargo/** 8 | pull_request: 9 | branches: master 10 | paths: 11 | - cargo/** 12 | 13 | defaults: 14 | run: 15 | working-directory: cargo 16 | 17 | jobs: 18 | cargo: 19 | permissions: 20 | contents: read 21 | runs-on: ubuntu-latest 22 | timeout-minutes: 30 23 | steps: 24 | # 4.2.2 25 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 26 | with: 27 | persist-credentials: false 28 | 29 | - name: Setup Poetry 30 | run: | 31 | curl -sSL https://install.python-poetry.org | python3 - 32 | sudo ln -s /github/home/.local/bin/poetry /usr/bin/poetry 33 | 34 | - name: Install python dependencies 35 | run: poetry sync --all-groups --all-extras 36 | 37 | - name: Check code formatting 38 | run: poetry run ruff format --check 39 | 40 | - name: Lint 41 | run: poetry run ruff check --output-format=github 42 | 43 | - name: Check python types 44 | run: poetry run mypy . 45 | -------------------------------------------------------------------------------- /.github/workflows/deno.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | paths: 6 | - deno/** 7 | branches: 8 | - master 9 | pull_request: 10 | paths: 11 | - deno/** 12 | branches: 13 | - master 14 | 15 | defaults: 16 | run: 17 | working-directory: deno 18 | 19 | permissions: 20 | contents: read 21 | 22 | jobs: 23 | deno: 24 | runs-on: ubuntu-latest 25 | 26 | steps: 27 | - name: Setup repo 28 | uses: actions/checkout@v4 29 | 30 | - name: Setup Deno 31 | uses: denoland/setup-deno@v2 32 | 33 | - name: Verify formatting 34 | run: deno fmt --check 35 | 36 | - name: Run linter 37 | run: deno lint 38 | 39 | - name: Run doc linter 40 | run: deno doc --lint src/main.ts 41 | 42 | - name: Run type check 43 | run: deno check . 44 | 45 | - name: Run tests 46 | run: deno test -A 47 | -------------------------------------------------------------------------------- /.github/workflows/dotnet.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: master 6 | paths: 7 | - dotnet/** 8 | pull_request: 9 | branches: master 10 | paths: 11 | - dotnet/** 12 | 13 | defaults: 14 | run: 15 | working-directory: dotnet 16 | 17 | jobs: 18 | dotnet: 19 | permissions: 20 | contents: read 21 | runs-on: ubuntu-latest 22 | timeout-minutes: 30 23 | steps: 24 | # 4.2.2 25 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 26 | with: 27 | persist-credentials: false 28 | 29 | - name: Setup Poetry 30 | run: | 31 | curl -sSL https://install.python-poetry.org | python3 - 32 | sudo ln -s /github/home/.local/bin/poetry /usr/bin/poetry 33 | 34 | - name: Install python dependencies 35 | run: poetry sync --all-groups --all-extras 36 | 37 | - name: Check code formatting 38 | run: poetry run ruff format --check 39 | 40 | - name: Lint 41 | run: poetry run ruff check --output-format=github 42 | 43 | - name: Check python types 44 | run: poetry run mypy . 45 | -------------------------------------------------------------------------------- /.github/workflows/label.yml: -------------------------------------------------------------------------------- 1 | name: "Label PRs" 2 | 3 | # WARNING: Do NOT use org level secrets or checkout any code in this 4 | # workflow 5 | 6 | on: 7 | pull_request_target: 8 | types: [opened, edited, reopened, synchronize] 9 | workflow_dispatch: 10 | 11 | jobs: 12 | labeler: 13 | permissions: 14 | contents: read 15 | pull-requests: write 16 | runs-on: ubuntu-latest 17 | steps: 18 | # 5.0.0 19 | - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 20 | with: 21 | repo-token: ${{ secrets.GITHUB_TOKEN }} 22 | sync-labels: true 23 | -------------------------------------------------------------------------------- /.github/workflows/node.yaml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: master 6 | paths: 7 | - node/** 8 | pull_request: 9 | branches: master 10 | paths: 11 | - node/** 12 | 13 | defaults: 14 | run: 15 | working-directory: node 16 | 17 | jobs: 18 | node: 19 | permissions: 20 | contents: read 21 | timeout-minutes: 60 22 | strategy: 23 | fail-fast: false 24 | matrix: 25 | python-version: 26 | - '3.9' 27 | - '3.10' 28 | - '3.11' 29 | - '3.12' 30 | - '3.13' 31 | runs-on: ubuntu-latest 32 | steps: 33 | # 4.2.2 34 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 35 | with: 36 | persist-credentials: false 37 | 38 | - name: Configure git 39 | run: | 40 | git config --global user.name 'github-actions[bot]' 41 | git config --global user.email '41898282+github-actions[bot]@users.noreply.github.com' 42 | 43 | # 5.6.0 44 | - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 45 | with: 46 | python-version: ${{ matrix.python-version }} 47 | 48 | - name: Setup Poetry 49 | run: | 50 | curl -sSL https://install.python-poetry.org | python3 - 51 | sudo ln -s /github/home/.local/bin/poetry /usr/bin/poetry 52 | 53 | - name: Install OS dependencies 54 | run: sudo apt-get update && sudo apt-get install -y flatpak-builder 55 | 56 | - name: Install Flatpak dependencies 57 | run: | 58 | flatpak --user remote-add flathub https://flathub.org/repo/flathub.flatpakrepo 59 | flatpak --user install -y flathub \ 60 | org.freedesktop.{Platform,Sdk{,.Extension.node{14,16,18}}}//22.08 61 | flatpak --user install -y flathub \ 62 | org.freedesktop.{Platform,Sdk{,.Extension.node{20,22}}}//24.08 63 | 64 | - name: Install dependencies 65 | run: poetry install --with=dev 66 | 67 | - name: Run checks 68 | run: poetry run poe check 69 | -------------------------------------------------------------------------------- /.github/workflows/pip.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: master 6 | paths: 7 | - pip/** 8 | pull_request: 9 | branches: master 10 | paths: 11 | - pip/** 12 | 13 | defaults: 14 | run: 15 | working-directory: pip 16 | 17 | jobs: 18 | pip: 19 | permissions: 20 | contents: read 21 | runs-on: ubuntu-latest 22 | timeout-minutes: 30 23 | steps: 24 | # 4.2.2 25 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 26 | with: 27 | persist-credentials: false 28 | 29 | - name: Install uv 30 | # 5.4.0 31 | uses: astral-sh/setup-uv@22695119d769bdb6f7032ad67b9bca0ef8c4a174 32 | with: 33 | version: "0.6.9" 34 | enable-cache: true 35 | cache-dependency-glob: | 36 | **/uv.lock 37 | **/pyproject.toml 38 | 39 | - name: Install python dependencies 40 | run: uv sync -v --all-groups --frozen 41 | 42 | - name: Check code formatting 43 | run: uv run ruff format --check 44 | 45 | - name: Lint 46 | run: uv run ruff check --output-format=github 47 | 48 | - name: Check python types 49 | run: uv run mypy . 50 | -------------------------------------------------------------------------------- /.github/workflows/poetry.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: master 6 | paths: 7 | - poetry/** 8 | pull_request: 9 | branches: master 10 | paths: 11 | - poetry/** 12 | 13 | defaults: 14 | run: 15 | working-directory: poetry 16 | 17 | jobs: 18 | poetry: 19 | permissions: 20 | contents: read 21 | runs-on: ubuntu-latest 22 | timeout-minutes: 30 23 | steps: 24 | # 4.2.2 25 | - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 26 | with: 27 | persist-credentials: false 28 | 29 | - name: Setup Poetry 30 | run: | 31 | curl -sSL https://install.python-poetry.org | python3 - 32 | sudo ln -s /github/home/.local/bin/poetry /usr/bin/poetry 33 | 34 | - name: Install python dependencies 35 | run: poetry sync --all-groups --all-extras 36 | 37 | - name: Check code formatting 38 | run: poetry run ruff format --check 39 | 40 | - name: Lint 41 | run: poetry run ruff check --output-format=github 42 | 43 | - name: Check python types 44 | run: poetry run mypy . 45 | -------------------------------------------------------------------------------- /.github/workflows/publish-jsr.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | on: 3 | push: 4 | paths: 5 | - deno/** 6 | branches: 7 | - master 8 | 9 | defaults: 10 | run: 11 | working-directory: deno 12 | 13 | jobs: 14 | publish: 15 | runs-on: ubuntu-latest 16 | 17 | permissions: 18 | contents: read 19 | id-token: write 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | 24 | - name: Setup Deno 25 | uses: denoland/setup-deno@v2 26 | 27 | - name: Publish package 28 | run: deno publish 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | deno/coverage 2 | -------------------------------------------------------------------------------- /CODEOWNERS: -------------------------------------------------------------------------------- 1 | # Fallback 2 | * @flatpak/flatpak-builder-tools 3 | 4 | /spm/ @david-swift 5 | /gradle/ @hadess 6 | /deno/ @sigmasd 7 | /.github/workflows/deno.yml @sigmasd 8 | /.github/workflows/publish-jsr.yml @sigmasd 9 | -------------------------------------------------------------------------------- /cargo/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | package-mode = false 3 | 4 | [project] 5 | name = "flatpak_cargo_generator" 6 | version = "0.0.1" 7 | description = "Script to generate flatpak-builder manifest from Cargo lockfiles" 8 | license = {text = "MIT"} 9 | readme = "README.md" 10 | requires-python = "<4.0,>=3.9" 11 | dependencies = [ 12 | "aiohttp<4.0.0,>=3.9.5", 13 | "toml<1.0.0,>=0.10.2", 14 | "PyYAML<7.0.0,>=6.0.2", 15 | ] 16 | 17 | [tool.poetry.group.dev.dependencies] 18 | ruff = "^0.6.7" 19 | mypy = "^1.11.2" 20 | types-toml = "^0.10.8" 21 | types-pyyaml = "^6.0.12" 22 | 23 | [tool.ruff] 24 | line-length = 88 25 | include = ["*.py"] 26 | target-version = "py39" 27 | 28 | [tool.ruff.lint] 29 | preview = true 30 | extend-select = [ 31 | "B", 32 | "ERA", 33 | "I", 34 | "PLE", 35 | "PLW", 36 | "W", 37 | ] 38 | 39 | [tool.ruff.format] 40 | line-ending = "lf" 41 | quote-style = "double" 42 | 43 | [tool.mypy] 44 | disallow_untyped_defs = true 45 | disallow_any_unimported = true 46 | no_implicit_optional = true 47 | check_untyped_defs = true 48 | warn_unused_ignores = true 49 | show_error_codes = true 50 | warn_return_any = true 51 | -------------------------------------------------------------------------------- /cargo/quickstart/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "quickstart" 3 | version = "0.1.0" 4 | authors = ["Bilal Elmoussaoui "] 5 | edition = "2018" 6 | 7 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 8 | 9 | [dependencies] 10 | gtk = "0.8" 11 | gio = "0.8" -------------------------------------------------------------------------------- /cargo/quickstart/com.flatpak.quickstart.json: -------------------------------------------------------------------------------- 1 | { 2 | "app-id": "com.flatpak.quickstart", 3 | "runtime": "org.freedesktop.Platform", 4 | "runtime-version": "19.08", 5 | "sdk": "org.freedesktop.Sdk", 6 | "sdk-extensions": ["org.freedesktop.Sdk.Extension.rust-stable"], 7 | "command": "quickstart", 8 | "finish-args": [ 9 | "--share=ipc", 10 | "--socket=fallback-x11", 11 | "--socket=wayland", 12 | "--device=dri" 13 | ], 14 | "build-options": { 15 | "append-path": "/usr/lib/sdk/rust-stable/bin" 16 | }, 17 | "modules": [{ 18 | "name": "quickstart", 19 | "buildsystem": "simple", 20 | "build-options": { 21 | "env": { 22 | "CARGO_HOME": "/run/build/quickstart/cargo" 23 | } 24 | }, 25 | "build-commands": [ 26 | "cargo --offline fetch --manifest-path Cargo.toml --verbose", 27 | "cargo --offline build --release --verbose", 28 | "install -Dm755 ./target/release/quickstart -t /app/bin/" 29 | ], 30 | "sources": [{ 31 | "type": "dir", 32 | "path": "." 33 | }, 34 | "generated-sources.json" 35 | ] 36 | }] 37 | } 38 | -------------------------------------------------------------------------------- /cargo/quickstart/src/main.rs: -------------------------------------------------------------------------------- 1 | use gio::prelude::*; 2 | use gtk::prelude::*; 3 | use std::env::args; 4 | 5 | fn build_ui(application: >k::Application) { 6 | let window = gtk::ApplicationWindow::new(application); 7 | 8 | window.set_title("Flatpak!"); 9 | window.set_border_width(10); 10 | window.set_position(gtk::WindowPosition::Center); 11 | window.set_default_size(350, 70); 12 | 13 | let button = gtk::Button::new_with_label("Click me!"); 14 | 15 | window.add(&button); 16 | 17 | window.show_all(); 18 | } 19 | 20 | fn main() { 21 | let application = 22 | gtk::Application::new(Some("com.flatpak.quickstart"), Default::default()) 23 | .expect("Initialization failed..."); 24 | 25 | application.connect_activate(|app| { 26 | build_ui(app); 27 | }); 28 | 29 | application.run(&args().collect::>()); 30 | } -------------------------------------------------------------------------------- /cpan/README.md: -------------------------------------------------------------------------------- 1 | # flatpak-cpan-generator 2 | 3 | Generates a sources file for a list of CPAN modules. 4 | 5 | ## Requirements 6 | 7 | - App::cpanminus 8 | - Getopt::Long::Descriptive 9 | - JSON::MaybeXS 10 | - LWP::UserAgent 11 | - MetaCPAN::Client 12 | - Capture::Tiny 13 | 14 | Example installation on Fedora: 15 | 16 | ```bash 17 | $ sudo dnf install 'perl(App::cpanminus)' 'perl(Getopt::Long::Descriptive)' ... 18 | ``` 19 | 20 | ## Usage 21 | 22 | You can run flatpak-cpan-generator like this: 23 | 24 | ```bash 25 | $ flatpak-cpan-generator LWP::UserAgent Some::Other::Package ... 26 | ``` 27 | 28 | This will write a generated-sources.json (the filename can be changed via `-o/--output`) that: 29 | 30 | - Downloads the dependencies into a directory named `perl-libs`. (You can change the directory 31 | name via `-d/--dir`.) 32 | - Saves a file `install.sh` into the same directory as above that installs all the dependencies 33 | by running `perl Makefile.PL && make install` or `perl Build.PL && ./Build && ./Build install` 34 | inside each directory. 35 | 36 | Note both Perl itself and modules installed via Makefile.PL tend to not have write 37 | permission on shared objects they installed, which can break flatpak-builder. The example 38 | below shows how to work around that via chmod. 39 | 40 | ## Example 41 | 42 | ```yaml 43 | # Installs Perl. 44 | # (Based on: https://github.com/flathub/io.github.Hexchat.Plugin.Perl) 45 | - name: perl 46 | no-autogen: true 47 | config-opts: 48 | - '-des' 49 | # Build a shared library. 50 | - '-Duseshrplib' 51 | build-options: 52 | cflags: '-fPIC' 53 | ldflags: '-fpic' 54 | sources: 55 | - type: archive 56 | url: https://www.cpan.org/src/5.0/perl-5.30.0.tar.gz 57 | sha256: 851213c754d98ccff042caa40ba7a796b2cee88c5325f121be5cbb61bbf975f2 58 | - type: shell 59 | commands: 60 | # Have Flatpak run the GNU-compatible configure script. 61 | - 'ln -s configure{.gnu,}' 62 | # Restore write permission to the Perl libraries. 63 | post-install: 64 | - 'chmod -R u+w /app/lib/perl5' 65 | # Clean up a bunch of stuff we don't need. Depending on your application, 66 | # you may have to drop some of these (e.g. *.pod). 67 | cleanup: 68 | - '/bin/corelist' 69 | - '/bin/cpan' 70 | - '/bin/enc2xs' 71 | - '/bin/encguess' 72 | - '/bin/h2ph' 73 | - '/bin/h2xs' 74 | - '/bin/instmodsh' 75 | - '/bin/json_pp' 76 | - '/bin/libnetcfg' 77 | - '/bin/perlbug' 78 | - '/bin/perldoc' 79 | - '/bin/perlivp' 80 | - '/bin/perlthanks' 81 | - '/bin/piconv' 82 | - '/bin/pl2pm' 83 | - '/bin/pod*' 84 | - '/bin/prove' 85 | - '/bin/ptar*' 86 | - '/bin/shasum' 87 | - '/bin/splain' 88 | - '/bin/xsubpp' 89 | - '/bin/zipdetails' 90 | - '/include' 91 | - '/man' 92 | - '*.pod' 93 | 94 | # Installs the modules generated by flatpak-cpan-generator. 95 | # Note that *any* Makefile.PL-style modules MUST be installed in this one step, 96 | # as once perllocal.pod is written in one module, it cannot be modified by others. 97 | - name: perl-libs 98 | buildsystem: simple 99 | build-commands: 100 | - 'perl-libs/install.sh' 101 | # Same as with the Perl module, we need to restore write permission. 102 | # However, -f is now passed to avoid errors from trying to touch files from the 103 | # above module that are now marked as read-only. 104 | post-install: 105 | - 'chmod -Rf u+w /app/lib/perl5/site_perl' 106 | sources: 107 | - generated-sources.json 108 | # This step should be customized based on the CPAN packages you're using. 109 | cleanup: 110 | - '/bin' 111 | - '/man' 112 | ``` 113 | 114 | ## Background 115 | 116 | flatpak-cpan-generator works by: 117 | 118 | - Using cpanminus to install all the needed libraries into a separate directory. cpanminus is 119 | used over vanilla CPAN because the latter does not seem to support this use case at all. 120 | - Parsing the output of cpanm in order to extract the names and versions of the installed packages. 121 | - Querying CPAN to grab the distribution URLs for each and saving them into the sources file. 122 | 123 | We can't just resolve the dependency list without cpanminus, because many CPAN packages don't 124 | properly declare all their dependencies in the metadata. Therefore, the only way to find them 125 | is to run or parse Makefile.PL, the latter of which cpanminus already does. 126 | -------------------------------------------------------------------------------- /cpan/flatpak-cpan-generator.pl: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env perl 2 | 3 | use v5.14; 4 | 5 | use strict; 6 | use warnings; 7 | 8 | use Digest::SHA; 9 | require File::Temp; 10 | use File::Temp (); 11 | 12 | use Getopt::Long::Descriptive; 13 | use JSON::MaybeXS; 14 | use LWP::UserAgent; 15 | use MetaCPAN::Client; 16 | use Capture::Tiny qw(tee); 17 | 18 | 19 | sub scan_deps { 20 | my @deps = grep(/^Successfully installed/, @_); 21 | 22 | for (@deps) 23 | { 24 | s/^Successfully installed (\S+).*/$1/; 25 | } 26 | 27 | @deps 28 | } 29 | 30 | sub get_url_sha256 { 31 | my ($url) = @_; 32 | 33 | my $state = Digest::SHA->new(256); 34 | my $ua = LWP::UserAgent->new; 35 | $ua->env_proxy; 36 | 37 | my $resp = $ua->get($url, ':read_size_hint' => 1024, 38 | ':content_cb' => sub { 39 | my ($data) = @_; 40 | $state->add($data); 41 | }); 42 | 43 | die "Failed to get sha256 of $url: @{[$resp->status_line]}\n" if !$resp->is_success; 44 | $state->hexdigest; 45 | 46 | } 47 | sub get_source_for_dep { 48 | my ($cpan, $dep, $outdir) = @_; 49 | my $release_set = $cpan->release({ name => $dep }); 50 | 51 | die "Unexpected @{[$release_set->total]} releases for $dep" 52 | if $release_set->total != 1; 53 | my $release = $release_set->next; 54 | 55 | my $url = $release->download_url; 56 | my $sha256 = get_url_sha256 $url; 57 | 58 | { 59 | type => 'archive', 60 | url => $url, 61 | sha256 => $sha256, 62 | dest => "$outdir/@{[$release->distribution]}", 63 | }; 64 | } 65 | 66 | sub write_module_to_file { 67 | my ($output, $root) = @_; 68 | 69 | my $serializer = JSON::MaybeXS->new(indent => 1, space_after => 1, canonical => 1); 70 | my $json = $serializer->encode($root); 71 | 72 | open my $fh, '>', $output or die "Could not open $output for writing\n"; 73 | print $fh $json; 74 | close $fh; 75 | } 76 | 77 | sub main { 78 | my ($opts, $usage) = describe_options( 79 | 'flatpak-cpan-generator %o ', 80 | ['output|o=s', 'The generated sources file', { default => 'generated-sources.json' }], 81 | ['dir|d=s', 'The output directory used inside the sources file', { default => 'perl-libs' }], 82 | ['help|h', 'Show this screen', { shortcircuit => 1, hidden => 1 }], 83 | ); 84 | 85 | if ($opts->help) { 86 | print $usage->text; 87 | exit; 88 | } 89 | 90 | die "At least one package is required.\n" if @ARGV == 0; 91 | 92 | my $cpan = MetaCPAN::Client->new; 93 | 94 | say '** Installing dependencies with cpanm...'; 95 | 96 | my $tmpdir = File::Temp->newdir; 97 | my ($stdout, $stderr, $exit) = tee { 98 | system ('cpanm', '-n', '-L', $tmpdir, "--", @ARGV); 99 | }; 100 | die "cpanm failed with exit status $exit\n" if $exit != 0; 101 | 102 | say '** Scanning dependencies...'; 103 | 104 | my @stdout = split "\n", $stdout; 105 | my @deps = scan_deps @stdout; 106 | # my @deps = scan_deps 'lib'; 107 | my @sources = (); 108 | 109 | foreach my $dep (@deps) { 110 | say "** Processing: $dep"; 111 | my $source = get_source_for_dep $cpan, $dep, $opts->dir; 112 | push @sources, $source; 113 | } 114 | 115 | push @sources, { 116 | type => 'script', 117 | dest => $opts->dir, 118 | 'dest-filename' => 'install.sh', 119 | commands => [ 120 | "set -e", 121 | "function make_install {", 122 | " mod_dir=\$1", 123 | " cd \$mod_dir", 124 | " if [ -f 'Makefile.PL' ]; then", 125 | " perl Makefile.PL PREFIX=\${FLATPAK_DEST} && make install PREFIX=\${FLATPAK_DEST}", 126 | " elif [ -f 'Build.PL' ]; then", 127 | " perl Build.PL && ./Build && ./Build install", 128 | " else", 129 | " echo 'No Makefile.PL or Build.PL found. Do not know how to install this module'", 130 | " exit 1", 131 | " fi", 132 | "}", 133 | map { "(make_install $_->{dest})" } @sources 134 | ], 135 | }; 136 | 137 | write_module_to_file $opts->output, \@sources; 138 | } 139 | 140 | main; 141 | -------------------------------------------------------------------------------- /deno/README.md: -------------------------------------------------------------------------------- 1 | # Flatpak Deno Generator 2 | 3 | Run from jsr 4 | 5 | ``` 6 | deno -RN -W=. jsr:@flatpak-contrib/flatpak-deno-generator deno.lock 7 | ``` 8 | 9 | or locally from this repo 10 | 11 | ``` 12 | deno -RN -W=. src/main.ts deno.lock --output sources.json 13 | ``` 14 | 15 | This will create a `deno-sources.json` (or the name specified with --output) 16 | that can be used in flatpak build files. The sources files provides these 2 17 | directories: 18 | 19 | - it creates and populates `./deno_dir` with npm dependencies 20 | - it creates and populates `./vendor` with jsr + http dependencies 21 | 22 | ## Usage: 23 | 24 | - Use the sources file as a source, example: 25 | 26 | ```yml 27 | sources: 28 | - deno-sources.json 29 | ``` 30 | 31 | - To use `deno_dir` (when your project have npm dependencies) point `DENO_DIR` 32 | env variable to it, like so: 33 | 34 | ```yml 35 | - name: someModule 36 | buildsystem: simple 37 | build-options: 38 | env: 39 | # sources provides deno_dir directory 40 | DENO_DIR: deno_dir 41 | ``` 42 | 43 | - To use `vendor` (when your project have http or jsr dependencies) move it next 44 | to your `deno.json` file and make sure to compile or run with `--vendor` flag, 45 | exmaple: 46 | 47 | ```yml 48 | - # sources provides vendor directory 49 | - # src is where my deno project at as in deno.json is under src directory, so I'm moving vendor next to it 50 | - mv ./vendor src/ 51 | - DENORT_BIN=$PWD/denort ./deno compile --vendor --no-check --output virtaudio-bin --cached-only 52 | --allow-all --include ./src/gui.slint --include ./src/client.html ./src/gui.ts 53 | ``` 54 | 55 | ## Notes 56 | 57 | Currently this only supports lockfile V5 (available since deno version 2.3) 58 | 59 | ## License 60 | 61 | MIT 62 | 63 | ## Example 64 | 65 | - checkout https://github.com/flathub/io.github.sigmasd.VirtAudio/ 66 | 67 | ## Technical Info 68 | 69 | Theoretically it would've been better to put all the dependencies in `DENO_DIR` 70 | but currently thats not possible because jsr and https dependencies have some 71 | special metadata checks made by deno, more info here 72 | https://github.com/denoland/deno/issues/29212 73 | -------------------------------------------------------------------------------- /deno/deno.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-contrib/flatpak-deno-generator", 3 | "version": "1.3.4", 4 | "exports": "./src/main.ts", 5 | "license": "MIT" 6 | } 7 | -------------------------------------------------------------------------------- /deno/deno.lock: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "specifiers": { 4 | "jsr:@std/assert@0.221": "0.221.0", 5 | "jsr:@std/assert@0.221.0": "0.221.0", 6 | "jsr:@std/encoding@1": "1.0.10", 7 | "jsr:@std/fmt@0.221": "0.221.0", 8 | "jsr:@std/fs@0.221.0": "0.221.0", 9 | "jsr:@std/path@0.221": "0.221.0", 10 | "jsr:@std/path@0.221.0": "0.221.0" 11 | }, 12 | "jsr": { 13 | "@std/assert@0.221.0": { 14 | "integrity": "a5f1aa6e7909dbea271754fd4ab3f4e687aeff4873b4cef9a320af813adb489a", 15 | "dependencies": [ 16 | "jsr:@std/fmt" 17 | ] 18 | }, 19 | "@std/encoding@1.0.10": { 20 | "integrity": "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1" 21 | }, 22 | "@std/fmt@0.221.0": { 23 | "integrity": "379fed69bdd9731110f26b9085aeb740606b20428ce6af31ef6bd45ef8efa62a" 24 | }, 25 | "@std/fs@0.221.0": { 26 | "integrity": "028044450299de8ed5a716ade4e6d524399f035513b85913794f4e81f07da286", 27 | "dependencies": [ 28 | "jsr:@std/assert@0.221", 29 | "jsr:@std/path@0.221" 30 | ] 31 | }, 32 | "@std/path@0.221.0": { 33 | "integrity": "0a36f6b17314ef653a3a1649740cc8db51b25a133ecfe838f20b79a56ebe0095", 34 | "dependencies": [ 35 | "jsr:@std/assert@0.221" 36 | ] 37 | } 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /deno/src/utils.ts: -------------------------------------------------------------------------------- 1 | // LICENSE = MIT 2 | 3 | import { encodeHex } from "jsr:@std/encoding@1/hex"; 4 | import { decodeBase64 } from "jsr:@std/encoding@1/base64"; 5 | 6 | export async function sha256(text: string): Promise { 7 | const encoder = new TextEncoder(); 8 | const data = encoder.encode(text); 9 | const hashBuffer = await crypto.subtle.digest("SHA-256", data); 10 | // Convert buffer to hex string 11 | return Array.from(new Uint8Array(hashBuffer)) 12 | .map((b) => b.toString(16).padStart(2, "0")) 13 | .join(""); 14 | } 15 | 16 | /** 17 | * Converts a Base64 encoded string to its hexadecimal representation 18 | * 19 | * @param base64String The Base64 encoded string. 20 | * @returns The hexadecimal representation of the decoded string. 21 | */ 22 | export function base64ToHex(base64String: string): string { 23 | // Step 1: Base64 decode the string into a Uint8Array. 24 | const binaryData: Uint8Array = decodeBase64(base64String); 25 | // Step 2: Convert the Uint8Array (raw binary data) to a hexadecimal string. 26 | const hexString: string = encodeHex(binaryData); 27 | return hexString; 28 | } 29 | 30 | export function splitOnce( 31 | str: string, 32 | separator: string, 33 | dir: "left" | "right" = "left", 34 | ) { 35 | const idx = dir === "left" 36 | ? str.indexOf(separator) 37 | : str.lastIndexOf(separator); 38 | if (idx === -1) return [str]; 39 | return [str.slice(0, idx), str.slice(idx + separator.length)]; 40 | } 41 | 42 | const FORBIDDEN_CHARS = new Set([ 43 | "?", 44 | "<", 45 | ">", 46 | ":", 47 | "*", 48 | "|", 49 | "\\", 50 | ":", 51 | '"', 52 | "'", 53 | "/", 54 | ]); 55 | 56 | // https://github.com/denoland/deno_cache_dir/blob/0b2dbb2553019dd829d71665bed7f48f610b64f0/rs_lib/src/local.rs#L594 57 | export function hasForbiddenChars(segment: string): boolean { 58 | for (const c of segment) { 59 | const isUppercase = /[A-Z]/.test(c); 60 | if (FORBIDDEN_CHARS.has(c) || isUppercase) { 61 | // do not allow uppercase letters in order to make this work 62 | // the same on case insensitive file systems 63 | return true; 64 | } 65 | } 66 | return false; 67 | } 68 | 69 | // https://github.com/denoland/deno_cache_dir/blob/0b2dbb2553019dd829d71665bed7f48f610b64f0/rs_lib/src/local.rs#L651 70 | export function shouldHash(fileName: string): boolean { 71 | return fileName.length === 0 || 72 | fileName.length > 30 || 73 | hasForbiddenChars(fileName); 74 | } 75 | 76 | // https://github.com/denoland/deno_cache_dir/blob/0b2dbb2553019dd829d71665bed7f48f610b64f0/rs_lib/src/local.rs#L621 77 | export async function shortHash(fileName: string): Promise { 78 | const hash = await sha256(fileName); 79 | const MAX_LENGTH = 20; 80 | let sub = ""; 81 | let count = 0; 82 | for (const c of fileName) { 83 | if (count >= MAX_LENGTH) break; 84 | if (c === "?") break; 85 | if (FORBIDDEN_CHARS.has(c)) { 86 | sub += "_"; 87 | } else { 88 | sub += c.toLowerCase(); 89 | } 90 | count++; 91 | } 92 | 93 | sub = splitOnce(sub, ".", "right")[0]; 94 | let ext = splitOnce(fileName, ".", "right").at(1); 95 | ext = ext ? `.${ext}` : ""; 96 | 97 | if (sub.length === 0) { 98 | return `#${hash.slice(0, 7)}${ext}`; 99 | } else { 100 | return `#${sub}_${hash.slice(0, 5)}${ext}`; 101 | } 102 | } 103 | 104 | export function urlSegments(url: string | URL) { 105 | return new URL(url).pathname.replace(/^\//, "").split("/"); 106 | } 107 | -------------------------------------------------------------------------------- /deno/tests/main.test.ts: -------------------------------------------------------------------------------- 1 | // LICENSE = MIT 2 | // deno-lint-ignore-file require-await 3 | import { assert, assertEquals, assertMatch } from "jsr:@std/assert@0.221.0"; 4 | import { jsrPkgToFlatpakData, npmPkgToFlatpakData } from "../src/main.ts"; 5 | 6 | Deno.test("jsrPkgToFlatpakData returns correct flatpak data", async () => { 7 | // Mock fetch for meta.json and versioned meta 8 | const metaJson = JSON.stringify({ 9 | dummy: true, 10 | }); 11 | const metaVerJson = JSON.stringify({ 12 | moduleGraph2: { 13 | "/mod.ts": {}, 14 | "/deno.json": {}, 15 | }, 16 | moduleGraph1: {}, 17 | manifest: { 18 | "/mod.ts": { 19 | checksum: "sha256-abcdef1234567890", 20 | }, 21 | "/deno.json": { 22 | checksum: "sha256-ffeeddccbbaa9988", 23 | }, 24 | }, 25 | }); 26 | 27 | let fetchCallCount = 0; 28 | const origFetch = globalThis.fetch; 29 | Object.defineProperty(globalThis, "fetch", { 30 | configurable: true, 31 | writable: true, 32 | value: async (input: URL | RequestInfo, _init?: RequestInit) => { 33 | const url = typeof input === "string" 34 | ? input 35 | : input instanceof URL 36 | ? input.toString() 37 | : (input as Request).url; 38 | fetchCallCount++; 39 | if (url.endsWith("_meta.json")) { 40 | return { 41 | text: async () => metaVerJson, 42 | } as Response; 43 | } 44 | if (url.endsWith("meta.json")) { 45 | return { 46 | text: async () => metaJson, 47 | } as Response; 48 | } 49 | throw new Error("Unexpected fetch url: " + url); 50 | }, 51 | }); 52 | 53 | const pkg = { 54 | module: "@std/encoding", 55 | version: "1.0.10", 56 | name: "encoding", 57 | }; 58 | 59 | const data = await jsrPkgToFlatpakData(pkg); 60 | // Restore fetch after test 61 | Object.defineProperty(globalThis, "fetch", { 62 | configurable: true, 63 | writable: true, 64 | value: origFetch, 65 | }); 66 | 67 | // Should have meta.json, versioned meta, /mod.ts, deno.json, and duplicate deno.json 68 | assertEquals(data.length, 5); 69 | 70 | // meta.json 71 | assertEquals(data[0].url, "https://jsr.io/@std/encoding/meta.json"); 72 | assertEquals(data[0]["dest-filename"], "meta.json"); 73 | assertEquals(data[1].url, "https://jsr.io/@std/encoding/1.0.10_meta.json"); 74 | assertEquals(data[1]["dest-filename"], "1.0.10_meta.json"); 75 | 76 | // /mod.ts 77 | assertEquals(data[2].url, "https://jsr.io/@std/encoding/1.0.10/mod.ts"); 78 | assertEquals(data[2].sha256, "abcdef1234567890"); 79 | // Accept either "mod.ts" or a hashed filename 80 | assertMatch( 81 | data[2]["dest-filename"] as string, 82 | /^mod\.ts$|^#mod_[a-f0-9]{5}\.ts$/, 83 | ); 84 | 85 | // /deno.json 86 | assertEquals(data[3].url, "https://jsr.io/@std/encoding/1.0.10/deno.json"); 87 | assertEquals(data[3].sha256, "ffeeddccbbaa9988"); 88 | assertEquals(data[3]["dest-filename"], "deno.json"); 89 | }); 90 | 91 | Deno.test("npmPkgToFlatpakData returns correct flatpak data", async () => { 92 | // Mock fetch for npm meta 93 | const metaJson = { 94 | versions: { 95 | "2.18.4": { 96 | dist: { 97 | // "abcdefg" in base64 is "YWJjZGVmZw==" 98 | integrity: "sha512-YWJjZGVmZw==", 99 | }, 100 | }, 101 | }, 102 | }; 103 | 104 | const origFetch = globalThis.fetch; 105 | Object.defineProperty(globalThis, "fetch", { 106 | configurable: true, 107 | writable: true, 108 | value: async (input: URL | RequestInfo, _init?: RequestInit) => { 109 | const url = typeof input === "string" 110 | ? input 111 | : input instanceof URL 112 | ? input.toString() 113 | : (input as Request).url; 114 | if (url === "https://registry.npmjs.org/@napi-rs/cli") { 115 | return { 116 | json: async () => metaJson, 117 | } as Response; 118 | } 119 | throw new Error("Unexpected fetch url: " + url); 120 | }, 121 | }); 122 | 123 | const pkg = { 124 | module: "@napi-rs/cli", 125 | version: "2.18.4", 126 | name: "cli", 127 | cpu: "x86_64" as const, 128 | }; 129 | 130 | const data = await npmPkgToFlatpakData(pkg); 131 | // Restore fetch after test 132 | Object.defineProperty(globalThis, "fetch", { 133 | configurable: true, 134 | writable: true, 135 | value: origFetch, 136 | }); 137 | 138 | // Should have registry.json and archive 139 | assertEquals(data.length, 2); 140 | 141 | // registry.json 142 | const registryContents = data.at(0)?.contents; 143 | assert(registryContents !== undefined); 144 | assert("2.18.4" in JSON.parse(registryContents).versions); 145 | assertEquals(data[0]["dest-filename"], "registry.json"); 146 | 147 | // archive 148 | assertEquals( 149 | data[1].url, 150 | "https://registry.npmjs.org/@napi-rs/cli/-/cli-2.18.4.tgz", 151 | ); 152 | assertEquals( 153 | data[1]["archive-type"], 154 | "tar-gzip", 155 | ); 156 | assertEquals( 157 | data[1].dest, 158 | "deno_dir/npm/registry.npmjs.org/@napi-rs/cli/2.18.4", 159 | ); 160 | assertEquals( 161 | (data[1]["only-arches"] as string[])[0], 162 | "x86_64", 163 | ); 164 | // sha512 should be present and hex 165 | assertMatch( 166 | String(data[1].sha512), 167 | /^[a-f0-9]+$/, 168 | ); 169 | }); 170 | -------------------------------------------------------------------------------- /deno/tests/main_function.test.ts: -------------------------------------------------------------------------------- 1 | // LICENSE = MIT 2 | // deno-lint-ignore-file no-explicit-any 3 | import { main } from "../src/main.ts"; 4 | import { assert } from "jsr:@std/assert@0.221.0"; 5 | import { join } from "jsr:@std/path@0.221.0"; 6 | import { existsSync } from "jsr:@std/fs@0.221.0"; 7 | 8 | Deno.test("main function: generates deno-sources.json from lockfile", async () => { 9 | const tmpDir = "./tests/tmp_main"; 10 | await Deno.mkdir(tmpDir, { recursive: true }); 11 | const lockPath = join(tmpDir, "deno.lock"); 12 | const sourcesPath = join(tmpDir, "deno-sources.json"); 13 | 14 | // Lockfile with jsr, npm, and https deps 15 | await Deno.writeTextFile( 16 | lockPath, 17 | JSON.stringify( 18 | { 19 | version: "5", 20 | jsr: { 21 | "@std/encoding@1.0.10": { 22 | integrity: 23 | "8783c6384a2d13abd5e9e87a7ae0520a30e9f56aeeaa3bdf910a3eaaf5c811a1", 24 | }, 25 | }, 26 | npm: { 27 | "left-pad@1.3.0": { 28 | integrity: 29 | "sha512-1r9Z1tcHTul3e8DqRLVQjaxAg/P6nxsVXni4eWh05rq6ArlTc95xJMu38xpv8uKXuX4nHCqB6f+GO6zkRgLr1w==", 30 | engines: { node: ">=0.10.0" }, 31 | }, 32 | // peer dep 33 | "update-browserslist-db@1.1.3_browserslist@4.24.4": { 34 | "integrity": 35 | "sha512-UxhIZQ+QInVdunkDAaiazvvT/+fXL5Osr0JZlJulepYu6Jd7qJtDZjlur0emRlT71EN3ScPoE7gvsuIKKNavKw==", 36 | "dependencies": [ 37 | "browserslist", 38 | "escalade", 39 | "picocolors", 40 | ], 41 | "bin": true, 42 | }, 43 | // peer deps can have multiple peers 44 | "@sveltejs/vite-plugin-svelte-inspector@4.0.1_@sveltejs+vite-plugin-svelte@5.0.3__svelte@5.25.3___acorn@8.14.1__vite@6.2.3_svelte@5.25.3__acorn@8.14.1_vite@6.2.3": 45 | { 46 | "integrity": 47 | "sha512-J/Nmb2Q2y7mck2hyCX4ckVHcR5tu2J+MtBEQqpDrrgELZ2uvraQcK/ioCV61AqkdXFgriksOKIceDcQmqnGhVw==", 48 | "dependencies": [ 49 | "@sveltejs/vite-plugin-svelte", 50 | "debug", 51 | "svelte", 52 | "vite", 53 | ], 54 | }, 55 | }, 56 | remote: { 57 | "https://deno.land/std@0.203.0/uuid/v1.ts": 58 | "b6e2e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1e2c1", 59 | }, 60 | }, 61 | null, 62 | 2, 63 | ), 64 | ); 65 | 66 | try { 67 | await main(lockPath, sourcesPath); 68 | assert(existsSync(sourcesPath), "deno-sources.json should be created"); 69 | const sources = JSON.parse(await Deno.readTextFile(sourcesPath)); 70 | assert(Array.isArray(sources)); 71 | // jsr checks 72 | assert(sources.some((s: any) => s["dest-filename"] === "meta.json")); 73 | assert(sources.some((s: any) => s["dest-filename"] === "1.0.10_meta.json")); 74 | // npm checks 75 | assert(sources.some((s: any) => s["dest-filename"] === "registry.json")); 76 | assert(sources.some((s: any) => 77 | typeof s.dest === "string" && 78 | s.dest.includes("left-pad/1.3.0") 79 | )); 80 | // https checks 81 | assert(sources.some((s: any) => 82 | typeof s.url === "string" && 83 | s.url.startsWith("https://deno.land/std@0.203.0/uuid/v1.ts") 84 | )); 85 | } finally { 86 | await Deno.remove(tmpDir, { recursive: true }); 87 | } 88 | }); 89 | -------------------------------------------------------------------------------- /deno/tests/utils.test.ts: -------------------------------------------------------------------------------- 1 | // LICENSE = MIT 2 | import { assert, assertEquals, assertMatch } from "jsr:@std/assert@0.221.0"; 3 | import { 4 | base64ToHex, 5 | sha256, 6 | shortHash, 7 | shouldHash, 8 | splitOnce, 9 | urlSegments, 10 | } from "../src/utils.ts"; 11 | 12 | Deno.test("sha256 produces correct hash", async () => { 13 | const hash = await sha256("hello world"); 14 | assertEquals( 15 | hash, 16 | "b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9", 17 | ); 18 | }); 19 | 20 | Deno.test("base64ToHex converts base64 to hex", () => { 21 | // "hello" in base64 is "aGVsbG8=" 22 | // "hello" in hex is "68656c6c6f" 23 | assertEquals(base64ToHex("aGVsbG8="), "68656c6c6f"); 24 | }); 25 | 26 | Deno.test("splitOnce splits string correctly (left)", () => { 27 | assertEquals(splitOnce("foo:bar:baz", ":"), ["foo", "bar:baz"]); 28 | assertEquals(splitOnce("foo", ":"), ["foo"]); 29 | }); 30 | 31 | Deno.test("splitOnce splits string correctly (right)", () => { 32 | assertEquals(splitOnce("foo:bar:baz", ":", "right"), ["foo:bar", "baz"]); 33 | assertEquals(splitOnce("foo", ":", "right"), ["foo"]); 34 | }); 35 | 36 | Deno.test("shouldHash returns true for forbidden or long file names", () => { 37 | assert(shouldHash("ThisIsUppercase.txt")); 38 | assert(shouldHash("file?name.txt")); 39 | assert(shouldHash("a".repeat(31))); 40 | assert(shouldHash("")); 41 | }); 42 | 43 | Deno.test("shouldHash returns false for safe short lowercase names", () => { 44 | assert(!shouldHash("file.txt")); 45 | assert(!shouldHash("abc")); 46 | }); 47 | 48 | Deno.test("shortHash returns hashed filename for forbidden/long names", async () => { 49 | const result = await shortHash("ThisIsUppercase.txt"); 50 | assertMatch(result, /^#thisisuppercase_[a-f0-9]{5}\.txt$/); 51 | const result2 = await shortHash("file?name.txt"); 52 | assertMatch(result2, /^#file_[a-f0-9]{5}.txt$/); 53 | const result3 = await shortHash("a".repeat(40)); 54 | assertMatch(result3, /^#aaaaaaaaaaaaaaaaaaaa_[a-f0-9]{5}$/); 55 | const result4 = await shortHash("file { 64 | const result = await shortHash(""); 65 | assertMatch(result, /^#[a-f0-9]{7}$/); 66 | }); 67 | 68 | Deno.test("urlSegments splits URL path into segments", () => { 69 | assertEquals( 70 | urlSegments("https://example.com/foo/bar/baz.txt"), 71 | ["foo", "bar", "baz.txt"], 72 | ); 73 | assertEquals( 74 | urlSegments(new URL("https://example.com/a/b/c")), 75 | ["a", "b", "c"], 76 | ); 77 | assertEquals( 78 | urlSegments("https://example.com/"), 79 | [""], 80 | ); 81 | }); 82 | -------------------------------------------------------------------------------- /dotnet/README.md: -------------------------------------------------------------------------------- 1 | # Flatpak .NET Generator 2 | 3 | Tool to automatically generate a `flatpak-builder` sources file from a .NET Core .csproj file. 4 | 5 | ## Requirements 6 | 7 | You need to have `org.freedesktop.Sdk` and `org.freedesktop.Sdk.Extension.dotnet` installed. 8 | both branch 22.08 (21.08 is selectable by passing it in via the `--dotnet` or `-d` arguments) 9 | 10 | ## Usage 11 | 12 | Run `flatpak-dotnet-generator.py my-output-sources.json my.input.Desktop.csproj`. 13 | 14 | Then, you can use the sources file like this: 15 | 16 | ```yaml 17 | modules: 18 | - name: my-module 19 | buildsystem: simple 20 | build-commands: 21 | - '. /usr/lib/sdk/dotnet/enable.sh; dotnet build -f netcoreapp2.1 -c Release --source nuget-sources my.input.Desktop.csproj' 22 | sources: 23 | - my-output-sources.json 24 | ``` 25 | 26 | When using `dotnet build` or `dotnet publish` make sure you add the `--source nuget-sources` argument in order for `dotnet` 27 | to pick up the source files generated by this tool. 28 | 29 | If you want to change the directory name, run `flatpak-dotnet-generator.py` with `--destdir=my-destdir`. 30 | 31 | ## Arguments 32 | - `--runtime` or `-r` The target [runtime](https://learn.microsoft.com/en-us/dotnet/core/rid-catalog#linux-rids) to restore packages for. 33 | - `--dotnet` or `-d` The target version of dotnet to use. (Defaults to latest LTS version) 34 | - `--freedesktop` or `-f` The target version of the freedesktop sdk to use. (Defaults to latest version) 35 | - `--destdir` The directory the generated sources file will save sources to `nuget-sources` by default. 36 | - `--dotnet-args` or `-a` Pass additional arguments to the `dotnet` command. 37 | 38 | ## Example 39 | 40 | To pass multiple arguments to the `dotnet` command, use the `--dotnet-args` option: 41 | 42 | ```bash 43 | python3 flatpak-dotnet-generator.py my-output-sources.json my.input.Desktop.csproj --runtime linux-x64 --dotnet-args --no-cache --verbosity detailed 44 | ``` 45 | 46 | In this example: 47 | - `--no-cache` and `--verbosity detailed` are additional arguments passed to the `dotnet` command. 48 | - You can add as many arguments as needed after `--dotnet-args`. 49 | 50 | ## Development 51 | 52 | 1. Install Poetry v2 https://python-poetry.org/docs/#installation 53 | 2. `poetry install --with dev` 54 | 3. Format and lint: `poetry run ruff format && poetry run ruff check --fix --exit-non-zero-on-fix` 55 | 4. Type check: `poetry run mypy .` 56 | -------------------------------------------------------------------------------- /dotnet/flatpak-dotnet-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | __license__ = "MIT" 4 | 5 | import argparse 6 | import base64 7 | import binascii 8 | import concurrent.futures 9 | import json 10 | import subprocess 11 | import tempfile 12 | from pathlib import Path 13 | 14 | 15 | def main() -> None: 16 | # Bump this to latest freedesktop runtime version. 17 | freedesktop_default = "24.08" 18 | # Bump this to an LTS dotnet version. 19 | dotnet_default = "8" 20 | 21 | parser = argparse.ArgumentParser() 22 | parser.add_argument("output", help="The output JSON sources file") 23 | parser.add_argument("project", nargs="+", help="The project file(s)") 24 | parser.add_argument( 25 | "--runtime", 26 | "-r", 27 | nargs="+", 28 | default=[None], 29 | help="The target runtime(s) to restore packages for", 30 | ) 31 | parser.add_argument( 32 | "--freedesktop", 33 | "-f", 34 | help="The target version of the freedesktop sdk to use", 35 | default=freedesktop_default, 36 | ) 37 | parser.add_argument( 38 | "--dotnet", 39 | "-d", 40 | help="The target version of dotnet to use", 41 | default=dotnet_default, 42 | ) 43 | parser.add_argument( 44 | "--destdir", 45 | help="The directory the generated sources file will save sources to", 46 | default="nuget-sources", 47 | ) 48 | parser.add_argument( 49 | "--dotnet-args", 50 | "-a", 51 | nargs=argparse.REMAINDER, 52 | help="Additional arguments to pass to the dotnet command", 53 | ) 54 | args = parser.parse_args() 55 | 56 | sources = [] 57 | with tempfile.TemporaryDirectory(dir=Path()) as tmp: 58 | 59 | def restore_project(project: str, runtime: str | None) -> None: 60 | subprocess.run( 61 | [ 62 | "flatpak", 63 | "run", 64 | "--env=DOTNET_CLI_TELEMETRY_OPTOUT=true", 65 | "--env=DOTNET_SKIP_FIRST_TIME_EXPERIENCE=true", 66 | "--command=sh", 67 | f"--runtime=org.freedesktop.Sdk//{args.freedesktop}", 68 | "--share=network", 69 | "--filesystem=host", 70 | f"org.freedesktop.Sdk.Extension.dotnet{args.dotnet}//{args.freedesktop}", 71 | "-c", 72 | f'PATH="${{PATH}}:/usr/lib/sdk/dotnet{args.dotnet}/bin" LD_LIBRARY_PATH="$LD_LIBRARY_PATH:/usr/lib/sdk/dotnet{args.dotnet}/lib" exec dotnet restore "$@"', 73 | "--", 74 | "--packages", 75 | tmp, 76 | project, 77 | ] 78 | + (["-r", runtime] if runtime else []) 79 | + (args.dotnet_args or []), 80 | check=False, 81 | ) 82 | 83 | with concurrent.futures.ThreadPoolExecutor() as executor: 84 | futures = [] 85 | for project in args.project: 86 | if args.runtime: 87 | for runtime in args.runtime: 88 | futures.append( 89 | executor.submit(restore_project, project, runtime) 90 | ) 91 | else: 92 | futures.append(executor.submit(restore_project, project, None)) 93 | concurrent.futures.wait(futures) 94 | 95 | for path in Path(tmp).glob("**/*.nupkg.sha512"): 96 | name = path.parent.parent.name 97 | version = path.parent.name 98 | filename = "{}.{}.nupkg".format(name, version) 99 | url = "https://api.nuget.org/v3-flatcontainer/{}/{}/{}".format( 100 | name, version, filename 101 | ) 102 | 103 | with path.open() as fp: 104 | sha512 = binascii.hexlify(base64.b64decode(fp.read())).decode("ascii") 105 | 106 | sources.append( 107 | { 108 | "type": "file", 109 | "url": url, 110 | "sha512": sha512, 111 | "dest": args.destdir, 112 | "dest-filename": filename, 113 | } 114 | ) 115 | 116 | with open(args.output, "w", encoding="utf-8") as fp: 117 | json.dump(sorted(sources, key=lambda n: n.get("dest-filename")), fp, indent=4) 118 | 119 | 120 | if __name__ == "__main__": 121 | main() 122 | -------------------------------------------------------------------------------- /dotnet/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | package-mode = false 3 | 4 | [project] 5 | name = "flatpak_dotnet_generator" 6 | version = "0.0.1" 7 | description = "Script to generate flatpak-builder manifest from csproj files" 8 | license = {text = "MIT"} 9 | readme = "README.md" 10 | requires-python = "<4.0,>=3.10" 11 | dependencies = [] 12 | 13 | [tool.poetry.group.dev.dependencies] 14 | ruff = "^0.6.7" 15 | mypy = "^1.11.2" 16 | 17 | [tool.ruff] 18 | line-length = 88 19 | include = ["*.py"] 20 | target-version = "py39" 21 | 22 | [tool.ruff.lint] 23 | preview = true 24 | extend-select = [ 25 | "B", 26 | "ERA", 27 | "I", 28 | "PLE", 29 | "PLW", 30 | "W", 31 | ] 32 | 33 | [tool.ruff.format] 34 | line-ending = "lf" 35 | quote-style = "double" 36 | 37 | [tool.mypy] 38 | disallow_untyped_defs = true 39 | disallow_any_unimported = true 40 | no_implicit_optional = true 41 | check_untyped_defs = true 42 | warn_unused_ignores = true 43 | show_error_codes = true 44 | warn_return_any = true 45 | -------------------------------------------------------------------------------- /dub/flatpak-dub-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | __license__ = 'MIT' 4 | import json 5 | import urllib.parse 6 | import hashlib 7 | import logging 8 | import argparse 9 | import asyncio 10 | import aiohttp 11 | 12 | REGISTRY_URL = "https://code.dlang.org/" 13 | 14 | async def get_remote_sha256(http_session, url): 15 | logging.info(f"started sha256({url})") 16 | sha256 = hashlib.sha256() 17 | async with http_session.get(url) as response: 18 | while True: 19 | data = await response.content.read(4096) 20 | if not data: 21 | break 22 | sha256.update(data) 23 | logging.info(f"done sha256({url})") 24 | return sha256.hexdigest() 25 | 26 | def load_dub_selections(dub_selections_file="dub.selections.json"): 27 | with open(dub_selections_file, "r") as f: 28 | dub_selections = json.load(f) 29 | assert dub_selections.get("fileVersion") == 1 30 | return dub_selections 31 | 32 | async def get_sources(http_session, name, version_obj): 33 | if isinstance(version_obj, dict): 34 | if "path" in version_obj: 35 | logging.warning(f"Skipping path based dependency {name}") 36 | return 37 | version = version_obj["version"] 38 | else: 39 | version = version_obj 40 | dl_url = urllib.parse.urljoin(REGISTRY_URL, f"/packages/{name}/{version}.zip") 41 | source = { 42 | "type": "archive", 43 | "url": dl_url, 44 | "sha256": await get_remote_sha256(http_session, dl_url), 45 | "dest": f".flatpak-dub/{name}-{version}" 46 | } 47 | local_package = { 48 | "name": name, 49 | "version": version, 50 | "path": f".flatpak-dub/{name}-{version}" 51 | } 52 | return (source, local_package) 53 | 54 | async def generate_sources(dub_selections): 55 | sources = [] 56 | local_packages = [] 57 | 58 | async with aiohttp.ClientSession() as http_session: 59 | coros = [] 60 | for name, version_obj in dub_selections["versions"].items(): 61 | coros.append(get_sources(http_session, name, version_obj)) 62 | dub_sources = await asyncio.gather(*coros) 63 | for dub_source in dub_sources: 64 | if dub_source is not None: 65 | source, local_package = dub_source 66 | sources.append(source) 67 | local_packages.append(local_package) 68 | sources += [ 69 | { 70 | "type": "inline", 71 | "contents": json.dumps(local_packages), 72 | "dest": ".dub/packages", 73 | "dest-filename": "local-packages.json" 74 | }, 75 | { 76 | "type": "shell", 77 | "commands": [ 78 | ( 79 | "jq 'map(.path = ([$ENV.PWD] + (.path | split(\"/\")) | join(\"/\")))' " 80 | "<<<$(<.dub/packages/local-packages.json) > .dub/packages/local-packages.json" 81 | ) 82 | ] 83 | } 84 | ] 85 | 86 | return sources 87 | 88 | async def main(): 89 | parser = argparse.ArgumentParser() 90 | parser.add_argument('dub_selections_file', help='Path to the dub.selections.json file') 91 | parser.add_argument('-o', '--output', required=False, help='Where to write generated sources') 92 | args = parser.parse_args() 93 | if args.output is not None: 94 | outfile = args.output 95 | else: 96 | outfile = 'generated-sources.json' 97 | 98 | generated_sources = await generate_sources(load_dub_selections(args.dub_selections_file)) 99 | with open(outfile, 'w') as out: 100 | json.dump(generated_sources, out, indent=4, sort_keys=False) 101 | 102 | if __name__ == '__main__': 103 | logging.basicConfig(level=logging.DEBUG) 104 | asyncio.run(main()) 105 | -------------------------------------------------------------------------------- /flatpak-json2yaml.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2018 Christoph Reiter 3 | # 4 | # Permission is hereby granted, free of charge, to any person obtaining 5 | # a copy of this software and associated documentation files (the 6 | # "Software"), to deal in the Software without restriction, including 7 | # without limitation the rights to use, copy, modify, merge, publish, 8 | # distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so, subject to 10 | # the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included 13 | # in all copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 18 | # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 19 | # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 20 | # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 21 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 22 | 23 | import os 24 | import re 25 | import sys 26 | import argparse 27 | import json 28 | import yaml 29 | from collections import OrderedDict 30 | 31 | 32 | def json_remove_comments(json_data): 33 | 34 | def repl_func(match): 35 | comment = match.group(1).decode("utf-8") 36 | line = match.string[:match.start()].count(b"\n") + 1 37 | print("Removed comment in line {}: {}".format(line, repr(comment)), 38 | file=sys.stderr) 39 | return b"" 40 | 41 | return re.sub(br"^\s*(/\*.*?\*/)\s*$", repl_func, json_data, 42 | flags=re.MULTILINE | re.DOTALL) 43 | 44 | 45 | def test_json_remove_comments(): 46 | assert json_remove_comments(b"") == b"" 47 | assert json_remove_comments(b"\"/tmp/*.txt\"") == b"\"/tmp/*.txt\"" 48 | assert json_remove_comments(b"/*foo*/\nbar") == b"\nbar" 49 | assert json_remove_comments(b" /*fo\no*/ \nbar") == b"\nbar" 50 | assert json_remove_comments(b"/*foo*/\nquux\n/*bar*/") == b"\nquux\n" 51 | 52 | 53 | def json_to_yaml(json_data): 54 | """Takes encoded json and returns encoded yaml""" 55 | 56 | json_data = json_remove_comments(json_data) 57 | data = json.loads(json_data, object_pairs_hook=OrderedDict) 58 | 59 | class OrderedDumper(yaml.Dumper): 60 | 61 | # to get indented lists 62 | def increase_indent(self, flow=False, indentless=False): 63 | return super(OrderedDumper, self).increase_indent(flow, False) 64 | 65 | # to make pyyaml understand OrderedDict 66 | def dict_representer(dumper, data): 67 | return dumper.represent_dict(data.items()) 68 | 69 | OrderedDumper.add_representer(OrderedDict, dict_representer) 70 | 71 | return yaml.dump( 72 | data, Dumper=OrderedDumper, 73 | default_flow_style=False, encoding='utf-8') 74 | 75 | 76 | def main(): 77 | parser = argparse.ArgumentParser( 78 | description='Flatpak JSON to YAML converter') 79 | parser.add_argument('json_file', type=str, 80 | help='The flatpak JSON file to convert') 81 | parser.add_argument('-f', '--force', dest='force', 82 | default=False, action='store_true', 83 | help='Overwrite existing file') 84 | parser.add_argument('-o', '--output', type=str, dest='out_file', 85 | help='The yaml target path') 86 | args = parser.parse_args() 87 | 88 | with open(args.json_file, "rb") as h: 89 | out_file = args.out_file 90 | if out_file is None: 91 | out_file = os.path.splitext(args.json_file)[0] + '.yml' 92 | 93 | yaml_data = json_to_yaml(h.read()) 94 | 95 | if args.force: 96 | flags = "wb" 97 | else: 98 | flags = "xb" 99 | 100 | with open(out_file, flags) as out: 101 | out.write(yaml_data) 102 | 103 | 104 | if __name__ == '__main__': 105 | main() 106 | -------------------------------------------------------------------------------- /go-get/README.md: -------------------------------------------------------------------------------- 1 | # Flatpak Go Get Generator 2 | Tool to automatically create the source list for a Go module (legacy). 3 | 4 | It runs the build in a legacy `GOPATH` mode. 5 | For a module-aware mode, see [go-modules](../go-modules/README.md) or [flatpak-go-vendor-generator](./flatpak-go-vendor-generator.py) script. 6 | 7 | The script does not require Go in the host system. 8 | 9 | ## Usage 10 | 1. In the manifest, give the Go module network access and set GOPATH to $PWD. 11 | 12 | Example manifest module (json): 13 | ```json 14 | { 15 | "name": "writeas-cli", 16 | "buildsystem": "simple", 17 | "build-options": { 18 | "env": { 19 | "GOBIN": "/app/bin/" 20 | }, 21 | "build-args": [ 22 | "--share=network" 23 | ] 24 | }, 25 | "build-commands": [ 26 | ". /usr/lib/sdk/golang/enable.sh; export GOPATH=$PWD; go get github.com/writeas/writeas-cli/cmd/writeas" 27 | ] 28 | } 29 | ``` 30 | 31 | Example manifest (yaml): 32 | ```yaml 33 | app-id: writeas-cli 34 | runtime: org.freedesktop.Platform 35 | runtime-version: '21.08' 36 | sdk: org.freedesktop.Sdk 37 | sdk-extensions: 38 | - org.freedesktop.Sdk.Extension.golang 39 | command: echo "Done" 40 | modules: 41 | - name: writeas 42 | buildsystem: simple 43 | build-options: 44 | append-path: /usr/lib/sdk/golang/bin 45 | env: 46 | GOBIN: /app/bin 47 | GO111MODULE: off 48 | GOPATH: /run/build/writeas 49 | build-args: 50 | - --share=network 51 | build-commands: 52 | - go get github.com/writeas/writeas-cli/cmd/writeas 53 | ``` 54 | 55 | 2. Run flatpak-builder with `--keep-build-dirs`. 56 | 3. Run `go-get/flatpak-go-get-generator.py ` with build-dir pointing the the build directory in `.flatpak-builder/build`. 57 | 4. Convert the source list to YAML if necessary. 58 | 5. Add the list to the sources field of the Go module in the manifest. 59 | 6. Change build command from `go get` to `go install`. 60 | 7. Remove network access. 61 | 62 | **The script assumes the networked built was run with `GOPATH=$PWD`.** 63 | 64 | ## Example final module 65 | ```json 66 | { 67 | "name": "writeas-cli", 68 | "buildsystem": "simple", 69 | "build-options": { 70 | "env": { 71 | "GOBIN": "/app/bin/" 72 | } 73 | }, 74 | "build-commands": [ 75 | ". /usr/lib/sdk/golang/enable.sh; export GOPATH=$PWD; go install github.com/writeas/writeas-cli/cmd/writeas" 76 | ], 77 | "sources": [ 78 | { 79 | "type": "git", 80 | "url": "https://github.com/atotto/clipboard", 81 | "commit": "aa9549103943c05f3e8951009cdb6a0bec2c8949", 82 | "dest": "src/github.com/atotto/clipboard" 83 | }, 84 | ... 85 | ] 86 | } 87 | ``` 88 | 89 | -------------------------------------------------------------------------------- /go-get/flatpak-go-get-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # Copyright 2018 Çağatay Yiğit Şahin 3 | # 4 | # Permission is hereby granted, free of charge, to any person obtaining 5 | # a copy of this software and associated documentation files (the 6 | # "Software"), to deal in the Software without restriction, including 7 | # without limitation the rights to use, copy, modify, merge, publish, 8 | # distribute, sublicense, and/or sell copies of the Software, and to 9 | # permit persons to whom the Software is furnished to do so, subject to 10 | # the following conditions: 11 | # 12 | # The above copyright notice and this permission notice shall be included 13 | # in all copies or substantial portions of the Software. 14 | # 15 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 16 | # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 17 | # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 18 | # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 19 | # CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 20 | # TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 21 | # SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 22 | 23 | from pathlib import Path 24 | from typing import List, Dict 25 | import subprocess 26 | import argparse 27 | import json 28 | 29 | def is_git_repository(p): 30 | is_git_repo = p.is_dir() and (p / ".git").is_dir() 31 | return is_git_repo 32 | 33 | def repo_paths(build_dir: Path) -> List[Path]: 34 | src_dir = build_dir / 'src' 35 | repo_paths: List[Path] = [] 36 | 37 | domains = src_dir.iterdir() 38 | for domain in domains: 39 | domain_users = domain.iterdir() 40 | for user in domain_users: 41 | if is_git_repository(user): 42 | repo_paths.append(user) 43 | else: 44 | user_repos = user.iterdir() 45 | for ur in user_repos: 46 | if is_git_repository(ur): 47 | repo_paths.append(ur) 48 | return repo_paths 49 | 50 | def repo_source(repo_path: Path) -> Dict[str, str]: 51 | def current_commit(repo_path: Path) -> str: 52 | output = subprocess.check_output(['git', 'rev-parse', 'HEAD'], 53 | cwd=repo_path).decode('ascii').strip() 54 | return output 55 | 56 | def remote_url(repo_path: Path) -> str: 57 | output = subprocess.check_output( 58 | ['git', 'remote', 'get-url', 'origin'], 59 | cwd=repo_path).decode('ascii').strip() 60 | return output 61 | 62 | repo_path_str = str(repo_path) 63 | dest_path = repo_path_str[repo_path_str.rfind('src/'):] 64 | source_object = {'type': 'git', 'url': remote_url(repo_path), 'commit': current_commit(repo_path), 'dest': dest_path} 65 | return source_object 66 | 67 | def sources(build_dir: Path) -> List[Dict[str, str]]: 68 | return list(map(repo_source, repo_paths(build_dir))) 69 | 70 | def main(): 71 | def directory(string: str) -> Path: 72 | path = Path(string) 73 | if not path.is_dir(): 74 | msg = 'build-dir should be a directory.' 75 | raise argparse.ArgumentTypeError(msg) 76 | return path 77 | 78 | parser = argparse.ArgumentParser(description='For a Go module’s dependencies, output array of sources in flatpak-manifest format.') 79 | parser.add_argument('build_dir', help='Build directory of the module in .flatpak-builder/build', type=directory) 80 | parser.add_argument('-o', '--output', dest='output_file', help='The file to write the source list to. Default is -sources.json', type=str) 81 | args = parser.parse_args() 82 | source_list = sources(args.build_dir) 83 | 84 | output_file = args.output_file 85 | if output_file is None: 86 | output_file = args.build_dir.absolute().name + '-sources.json' 87 | 88 | with open(output_file, 'w') as out: 89 | json.dump(source_list, out, indent=2) 90 | 91 | if __name__ == '__main__': 92 | main() 93 | -------------------------------------------------------------------------------- /go-get/flatpak-go-vendor-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | """ 3 | This is a very pragmatic (i.e. simple) tool for using Go vendor 4 | with flatpak. 5 | 6 | To make use of the tool you need to produce a vendor/modules.txt 7 | through `go mod vendor`. One approach is to modify your manifest to 8 | include 9 | 10 | build-options: 11 | build-args: 12 | - --share=network 13 | 14 | and run `go mod vendor` just before you start to build. 15 | 16 | Once that is done, you should see a "vendors/modules.txt" which 17 | you can point this tool at. 18 | 19 | This tool has a few rough edges, such as special-casing a few things. 20 | For example, it assumes that everything is git-clonable. 21 | Except for certain URLs which are rewritten. 22 | 23 | The real solution is https://github.com/golang/go/issues/35922 24 | """ 25 | import json 26 | import logging 27 | import sys 28 | import urllib.request 29 | from html.parser import HTMLParser 30 | 31 | import attr 32 | 33 | log = logging.getLogger(__name__) 34 | 35 | @attr.s 36 | class GoModule: 37 | name = attr.ib() 38 | version = attr.ib() 39 | revision = attr.ib() 40 | 41 | def parse_modules(fh): 42 | for line in (l.strip() for l in fh if l.strip()): 43 | log.debug("Read line: %s", line) 44 | if line.startswith("# "): 45 | splits = line.split(" ") 46 | name, line_version = splits[-2], splits[-1] 47 | if '-' in line_version: 48 | log.debug("Parsing version: %s", line_version) 49 | _version, date_revision = line_version.strip().split("-", 1) 50 | try: 51 | log.debug("Splitting %s", date_revision) 52 | date, revision = date_revision.split('-') 53 | except ValueError: 54 | log.debug("no further split of %s", date_revision) 55 | date = None 56 | version = revision = line_version 57 | else: 58 | version = _version 59 | 60 | log.debug("Parsed version into: %s %s %s", version, date, revision) 61 | else: 62 | revision = None 63 | version = line_version 64 | 65 | m = GoModule(name, version, revision) 66 | yield m 67 | 68 | def get_go_redirect(html_data): 69 | class GoImportParser(HTMLParser): 70 | _repo = None 71 | 72 | def handle_starttag(self, tag, attrs): 73 | if self._repo is not None: 74 | return 75 | 76 | # Make a dict of the attribute name/values 77 | # since it's easier to work with and understand. 78 | _attrs = {} 79 | for attr, value in attrs: 80 | _attrs[attr] = value 81 | 82 | name_attr = _attrs.get('name') 83 | if name_attr != 'go-import': 84 | return 85 | content = _attrs.get('content') 86 | if content is not None: 87 | self._repo = content.split(' ')[-1] 88 | 89 | def get_repo(self): 90 | return self._repo 91 | 92 | parser = GoImportParser() 93 | parser.feed(html_data) 94 | return parser.get_repo() 95 | 96 | 97 | def go_module_to_flatpak(m): 98 | if not m.name.startswith("github.com"): 99 | url = m.name 100 | else: 101 | splits = m.name.split('/') 102 | if len(splits) > 3: 103 | url = '/'.join(splits[:3]) 104 | else: 105 | url = m.name 106 | url = "https://" + url 107 | 108 | print('Checking {}...'.format(url), file=sys.stderr) 109 | 110 | try: 111 | with urllib.request.urlopen(url + '?go-get=1') as response: 112 | page_contents = str(response.read()) 113 | except urllib.request.URLError as e: 114 | print('Failed to check {}: {}'.format(url, e), file=sys.stderr) 115 | sys.exit(1) 116 | else: 117 | repo = get_go_redirect(page_contents) 118 | url_found = repo 119 | if url_found != url: 120 | print(' got {}'.format(url_found), file=sys.stderr) 121 | else: 122 | print(' done', file=sys.stderr) 123 | url = url_found 124 | 125 | if not '+' in m.version: 126 | tag = m.version 127 | else: 128 | splits = m.version.split('+') 129 | log.debug(f"Splitting version for {url}: {m.version} {splits}") 130 | tag = splits[0] 131 | 132 | rev = m.revision 133 | source = { 134 | "type": "git", 135 | "url": url, 136 | "tag": tag, 137 | "dest": "vendor/" + m.name, 138 | } 139 | if m.revision: 140 | del source["tag"] 141 | source["commit"] = m.revision 142 | 143 | return source 144 | 145 | def main(): 146 | modules_file = sys.argv[1] 147 | fh = open(modules_file) 148 | fp_modules = [go_module_to_flatpak(m) for m in parse_modules(fh)] 149 | print (json.dumps(fp_modules, indent=4)) 150 | 151 | if __name__ == "__main__": 152 | main() 153 | -------------------------------------------------------------------------------- /go-modules/README.md: -------------------------------------------------------------------------------- 1 | # Flatpak with Go Modules 2 | Automatically create the source list for a Go module. 3 | 4 | This method generates sources section with references to ZIP files hosted by the Go modules proxy. 5 | 6 | ## Usage 7 | 8 | Example manifest (yaml): 9 | 10 | ```yaml 11 | app-id: io.github.golang.tools.gorename 12 | runtime: org.freedesktop.Platform 13 | runtime-version: '23.08' 14 | sdk: org.freedesktop.Sdk 15 | sdk-extensions: 16 | - org.freedesktop.Sdk.Extension.golang 17 | command: gorename 18 | modules: 19 | - name: gorename 20 | buildsystem: simple 21 | build-options: 22 | append-path: /usr/lib/sdk/golang/bin 23 | build-commands: 24 | - go install -mod=vendor ./cmd/gorename 25 | sources: 26 | - type: git 27 | url: https://github.com/golang/tools 28 | commit: e81af27852c63b9432c0b5bb49707a7f207ef21b 29 | ``` 30 | 31 | 1. Install `flatpak-go-mod`: `go install github.com/dennwc/flatpak-go-mod@latest` 32 | 2. Run it in Flatpak project directory: `flatpak-go-mod ./path/to/your/go/project` 33 | 3. Append generated `go.mod.yaml` file to `sources` section of the manifest. 34 | 4. Add `modules.txt` to your Flatpak project. 35 | 5. Build/install your binary in vendor mode (with `-mod=vendor`). 36 | 37 | ## Final manifest example 38 | 39 | ```yaml 40 | app-id: io.github.golang.tools.gorename 41 | runtime: org.freedesktop.Platform 42 | runtime-version: '23.08' 43 | sdk: org.freedesktop.Sdk 44 | sdk-extensions: 45 | - org.freedesktop.Sdk.Extension.golang 46 | command: gorename 47 | modules: 48 | - name: gorename 49 | buildsystem: simple 50 | build-options: 51 | append-path: /usr/lib/sdk/golang/bin 52 | build-commands: 53 | - go install -mod=vendor ./cmd/gorename 54 | sources: 55 | - type: git 56 | url: https://github.com/golang/tools 57 | commit: e81af27852c63b9432c0b5bb49707a7f207ef21b 58 | 59 | # Workaround for Go modules generated by github.com/dennwc/flatpak-go-mod 60 | - type: file 61 | path: modules.txt 62 | dest: vendor 63 | 64 | - type: archive 65 | url: https://proxy.golang.org/github.com/yuin/goldmark/@v/v1.4.13.zip 66 | strip-components: 3 67 | dest: vendor/github.com/yuin/goldmark 68 | sha256: bb41a602b174345fda392c8ad83fcc93217c285c763699677630be90feb7a5e3 69 | 70 | - type: archive 71 | url: https://proxy.golang.org/golang.org/x/mod/@v/v0.7.0.zip 72 | strip-components: 3 73 | dest: vendor/golang.org/x/mod 74 | sha256: 24abd1db13329873d72034dc27efad09cbc37d39cf28b8ff7bb3c2adc8eedef7 75 | 76 | - type: archive 77 | url: https://proxy.golang.org/golang.org/x/net/@v/v0.5.0.zip 78 | strip-components: 3 79 | dest: vendor/golang.org/x/net 80 | sha256: 0e606881eeb2f572b3d61ad2a639e79cad002064090c75c838aa2f4feca61c8e 81 | 82 | - type: archive 83 | url: https://proxy.golang.org/golang.org/x/sys/@v/v0.4.0.zip 84 | strip-components: 3 85 | dest: vendor/golang.org/x/sys 86 | sha256: efa9354fcaa709825bbb1c86b83e2347cebb5349f4326cc4c8ccb972ad32032c 87 | 88 | - type: archive 89 | url: https://proxy.golang.org/golang.org/x/sync/@v/v0.1.0.zip 90 | strip-components: 3 91 | dest: vendor/golang.org/x/sync 92 | sha256: f510bec6009e19882d19953e7273137d34df86c65949345d72f123a255c2ecd2 93 | ``` -------------------------------------------------------------------------------- /gradle/README.md: -------------------------------------------------------------------------------- 1 | # Flatpak Gradle Generator 2 | 3 | Tool to automatically generate a `flatpak-builder` sources file from a Gradle log. 4 | 5 | ## Requirements 6 | 7 | You need to have `org.freedesktop.Sdk` and `org.freedesktop.Sdk.Extension.openjdk11` installed, 8 | both branch 21.08. 9 | 10 | ## Usage 11 | 12 | From withing the application's source directory, run: 13 | 14 | ``` 15 | flatpak run --command=bash --share=network --filesystem=`pwd` -d org.freedesktop.Sdk//21.08 16 | ``` 17 | 18 | This will enter the sandbox sharing just the current directory between your home directory 19 | and the source application, then do what's needed to compile the application, for example, 20 | for Ghidra: 21 | 22 | ```sh 23 | $ source /usr/lib/sdk/openjdk11/enable.sh 24 | $ rm -rf gradle-cache 25 | $ mkdir -p dependencies/flatRepo/ 26 | 27 | # Install some ghidra specific files, should probably be installed by hand/outside this script 28 | $ wget https://github.com/pxb1988/dex2jar/releases/download/2.0/dex-tools-2.0.zip 29 | $ unzip -j dex-tools-2.0.zip "*.jar" -d dependencies/flatRepo/ 30 | $ wget -P dependencies/flatRepo/ https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/android4me/AXMLPrinter2.jar 31 | 32 | # Launch gradle build with `--info` to log all the http(s) URLs 33 | $ gradle -g gradle-cache/ --info --console plain buildGhidra > gradle-log.txt 34 | ``` 35 | 36 | Then exit the sandbox (Ctrl+D or `exit`), and parse the build log by running: 37 | 38 | ``` 39 | flatpak-gradle-generator.py gradle-log.txt gradle-dependencies.json 40 | ``` 41 | 42 | To make reproducing the build easier, we recommend that you create a `script.sh` with 43 | the manual commands above to ship in your Flatpak repository, so you can run: 44 | 45 | ``` 46 | $ flatpak run --command=bash --share=network --filesystem=`pwd` -d org.freedesktop.Sdk//21.08 ./script.sh 47 | $ flatpak-gradle-generator.py gradle-log.txt gradle-dependencies.json 48 | ``` 49 | -------------------------------------------------------------------------------- /gradle/flatpak-gradle-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | __license__ = 'MIT' 4 | import aiohttp 5 | import argparse 6 | import asyncio 7 | import json 8 | import hashlib 9 | import logging 10 | import re 11 | 12 | arches = { 13 | 'linux-x86_64': 'x86_64', 14 | 'linux-x86_32': 'i386', 15 | 'linux-aarch_64': 'aarch64', 16 | 'linux-aarch_32': 'arm' 17 | } 18 | 19 | async def get_remote_sha256(url): 20 | logging.info(f"started sha256({url})") 21 | sha256 = hashlib.sha256() 22 | async with aiohttp.ClientSession(raise_for_status=True) as http_session: 23 | async with http_session.get(url) as response: 24 | while True: 25 | data = await response.content.read(4096) 26 | if not data: 27 | break 28 | sha256.update(data) 29 | logging.info(f"done sha256({url})") 30 | return sha256.hexdigest() 31 | 32 | async def parse_url(url, destdir, arch=None): 33 | ret = [{ 'type': 'file', 34 | 'url': url, 35 | 'sha256': await get_remote_sha256(url), 36 | 'dest': destdir, }] 37 | if arch: 38 | ret[0]['only-arches'] = [arch] 39 | return ret 40 | 41 | def arch_for_url(url, urls_arch): 42 | arch = None 43 | try: 44 | arch = urls_arch[url] 45 | except KeyError: 46 | pass 47 | return arch 48 | 49 | async def parse_urls(urls, urls_arch, destdir): 50 | sources = [] 51 | sha_coros = [] 52 | for url in urls: 53 | arch = arch_for_url(url, urls_arch) 54 | sha_coros.append(parse_url(str(url), destdir, arch)) 55 | sources.extend(sum(await asyncio.gather(*sha_coros), [])) 56 | return sources 57 | 58 | def gradle_arch_to_flatpak_arch(arch): 59 | return arches[arch] 60 | 61 | def flatpak_arch_to_gradle_arch(arch): 62 | rev_arches = dict((v, k) for k, v in arches.items()) 63 | return rev_arches[arch] 64 | 65 | def main(): 66 | parser = argparse.ArgumentParser() 67 | parser.add_argument('input', help='The gradle log file') 68 | parser.add_argument('output', help='The output JSON sources file') 69 | parser.add_argument('--destdir', 70 | help='The directory the generated sources file will save sources to', 71 | default='dependencies') 72 | parser.add_argument('--arches', 73 | help='Comma-separated list of architectures the generated sources will be for', 74 | default='x86_64,aarch64,i386,arm') 75 | args = parser.parse_args() 76 | req_flatpak_arches = args.arches.split(',') 77 | req_gradle_arches = [] 78 | for arch in req_flatpak_arches: 79 | req_gradle_arches.append(flatpak_arch_to_gradle_arch(arch)) 80 | 81 | urls = [] 82 | urls_arch = {} 83 | r = re.compile('https://[\\w/\\-?=%.]+\\.[\\w/\\-?=%.]+') 84 | with open(args.input,'r') as f: 85 | for lines in f: 86 | res = r.findall(lines) 87 | for url in res: 88 | if url.endswith('.jar'): 89 | urls.append(url) 90 | elif url.endswith('.exe'): 91 | for host in req_gradle_arches: 92 | if host in url: 93 | for arch in req_gradle_arches: 94 | new_url = url.replace(host, arch) 95 | urls.append(new_url) 96 | urls_arch[new_url] = gradle_arch_to_flatpak_arch(arch) 97 | 98 | # print(urls) 99 | # print(urls_arch) 100 | 101 | sources = asyncio.run(parse_urls(urls, urls_arch, args.destdir)) 102 | 103 | with open(args.output, 'w') as fp: 104 | json.dump(sources, fp, indent=4) 105 | fp.write('\n') 106 | 107 | 108 | if __name__ == '__main__': 109 | main() 110 | -------------------------------------------------------------------------------- /node/.gitattributes: -------------------------------------------------------------------------------- 1 | **/generated-sources.json linguist-generated=true 2 | **/package-lock.json linguist-generated=true 3 | -------------------------------------------------------------------------------- /node/.gitignore: -------------------------------------------------------------------------------- 1 | .mypy_cache/ 2 | .flatpak-builder/ 3 | .pytest_cache/ 4 | .venv/ 5 | _build/ 6 | __pycache__/ 7 | node_modules/ 8 | npm-cache-*/ 9 | yarn-mirror/ 10 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flatpak/flatpak-builder-tools/ea9bfa22d175066dd3044544cc55aa070f8282f4/node/flatpak_node_generator/__init__.py -------------------------------------------------------------------------------- /node/flatpak_node_generator/__main__.py: -------------------------------------------------------------------------------- 1 | from .main import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/cache.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import IO, Iterator, Optional, Type 3 | 4 | import os 5 | import re 6 | import tempfile 7 | import types 8 | 9 | 10 | class Cache: 11 | instance: 'Cache' 12 | 13 | @classmethod 14 | def get_working_instance_if(cls, condition: bool) -> 'Cache': 15 | return cls.instance if condition else NullCache() 16 | 17 | class BucketReader: 18 | def read_parts(self, size: int) -> Iterator[bytes]: 19 | raise NotImplementedError 20 | 21 | def read_all(self) -> bytes: 22 | raise NotImplementedError 23 | 24 | def close(self) -> None: 25 | raise NotImplementedError 26 | 27 | def __enter__(self) -> 'Cache.BucketReader': 28 | return self 29 | 30 | def __exit__( 31 | self, 32 | exc_type: Optional[Type[BaseException]], 33 | exc_value: Optional[BaseException], 34 | traceback: Optional[types.TracebackType], 35 | ) -> None: 36 | self.close() 37 | 38 | class BucketWriter: 39 | def write(self, data: bytes) -> None: 40 | raise NotImplementedError 41 | 42 | def cancel(self) -> None: 43 | raise NotImplementedError 44 | 45 | def seal(self) -> None: 46 | raise NotImplementedError 47 | 48 | def __enter__(self) -> 'Cache.BucketWriter': 49 | return self 50 | 51 | def __exit__( 52 | self, 53 | exc_type: Optional[Type[BaseException]], 54 | exc_value: Optional[BaseException], 55 | traceback: Optional[types.TracebackType], 56 | ) -> None: 57 | if traceback is None: 58 | self.seal() 59 | else: 60 | self.cancel() 61 | 62 | class BucketRef: 63 | def __init__(self, key: str) -> None: 64 | self.key = key 65 | 66 | def open_read(self) -> Optional['Cache.BucketReader']: 67 | raise NotImplementedError 68 | 69 | def open_write(self) -> 'Cache.BucketWriter': 70 | raise NotImplementedError 71 | 72 | def get(self, key: str) -> BucketRef: 73 | raise NotImplementedError 74 | 75 | 76 | class NullCache(Cache): 77 | class NullBucketWriter(Cache.BucketWriter): 78 | def write(self, data: bytes) -> None: 79 | pass 80 | 81 | def cancel(self) -> None: 82 | pass 83 | 84 | def seal(self) -> None: 85 | pass 86 | 87 | class NullBucketRef(Cache.BucketRef): 88 | def __init__(self, key: str) -> None: 89 | super().__init__(key) 90 | 91 | def open_read(self) -> Optional[Cache.BucketReader]: 92 | return None 93 | 94 | def open_write(self) -> Cache.BucketWriter: 95 | return NullCache.NullBucketWriter() 96 | 97 | def get(self, key: str) -> Cache.BucketRef: 98 | return NullCache.NullBucketRef(key) 99 | 100 | 101 | class FilesystemBasedCache(Cache): 102 | _SUBDIR = 'flatpak-node-generator' 103 | _KEY_CHAR_ESCAPE_RE = re.compile(r'[^A-Za-z0-9._\-]') 104 | 105 | def __init__(self, cache_root: Optional[Path] = None) -> None: 106 | self._cache_root = cache_root or self._default_cache_root() 107 | 108 | @staticmethod 109 | def _escape_key(key: str) -> str: 110 | return FilesystemBasedCache._KEY_CHAR_ESCAPE_RE.sub( 111 | lambda m: f'_{ord(m.group()):02X}', key 112 | ) 113 | 114 | class FilesystemBucketReader(Cache.BucketReader): 115 | def __init__(self, file: IO[bytes]) -> None: 116 | self.file = file 117 | 118 | def close(self) -> None: 119 | self.file.close() 120 | 121 | def read_parts(self, size: int) -> Iterator[bytes]: 122 | while True: 123 | data = self.file.read(size) 124 | if not data: 125 | break 126 | 127 | yield data 128 | 129 | def read_all(self) -> bytes: 130 | return self.file.read() 131 | 132 | class FilesystemBucketWriter(Cache.BucketWriter): 133 | def __init__(self, file: IO[bytes], temp: Path, target: Path) -> None: 134 | self.file = file 135 | self.temp = temp 136 | self.target = target 137 | 138 | def write(self, data: bytes) -> None: 139 | self.file.write(data) 140 | 141 | def cancel(self) -> None: 142 | self.file.close() 143 | self.temp.unlink() 144 | 145 | def seal(self) -> None: 146 | self.file.close() 147 | self.temp.rename(self.target) 148 | 149 | class FilesystemBucketRef(Cache.BucketRef): 150 | def __init__(self, key: str, cache_root: Path) -> None: 151 | super().__init__(key) 152 | self._cache_root = cache_root 153 | 154 | self._cache_path = self._cache_root / FilesystemBasedCache._escape_key(key) 155 | 156 | def open_read(self) -> Optional[Cache.BucketReader]: 157 | try: 158 | fp = self._cache_path.open('rb') 159 | except FileNotFoundError: 160 | return None 161 | else: 162 | return FilesystemBasedCache.FilesystemBucketReader(fp) 163 | 164 | def open_write(self) -> Cache.BucketWriter: 165 | target = self._cache_path 166 | if not target.parent.exists(): 167 | target.parent.mkdir(exist_ok=True, parents=True) 168 | 169 | fd, temp = tempfile.mkstemp(dir=self._cache_root, prefix='__temp__') 170 | return FilesystemBasedCache.FilesystemBucketWriter( 171 | os.fdopen(fd, 'wb'), Path(temp), target 172 | ) 173 | 174 | @classmethod 175 | def _default_cache_root(cls) -> Path: 176 | xdg_cache_home = os.environ.get( 177 | 'XDG_CACHE_HOME', os.path.expanduser('~/.cache') 178 | ) 179 | return Path(xdg_cache_home) / cls._SUBDIR 180 | 181 | def get(self, key: str) -> Cache.BucketRef: 182 | return FilesystemBasedCache.FilesystemBucketRef(key, self._cache_root) 183 | 184 | 185 | Cache.instance = NullCache() 186 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/electron.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, Iterator, NamedTuple, Optional 2 | 3 | import hashlib 4 | import os.path 5 | import urllib.parse 6 | 7 | from .integrity import Integrity 8 | from .package import SemVer 9 | from .requests import Requests 10 | 11 | 12 | class ElectronBinaryManager: 13 | class Arch(NamedTuple): 14 | electron: str 15 | flatpak: str 16 | 17 | class Binary(NamedTuple): 18 | filename: str 19 | url: str 20 | integrity: Integrity 21 | 22 | arch: Optional['ElectronBinaryManager.Arch'] = None 23 | 24 | @property 25 | def url_hash(self) -> str: 26 | url = urllib.parse.urlparse(self.url) 27 | url_dir = urllib.parse.urlunparse( 28 | url._replace(path=os.path.dirname(url.path)) 29 | ) 30 | return hashlib.sha256(url_dir.encode()).hexdigest() 31 | 32 | ELECTRON_ARCHES_TO_FLATPAK = { 33 | 'ia32': 'i386', 34 | 'x64': 'x86_64', 35 | 'armv7l': 'arm', 36 | 'arm64': 'aarch64', 37 | } 38 | 39 | INTEGRITY_BASE_FILENAME = 'SHASUMS256.txt' 40 | 41 | def __init__( 42 | self, version: str, base_url: str, integrities: Dict[str, Integrity] 43 | ) -> None: 44 | self.version = version 45 | self.base_url = base_url 46 | self.integrities = integrities 47 | 48 | def child_url(self, child: str) -> str: 49 | return f'{self.base_url}/{child}' 50 | 51 | def find_binaries(self, binary: str) -> Iterator['ElectronBinaryManager.Binary']: 52 | for electron_arch, flatpak_arch in self.ELECTRON_ARCHES_TO_FLATPAK.items(): 53 | # Electron v19+ drop linux-ia32 support. 54 | if ( 55 | SemVer.parse(self.version) >= SemVer.parse('19.0.0') 56 | and electron_arch == 'ia32' 57 | ): 58 | continue 59 | 60 | binary_filename = f'{binary}-v{self.version}-linux-{electron_arch}.zip' 61 | binary_url = self.child_url(binary_filename) 62 | 63 | arch = ElectronBinaryManager.Arch( 64 | electron=electron_arch, flatpak=flatpak_arch 65 | ) 66 | yield ElectronBinaryManager.Binary( 67 | filename=binary_filename, 68 | url=binary_url, 69 | integrity=self.integrities[binary_filename], 70 | arch=arch, 71 | ) 72 | 73 | @property 74 | def integrity_file(self) -> 'ElectronBinaryManager.Binary': 75 | return ElectronBinaryManager.Binary( 76 | filename=f'SHASUMS256.txt-{self.version}', 77 | url=self.child_url(self.INTEGRITY_BASE_FILENAME), 78 | integrity=self.integrities[self.INTEGRITY_BASE_FILENAME], 79 | ) 80 | 81 | @staticmethod 82 | async def for_version( 83 | version: str, *, base_url: Optional[str] = None 84 | ) -> 'ElectronBinaryManager': 85 | if base_url is None: 86 | base_url = ( 87 | f'https://github.com/electron/electron/releases/download/v{version}' 88 | ) 89 | 90 | integrity_url = f'{base_url}/{ElectronBinaryManager.INTEGRITY_BASE_FILENAME}' 91 | integrity_data = ( 92 | await Requests.instance.read_all(integrity_url, cachable=True) 93 | ).decode() 94 | 95 | integrities: Dict[str, Integrity] = {} 96 | for line in integrity_data.splitlines(): 97 | digest, star_filename = line.split() 98 | filename = star_filename.strip('*') 99 | integrities[filename] = Integrity(algorithm='sha256', digest=digest) 100 | 101 | integrities[ElectronBinaryManager.INTEGRITY_BASE_FILENAME] = Integrity.generate( 102 | integrity_data 103 | ) 104 | 105 | return ElectronBinaryManager( 106 | version=version, base_url=base_url, integrities=integrities 107 | ) 108 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/integrity.py: -------------------------------------------------------------------------------- 1 | from typing import Any, NamedTuple, Union 2 | 3 | import base64 4 | import binascii 5 | import hashlib 6 | 7 | 8 | class Integrity(NamedTuple): 9 | algorithm: str 10 | digest: str 11 | 12 | @staticmethod 13 | def parse(value: str) -> 'Integrity': 14 | algorithm, encoded_digest = value.split('-', 1) 15 | assert algorithm.startswith('sha'), algorithm 16 | digest = binascii.hexlify(base64.b64decode(encoded_digest)).decode() 17 | 18 | return Integrity(algorithm, digest) 19 | 20 | @staticmethod 21 | def from_sha1(sha1: str) -> 'Integrity': 22 | assert len(sha1) == 40, f'Invalid length of sha1: {sha1}' 23 | return Integrity('sha1', sha1) 24 | 25 | @staticmethod 26 | def generate(data: Union[str, bytes], *, algorithm: str = 'sha256') -> 'Integrity': 27 | builder = IntegrityBuilder(algorithm) 28 | builder.update(data) 29 | return builder.build() 30 | 31 | @staticmethod 32 | def from_json_object(data: Any) -> 'Integrity': 33 | return Integrity(algorithm=data['algorithm'], digest=data['digest']) 34 | 35 | def to_json_object(self) -> Any: 36 | return {'algorithm': self.algorithm, 'digest': self.digest} 37 | 38 | def to_base64(self) -> str: 39 | return base64.b64encode(binascii.unhexlify(self.digest)).decode() 40 | 41 | 42 | class IntegrityBuilder: 43 | def __init__(self, algorithm: str = 'sha256') -> None: 44 | self.algorithm = algorithm 45 | self._hasher = hashlib.new(algorithm) 46 | 47 | def update(self, data: Union[str, bytes]) -> None: 48 | data_bytes: bytes 49 | if isinstance(data, str): 50 | data_bytes = data.encode() 51 | else: 52 | data_bytes = data 53 | self._hasher.update(data_bytes) 54 | 55 | def build(self) -> Integrity: 56 | return Integrity(algorithm=self.algorithm, digest=self._hasher.hexdigest()) 57 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/manifest.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import ( 3 | Any, 4 | ContextManager, 5 | Dict, 6 | Iterator, 7 | List, 8 | Optional, 9 | Set, 10 | Tuple, 11 | Type, 12 | Union, 13 | ) 14 | 15 | import base64 16 | import json 17 | import types 18 | 19 | from .integrity import Integrity 20 | 21 | 22 | class ManifestGenerator(ContextManager['ManifestGenerator']): 23 | MAX_GITHUB_SIZE = 49 * 1000 * 1000 24 | JSON_INDENT = 4 25 | 26 | def __init__(self) -> None: 27 | # Store the dicts as a set of tuples, then rebuild the dict when returning it. 28 | # That way, we ensure uniqueness. 29 | self._sources: Set[Tuple[Tuple[str, Any], ...]] = set() 30 | self._commands: List[str] = [] 31 | 32 | def __exit__( 33 | self, 34 | exc_type: Optional[Type[BaseException]], 35 | exc_value: Optional[BaseException], 36 | tb: Optional[types.TracebackType], 37 | ) -> None: 38 | self._finalize() 39 | 40 | @property 41 | def data_root(self) -> Path: 42 | return Path('flatpak-node') 43 | 44 | @property 45 | def tmp_root(self) -> Path: 46 | return self.data_root / 'tmp' 47 | 48 | @property 49 | def source_count(self) -> int: 50 | return len(self._sources) 51 | 52 | def ordered_sources(self) -> Iterator[Dict[Any, Any]]: 53 | return map(dict, sorted(self._sources)) 54 | 55 | def split_sources(self) -> Iterator[List[Dict[Any, Any]]]: 56 | BASE_CURRENT_SIZE = len('[\n]') 57 | current_size = BASE_CURRENT_SIZE 58 | current: List[Dict[Any, Any]] = [] 59 | 60 | for source in self.ordered_sources(): 61 | # Generate one source by itself, then check the length without the closing and 62 | # opening brackets. 63 | source_json = json.dumps([source], indent=ManifestGenerator.JSON_INDENT) 64 | source_json_len = len('\n'.join(source_json.splitlines()[1:-1])) 65 | if current_size + source_json_len >= ManifestGenerator.MAX_GITHUB_SIZE: 66 | yield current 67 | current = [] 68 | current_size = BASE_CURRENT_SIZE 69 | current.append(source) 70 | current_size += source_json_len 71 | 72 | if current: 73 | yield current 74 | 75 | def _add_source(self, source: Dict[str, Any]) -> None: 76 | self._sources.add(tuple(source.items())) 77 | 78 | def _add_source_with_destination( 79 | self, 80 | source: Dict[str, Any], 81 | destination: Optional[Path], 82 | *, 83 | is_dir: bool, 84 | only_arches: Optional[List[str]] = None, 85 | ) -> None: 86 | if destination is not None: 87 | if is_dir: 88 | source['dest'] = str(destination) 89 | else: 90 | source['dest-filename'] = destination.name 91 | if len(destination.parts) > 1: 92 | source['dest'] = str(destination.parent) 93 | 94 | if only_arches: 95 | source['only-arches'] = tuple(only_arches) 96 | 97 | self._add_source(source) 98 | 99 | def add_local_file_source( 100 | self, 101 | path: Path, 102 | destination: Optional[Path] = None, 103 | *, 104 | only_arches: Optional[List[str]] = None, 105 | ) -> None: 106 | source: Dict[str, Any] = { 107 | 'type': 'file', 108 | 'path': str(path), 109 | } 110 | self._add_source_with_destination( 111 | source, destination, is_dir=False, only_arches=only_arches 112 | ) 113 | 114 | def add_url_source( 115 | self, 116 | url: str, 117 | integrity: Integrity, 118 | destination: Optional[Path] = None, 119 | *, 120 | only_arches: Optional[List[str]] = None, 121 | ) -> None: 122 | source: Dict[str, Any] = { 123 | 'type': 'file', 124 | 'url': url, 125 | integrity.algorithm: integrity.digest, 126 | } 127 | self._add_source_with_destination( 128 | source, destination, is_dir=False, only_arches=only_arches 129 | ) 130 | 131 | def add_archive_source( 132 | self, 133 | url: str, 134 | integrity: Integrity, 135 | destination: Optional[Path] = None, 136 | only_arches: Optional[List[str]] = None, 137 | strip_components: int = 1, 138 | ) -> None: 139 | source: Dict[str, Any] = { 140 | 'type': 'archive', 141 | 'url': url, 142 | 'strip-components': strip_components, 143 | integrity.algorithm: integrity.digest, 144 | } 145 | self._add_source_with_destination( 146 | source, destination, is_dir=True, only_arches=only_arches 147 | ) 148 | 149 | def add_data_source(self, data: Union[str, bytes], destination: Path) -> None: 150 | if isinstance(data, bytes): 151 | source = { 152 | 'type': 'inline', 153 | 'contents': base64.b64encode(data).decode('ascii'), 154 | 'base64': True, 155 | } 156 | else: 157 | assert isinstance(data, str) 158 | source = { 159 | 'type': 'inline', 160 | 'contents': data, 161 | } 162 | self._add_source_with_destination(source, destination, is_dir=False) 163 | 164 | def add_git_source( 165 | self, url: str, commit: str, destination: Optional[Path] = None 166 | ) -> None: 167 | source = {'type': 'git', 'url': url, 'commit': commit} 168 | self._add_source_with_destination(source, destination, is_dir=True) 169 | 170 | def add_script_source(self, commands: List[str], destination: Path) -> None: 171 | source = {'type': 'script', 'commands': tuple(commands)} 172 | self._add_source_with_destination(source, destination, is_dir=False) 173 | 174 | def add_shell_source( 175 | self, 176 | commands: List[str], 177 | destination: Optional[Path] = None, 178 | only_arches: Optional[List[str]] = None, 179 | ) -> None: 180 | """This might be slow for multiple instances. Use `add_command()` instead.""" 181 | source = {'type': 'shell', 'commands': tuple(commands)} 182 | self._add_source_with_destination( 183 | source, 184 | destination=destination, 185 | only_arches=only_arches, 186 | is_dir=True, 187 | ) 188 | 189 | def add_command(self, command: str) -> None: 190 | self._commands.append(command) 191 | 192 | def _finalize(self) -> None: 193 | if self._commands: 194 | self._add_source({'type': 'shell', 'commands': tuple(self._commands)}) 195 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/node_headers.py: -------------------------------------------------------------------------------- 1 | from typing import NamedTuple, Optional 2 | 3 | 4 | class NodeHeaders(NamedTuple): 5 | target: str 6 | runtime: str 7 | disturl: str 8 | 9 | @classmethod 10 | def with_defaults( 11 | cls, 12 | target: str, 13 | runtime: Optional[str] = None, 14 | disturl: Optional[str] = None, 15 | ) -> 'NodeHeaders': 16 | if runtime is None: 17 | runtime = 'node' 18 | if disturl is None: 19 | if runtime == 'node': 20 | disturl = 'http://nodejs.org/dist' 21 | elif runtime == 'electron': 22 | disturl = 'https://www.electronjs.org/headers' 23 | else: 24 | raise ValueError( 25 | f"Can't guess `disturl` for {runtime} version {target}" 26 | ) 27 | return cls(target, runtime, disturl) 28 | 29 | @property 30 | def url(self) -> str: 31 | # TODO it may be better to retrieve urls from disturl/index.json 32 | return f'{self.disturl}/v{self.target}/node-v{self.target}-headers.tar.gz' 33 | 34 | @property 35 | def install_version(self) -> str: 36 | # FIXME not sure if this static value will always work 37 | return '9' 38 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/package.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from pathlib import Path 3 | from typing import List, NamedTuple, Optional, Tuple, Union 4 | 5 | import abc 6 | import functools 7 | import re 8 | 9 | from .integrity import Integrity 10 | from .url_metadata import RemoteUrlMetadata 11 | 12 | 13 | @dataclass(frozen=True, order=True, eq=True) 14 | class SemVer: 15 | # Note that we ignore the metadata part, since all we do is version 16 | # comparisons. 17 | _SEMVER_RE = re.compile(r'(\d+)\.(\d+)\.(\d+)(?:-(?P[^+]+)(\+|$))?') 18 | 19 | @functools.total_ordering 20 | class Prerelease: 21 | def __init__(self, parts: Tuple[Union[str, int], ...]) -> None: 22 | self._parts = parts 23 | 24 | @staticmethod 25 | def parse(rel: str) -> Optional['SemVer.Prerelease']: 26 | if not rel: 27 | return None 28 | 29 | parts: List[Union[str, int]] = [] 30 | 31 | for part in rel.split('.'): # type: Union[str, int] 32 | try: 33 | part = int(part) 34 | except ValueError: 35 | pass 36 | 37 | parts.append(part) 38 | 39 | return SemVer.Prerelease(tuple(parts)) 40 | 41 | @property 42 | def parts(self) -> Tuple[Union[str, int], ...]: 43 | return self._parts 44 | 45 | def __lt__(self, other: object) -> bool: 46 | if not isinstance(other, SemVer.Prerelease): 47 | return NotImplemented 48 | 49 | for our_part, other_part in zip(self._parts, other._parts): 50 | if type(our_part) == type(other_part): 51 | if our_part < other_part: # type: ignore 52 | return True 53 | # Number parts are always less than strings. 54 | elif isinstance(our_part, int): 55 | return True 56 | 57 | return len(self._parts) < len(other._parts) 58 | 59 | def __eq__(self, other: object) -> bool: 60 | if not isinstance(other, SemVer.Prerelease): 61 | return NotImplemented 62 | 63 | return self._parts == other._parts 64 | 65 | def __repr__(self) -> str: 66 | return f'Prerelease(parts={self.parts})' 67 | 68 | major: int 69 | minor: int 70 | patch: int 71 | prerelease: Optional[Prerelease] = None 72 | 73 | @staticmethod 74 | def parse(version: str) -> 'SemVer': 75 | match = SemVer._SEMVER_RE.match(version) 76 | if match is None: 77 | raise ValueError(f'Invalid semver version: {version}') 78 | 79 | major, minor, patch = map(int, match.groups()[:3]) 80 | prerelease = SemVer.Prerelease.parse(match.group('prerelease')) 81 | 82 | return SemVer(major, minor, patch, prerelease) 83 | 84 | 85 | class PackageSource(abc.ABC): 86 | pass 87 | 88 | 89 | @dataclass(frozen=True, eq=True) 90 | class PackageFileSource(PackageSource): 91 | integrity: Optional[Integrity] 92 | 93 | 94 | @dataclass(frozen=True, eq=True) 95 | class PackageURLSource(PackageFileSource): 96 | resolved: str 97 | 98 | async def retrieve_integrity(self) -> Integrity: 99 | if self.integrity is not None: 100 | return self.integrity 101 | else: 102 | url = self.resolved 103 | assert url is not None, 'registry source has no resolved URL' 104 | metadata = await RemoteUrlMetadata.get(url, cachable=True) 105 | return metadata.integrity 106 | 107 | 108 | @dataclass(frozen=True, eq=True) 109 | class RegistrySource(PackageFileSource): 110 | pass 111 | 112 | 113 | @dataclass(frozen=True, eq=True) 114 | class ResolvedSource(RegistrySource, PackageURLSource): 115 | pass 116 | 117 | 118 | @dataclass(frozen=True, eq=True) 119 | class GitSource(PackageSource): 120 | original: str 121 | url: str 122 | commit: str 123 | from_: Optional[str] 124 | 125 | 126 | @dataclass(frozen=True, eq=True) 127 | class LocalSource(PackageSource): 128 | path: str 129 | 130 | 131 | class Package(NamedTuple): 132 | name: str 133 | version: str 134 | source: PackageSource 135 | lockfile: Path 136 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/progress.py: -------------------------------------------------------------------------------- 1 | from typing import Collection, ContextManager, Optional, Type 2 | 3 | import asyncio 4 | import shutil 5 | import sys 6 | import types 7 | 8 | from .package import Package 9 | from .providers import ModuleProvider 10 | 11 | 12 | class GeneratorProgress(ContextManager['GeneratorProgress']): 13 | def __init__( 14 | self, 15 | packages: Collection[Package], 16 | module_provider: ModuleProvider, 17 | max_parallel: int, 18 | ) -> None: 19 | self.finished = 0 20 | self.packages = packages 21 | self.module_provider = module_provider 22 | self.parallel_limit = asyncio.Semaphore(max_parallel) 23 | self.previous_package: Optional[Package] = None 24 | self.current_package: Optional[Package] = None 25 | 26 | def __exit__( 27 | self, 28 | exc_type: Optional[Type[BaseException]], 29 | exc_value: Optional[BaseException], 30 | tb: Optional[types.TracebackType], 31 | ) -> None: 32 | print() 33 | 34 | def _format_package(self, package: Package, max_width: int) -> str: 35 | result = f'{package.name} @ {package.version}' 36 | 37 | if len(result) > max_width: 38 | result = result[: max_width - 3] + '...' 39 | 40 | return result 41 | 42 | def _update(self) -> None: 43 | columns, _ = shutil.get_terminal_size() 44 | 45 | sys.stdout.write('\r' + ' ' * columns) 46 | 47 | prefix_string = f'\rGenerating packages [{self.finished}/{len(self.packages)}] ' 48 | sys.stdout.write(prefix_string) 49 | max_package_width = columns - len(prefix_string) 50 | 51 | if self.current_package is not None: 52 | sys.stdout.write( 53 | self._format_package(self.current_package, max_package_width) 54 | ) 55 | 56 | sys.stdout.flush() 57 | 58 | def _update_with_package(self, package: Package) -> None: 59 | self.previous_package, self.current_package = ( 60 | self.current_package, 61 | package, 62 | ) 63 | self._update() 64 | 65 | async def _generate(self, package: Package) -> None: 66 | async with self.parallel_limit: 67 | self._update_with_package(package) 68 | await self.module_provider.generate_package(package) 69 | self.finished += 1 70 | self._update_with_package(package) 71 | 72 | async def run(self) -> None: 73 | self._update() 74 | 75 | tasks = [asyncio.create_task(self._generate(pkg)) for pkg in self.packages] 76 | for coro in asyncio.as_completed(tasks): 77 | try: 78 | await coro 79 | except: 80 | # If an exception occurred, make sure to cancel all the other 81 | # tasks. 82 | for task in tasks: 83 | task.cancel() 84 | 85 | raise 86 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/providers/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | from typing import ContextManager, Dict, Iterator, List, Optional 3 | 4 | import re 5 | import urllib.parse 6 | 7 | from ..manifest import ManifestGenerator 8 | from ..node_headers import NodeHeaders 9 | from ..package import GitSource, Package 10 | from .special import SpecialSourceProvider 11 | 12 | _GIT_SCHEMES: Dict[str, Dict[str, str]] = { 13 | 'github': {'scheme': 'https', 'netloc': 'github.com'}, 14 | 'gitlab': {'scheme': 'https', 'netloc': 'gitlab.com'}, 15 | 'bitbucket': {'scheme': 'https', 'netloc': 'bitbucket.com'}, 16 | 'git': {}, 17 | 'git+http': {'scheme': 'http'}, 18 | 'git+https': {'scheme': 'https'}, 19 | } 20 | 21 | 22 | class LockfileProvider: 23 | def parse_git_source(self, version: str, from_: Optional[str] = None) -> GitSource: 24 | # https://github.com/microsoft/pyright/issues/1589 25 | # pyright: reportPrivateUsage=false 26 | 27 | original_url = urllib.parse.urlparse(version) 28 | assert original_url.scheme and original_url.path and original_url.fragment 29 | 30 | replacements = _GIT_SCHEMES.get(original_url.scheme, {}) 31 | new_url = original_url._replace(fragment='', **replacements) 32 | # Replace e.g. git:github.com/owner/repo with git://github.com/owner/repo 33 | if not new_url.netloc: 34 | path = new_url.path.split('/') 35 | new_url = new_url._replace(netloc=path[0], path='/'.join(path[1:])) 36 | 37 | return GitSource( 38 | original=original_url.geturl(), 39 | url=new_url.geturl(), 40 | commit=original_url.fragment, 41 | from_=from_, 42 | ) 43 | 44 | def process_lockfile(self, lockfile: Path) -> Iterator[Package]: 45 | raise NotImplementedError() 46 | 47 | 48 | class RCFileProvider: 49 | RCFILE_NAME: str 50 | 51 | def parse_rcfile(self, rcfile: Path) -> Dict[str, str]: 52 | with open(rcfile, 'r') as r: 53 | rcfile_text = r.read() 54 | parser_re = re.compile( 55 | r'^(?!#|;)(\S+)(?:\s+|\s*=\s*)(?:"(.+)"|(\S+))$', re.MULTILINE 56 | ) 57 | result: Dict[str, str] = {} 58 | for key, quoted_val, val in parser_re.findall(rcfile_text): 59 | result[key] = quoted_val or val 60 | return result 61 | 62 | def get_node_headers(self, rcfile: Path) -> Optional[NodeHeaders]: 63 | rc_data = self.parse_rcfile(rcfile) 64 | if 'target' not in rc_data: 65 | return None 66 | target = rc_data['target'] 67 | runtime = rc_data.get('runtime') 68 | disturl = rc_data.get('disturl') 69 | 70 | assert isinstance(runtime, str) and isinstance(disturl, str) 71 | 72 | return NodeHeaders.with_defaults(target, runtime, disturl) 73 | 74 | 75 | class ModuleProvider(ContextManager['ModuleProvider']): 76 | async def generate_package(self, package: Package) -> None: 77 | raise NotImplementedError() 78 | 79 | 80 | class ProviderFactory: 81 | def create_lockfile_provider(self) -> LockfileProvider: 82 | raise NotImplementedError() 83 | 84 | def create_rcfile_providers(self) -> List[RCFileProvider]: 85 | raise NotImplementedError() 86 | 87 | def create_module_provider( 88 | self, gen: ManifestGenerator, special: SpecialSourceProvider 89 | ) -> ModuleProvider: 90 | raise NotImplementedError() 91 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/flatpak/flatpak-builder-tools/ea9bfa22d175066dd3044544cc55aa070f8282f4/node/flatpak_node_generator/py.typed -------------------------------------------------------------------------------- /node/flatpak_node_generator/requests.py: -------------------------------------------------------------------------------- 1 | from typing import AsyncIterator, ClassVar 2 | 3 | import contextlib 4 | 5 | import aiohttp 6 | 7 | from .cache import Cache 8 | 9 | DEFAULT_PART_SIZE = 4096 10 | 11 | 12 | class Requests: 13 | instance: 'Requests' 14 | 15 | DEFAULT_RETRIES = 5 16 | retries: ClassVar[int] = DEFAULT_RETRIES 17 | 18 | def __get_cache_bucket(self, cachable: bool, url: str) -> Cache.BucketRef: 19 | return Cache.get_working_instance_if(cachable).get(f'requests:{url}') 20 | 21 | @contextlib.asynccontextmanager 22 | async def _open_stream(self, url: str) -> AsyncIterator[aiohttp.StreamReader]: 23 | async with aiohttp.ClientSession(raise_for_status=True) as session: 24 | async with session.get(url) as response: 25 | yield response.content 26 | 27 | async def _read_parts( 28 | self, url: str, size: int = DEFAULT_PART_SIZE 29 | ) -> AsyncIterator[bytes]: 30 | async with self._open_stream(url) as stream: 31 | while True: 32 | data = await stream.read(size) 33 | if not data: 34 | return 35 | 36 | yield data 37 | 38 | async def _read_all(self, url: str) -> bytes: 39 | async with self._open_stream(url) as stream: 40 | return await stream.read() 41 | 42 | async def read_parts( 43 | self, url: str, *, cachable: bool = False, size: int = DEFAULT_PART_SIZE 44 | ) -> AsyncIterator[bytes]: 45 | bucket = self.__get_cache_bucket(cachable, url) 46 | 47 | bucket_reader = bucket.open_read() 48 | if bucket_reader is not None: 49 | for part in bucket_reader.read_parts(size): 50 | yield part 51 | 52 | return 53 | 54 | for i in range(1, Requests.retries + 1): 55 | try: 56 | with bucket.open_write() as bucket_writer: 57 | async for part in self._read_parts(url, size): 58 | bucket_writer.write(part) 59 | yield part 60 | 61 | return 62 | except Exception: 63 | if i == Requests.retries: 64 | raise 65 | 66 | async def read_all(self, url: str, *, cachable: bool = False) -> bytes: 67 | bucket = self.__get_cache_bucket(cachable, url) 68 | 69 | bucket_reader = bucket.open_read() 70 | if bucket_reader is not None: 71 | return bucket_reader.read_all() 72 | 73 | for i in range(1, Requests.retries + 1): 74 | try: 75 | with bucket.open_write() as bucket_writer: 76 | data = await self._read_all(url) 77 | bucket_writer.write(data) 78 | return data 79 | except Exception: 80 | if i == Requests.retries: 81 | raise 82 | 83 | assert False 84 | 85 | 86 | class StubRequests(Requests): 87 | async def _read_parts( 88 | self, url: str, size: int = DEFAULT_PART_SIZE 89 | ) -> AsyncIterator[bytes]: 90 | yield b'' 91 | 92 | async def _read_all(self, url: str) -> bytes: 93 | return b'' 94 | 95 | 96 | Requests.instance = Requests() 97 | -------------------------------------------------------------------------------- /node/flatpak_node_generator/url_metadata.py: -------------------------------------------------------------------------------- 1 | from typing import Any, NamedTuple 2 | 3 | import json 4 | 5 | from .cache import Cache 6 | from .integrity import Integrity, IntegrityBuilder 7 | from .requests import Requests 8 | 9 | 10 | class RemoteUrlMetadata(NamedTuple): 11 | integrity: Integrity 12 | size: int 13 | 14 | @staticmethod 15 | def __get_cache_bucket(cachable: bool, kind: str, url: str) -> Cache.BucketRef: 16 | return Cache.get_working_instance_if(cachable).get( 17 | f'remote-url-metadata:{kind}:{url}' 18 | ) 19 | 20 | @staticmethod 21 | def from_json_object(data: Any) -> 'RemoteUrlMetadata': 22 | return RemoteUrlMetadata( 23 | integrity=Integrity.from_json_object(data['integrity']), 24 | size=data['size'], 25 | ) 26 | 27 | @classmethod 28 | async def get( 29 | cls, url: str, *, cachable: bool, integrity_algorithm: str = 'sha256' 30 | ) -> 'RemoteUrlMetadata': 31 | bucket = cls.__get_cache_bucket(cachable, 'full', url) 32 | 33 | bucket_reader = bucket.open_read() 34 | if bucket_reader is not None: 35 | data = json.loads(bucket_reader.read_all()) 36 | return RemoteUrlMetadata.from_json_object(data) 37 | 38 | builder = IntegrityBuilder(integrity_algorithm) 39 | size = 0 40 | 41 | async for part in Requests.instance.read_parts(url, cachable=False): 42 | builder.update(part) 43 | size += len(part) 44 | 45 | metadata = RemoteUrlMetadata(integrity=builder.build(), size=size) 46 | 47 | with bucket.open_write() as bucket_writer: 48 | bucket_writer.write(json.dumps(metadata.to_json_object()).encode('ascii')) 49 | 50 | return metadata 51 | 52 | @classmethod 53 | async def get_size(cls, url: str, *, cachable: bool) -> int: 54 | bucket = cls.__get_cache_bucket(cachable, 'size', url) 55 | 56 | bucket_reader = bucket.open_read() 57 | if bucket_reader is not None: 58 | return int(bucket_reader.read_all()) 59 | 60 | size = 0 61 | async for part in Requests.instance.read_parts(url, cachable=False): 62 | size += len(part) 63 | 64 | with bucket.open_write() as bucket_writer: 65 | bucket_writer.write(str(size).encode('ascii')) 66 | 67 | return size 68 | 69 | def to_json_object(self) -> Any: 70 | return { 71 | 'integrity': self.integrity.to_json_object(), 72 | 'size': self.size, 73 | } 74 | -------------------------------------------------------------------------------- /node/poetry.toml: -------------------------------------------------------------------------------- 1 | [virtualenvs] 2 | in-project = true 3 | -------------------------------------------------------------------------------- /node/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "flatpak_node_generator" 3 | version = "0.1.0" 4 | description = "Script to generate Flatpak manifests from NPM and Yarn lockfiles" 5 | license = {text = "MIT"} 6 | readme = "README.md" 7 | authors = [ 8 | {name = "Ryan Gonzalez", email = "ryan.gonzalez@collabora.com"}, 9 | {name = "Filippe LeMarchand", email = "gasinvein@gmail.com"}, 10 | ] 11 | requires-python = "<4.0,>=3.9" 12 | dependencies = [ 13 | "aiohttp<4.0.0,>=3.9.0", 14 | ] 15 | 16 | [tool.poetry.group.dev.dependencies] 17 | blue = {git = "https://github.com/grantjenks/blue"} 18 | isort = "^6.0.1" 19 | mypy = "^1.11.2" 20 | poethepoet = "^0.34.0" 21 | pytest = "^8.3.5" 22 | pytest-asyncio = "^0.26.0" 23 | pytest-datadir = "^1.6.1" 24 | pytest-httpserver = "^1.1.3" 25 | pytest-xdist = "^3.6.1" 26 | 27 | [tool.poetry.scripts] 28 | flatpak-node-generator = "flatpak_node_generator.main:main" 29 | 30 | [tool.blue] 31 | line-length = 88 32 | target-version = ["py39"] 33 | 34 | [tool.isort] 35 | from_first = true 36 | lines_between_types = 1 37 | profile = "black" 38 | src_paths = ["flatpak_node_generator", "tests"] 39 | 40 | [tool.mypy] 41 | python_version = "3.9" 42 | exclude = "^(.*/)?((([^/]+)-quick-start)|(\\.venv)|npm-cache|yarn-mirror)/.*$" 43 | strict = true 44 | 45 | [tool.poe.tasks] 46 | check-format = "blue --check flatpak_node_generator tests" 47 | check-isort = "isort --check flatpak_node_generator tests" 48 | check-mypy = "mypy ." 49 | check-test = "pytest -vvvs -n auto" 50 | check = ["check-format", "check-isort", "check-mypy", "check-test"] 51 | 52 | apply-format = "blue flatpak_node_generator" 53 | apply-isort = "isort flatpak_node_generator" 54 | 55 | [tool.pytest.ini_options] 56 | asyncio_mode = "auto" 57 | testpaths = ["tests"] 58 | 59 | [build-system] 60 | requires = ["poetry-core>=1.0.0"] 61 | build-backend = "poetry.core.masonry.api" 62 | -------------------------------------------------------------------------------- /node/tests/data/packages/electron/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/electron", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "@electron/get": "^2.0.3", 6 | "electron": "^26.3.0" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /node/tests/data/packages/local-link-yarn/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/local", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "subdir": "link:subdir" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/local-link-yarn/subdir/index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | 3 | module.exports = { 4 | sayHello: () => { 5 | fs.writeFileSync('hello.txt', 'Hello!') 6 | }, 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/local-link-yarn/subdir/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/subdir", 3 | "version": "1.0.0" 4 | } 5 | -------------------------------------------------------------------------------- /node/tests/data/packages/local-link-yarn/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "subdir@link:subdir": 6 | version "0.0.0" 7 | uid "" 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/package-lock.v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/local", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "subdir": { 8 | "version": "file:subdir" 9 | } 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/package-lock.v2.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/local", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/local", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "subdir": "file:subdir" 12 | } 13 | }, 14 | "node_modules/subdir": { 15 | "resolved": "subdir", 16 | "link": true 17 | }, 18 | "subdir": { 19 | "name": "@flatpak-node-generator-tests/subdir", 20 | "version": "1.0.0" 21 | } 22 | }, 23 | "dependencies": { 24 | "subdir": { 25 | "version": "file:subdir" 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/package-lock.v3.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/local", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/local", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "subdir": "file:subdir" 12 | } 13 | }, 14 | "node_modules/subdir": { 15 | "name": "@flatpak-node-generator-tests/subdir", 16 | "version": "1.0.0", 17 | "resolved": "file:subdir" 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/local", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "subdir": "file:subdir" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/subdir/index.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | 3 | module.exports = { 4 | sayHello: () => { 5 | fs.writeFileSync('hello.txt', 'Hello!') 6 | }, 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/subdir/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/subdir", 3 | "version": "1.0.0" 4 | } 5 | -------------------------------------------------------------------------------- /node/tests/data/packages/local/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "subdir@file:subdir": 6 | version "1.0.0" 7 | -------------------------------------------------------------------------------- /node/tests/data/packages/minimal-git/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/minimal-git", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "nop": { 8 | "version": "github:supershabam/nop#f110e75f62cfe3bf4468ac3b74e3dc72ab9ae4bf", 9 | "from": "github:supershabam/nop" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /node/tests/data/packages/minimal-git/package-lock.v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/minimal-git", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "nop": { 8 | "version": "git+https://git@github.com/supershabam/nop.git#f110e75f62cfe3bf4468ac3b74e3dc72ab9ae4bf", 9 | "from": "git+https://git@github.com/supershabam/nop.git" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /node/tests/data/packages/minimal-git/package-lock.v2.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/minimal-git", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/minimal-git", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "nop": "https://git@github.com/supershabam/nop.git" 12 | } 13 | }, 14 | "node_modules/nop": { 15 | "version": "1.0.0", 16 | "resolved": "git+https://git@github.com/supershabam/nop.git#f110e75f62cfe3bf4468ac3b74e3dc72ab9ae4bf", 17 | "license": "MIT" 18 | } 19 | }, 20 | "dependencies": { 21 | "nop": { 22 | "version": "git+https://git@github.com/supershabam/nop.git#f110e75f62cfe3bf4468ac3b74e3dc72ab9ae4bf", 23 | "from": "nop@https://git@github.com/supershabam/nop.git" 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /node/tests/data/packages/minimal-git/package-lock.v3.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/minimal-git", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/minimal-git", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "nop": "https://git@github.com/supershabam/nop.git" 12 | } 13 | }, 14 | "node_modules/nop": { 15 | "version": "1.0.0", 16 | "resolved": "git+ssh://git@github.com/supershabam/nop.git#f110e75f62cfe3bf4468ac3b74e3dc72ab9ae4bf", 17 | "integrity": "sha512-xCwdA7C4QIORvTMytKHMlkEN6axJGimR0gv5vgjKpEKRvQrPOwhjJnrZEcd5g0LP+7IY38+TY7MP59HRY6gcwA==", 18 | "license": "MIT" 19 | } 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /node/tests/data/packages/minimal-git/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/minimal-git", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "nop": "https://git@github.com/supershabam/nop.git" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/minimal-git/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "nop@https://git@github.com/supershabam/nop.git": 6 | version "1.0.0" 7 | resolved "https://git@github.com/supershabam/nop.git#f110e75f62cfe3bf4468ac3b74e3dc72ab9ae4bf" 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/missing-resolved-npm/package-lock.v2.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/missing-resolved-npm", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/missing-resolved-npm", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "word-wrap": "^1.2.3" 12 | } 13 | }, 14 | "node_modules/word-wrap": { 15 | "version": "1.2.3", 16 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", 17 | "engines": { 18 | "node": ">=0.10.0" 19 | } 20 | } 21 | }, 22 | "dependencies": { 23 | "word-wrap": { 24 | "version": "1.2.3", 25 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /node/tests/data/packages/missing-resolved-npm/package-lock.v3.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/missing-resolved-npm", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/missing-resolved-npm", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "word-wrap": "^1.2.3" 12 | } 13 | }, 14 | "node_modules/word-wrap": { 15 | "version": "1.2.3", 16 | "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", 17 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", 18 | "engines": { 19 | "node": ">=0.10.0" 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /node/tests/data/packages/missing-resolved-npm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/missing-resolved-npm", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "word-wrap": "^1.2.3" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/data/packages/url-as-dep/package-lock.v1.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/url-as-dep", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "word-wrap": { 8 | "version": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", 9 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /node/tests/data/packages/url-as-dep/package-lock.v2.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/url-as-dep", 3 | "version": "1.0.0", 4 | "lockfileVersion": 2, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/url-as-dep", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "word-wrap": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz" 12 | } 13 | }, 14 | "node_modules/word-wrap": { 15 | "version": "1.2.3", 16 | "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", 17 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", 18 | "license": "MIT", 19 | "engines": { 20 | "node": ">=0.10.0" 21 | } 22 | } 23 | }, 24 | "dependencies": { 25 | "word-wrap": { 26 | "version": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", 27 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==" 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /node/tests/data/packages/url-as-dep/package-lock.v3.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/url-as-dep", 3 | "version": "1.0.0", 4 | "lockfileVersion": 3, 5 | "requires": true, 6 | "packages": { 7 | "": { 8 | "name": "@flatpak-node-generator-tests/url-as-dep", 9 | "version": "1.0.0", 10 | "dependencies": { 11 | "word-wrap": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz" 12 | } 13 | }, 14 | "node_modules/word-wrap": { 15 | "version": "1.2.3", 16 | "resolved": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz", 17 | "integrity": "sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ==", 18 | "license": "MIT", 19 | "engines": { 20 | "node": ">=0.10.0" 21 | } 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /node/tests/data/packages/url-as-dep/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@flatpak-node-generator-tests/url-as-dep", 3 | "version": "1.0.0", 4 | "dependencies": { 5 | "word-wrap": "https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.3.tgz" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /node/tests/test_electron.py: -------------------------------------------------------------------------------- 1 | from typing import List 2 | 3 | from conftest import RequestsController 4 | from flatpak_node_generator.electron import ElectronBinaryManager 5 | from flatpak_node_generator.integrity import Integrity 6 | 7 | VERSION = '18.0.0' 8 | 9 | INTEGRITY_DATA = """ 10 | b3c088672deab866b0da85ec02338a3e2c541332a946814b7cd09e9a16cd41d6 *electron-v18.0.0-linux-arm64.zip 11 | 991c54c875102c1440f45a97509693003c8c5c3f1c69123e0913236396202dba *electron-v18.0.0-linux-armv7l.zip 12 | 995fd56b0c03abcac575b920092876b85185d3e13d63b2007e8e49f395f64061 *electron-v18.0.0-linux-ia32.zip 13 | 115737fb1e6759bcb9822e642f9457c2ee1ae7c3413dc3f356bf5d15576fec4d *electron-v18.0.0-linux-x64.zip 14 | 3f020561f8cd30e0fb51b82686fbb3b2a81a92990e9dd009234f19fcac48a2ce *electron.d.ts 15 | 9d80371fc2a146f6671aeb70dea36638400569fd130f2773a2f79b83b3e2c127 *ffmpeg-v18.0.0-linux-arm64.zip 16 | 60cf19bf219cb7ccf190f86e1bc6e8cc0ed0d7e9889faffc0138f443aaf17baf *ffmpeg-v18.0.0-linux-armv7l.zip 17 | af80bceef443bf01bced73eea3e8f700cca1f20560c97a0b588b49577d0a9141 *ffmpeg-v18.0.0-linux-ia32.zip 18 | 1d7c771bdfd727b7707cf7635c8b0fad66c89d03610b9bac793429ae3ddb458c *ffmpeg-v18.0.0-linux-x64.zip 19 | """.strip() 20 | 21 | INTEGRITY_SHA256 = '0cba8a5d7280d8cf264c0b8399e93e4cc75d8e18c3b89ae351e443d2c4437f47' 22 | 23 | 24 | def _expect_integrity_request(requests: RequestsController) -> None: 25 | requests.server.expect_oneshot_request('/SHASUMS256.txt', 'GET').respond_with_data( 26 | INTEGRITY_DATA 27 | ) 28 | 29 | 30 | def _list_binaries( 31 | manager: ElectronBinaryManager, 32 | binary: str, 33 | ) -> List[ElectronBinaryManager.Binary]: 34 | return list(sorted(manager.find_binaries(binary), key=lambda b: b.filename)) 35 | 36 | 37 | async def test_integrity_file(requests: RequestsController) -> None: 38 | _expect_integrity_request(requests) 39 | manager = await ElectronBinaryManager.for_version( 40 | VERSION, base_url=requests.url.rstrip('/') 41 | ) 42 | 43 | assert manager.integrity_file == ElectronBinaryManager.Binary( 44 | filename=f'SHASUMS256.txt-{VERSION}', 45 | url=requests.server.url_for('SHASUMS256.txt'), 46 | integrity=Integrity('sha256', INTEGRITY_SHA256), 47 | arch=None, 48 | ) 49 | 50 | 51 | async def test_electron_binaries(requests: RequestsController) -> None: 52 | _expect_integrity_request(requests) 53 | manager = await ElectronBinaryManager.for_version( 54 | VERSION, base_url=requests.url.rstrip('/') 55 | ) 56 | 57 | assert _list_binaries(manager, 'electron') == [ 58 | ElectronBinaryManager.Binary( 59 | filename='electron-v18.0.0-linux-arm64.zip', 60 | url=requests.server.url_for('electron-v18.0.0-linux-arm64.zip'), 61 | integrity=Integrity( 62 | 'sha256', 63 | 'b3c088672deab866b0da85ec02338a3e2c541332a946814b7cd09e9a16cd41d6', 64 | ), 65 | arch=ElectronBinaryManager.Arch(electron='arm64', flatpak='aarch64'), 66 | ), 67 | ElectronBinaryManager.Binary( 68 | filename='electron-v18.0.0-linux-armv7l.zip', 69 | url=requests.server.url_for('electron-v18.0.0-linux-armv7l.zip'), 70 | integrity=Integrity( 71 | 'sha256', 72 | '991c54c875102c1440f45a97509693003c8c5c3f1c69123e0913236396202dba', 73 | ), 74 | arch=ElectronBinaryManager.Arch(electron='armv7l', flatpak='arm'), 75 | ), 76 | ElectronBinaryManager.Binary( 77 | filename='electron-v18.0.0-linux-ia32.zip', 78 | url=requests.server.url_for('electron-v18.0.0-linux-ia32.zip'), 79 | integrity=Integrity( 80 | 'sha256', 81 | '995fd56b0c03abcac575b920092876b85185d3e13d63b2007e8e49f395f64061', 82 | ), 83 | arch=ElectronBinaryManager.Arch(electron='ia32', flatpak='i386'), 84 | ), 85 | ElectronBinaryManager.Binary( 86 | filename='electron-v18.0.0-linux-x64.zip', 87 | url=requests.server.url_for('electron-v18.0.0-linux-x64.zip'), 88 | integrity=Integrity( 89 | 'sha256', 90 | '115737fb1e6759bcb9822e642f9457c2ee1ae7c3413dc3f356bf5d15576fec4d', 91 | ), 92 | arch=ElectronBinaryManager.Arch(electron='x64', flatpak='x86_64'), 93 | ), 94 | ] 95 | 96 | 97 | async def test_ffmpeg_binaries(requests: RequestsController) -> None: 98 | _expect_integrity_request(requests) 99 | manager = await ElectronBinaryManager.for_version( 100 | VERSION, base_url=requests.url.rstrip('/') 101 | ) 102 | 103 | assert _list_binaries(manager, 'ffmpeg') == [ 104 | ElectronBinaryManager.Binary( 105 | filename='ffmpeg-v18.0.0-linux-arm64.zip', 106 | url=requests.server.url_for('ffmpeg-v18.0.0-linux-arm64.zip'), 107 | integrity=Integrity( 108 | 'sha256', 109 | '9d80371fc2a146f6671aeb70dea36638400569fd130f2773a2f79b83b3e2c127', 110 | ), 111 | arch=ElectronBinaryManager.Arch(electron='arm64', flatpak='aarch64'), 112 | ), 113 | ElectronBinaryManager.Binary( 114 | filename='ffmpeg-v18.0.0-linux-armv7l.zip', 115 | url=requests.server.url_for('ffmpeg-v18.0.0-linux-armv7l.zip'), 116 | integrity=Integrity( 117 | 'sha256', 118 | '60cf19bf219cb7ccf190f86e1bc6e8cc0ed0d7e9889faffc0138f443aaf17baf', 119 | ), 120 | arch=ElectronBinaryManager.Arch(electron='armv7l', flatpak='arm'), 121 | ), 122 | ElectronBinaryManager.Binary( 123 | filename='ffmpeg-v18.0.0-linux-ia32.zip', 124 | url=requests.server.url_for('ffmpeg-v18.0.0-linux-ia32.zip'), 125 | integrity=Integrity( 126 | 'sha256', 127 | 'af80bceef443bf01bced73eea3e8f700cca1f20560c97a0b588b49577d0a9141', 128 | ), 129 | arch=ElectronBinaryManager.Arch(electron='ia32', flatpak='i386'), 130 | ), 131 | ElectronBinaryManager.Binary( 132 | filename='ffmpeg-v18.0.0-linux-x64.zip', 133 | url=requests.server.url_for('ffmpeg-v18.0.0-linux-x64.zip'), 134 | integrity=Integrity( 135 | 'sha256', 136 | '1d7c771bdfd727b7707cf7635c8b0fad66c89d03610b9bac793429ae3ddb458c', 137 | ), 138 | arch=ElectronBinaryManager.Arch(electron='x64', flatpak='x86_64'), 139 | ), 140 | ] 141 | -------------------------------------------------------------------------------- /node/tests/test_integrity.py: -------------------------------------------------------------------------------- 1 | from flatpak_node_generator.integrity import Integrity, IntegrityBuilder 2 | 3 | TEST_STRING = 'this is a test string' 4 | TEST_SHA1 = '9a375f77abb15794900c2689812204273d757c9b' 5 | TEST_SHA256 = 'f6774519d1c7a3389ef327e9c04766b999db8cdfb85d1346c471ee86d65885bc' 6 | TEST_SHA256_B64 = '9ndFGdHHozie8yfpwEdmuZnbjN+4XRNGxHHuhtZYhbw=' 7 | 8 | 9 | def test_generate() -> None: 10 | integrity = Integrity.generate(TEST_STRING) 11 | assert integrity.algorithm == 'sha256' 12 | assert integrity.digest == TEST_SHA256 13 | 14 | integrity = Integrity.generate(TEST_STRING, algorithm='sha1') 15 | assert integrity.algorithm == 'sha1' 16 | assert integrity.digest == TEST_SHA1 17 | 18 | 19 | def test_builder() -> None: 20 | STEP_SIZE = 3 21 | 22 | builder = IntegrityBuilder() 23 | for i in range(0, len(TEST_STRING), STEP_SIZE): 24 | builder.update(TEST_STRING[i : i + STEP_SIZE]) 25 | 26 | integrity = builder.build() 27 | assert integrity.algorithm == 'sha256' 28 | assert integrity.digest == TEST_SHA256 29 | 30 | 31 | def test_parse() -> None: 32 | integrity = Integrity.parse(f'sha256-{TEST_SHA256_B64}') 33 | assert integrity.algorithm == 'sha256' 34 | assert integrity.digest == TEST_SHA256 35 | -------------------------------------------------------------------------------- /node/tests/test_manifest.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from pathlib import Path 4 | 5 | import subprocess 6 | import urllib.parse 7 | 8 | from conftest import FlatpakBuilder, RequestsController 9 | from flatpak_node_generator.integrity import Integrity 10 | from flatpak_node_generator.manifest import ManifestGenerator 11 | 12 | 13 | def test_url(requests: RequestsController, flatpak_builder: FlatpakBuilder) -> None: 14 | DATA_1 = 'abc' 15 | DATA_2 = 'def' 16 | 17 | requests.server.expect_oneshot_request('/file1', 'GET').respond_with_data(DATA_1) 18 | requests.server.expect_oneshot_request('/file2', 'GET').respond_with_data(DATA_2) 19 | 20 | with ManifestGenerator() as gen: 21 | gen.add_url_source(requests.url_for('file1'), Integrity.generate(DATA_1)) 22 | gen.add_url_source( 23 | requests.url_for('file2'), 24 | Integrity.generate(DATA_2), 25 | destination=Path('file2-renamed'), 26 | ) 27 | 28 | flatpak_builder.build(sources=gen.ordered_sources()) 29 | 30 | assert (flatpak_builder.module_dir / 'file1').read_text() == DATA_1 31 | assert (flatpak_builder.module_dir / 'file2-renamed').read_text() == DATA_2 32 | 33 | 34 | def test_data(flatpak_builder: FlatpakBuilder) -> None: 35 | DATA_STR = 'abc' 36 | DATA_BYTES = b'def\0ghi' 37 | 38 | with ManifestGenerator() as gen: 39 | gen.add_data_source(DATA_STR, Path('str-file')) 40 | gen.add_data_source(DATA_BYTES, Path('bytes-file')) 41 | 42 | flatpak_builder.build(sources=gen.ordered_sources()) 43 | 44 | assert (flatpak_builder.module_dir / 'str-file').read_text() == DATA_STR 45 | assert (flatpak_builder.module_dir / 'bytes-file').read_bytes() == DATA_BYTES 46 | 47 | 48 | def test_local_file(flatpak_builder: FlatpakBuilder, tmp_path: Path) -> None: 49 | DATA = 'abc' 50 | 51 | path = tmp_path / 'file' 52 | path.write_text(DATA) 53 | 54 | with ManifestGenerator() as gen: 55 | gen.add_local_file_source(path) 56 | 57 | flatpak_builder.build(sources=gen.ordered_sources()) 58 | 59 | assert (flatpak_builder.module_dir / path.name).read_text() == DATA 60 | 61 | 62 | def test_git(tmp_path: Path, flatpak_builder: FlatpakBuilder) -> None: 63 | DATA_1 = 'this is a file in a git repo' 64 | DATA_2 = 'this is a changed file' 65 | 66 | git_repo = tmp_path / 'git-repo' 67 | git_repo.mkdir() 68 | 69 | test_file = git_repo / 'test-file' 70 | test_file.write_text(DATA_1) 71 | 72 | subprocess.run(['git', 'init'], cwd=git_repo, check=True) 73 | subprocess.run( 74 | ['git', 'symbolic-ref', 'HEAD', 'refs/heads/main'], cwd=git_repo, check=True 75 | ) 76 | subprocess.run(['git', 'add', test_file.name], cwd=git_repo, check=True) 77 | subprocess.run(['git', 'commit', '-m', 'initial'], cwd=git_repo, check=True) 78 | 79 | main_ref = git_repo / '.git' / 'refs' / 'heads' / 'main' 80 | commit_1 = main_ref.read_text().strip() 81 | 82 | test_file.write_text(DATA_2) 83 | subprocess.run(['git', 'commit', '-am', 'change'], cwd=git_repo, check=True) 84 | 85 | commit_2 = main_ref.read_text().strip() 86 | 87 | with ManifestGenerator() as gen: 88 | url = f'file://{urllib.parse.quote(str(git_repo.absolute()))}' 89 | gen.add_git_source(url, commit=commit_1) 90 | gen.add_git_source(url, commit=commit_2, destination=Path('subdir')) 91 | 92 | flatpak_builder.build(sources=gen.ordered_sources()) 93 | 94 | assert (flatpak_builder.module_dir / test_file.name).read_text() == DATA_1 95 | assert ( 96 | flatpak_builder.module_dir / 'subdir' / test_file.name 97 | ).read_text() == DATA_2 98 | 99 | 100 | def test_script(flatpak_builder: FlatpakBuilder) -> None: 101 | COMMANDS = ['echo 123'] 102 | 103 | with ManifestGenerator() as gen: 104 | gen.add_script_source(COMMANDS, Path('script')) 105 | 106 | flatpak_builder.build(sources=gen.ordered_sources()) 107 | 108 | with (flatpak_builder.module_dir / 'script').open() as fp: 109 | assert fp.readline().startswith('#!') 110 | assert fp.readline().strip() == 'echo 123' 111 | 112 | 113 | def test_commands(flatpak_builder: FlatpakBuilder) -> None: 114 | COMMAND = 'echo 123 > test' 115 | 116 | with ManifestGenerator() as gen: 117 | gen.add_command(COMMAND) 118 | 119 | flatpak_builder.build(sources=gen.ordered_sources()) 120 | 121 | test_file = flatpak_builder.module_dir / 'test' 122 | assert test_file.read_text().strip() == '123' 123 | 124 | 125 | def test_ordering() -> None: 126 | URL_1 = 'abc' 127 | URL_2 = 'def' 128 | URL_3 = 'ghi' 129 | 130 | with ManifestGenerator() as gen: 131 | gen.add_archive_source(URL_3, Integrity.generate('')) 132 | gen.add_archive_source(URL_1, Integrity.generate('')) 133 | gen.add_archive_source(URL_2, Integrity.generate('')) 134 | 135 | sources = gen.ordered_sources() 136 | 137 | assert next(sources)['url'] == URL_1 138 | assert next(sources)['url'] == URL_2 139 | assert next(sources)['url'] == URL_3 140 | -------------------------------------------------------------------------------- /node/tests/test_package.py: -------------------------------------------------------------------------------- 1 | from re import S 2 | 3 | from flatpak_node_generator.package import SemVer 4 | 5 | 6 | def test_semver_parsing() -> None: 7 | assert SemVer.parse('1.7.2') == SemVer(major=1, minor=7, patch=2) 8 | 9 | assert SemVer.parse('1123.3213.8943') == SemVer( 10 | major=1123, minor=3213, patch=8943, prerelease=None 11 | ) 12 | 13 | assert SemVer.parse('12.34.56-alpha01') == SemVer( 14 | major=12, minor=34, patch=56, prerelease=SemVer.Prerelease(('alpha01',)) 15 | ) 16 | 17 | assert SemVer.parse('12.34.56-a.2.b+build') == SemVer( 18 | major=12, 19 | minor=34, 20 | patch=56, 21 | prerelease=SemVer.Prerelease(('a', 2, 'b')), 22 | ) 23 | 24 | 25 | def test_semver_cmp() -> None: 26 | assert SemVer.parse('1.2.3') < SemVer.parse('1.3.2') 27 | assert SemVer.parse('1.3.2') > SemVer.parse('1.2.3') 28 | assert SemVer.parse('1.2.3') == SemVer.parse('1.2.3') 29 | 30 | assert SemVer.parse('1.0.0-alpha') < SemVer.parse('1.0.0-alpha.1') 31 | assert SemVer.parse('1.0.0-alpha.1') < SemVer.parse('1.0.0-alpha.2') 32 | assert SemVer.parse('1.0.0-alpha.1') < SemVer.parse('1.0.0-alpha.x') 33 | assert SemVer.parse('1.0.0-alpha.x') < SemVer.parse('1.0.0-beta') 34 | assert SemVer.parse('1.0.0-alpha.1') < SemVer.parse('1.0.0-alpha.1.1') 35 | assert SemVer.parse('1.0.0-alpha+build1') == SemVer.parse('1.0.0-alpha+build2') 36 | -------------------------------------------------------------------------------- /node/tests/test_providers.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | import itertools 4 | import shlex 5 | 6 | import pytest 7 | 8 | from conftest import FlatpakBuilder, ProviderFactorySpec 9 | from flatpak_node_generator.manifest import ManifestGenerator 10 | 11 | 12 | async def test_minimal_git( 13 | flatpak_builder: FlatpakBuilder, 14 | provider_factory_spec: ProviderFactorySpec, 15 | node_version: int, 16 | ) -> None: 17 | if node_version >= 18: 18 | pytest.xfail(reason='Git sources not yet supported for lockfile v2 syntax') 19 | 20 | with ManifestGenerator() as gen: 21 | await provider_factory_spec.generate_modules('minimal-git', gen, node_version) 22 | 23 | flatpak_builder.build( 24 | sources=itertools.chain(gen.ordered_sources()), 25 | commands=[ 26 | provider_factory_spec.install_command, 27 | """node -e 'require("nop")'""", 28 | ], 29 | use_node=node_version, 30 | ) 31 | 32 | 33 | async def test_local( 34 | flatpak_builder: FlatpakBuilder, 35 | provider_factory_spec: ProviderFactorySpec, 36 | node_version: int, 37 | shared_datadir: Path, 38 | ) -> None: 39 | with ManifestGenerator() as gen: 40 | await provider_factory_spec.generate_modules('local', gen, node_version) 41 | 42 | flatpak_builder.build( 43 | sources=itertools.chain( 44 | gen.ordered_sources(), 45 | [ 46 | { 47 | 'type': 'dir', 48 | 'path': str(shared_datadir / 'packages' / 'local' / 'subdir'), 49 | 'dest': 'subdir', 50 | } 51 | ], 52 | ), 53 | commands=[ 54 | provider_factory_spec.install_command, 55 | """node -e 'require("subdir").sayHello()'""", 56 | ], 57 | use_node=node_version, 58 | ) 59 | 60 | hello_txt = flatpak_builder.module_dir / 'hello.txt' 61 | assert hello_txt.read_text() == 'Hello!' 62 | 63 | 64 | async def test_local_link( 65 | flatpak_builder: FlatpakBuilder, 66 | yarn_provider_factory_spec: ProviderFactorySpec, 67 | node_version: int, 68 | shared_datadir: Path, 69 | ) -> None: 70 | with ManifestGenerator() as gen: 71 | await yarn_provider_factory_spec.generate_modules( 72 | 'local-link-yarn', gen, node_version 73 | ) 74 | 75 | flatpak_builder.build( 76 | sources=itertools.chain( 77 | gen.ordered_sources(), 78 | [ 79 | { 80 | 'type': 'dir', 81 | 'path': str( 82 | shared_datadir / 'packages' / 'local-link-yarn' / 'subdir' 83 | ), 84 | 'dest': 'subdir', 85 | } 86 | ], 87 | ), 88 | commands=[ 89 | yarn_provider_factory_spec.install_command, 90 | """node -e 'require("subdir").sayHello()'""", 91 | ], 92 | use_node=node_version, 93 | ) 94 | 95 | hello_txt = flatpak_builder.module_dir / 'hello.txt' 96 | assert hello_txt.read_text() == 'Hello!' 97 | 98 | 99 | async def test_missing_resolved_field( 100 | flatpak_builder: FlatpakBuilder, 101 | npm_provider_factory_spec: ProviderFactorySpec, 102 | node_version: int, 103 | ) -> None: 104 | if node_version < 16: 105 | pytest.skip() 106 | 107 | with ManifestGenerator() as gen: 108 | await npm_provider_factory_spec.generate_modules( 109 | 'missing-resolved-npm', gen, node_version 110 | ) 111 | 112 | flatpak_builder.build( 113 | sources=gen.ordered_sources(), 114 | commands=[ 115 | npm_provider_factory_spec.install_command, 116 | f"""node -e 'require("word-wrap")'""", 117 | ], 118 | use_node=node_version, 119 | ) 120 | 121 | word_wrap_package_json = ( 122 | flatpak_builder.module_dir / 'node_modules' / 'word-wrap' / 'package.json' 123 | ) 124 | assert word_wrap_package_json.exists() 125 | 126 | 127 | async def test_url_as_dep( 128 | flatpak_builder: FlatpakBuilder, 129 | npm_provider_factory_spec: ProviderFactorySpec, 130 | node_version: int, 131 | ) -> None: 132 | with ManifestGenerator() as gen: 133 | await npm_provider_factory_spec.generate_modules( 134 | 'url-as-dep', gen, node_version 135 | ) 136 | 137 | flatpak_builder.build( 138 | sources=gen.ordered_sources(), 139 | commands=[ 140 | npm_provider_factory_spec.install_command, 141 | f"""node -e 'require("word-wrap")'""", 142 | ], 143 | use_node=node_version, 144 | ) 145 | 146 | word_wrap_package_json = ( 147 | flatpak_builder.module_dir / 'node_modules' / 'word-wrap' / 'package.json' 148 | ) 149 | assert word_wrap_package_json.exists() 150 | 151 | 152 | async def test_special_electron( 153 | flatpak_builder: FlatpakBuilder, 154 | provider_factory_spec: ProviderFactorySpec, 155 | node_version: int, 156 | ) -> None: 157 | VERSION = '26.3.0' 158 | SCRIPT = f""" 159 | import {{download}} from '@electron/get' 160 | await download('{VERSION}') 161 | """ 162 | 163 | with ManifestGenerator() as gen: 164 | await provider_factory_spec.generate_modules('electron', gen, node_version) 165 | 166 | flatpak_builder.build( 167 | sources=itertools.chain(gen.ordered_sources()), 168 | commands=[ 169 | provider_factory_spec.install_command, 170 | f"""node --input-type=module -e {shlex.quote(SCRIPT)}""", 171 | ], 172 | use_node=node_version, 173 | ) 174 | 175 | electron_version = ( 176 | flatpak_builder.module_dir / 'node_modules' / 'electron' / 'dist' / 'version' 177 | ) 178 | assert electron_version.read_text() == '26.3.0' 179 | -------------------------------------------------------------------------------- /node/tests/test_requests.py: -------------------------------------------------------------------------------- 1 | from typing import AsyncIterator 2 | 3 | from pytest_httpserver import RequestHandler 4 | 5 | import pytest 6 | 7 | from conftest import RequestsController 8 | from flatpak_node_generator.requests import Requests 9 | 10 | _HELLO = '/hello' 11 | _DATA = b'1234567890' 12 | _DATA2 = _DATA + b'ABC' 13 | _PART_SIZE = 3 14 | 15 | 16 | def _expect_single_hello(requests: RequestsController) -> RequestHandler: 17 | return requests.server.expect_oneshot_request(_HELLO, 'GET') 18 | 19 | 20 | async def _read_parts(it: AsyncIterator[bytes]) -> bytes: 21 | total = b'' 22 | async for part in it: 23 | assert len(part) <= _PART_SIZE 24 | total += part 25 | 26 | return total 27 | 28 | 29 | async def test_read_all(requests: RequestsController) -> None: 30 | _expect_single_hello(requests).respond_with_data(_DATA) 31 | assert (await Requests.instance.read_all(requests.url_for(_HELLO))) == _DATA 32 | 33 | # Twice to make sure it's not cached. 34 | _expect_single_hello(requests).respond_with_data(_DATA2) 35 | assert (await Requests.instance.read_all(requests.url_for(_HELLO))) == _DATA2 36 | 37 | 38 | async def test_read_all_retries(requests: RequestsController) -> None: 39 | assert Requests.retries == 3 40 | 41 | _expect_single_hello(requests).respond_with_data(status=500) 42 | _expect_single_hello(requests).respond_with_data(status=500) 43 | _expect_single_hello(requests).respond_with_data(status=500) 44 | 45 | with pytest.raises(Exception, match=r'500'): 46 | await Requests.instance.read_all(requests.url_for(_HELLO)) 47 | 48 | _expect_single_hello(requests).respond_with_data(status=500) 49 | _expect_single_hello(requests).respond_with_data(status=500) 50 | _expect_single_hello(requests).respond_with_data(_DATA) 51 | 52 | assert (await Requests.instance.read_all(requests.url_for(_HELLO))) == _DATA 53 | 54 | 55 | async def test_read_all_cached(requests: RequestsController) -> None: 56 | _expect_single_hello(requests).respond_with_data(_DATA) 57 | 58 | assert ( 59 | await Requests.instance.read_all(requests.url_for(_HELLO), cachable=True) 60 | ) == _DATA 61 | assert ( 62 | await Requests.instance.read_all(requests.url_for(_HELLO), cachable=True) 63 | ) == _DATA 64 | 65 | 66 | async def test_read_parts(requests: RequestsController) -> None: 67 | _expect_single_hello(requests).respond_with_data(_DATA) 68 | assert ( 69 | await _read_parts( 70 | Requests.instance.read_parts(requests.url_for(_HELLO), size=_PART_SIZE) 71 | ) 72 | ) == _DATA 73 | 74 | # Twice to make sure it's not cached. 75 | _expect_single_hello(requests).respond_with_data(_DATA2) 76 | assert ( 77 | await _read_parts( 78 | Requests.instance.read_parts(requests.url_for(_HELLO), size=_PART_SIZE) 79 | ) 80 | ) == _DATA2 81 | 82 | 83 | async def test_read_parts_retries(requests: RequestsController) -> None: 84 | assert Requests.retries == 3 85 | 86 | _expect_single_hello(requests).respond_with_data(status=500) 87 | _expect_single_hello(requests).respond_with_data(status=500) 88 | _expect_single_hello(requests).respond_with_data(_DATA) 89 | 90 | assert ( 91 | await _read_parts( 92 | Requests.instance.read_parts(requests.url_for(_HELLO), size=_PART_SIZE) 93 | ) 94 | ) == _DATA 95 | 96 | _expect_single_hello(requests).respond_with_data(status=500) 97 | _expect_single_hello(requests).respond_with_data(status=500) 98 | _expect_single_hello(requests).respond_with_data(status=500) 99 | 100 | with pytest.raises(Exception, match=r'500'): 101 | await _read_parts( 102 | Requests.instance.read_parts(requests.url_for(_HELLO), size=_PART_SIZE) 103 | ) 104 | 105 | 106 | async def test_read_parts_cached(requests: RequestsController) -> None: 107 | _expect_single_hello(requests).respond_with_data(_DATA) 108 | 109 | assert ( 110 | await _read_parts( 111 | Requests.instance.read_parts( 112 | requests.url_for(_HELLO), size=_PART_SIZE, cachable=True 113 | ) 114 | ) 115 | ) == _DATA 116 | 117 | assert ( 118 | await _read_parts( 119 | Requests.instance.read_parts( 120 | requests.url_for(_HELLO), size=_PART_SIZE, cachable=True 121 | ) 122 | ) 123 | ) == _DATA 124 | -------------------------------------------------------------------------------- /node/tests/test_yarn.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from conftest import ProviderFactorySpec 4 | from flatpak_node_generator.integrity import Integrity 5 | from flatpak_node_generator.package import GitSource, Package, ResolvedSource 6 | from flatpak_node_generator.providers.yarn import YarnLockfileProvider 7 | 8 | TEST_LOCKFILE = """ 9 | # random comment 10 | "@scope/name@^1.0.0": 11 | version "1.1.5" 12 | # random comment 13 | resolved "https://registry.yarnpkg.com/@scope/name/-/name-1.1.tgz#e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e" 14 | integrity sha512-Ortmd680rFfAylgo/ZT52IbCbOWajOYOz2d4B5Qj3M/x1vGctlWAXVYJjm04oacQ3uWVI+7XUR5ankuMyzpGhg== 15 | dependencies: 16 | thing "^2.0.0" 17 | 18 | bling "~2.2.0" 19 | 20 | # random comment 21 | 22 | thing@^2.0.0: 23 | version "2.0.1" 24 | resolved "https://codeload.github.com/flathub/thing/tar.gz/7448d8798a4380162d4b56f9b452e2f6f9e24e7a" 25 | 26 | bling@~2.2.0: 27 | "version" "2.2.0" 28 | "resolved" "https://registry.yarnpkg.com/bling/-/name-1.1.tgz#a3db5c13ff90a36963278c6a39e4ee3c22e2a436" 29 | "integrity" sha512-K1nRedmBWZT2hzg6iG6jQQmIl1bvylqycxjMZ84qISYdEvpv7muMcW9yIU6tVe4NeJ1sNc/5d9QO9XKLqRiKgA== 30 | 31 | "@scope/zing@git+https://somewhere.place/scope/zing": 32 | version "2.0.1" 33 | resolved "git+https://somewhere.place/scope/zing#9c6b057a2b9d96a4067a749ee3b3b0158d390cf1" 34 | """ 35 | 36 | 37 | def test_lockfile_parsing(tmp_path: Path) -> None: 38 | lockfile_provider = YarnLockfileProvider() 39 | 40 | yarn_lock = tmp_path / 'yarn.lock' 41 | yarn_lock.write_text(TEST_LOCKFILE) 42 | 43 | packages = list(lockfile_provider.process_lockfile(yarn_lock)) 44 | 45 | assert packages == [ 46 | Package( 47 | lockfile=yarn_lock, 48 | name='@scope/name', 49 | version='1.1.5', 50 | source=ResolvedSource( 51 | resolved='https://registry.yarnpkg.com/@scope/name/-/name-1.1.tgz#e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e', 52 | integrity=Integrity( 53 | 'sha512', 54 | '3abb6677af34ac57c0ca5828fd94f9d886c26ce59a8ce60ecf6778079423dccff1d6f19cb655805d56098e6d38a1a710dee59523eed7511e5a9e4b8ccb3a4686', 55 | ), 56 | ), 57 | ), 58 | Package( 59 | lockfile=yarn_lock, 60 | name='thing', 61 | version='2.0.1', 62 | source=ResolvedSource( 63 | resolved='https://codeload.github.com/flathub/thing/tar.gz/7448d8798a4380162d4b56f9b452e2f6f9e24e7a', 64 | integrity=None, 65 | ), 66 | ), 67 | Package( 68 | lockfile=yarn_lock, 69 | name='bling', 70 | version='2.2.0', 71 | source=ResolvedSource( 72 | resolved='https://registry.yarnpkg.com/bling/-/name-1.1.tgz#a3db5c13ff90a36963278c6a39e4ee3c22e2a436', 73 | integrity=Integrity( 74 | 'sha512', 75 | '2b59d179d9815994f687383a886ea34109889756efca5ab27318cc67ce2a21261d12fa6fee6b8c716f72214ead55ee0d789d6c35cff977d40ef5728ba9188a80', 76 | ), 77 | ), 78 | ), 79 | Package( 80 | lockfile=yarn_lock, 81 | name='@scope/zing', 82 | version='2.0.1', 83 | source=GitSource( 84 | original='git+https://somewhere.place/scope/zing#9c6b057a2b9d96a4067a749ee3b3b0158d390cf1', 85 | url='https://somewhere.place/scope/zing', 86 | commit='9c6b057a2b9d96a4067a749ee3b3b0158d390cf1', 87 | from_=None, 88 | ), 89 | ), 90 | ] 91 | -------------------------------------------------------------------------------- /node/tools/b64-integrity.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | SCRIPT_DIR="$(dirname "$0")" 4 | 5 | if [[ "$#" -ne 1 ]]; then 6 | echo "usage: $0 " >&2 7 | exit 1 8 | fi 9 | 10 | $1sum | cut -d' ' -f1 | "$SCRIPT_DIR/hex-to-b64.sh" 11 | -------------------------------------------------------------------------------- /node/tools/b64-to-hex.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | base64 -d | xxd -p | paste -s | tr -d '\t' 3 | -------------------------------------------------------------------------------- /node/tools/hex-to-b64.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # Also remove slashes so we can send in _cacache paths. 3 | tr -d '/\n' | xxd -r -p | base64 -w0 | sed 's/$/\n/' 4 | -------------------------------------------------------------------------------- /node/tools/lockfile-utils.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -e 4 | 5 | die() { 6 | echo "$@" >&2 7 | exit 1 8 | } 9 | 10 | USAGE="$0 [update-lockfile|peek-cache] [npm-14|npm-16|npm-18|yarn] " 11 | 12 | [[ "$#" -eq 3 ]] || die "$USAGE" 13 | 14 | command_arg="$1" 15 | pm_arg="$2" 16 | package_arg="$3" 17 | 18 | [[ "$command_arg" == @(update-lockfile|peek-cache) ]] || die "$USAGE" 19 | 20 | case "$pm_arg" in 21 | npm-*) 22 | case "$pm_arg" in 23 | npm-18) 24 | pm_lockfile=package-lock.v3.json 25 | pm_sdk_ext=node18 26 | ;; 27 | npm-16) 28 | pm_lockfile=package-lock.v2.json 29 | pm_sdk_ext=node16 30 | ;; 31 | npm-14) 32 | pm_lockfile=package-lock.v1.json 33 | pm_sdk_ext=node14 34 | ;; 35 | *) die "$USAGE" ;; 36 | esac 37 | 38 | pm_actual_lockfile=package-lock.json 39 | 40 | pm_cache_dir=npm-cache-$pm_sdk_ext 41 | pm_rc_contents="cache = \"$pm_cache_dir\"" 42 | pm_rc_file=.npmrc 43 | ;; 44 | yarn) 45 | pm_lockfile=yarn.lock 46 | pm_sdk_ext=node16 47 | 48 | pm_actual_lockfile=yarn.lock 49 | 50 | pm_cache_dir=yarn-mirror 51 | pm_rc_contents="yarn-offline-mirror \"./$pm_cache_dir\"" 52 | pm_rc_file=.yarnrc 53 | ;; 54 | *) die "$USAGE" ;; 55 | esac 56 | 57 | pm_name="${pm_arg%-*}" 58 | 59 | package_path="$(dirname "$0")/../tests/data/packages/$package_arg" 60 | [[ -d "$package_path" ]] || die "Unknown package: $package_arg" 61 | 62 | tmpdir=$(mktemp -d) 63 | trap 'rm -rf -- "$tmpdir"' EXIT 64 | 65 | cp "$package_path/package.json" "$tmpdir" 66 | 67 | # Special-case handling for our test of a local package. 68 | [[ -d "$package_path/subdir" ]] && cp -r "$package_path/subdir" "$tmpdir" 69 | 70 | if [[ "$command_arg" == peek-cache ]]; then 71 | cp "$package_path/$pm_lockfile" "$tmpdir" 72 | echo "$pm_rc_contents" > "$tmpdir/$pm_rc_file" 73 | fi 74 | 75 | # Workaround for https://github.com/npm/cli/issues/4896. 76 | gitconfig="$tmpdir/gitconfig" 77 | cat > "$gitconfig" <= 2.2.0. 11 | 12 | ## Usage 13 | 14 | Usage: `flatpak-opam-generator.py [-h] [--generate PACKAGE] [--install] opam_deps` 15 | 16 | ## Example 17 | 18 | ### Sources file generation 19 | 20 | Let's generate the sources file for the `lablgtk` library: 21 | ``` 22 | $ opam --version 23 | 2.2.0~alpha2 24 | 25 | $ opam tree --json=lablgtk_deps.json lablgtk 26 | 27 | $ ./flatpak-opam-generator.py lablgtk_deps.json > lablgtk_sources.json 28 | ``` 29 | 30 | Check the resulting file: 31 | ``` 32 | $ cat lablgtk_sources.json 33 | ``` 34 | ```json 35 | [ 36 | { 37 | "type": "file", 38 | "url": "https://github.com/garrigue/lablgtk/archive/2.18.13.tar.gz", 39 | "name": "lablgtk.2.18.13", 40 | "md5": "d0a326b99475216cc22232e72c89415f", 41 | "dest": "cache/md5/d0", 42 | "dest-filename": "d0a326b99475216cc22232e72c89415f" 43 | }, 44 | { 45 | "type": "file", 46 | "url": "https://github.com/ocaml/camlp-streams/archive/v5.0.1.tar.gz", 47 | "name": "camlp-streams.5.0.1", 48 | "md5": "afc874b25f7a1f13e8f5cfc1182b51a7", 49 | "dest": "cache/md5/af", 50 | "dest-filename": "afc874b25f7a1f13e8f5cfc1182b51a7" 51 | }, 52 | { 53 | "type": "file", 54 | "url": "https://github.com/ocaml/dune/releases/download/3.11.1/dune-3.11.1.tbz", 55 | "name": "dune.3.11.1", 56 | "sha256": "866f2307adadaf7604f3bf9d98bb4098792baa046953a6726c96c40fc5ed3f71", 57 | "dest": "cache/sha256/86", 58 | "dest-filename": "866f2307adadaf7604f3bf9d98bb4098792baa046953a6726c96c40fc5ed3f71" 59 | }, 60 | { 61 | "type": "file", 62 | "url": "http://download.camlcity.org/download/findlib-1.9.6.tar.gz", 63 | "name": "ocamlfind.1.9.6", 64 | "md5": "96c6ee50a32cca9ca277321262dbec57", 65 | "dest": "cache/md5/96", 66 | "dest-filename": "96c6ee50a32cca9ca277321262dbec57" 67 | } 68 | ] 69 | ``` 70 | 71 | This sources file is intended to be used as opam cache (see an example of the corresponding flaktpak manifest code below). 72 | The download from the cache uses the base url, appends the archive-mirror, followed by the hash algorithm, the first two characters of the has of the tarball, and the hex encoded hash of the archive. 73 | 74 | ### Manifest code generation 75 | 76 | Let's generate the corresponding Flatpak manifest code: 77 | ``` 78 | $ ./flatpak-opam-generator.py --generate lablgtk lablgtk_deps.json 79 | ``` 80 | ```yaml 81 | # Manifest code generated by flatpak-opam-generator 82 | - name: lablgtk 83 | buildsystem: simple 84 | #build-options: 85 | # append-path: 86 | # env: 87 | # OPAMROOT: 88 | # OPAMSWITCH: 89 | sources: 90 | - sources/lablgtk.json 91 | - type: git 92 | branch: master 93 | url: https://github.com/ocaml/opam-repository 94 | build-commands: 95 | - ls -A --color=never | grep -Ev "cache|packages|repo" | xargs rm -rf 96 | - opam admin filter -y lablgtk.2.18.13 camlp-streams.5.0.1 dune.3.11.1 ocamlfind.1.9.6 97 | - opam admin cache 98 | - opam repo add lablgtk . 99 | - opam install -y lablgtk.2.18.13 camlp-streams.5.0.1 dune.3.11.1 ocamlfind.1.9.6 100 | - opam repo remove --all lablgtk 101 | post-install: 102 | - opam info --field name,all-installed-versions lablgtk 103 | ``` 104 | -------------------------------------------------------------------------------- /opam/flatpak-opam-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import argparse 4 | import json 5 | import requests 6 | from pprint import pprint 7 | 8 | install = [] 9 | 10 | def search_dependencies (pkg): 11 | if all ([pkg["dependencies"] != [], 12 | not pkg['is_duplicate'], 13 | pkg['name'] not in ['ocaml', 'ocaml-config', 'ocaml-variants', 'base-domains']]): 14 | install.append({ 15 | 'name': pkg['name'], 16 | 'version': pkg['version'], 17 | }) 18 | for pkg in (pkg['dependencies']): 19 | search_dependencies(pkg) 20 | 21 | def parse (url_section): 22 | lines = url_section.split('\n') 23 | tar_url = None 24 | chktype = None 25 | chksum = None 26 | for i in lines: 27 | i = i.strip() 28 | if i.startswith('"http'): 29 | tar_url = i.strip().strip('"') 30 | if i.startswith('src: "http') or i.startswith('archive: "'): 31 | tar_url = i[i.index('"http'):].strip().strip('"') 32 | if i.startswith('checksum: "'): 33 | chktype = "md5" 34 | chksum = i[i.index('checksum: "')+11:].strip().strip('"') 35 | if i.startswith('"sha256') or i.startswith('checksum: "sha256'): 36 | chktype = "sha256" 37 | chksum = i[i.index('"sha256')+8:].strip().strip('"') 38 | break 39 | if i.startswith('"sha512') or i.startswith('checksum: "sha512'): 40 | chktype = "sha512" 41 | chksum = i[i.index('"sha512')+8:].strip().strip('"') 42 | break 43 | if i.startswith('"md5') or i.startswith('checksum: "md5'): 44 | chktype = "md5" 45 | chksum = i[i.index('"md5')+5:].strip().strip('"') 46 | break 47 | return (tar_url, chktype, chksum) 48 | 49 | def main(): 50 | 51 | parser = argparse.ArgumentParser() 52 | parser.add_argument('opam_deps', help='Path to the .json file generated by: opam tree --json=deps.json [PACKAGE]') 53 | parser.add_argument('--generate', help='Generates Flatpak manifest code for given package', dest='PACKAGE', action='store', required=False) 54 | parser.add_argument('--install', help='Do not construct cache file structure', action='store_true', required=False) 55 | args = parser.parse_args() 56 | 57 | with open(args.opam_deps, 'r') as f: 58 | data = json.load(f) 59 | main_pkg = data['tree'][0] 60 | install.append({ 61 | 'name': main_pkg['name'], 62 | 'version': main_pkg['version'], 63 | }) 64 | deps = main_pkg['dependencies'] 65 | for pkg in deps: 66 | search_dependencies(pkg) 67 | 68 | sources = [] 69 | for pkg in install: 70 | url = 'https://raw.githubusercontent.com/ocaml/opam-repository/master/packages/{}/{}.{}/opam'.format(pkg['name'], pkg['name'], pkg['version']) 71 | response = requests.get(url) 72 | 73 | try: 74 | url_section = response.text.split('url {', 1)[1].split('}')[0] 75 | except: 76 | continue 77 | 78 | tar_url, chktype, chksum = parse(url_section) 79 | if args.install: 80 | sources.append({ 81 | 'type': 'archive', 82 | 'url': tar_url, 83 | 'name': '{}'.format(pkg['name']), 84 | chktype: chksum, 85 | 'dest': 'packages/{}-{}'.format(pkg['name'], pkg['version']) 86 | }) 87 | else: 88 | sources.append({ 89 | 'type': 'file', 90 | 'url': tar_url, 91 | 'name': '{}.{}'.format(pkg['name'], pkg['version']), 92 | chktype: chksum, 93 | 'dest': 'cache/{}/{}'.format(chktype, chksum[:2]), 94 | 'dest-filename': chksum 95 | }) 96 | 97 | print (json.dumps(sources, indent=2)) 98 | 99 | if args.PACKAGE: 100 | deps = "" 101 | for source in sources: 102 | deps += source['name'] + ' ' 103 | print() 104 | print('# Manifest code generated by flatpak-opam-generator') 105 | print('- name: {}'.format(args.PACKAGE)) 106 | print(' buildsystem: simple') 107 | print(' #build-options:') 108 | print(' # append-path:') 109 | print(' # env:') 110 | print(' # OPAMROOT:') 111 | print(' # OPAMSWITCH:') 112 | print(' sources:') 113 | print(' - sources/{}.json'.format(args.PACKAGE)) 114 | if args.install: 115 | print(' build-commands:') 116 | print(' - >') 117 | print(' for pkg in {}; do'.format(deps)) 118 | print(' opam pin -y --no-action --assume-depexts --ignore-pin-depends $pkg packages/$pkg-*') 119 | print(' done') 120 | print(' - opam install -y {} packages/{}-*'.format(args.PACKAGE, args.PACKAGE)) 121 | print(' post-install:') 122 | print(' - opam info --field name,all-installed-versions {}'.format(args.PACKAGE)) 123 | else: 124 | print(' - type: git') 125 | print(' branch: master') 126 | print(' url: https://github.com/ocaml/opam-repository') 127 | print(' build-commands:') 128 | print(' - ls -A --color=never | grep -Ev "cache|packages|repo" | xargs rm -rf') 129 | print(' - opam admin filter -y {}'.format(deps)) 130 | print(' - opam admin cache') 131 | print(' - opam repo add {} .'.format(args.PACKAGE)) 132 | print(' - opam install -y {}'.format(deps)) 133 | print(' - opam repo remove --all {}'.format(args.PACKAGE)) 134 | print(' post-install:') 135 | print(' - opam info --field name,all-installed-versions {}'.format(args.PACKAGE)) 136 | 137 | if __name__ == '__main__': 138 | main() 139 | -------------------------------------------------------------------------------- /pip/flatpak-pip-generator: -------------------------------------------------------------------------------- 1 | flatpak-pip-generator.py -------------------------------------------------------------------------------- /pip/pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "flatpak_pip_generator" 3 | version = "0.0.1" 4 | description = "Tool to automatically generate flatpak-builder manifest for pip modules" 5 | license = {text = "MIT"} 6 | readme = "readme.md" 7 | requires-python = "<4.0,>=3.10" 8 | dependencies = [ 9 | "requirements-parser<1.0.0,>=0.11.0", 10 | ] 11 | 12 | [project.urls] 13 | Homepage = "https://github.com/flatpak/flatpak-builder-tools/tree/master/pip" 14 | Repository = "https://github.com/flatpak/flatpak-builder-tools.git" 15 | 16 | [dependency-groups] 17 | dev = [ 18 | "mypy<2.0.0,>=1.11.2", 19 | "ruff<1.0.0,>=0.6.7", 20 | "types-pyyaml<7.0.0.0,>=6.0.12.2", 21 | "tomli<3.0.0.0,>=2.2.1", 22 | ] 23 | 24 | [ruff] 25 | line-length = 88 26 | include = ["*.py"] 27 | target-version = "py310" 28 | 29 | [ruff.lint] 30 | preview = true 31 | extend-select = [ 32 | "B", 33 | "ERA", 34 | "I", 35 | "PLE", 36 | "PLW", 37 | "W", 38 | ] 39 | 40 | [ruff.format] 41 | line-ending = "lf" 42 | quote-style = "double" 43 | 44 | [mypy] 45 | disallow_untyped_defs = true 46 | disallow_any_unimported = true 47 | no_implicit_optional = true 48 | check_untyped_defs = true 49 | warn_unused_ignores = true 50 | show_error_codes = true 51 | warn_return_any = true 52 | -------------------------------------------------------------------------------- /pip/readme.md: -------------------------------------------------------------------------------- 1 | # Flatpak PIP Generator 2 | 3 | Tool to automatically generate `flatpak-builder` manifest json from a `pip` 4 | package-name. 5 | 6 | This requires `requirements-parser` which can be installed on your host with `pip3 install --user requirements-parser`. 7 | 8 | ## Usage 9 | 10 | `flatpak-pip-generator --runtime='org.freedesktop.Sdk//22.08' foo` which generates `python3-foo.json` and can be included in a manifest like: 11 | 12 | ```json 13 | "modules": [ 14 | "python3-foo.json", 15 | { 16 | "name": "other-modules" 17 | } 18 | ] 19 | ``` 20 | 21 | You can also list multiple packages in single command: 22 | ``` 23 | flatpak-pip-generator --runtime='org.freedesktop.Sdk//22.08' foo\>=1.0.0,\<2.0.0 bar 24 | ``` 25 | 26 | If your project contains a [requirements.txt file](https://pip.readthedocs.io/en/stable/user_guide/#requirements-files) with all the project dependencies, you can use 27 | ``` 28 | flatpak-pip-generator --runtime='org.freedesktop.Sdk//22.08' --requirements-file='/the/path/to/requirements.txt' --output pypi-dependencies 29 | ``` 30 | 31 | You can use that in your manifest like 32 | ```json 33 | "modules": [ 34 | "pypi-dependencies.json", 35 | { 36 | "name": "other-modules" 37 | } 38 | ] 39 | ``` 40 | 41 | ## Options 42 | 43 | * `--python2`: Build with Python 2. Note that you will have to build [the Python 2 shared-module](https://github.com/flathub/shared-modules/tree/master/python2.7) as it is not in any runtime. 44 | * `--build-isolation`: Enable build isolation with pip (recommended but not always work). 45 | * `--cleanup=(scripts|all)`: Add `cleanup` to the manifest. This is used when the packages installed are only used at build time. 46 | * `--build-only`: Alias to `--cleanup=all`. 47 | * `--requirements-file=`, `-r`: Reads the list of packages from `requirements.txt` file. Mutually exclusive with `--pyproject-file`. 48 | * `--pyproject-file=`: Reads the list of packages from `pyproject.toml` file. Mutually exclusive with `--requirements-file` or `r`. 49 | * `--ignore-pkg=`: Ignore a specific package name in a requirements-file, otherwise ignored. 50 | * `--checker-data`: This adds `x-checker-data` to modules so you will be notified when new releases happen. See [flatpak-external-data-checker](https://github.com/flathub/flatpak-external-data-checker) for more details. 51 | * `--runtime=`: Runs `pip` inside of a specific Flatpak runtime instead of on your host. Highly recommended for reproducability and portability. Examples would be `org.freedesktop.Sdk//22.08` or `org.gnome.Sdk/aarch64/43`. 52 | * `--ignore-errors=`: Allow the generation of empty or otherwise broken files when downloading packages fails. 53 | * `--ignore-installed=`: Comma-separated list of package names for which pip should ignore already installed packages. Useful when the package is installed in the SDK but not in the runtime. 54 | * `--output=`: Sets an output file. 55 | * `--yaml`: Outputs a YAML file. 56 | 57 | ## Development 58 | 59 | 1. Install uv https://docs.astral.sh/uv/getting-started/installation/ 60 | 2. `uv sync -v --all-groups --frozen` 61 | 3. Format and lint: `uv run ruff format && uv run ruff check --fix --exit-non-zero-on-fix` 62 | 4. Type check: `uv run mypy .` 63 | -------------------------------------------------------------------------------- /poetry/pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | package-mode = false 3 | 4 | [project] 5 | name = "flatpak_poetry_generator" 6 | version = "0.0.1" 7 | description = "Script to generate flatpak-builder manifest from Poetry lockfiles" 8 | license = {text = "MIT"} 9 | readme = "readme.md" 10 | requires-python = "<4.0,>=3.9" 11 | dependencies = [ 12 | "toml<1.0.0,>=0.10.2", 13 | ] 14 | 15 | [tool.poetry.group.dev.dependencies] 16 | ruff = "^0.6.7" 17 | mypy = "^1.11.2" 18 | types-toml = "^0.10.8" 19 | 20 | [tool.ruff] 21 | line-length = 88 22 | include = ["*.py"] 23 | target-version = "py39" 24 | 25 | [tool.ruff.lint] 26 | preview = true 27 | extend-select = [ 28 | "B", 29 | "ERA", 30 | "I", 31 | "PLE", 32 | "PLW", 33 | "W", 34 | ] 35 | 36 | [tool.ruff.format] 37 | line-ending = "lf" 38 | quote-style = "double" 39 | 40 | [tool.mypy] 41 | disallow_untyped_defs = true 42 | disallow_any_unimported = true 43 | no_implicit_optional = true 44 | check_untyped_defs = true 45 | warn_unused_ignores = true 46 | show_error_codes = true 47 | warn_return_any = true 48 | -------------------------------------------------------------------------------- /poetry/readme.md: -------------------------------------------------------------------------------- 1 | # Flatpak Python Poetry Lockfile Generator 2 | 3 | Tool to automatically generate `flatpak-builder` manifest json from a poetry.lock file. 4 | 5 | ## Usage 6 | 7 | `flatpak-poetry-generator poetry.lock` which generates 8 | `generated-poetry-sources.json` and can be included in a manifest like: 9 | 10 | ```json 11 | "modules": [ 12 | "generated-poetry-sources.json", 13 | { 14 | "name": "other-modules" 15 | } 16 | ] 17 | ``` 18 | 19 | ## Optional Arguments 20 | - `--production` does not include development dependencies 21 | - `-o outputfile` to output to a different filename 22 | 23 | ## Development 24 | 25 | 1. Install Poetry v2 https://python-poetry.org/docs/#installation 26 | 2. `poetry install --with dev` 27 | 3. Format and lint: `poetry run ruff format && poetry run ruff check --fix --exit-non-zero-on-fix` 28 | 4. Type check: `poetry run mypy .` 29 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # Flatpak Builder Tools 2 | 3 | This repository contains a collection of various scripts to aid in using `flatpak-builder`. 4 | 5 | Feel free to submit your own scripts that would be useful for others. 6 | 7 | The intended usage of the generators is as a submodule used as part of your build 8 | process to generate manifests. 9 | 10 | See the sub-directories of the respective tools for more information and licenses. 11 | 12 | ## Converting manifests from JSON to YAML 13 | 14 | A simple script to help convert JSON manifests to YAML is also in this repo. 15 | 16 | After cloning the repository you can run `./flatpak-json2yaml.py /path/to/example.json --output=example.yml`. 17 | 18 | This depends on [PyYAML](https://pypi.org/project/PyYAML/) which may need to be installed. 19 | 20 | ## Flutter 21 | 22 | Please look at the [flatpak-flutter](https://github.com/TheAppgineer/flatpak-flutter) 23 | project, developed by [Jan Koudijs](https://github.com/JanKoudijs). 24 | -------------------------------------------------------------------------------- /rubygems/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2018 Yuto Tokunaga 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 8 | -------------------------------------------------------------------------------- /rubygems/README.md: -------------------------------------------------------------------------------- 1 | Flatpak RubyGems Module Generator 2 | ============================= 3 | Tool to generate `flatpak-builder` manifest json from `Gemfile`. This tool uses bundler's `bundle package` subcommand to get gem names and version numbers. 4 | 5 | Usage 6 | ----- 7 | **REQUIREMENT** Your project's repository contains `Gemfile` (and `Gemfile.lock`) in the repository root. 8 | 9 | 1. `cd` to the repository root and checkout the correct revision. 10 | 2. `bundle install && bundle package` to copy `*.gem` files to `vendor/cache`. 11 | 3. `ruby flatpak_rubygems_generator.rb -s source.json -o rubygems.json` generates `rubygems.json` from `vendor/cache`. Part of generated `rubygems.json` is shown below. This manifest includes another source to get `Gemfile`. 12 | 13 | ```json 14 | { 15 | "name": "rubygems", 16 | "buildsystem": "simple", 17 | "build-commands": [ 18 | "bundle install --local" 19 | ], 20 | "sources": [ 21 | "source.json", 22 | { 23 | "type": "file", 24 | "url": "https://rubygems.org/gems/memoist-0.16.0.gem", 25 | "sha256": "70bd755b48477c9ef9601daa44d298e04a13c1727f8f9d38c34570043174085f", 26 | "dest": "vendor/cache" 27 | }, 28 | ... 29 | ] 30 | } 31 | ``` 32 | 4. Edit `source.json` and specify your project's repository as a source. For example, 33 | 34 | ```json 35 | { 36 | "type": "git", 37 | "url": "git://example.com/repo.git", 38 | "tag": "X.Y.Z" 39 | } 40 | ``` 41 | 5. Edit main manifest json file like below. 42 | 43 | ```json 44 | ... 45 | "modules": [ 46 | { 47 | "name": "ruby", 48 | "config-opts": [ 49 | "--disable-install-doc" 50 | ], 51 | "sources": [ 52 | { 53 | "type": "archive", 54 | "url": "https://cache.ruby-lang.org/pub/ruby/2.3/ruby-2.3.6.tar.gz", 55 | "sha256": "8322513279f9edfa612d445bc111a87894fac1128eaa539301cebfc0dd51571e" 56 | } 57 | ] 58 | }, 59 | { 60 | "name": "bundler", 61 | "buildsystem": "simple", 62 | "build-commands": [ 63 | "gem install --local bundler-1.16.2.gem" 64 | ], 65 | "sources": [ 66 | { 67 | "type": "file", 68 | "url": "https://rubygems.org/downloads/bundler-1.16.2.gem", 69 | "sha256": "3bb53e03db0a8008161eb4c816ccd317120d3c415ba6fee6f90bbc7f7eec8690" 70 | } 71 | ] 72 | }, 73 | "rubygems.json", 74 | ... 75 | ], 76 | ... 77 | ``` 78 | 79 | -------------------------------------------------------------------------------- /rubygems/flatpak_rubygems_generator.rb: -------------------------------------------------------------------------------- 1 | # frozen_string_literal: true 2 | 3 | require 'optparse' 4 | require 'net/http' 5 | require 'uri' 6 | require 'json' 7 | 8 | # %s is 'gemname-x.y.z.gem' 9 | # ex. 'rails-3.2.1.gem' 10 | GEM_URL = 'https://rubygems.org/gems/%s' 11 | 12 | # %s is gemname 13 | GEM_VERSIONS_URL = 'https://rubygems.org/api/v1/versions/%s.json' 14 | 15 | RE = /^(.+)-(#{Gem::Version::VERSION_PATTERN}).gem$/ 16 | 17 | def split_filename(filename) 18 | gemname, version = RE.match(filename).captures 19 | [gemname, version] 20 | end 21 | 22 | def get_file_hash(gemname, version) 23 | # https://guides.rubygems.org/rubygems-org-api/#gem-version-methods 24 | uri = URI.parse(GEM_VERSIONS_URL % gemname) 25 | result = JSON.parse(Net::HTTP.get(uri)) 26 | result.select { |h| h['number'] == version && h['platform'] == 'ruby' }.first['sha'] 27 | end 28 | 29 | params = { source: nil, out: 'rubygems.json' } 30 | OptionParser.new do |opt| 31 | opt.on('-s', '--source=SOURCE') { |v| v } 32 | opt.on('-o', '--out=OUTPUT') { |v| v } 33 | opt.parse! ARGV, into: params 34 | end 35 | 36 | bundle_command = 'bundle install --local' 37 | sources = Dir.glob('*.gem', base: 'vendor/cache').map do |f| 38 | { 39 | type: 'file', 40 | url: GEM_URL % f, 41 | sha256: get_file_hash(*split_filename(f)), 42 | dest: 'vendor/cache' 43 | } 44 | end 45 | sources = [params[:source]] + sources unless params[:source].nil? 46 | main_module = { 47 | name: 'rubygems', 48 | buildsystem: 'simple', 49 | 'build-commands' => [bundle_command], 50 | sources: sources 51 | } 52 | 53 | File.write params[:out], JSON.pretty_generate(main_module) 54 | -------------------------------------------------------------------------------- /spm/README.md: -------------------------------------------------------------------------------- 1 | # flatpak-spm-generator 2 | 3 | Tool to automatically generate `flatpak-builder` manifest JSON from a Swift package. 4 | 5 | ## Requirements 6 | 7 | Swift is required to execute the script. It is not only written as a Swift script, but also uses Swift to get information about the dependencies of the package. 8 | 9 | ## Usage 10 | 11 | The first step is to convert the dependencies declared in the `Package.swift` file as well as the dependencies of those dependencies into a format flatpak-builder can understand, 12 | and generate a script that makes SPM use the offline dependencies. 13 | ``` 14 | swift flatpak-spm-generator.swift ./quickstart ./quickstart 15 | ``` 16 | 17 | The first path is the directory containing the Swift package, and the second path is the directory containing the Flatpak manifest file. 18 | 19 | The tool creates a `generated-sources.json` and a `setup-offline.sh` file in the directory of the Flatpak manifest. Here is a sample module of a manifest showing how to use those generated files. 20 | ```json 21 | { 22 | "name": "quickstart", 23 | "buildsystem": "simple", 24 | "sources": [ 25 | { 26 | "type": "dir", 27 | "path": "." 28 | }, 29 | "generated-sources.json" 30 | ], 31 | "build-commands": [ 32 | "./setup-offline.sh", 33 | "swift build -c release --static-swift-stdlib --skip-update", 34 | "install -Dm755 .build/release/quickstart /app/bin/quickstart" 35 | ] 36 | } 37 | ``` 38 | 39 | See the quickstart project for a complete example. 40 | 41 | -------------------------------------------------------------------------------- /spm/flatpak-spm-generator.swift: -------------------------------------------------------------------------------- 1 | import Foundation 2 | 3 | let arguments = CommandLine.arguments 4 | 5 | // The path to the directory containing the SPM manifest. 6 | let path = arguments.count > 1 ? arguments[1] : "." 7 | // The path to the directory containing the Flatpak manifest. 8 | let pathToManifest = arguments.count > 2 ? arguments[2] : "." 9 | 10 | // Build the Swift package to get a complete list of dependencies under "{path}/.build/workspace-state.json". 11 | let task = Process() 12 | task.executableURL = URL(fileURLWithPath: "/usr/bin/env") 13 | task.arguments = ["swift", "build", "-c", "release", "--package-path", path] 14 | try task.run() 15 | task.waitUntilExit() 16 | 17 | // Parse the dependencies in the workspace state file. 18 | let workspaceStatePath = path + "/.build/workspace-state.json" 19 | guard let workspaceStateData = FileManager.default.contents(atPath: workspaceStatePath) else { 20 | fatalError("The workspace state file expected at \(workspaceStatePath) could not be found.") 21 | } 22 | let workspaceState = try JSONDecoder().decode(WorkspaceState.self, from: workspaceStateData) 23 | 24 | // Copy the names of the folders under "{path}/.build/repositories". 25 | let repositoriesPath = path + "/.build/repositories" 26 | let repositoriesContent = try FileManager.default.contentsOfDirectory(atPath: repositoriesPath) 27 | 28 | // Generate the JSON file with the sources and the shell script for tweaks. 29 | var content = """ 30 | [ 31 | """ 32 | var shellContent = """ 33 | #!/usr/bin/env bash 34 | mkdir .build/repositories 35 | cd .build/repositories 36 | """ 37 | 38 | for dependency in workspaceState.object.dependencies { 39 | let subpath = dependency.subpath 40 | content.append(""" 41 | 42 | { 43 | "type": "git", 44 | "url": "\(dependency.packageRef.location)", 45 | "disable-shallow-clone": true, 46 | "commit": "\(dependency.state.checkoutState.revision)", 47 | "dest": ".build/checkouts/\(subpath)" 48 | }, 49 | """) 50 | let folders = repositoriesContent.filter { $0.hasPrefix(subpath + "-") } 51 | for folder in folders { 52 | shellContent.append(""" 53 | 54 | mkdir ./\(folder) 55 | cp -r ../checkouts/\(subpath)/.git/* ./\(folder) 56 | """) 57 | } 58 | } 59 | content.append(""" 60 | 61 | { 62 | "type": "file", 63 | "path": "setup-offline.sh" 64 | } 65 | """) 66 | content.append("\n]") 67 | 68 | // Save the files. 69 | let pathToSetup = "\(pathToManifest)/setup-offline.sh" 70 | 71 | let contentData = content.data(using: .utf8) 72 | let shellContentData = shellContent.data(using: .utf8) 73 | try contentData?.write(to: .init(fileURLWithPath: "\(pathToManifest)/generated-sources.json")) 74 | try shellContentData?.write(to: .init(fileURLWithPath: pathToSetup)) 75 | 76 | let executable = Process() 77 | executable.executableURL = URL(fileURLWithPath: "/usr/bin/env") 78 | executable.arguments = ["chmod", "+x", pathToSetup] 79 | try executable.run() 80 | executable.waitUntilExit() 81 | 82 | // Types for decoding workspace state file. 83 | struct Dependency: Codable { 84 | 85 | var packageRef: PackageRef 86 | var state: State 87 | var subpath: String 88 | 89 | struct PackageRef: Codable { 90 | 91 | var location: String 92 | 93 | } 94 | 95 | struct State: Codable { 96 | 97 | var checkoutState: CheckoutState 98 | 99 | } 100 | 101 | struct CheckoutState: Codable { 102 | 103 | var revision: String 104 | 105 | } 106 | 107 | } 108 | 109 | struct WorkspaceState: Codable { 110 | 111 | var object: Object 112 | 113 | struct Object: Codable { 114 | 115 | var dependencies: [Dependency] 116 | 117 | } 118 | 119 | } 120 | -------------------------------------------------------------------------------- /spm/quickstart/Package.swift: -------------------------------------------------------------------------------- 1 | // swift-tools-version: 5.8 2 | 3 | import PackageDescription 4 | 5 | let package = Package( 6 | name: "quickstart", 7 | products: [ 8 | .executable( 9 | name: "quickstart", 10 | targets: ["quickstart"] 11 | ) 12 | ], 13 | dependencies: [ 14 | .package(url: "https://github.com/AparokshaUI/adwaita-swift", from: "0.2.0") 15 | ], 16 | targets: [ 17 | .executableTarget( 18 | name: "quickstart", 19 | dependencies: [.product(name: "Adwaita", package: "adwaita-swift")] 20 | ) 21 | ] 22 | ) 23 | -------------------------------------------------------------------------------- /spm/quickstart/Sources/quickstart/main.swift: -------------------------------------------------------------------------------- 1 | // The Swift Programming Language 2 | // https://docs.swift.org/swift-book 3 | 4 | import Adwaita 5 | 6 | @main 7 | struct QuickStart: App { 8 | 9 | let id = "org.flatpak.quickstart" 10 | var app: GTUIApp! 11 | 12 | var scene: Scene { 13 | Window(id: "main") { _ in 14 | Text("Hello, world!") 15 | .padding(50) 16 | .topToolbar { 17 | HeaderBar.empty() 18 | } 19 | } 20 | .title("Demo") 21 | .resizable(false) 22 | .closeShortcut() 23 | .quitShortcut() 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /spm/quickstart/org.flatpak.quickstart.json: -------------------------------------------------------------------------------- 1 | { 2 | "app-id": "org.flatpak.quickstart", 3 | "runtime": "org.gnome.Platform", 4 | "runtime-version": "45", 5 | "sdk": "org.gnome.Sdk", 6 | "sdk-extensions": [ 7 | "org.freedesktop.Sdk.Extension.swift5" 8 | ], 9 | "command": "quickstart", 10 | "finish-args": [ 11 | "--share=ipc", 12 | "--socket=fallback-x11", 13 | "--socket=wayland", 14 | "--device=dri" 15 | ], 16 | "build-options": { 17 | "append-path": "/usr/lib/sdk/swift5/bin", 18 | "prepend-ld-library-path": "/usr/lib/sdk/swift5/lib" 19 | }, 20 | "modules": [ 21 | { 22 | "name": "quickstart", 23 | "buildsystem": "simple", 24 | "sources": [ 25 | { 26 | "type": "dir", 27 | "path": "." 28 | }, 29 | "generated-sources.json" 30 | ], 31 | "build-commands": [ 32 | "./setup-offline.sh", 33 | "swift build -c release --static-swift-stdlib --skip-update", 34 | "install -Dm755 .build/release/quickstart /app/bin/quickstart" 35 | ] 36 | } 37 | ] 38 | } 39 | -------------------------------------------------------------------------------- /yarn/README.md: -------------------------------------------------------------------------------- 1 | **NOTE:** For Node 10+, use [flatpak-node-generator](../node/README.md) instead. 2 | -------------------------------------------------------------------------------- /yarn/flatpak-yarn-generator.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | __license__ = "MIT" 4 | 5 | import argparse 6 | import sys 7 | import json 8 | import re 9 | import urllib.request 10 | import urllib.parse 11 | import hashlib 12 | 13 | electron_arches = { 14 | "armv7l": "arm", 15 | "arm64": "aarch64", 16 | "ia32": "i386", 17 | "x64": "x86_64" 18 | } 19 | 20 | def getModuleSources(lockfile, include_devel=True): 21 | sources = [] 22 | currentSource = '' 23 | currentSourceVersion = '' 24 | yarnVersion = '' 25 | for line in lockfile: 26 | if '# yarn lockfile' in line: 27 | yarnVersion = re.split('# yarn lockfile ', line)[1].strip('\n') 28 | if line.endswith(':\n') and 'dependencies' not in line and 'optionalDependencies' not in line: 29 | listOfNames = re.split(',', line[:-1]) 30 | currentSource = re.split(r'\@[\^\>\=\<\~]*[\d\s\*]', listOfNames[0])[0] 31 | currentSource = currentSource.strip('"').replace('/','-') 32 | if 'version ' in line and currentSource: 33 | currentSourceVersion = re.split('version ', line)[1].strip('\n').strip('"') 34 | if 'resolved ' in line and currentSource and currentSourceVersion: 35 | if currentSource == 'electron': 36 | shasums_url = "https://github.com/electron/electron/releases/download/v" + currentSourceVersion + "/SHASUMS256.txt" 37 | f = urllib.request.urlopen(shasums_url) 38 | shasums = {} 39 | shasums_data = f.read().decode("utf8") 40 | for cksumline in shasums_data.split('\n'): 41 | l = cksumline.split() 42 | if len(l) == 2: 43 | shasums[l[1][1:]] = l[0] 44 | 45 | mini_shasums = "" 46 | for arch in electron_arches.keys(): 47 | zipName = "electron-v" + currentSourceVersion + "-linux-" + arch + ".zip" 48 | source = {'type': 'file', 49 | 'url': 'https://github.com/electron/electron/releases/download/v' + currentSourceVersion + '/' + zipName, 50 | 'sha256': shasums[zipName], 51 | 'dest': 'electron-cache', 52 | 'only-arches': [electron_arches[arch]], 53 | 'dest-filename': currentSource + '-v' + currentSourceVersion + '-linux-' + arch + '.zip'} 54 | sources.append(source) 55 | mini_shasums = mini_shasums + shasums[zipName] + " *" + zipName + "\n" 56 | source = {"type": "file", 57 | "url": "data:" + urllib.parse.quote(mini_shasums.encode("utf8")), 58 | "dest": "electron-cache", 59 | "dest-filename": "SHASUMS256.txt-" + currentSourceVersion} 60 | sources.append(source) 61 | 62 | resolvedStrippedStr = re.split('resolved ', line)[1].strip('\n').strip('"') 63 | tempList = re.split('#', resolvedStrippedStr) 64 | if len(tempList) == 1: 65 | filename = re.split('/', tempList[0])[-1].strip('\n') 66 | shasum = hashlib.sha1() 67 | with urllib.request.urlopen(tempList[0]) as f: 68 | buf = f.read() 69 | shasum.update(buf) 70 | tempList.append(shasum.hexdigest()) 71 | source = {'type': 'file', 72 | 'url': tempList[0], 73 | 'sha1': tempList[1], 74 | 'dest': 'yarn-mirror', 75 | 'dest-filename': filename} 76 | else: 77 | source = {'type': 'file', 78 | 'url': tempList[0], 79 | 'sha1': tempList[1], 80 | 'dest': 'yarn-mirror', 81 | 'dest-filename': currentSource + '-' + currentSourceVersion + '.tgz'} 82 | currentSource = '' 83 | sources.append(source) 84 | 85 | return sources 86 | 87 | def main(): 88 | parser = argparse.ArgumentParser(description='Flatpak Yarn generator') 89 | parser.add_argument('lockfile', type=str) 90 | parser.add_argument('-o', type=str, dest='outfile', default='generated-sources.json') 91 | parser.add_argument('--production', action='store_true', default=False) 92 | parser.add_argument('--recursive', action='store_true', default=False) 93 | args = parser.parse_args() 94 | 95 | include_devel = not args.production 96 | 97 | outfile = args.outfile 98 | 99 | if args.recursive: 100 | import glob 101 | lockfiles = glob.iglob('**/%s' % args.lockfile, recursive=True) 102 | else: 103 | lockfiles = [args.lockfile] 104 | 105 | sources = [] 106 | for lockfile in lockfiles: 107 | print('Scanning "%s" ' % lockfile, file=sys.stderr) 108 | 109 | with open(lockfile, 'r') as f: 110 | s = getModuleSources(f ,include_devel=include_devel) 111 | sources += s 112 | 113 | print(' ... %d new entries' % len(s), file=sys.stderr) 114 | 115 | sources = remove_duplicates(sources) 116 | print('%d total entries after removing duplicates' % len(sources), file=sys.stderr) 117 | 118 | print('Writing to "%s"' % outfile) 119 | with open(outfile, 'w') as f: 120 | f.write(json.dumps(sources, indent=4)) 121 | 122 | def remove_duplicates(items): 123 | new_list = [] 124 | for obj in items: 125 | if obj not in new_list: 126 | new_list.append(obj) 127 | return new_list 128 | 129 | 130 | if __name__ == '__main__': 131 | main() 132 | -------------------------------------------------------------------------------- /yarn/io.atom.electron.ElectronQuickStart.json: -------------------------------------------------------------------------------- 1 | { 2 | "app-id": "io.atom.electron.ElectronQuickStart", 3 | "runtime": "org.freedesktop.Platform", 4 | "runtime-version": "1.6", 5 | "base": "io.atom.electron.BaseApp", 6 | "base-version": "stable", 7 | "branch": "stable", 8 | "sdk": "org.freedesktop.Sdk", 9 | "command": "run.sh", 10 | "separate-locales": false, 11 | "finish-args": [ 12 | /* X11 + XShm access */ 13 | "--share=ipc", "--socket=x11", 14 | /* Sound access */ 15 | "--socket=pulseaudio", 16 | /* Network access */ 17 | "--share=network" 18 | ], 19 | "build-options" : { 20 | "cflags": "-O2 -g", 21 | "cxxflags": "-O2 -g", 22 | "env": { 23 | "NPM_CONFIG_LOGLEVEL": "info" 24 | } 25 | }, 26 | "modules": [ 27 | { 28 | "name": "nodejs", 29 | "cleanup": [ 30 | "/include", 31 | "/share", 32 | "/app/lib/node_modules/npm/changelogs", 33 | "/app/lib/node_modules/npm/doc", 34 | "/app/lib/node_modules/npm/html", 35 | "/app/lib/node_modules/npm/man", 36 | "/app/lib/node_modules/npm/scripts" 37 | ], 38 | "sources": [ 39 | { 40 | "type": "archive", 41 | "url": "https://nodejs.org/dist/v8.9.2/node-v8.9.2.tar.gz", 42 | "sha256": "8038fa61ac4562786fcc6d3229f3caa07a864b94bd5922fe75942a38ea1c3a3b" 43 | } 44 | ] 45 | }, 46 | { 47 | "name": "yarn", 48 | "buildsystem": "simple", 49 | "build-commands": [ 50 | "mkdir -p /app/share/yarn", 51 | "cp -a * /app/share/yarn/", 52 | "ln -s /app/share/yarn/bin/yarn /app/bin/yarn", 53 | "ln -s /app/share/yarn/bin/yarnpkg /app/bin/yarnpkg" 54 | ], 55 | "sources": [ 56 | { 57 | "type": "archive", 58 | "url": "https://github.com/yarnpkg/yarn/releases/download/v1.3.2/yarn-v1.3.2.tar.gz", 59 | "sha256": "6cfe82e530ef0837212f13e45c1565ba53f5199eec2527b85ecbcd88bf26821d" 60 | } 61 | ] 62 | }, 63 | { 64 | "name": "electron-quick-start", 65 | "build-options" : { 66 | "env": { 67 | /* Need this for electron-download to find the cached electron binary */ 68 | "electron_config_cache": "/run/build/electron-quick-start/electron-cache" 69 | } 70 | }, 71 | "buildsystem": "simple", 72 | "sources": [ 73 | { 74 | "type": "git", 75 | "url": "https://github.com/electron/electron-quick-start.git", 76 | "commit": "71199c46fc6d599ab3f29f25e837d0e1768b5ef7", 77 | "dest": "main" 78 | }, 79 | /* This file should really have been in the repo, but 80 | here is a copy created by running npm install 81 | outside the sandbox */ 82 | { 83 | "type": "file", 84 | "path": "electron-quick-start-yarn.lock", 85 | "dest-filename": "yarn.lock", 86 | "dest": "main" 87 | }, 88 | { 89 | "type": "file", 90 | "path": "yarnrc", 91 | "dest": "main", 92 | "dest-filename": ".yarnrc" 93 | }, 94 | /* This file is autogenerated by: flatpak-yarn-generator.py electron-quick-start-yarn.lock generated-sources.json */ 95 | "generated-sources.json", 96 | /* Wrapper to launch the app */ 97 | { 98 | "type": "script", 99 | "dest-filename": "run.sh", 100 | "commands": [ 101 | "cd /app/main", 102 | "yarn start" 103 | ] 104 | } 105 | ], 106 | "build-commands": [ 107 | /* Install yarn dependencies */ 108 | "yarn install --cwd /run/build/electron-quick-start/main --offline", 109 | /* Bundle app and dependencies */ 110 | "mkdir -p /app/main /app/bin", 111 | "cp -ra * /app/main/", 112 | /* Install app wrapper */ 113 | "install run.sh /app/bin/" 114 | ] 115 | } 116 | ] 117 | } 118 | -------------------------------------------------------------------------------- /yarn/yarnrc: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | yarn-offline-mirror "/run/build/electron-quick-start/yarn-mirror" 6 | yarn-offline-mirror-pruning true 7 | --------------------------------------------------------------------------------