├── .dockerignore
├── .gitattributes
├── .github
├── CODEOWNERS
├── FUNDING.yml
├── ISSUE_TEMPLATE
│ ├── bug.yml
│ ├── config.yml
│ └── feat.yml
├── renovate.json
└── workflows
│ ├── build.yml
│ ├── release.yml
│ ├── update-graphql-schema.yml
│ └── update-inno-dependencies.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── Cargo.lock
├── Cargo.toml
├── Dockerfile
├── LICENSE.md
├── README.md
├── assets
├── CodeDependencies.iss
├── banner.svg
├── demo.gif
├── github.graphql
├── gpl-3.0.rst
├── installer.iss
├── logo.ico
└── logo.svg
├── build.rs
├── rustfmt.toml
└── src
├── commands
├── analyse.rs
├── cleanup.rs
├── complete.rs
├── list_versions.rs
├── mod.rs
├── new_version.rs
├── remove_dead_versions.rs
├── remove_version.rs
├── show_version.rs
├── submit.rs
├── sync_fork.rs
├── token
│ ├── commands.rs
│ ├── mod.rs
│ ├── remove.rs
│ └── update.rs
├── update_version.rs
└── utils
│ └── mod.rs
├── credential.rs
├── download
├── downloader.rs
├── file.rs
└── mod.rs
├── download_file.rs
├── editor.rs
├── file_analyser.rs
├── github
├── github_client.rs
├── graphql
│ ├── create_commit.rs
│ ├── create_pull_request.rs
│ ├── create_ref.rs
│ ├── get_all_values.rs
│ ├── get_branches.rs
│ ├── get_current_user_login.rs
│ ├── get_directory_content.rs
│ ├── get_directory_content_with_text.rs
│ ├── get_existing_pull_request.rs
│ ├── get_file_content.rs
│ ├── get_repository_info.rs
│ ├── github_schema.rs
│ ├── merge_upstream.rs
│ ├── mod.rs
│ ├── types.rs
│ └── update_refs.rs
├── mod.rs
├── rest
│ ├── get_tree.rs
│ └── mod.rs
└── utils
│ ├── mod.rs
│ ├── package_path.rs
│ └── pull_request.rs
├── installers
├── burn
│ ├── manifest.rs
│ ├── mod.rs
│ └── wix_burn_stub.rs
├── inno
│ ├── compression.rs
│ ├── encoding.rs
│ ├── entry
│ │ ├── component.rs
│ │ ├── condition.rs
│ │ ├── directory.rs
│ │ ├── file.rs
│ │ ├── icon.rs
│ │ ├── ini.rs
│ │ ├── language.rs
│ │ ├── message.rs
│ │ ├── mod.rs
│ │ ├── permission.rs
│ │ ├── registry.rs
│ │ ├── task.rs
│ │ └── type.rs
│ ├── enum_value.rs
│ ├── flag_reader.rs
│ ├── header
│ │ ├── architecture.rs
│ │ ├── enums.rs
│ │ ├── flags.rs
│ │ └── mod.rs
│ ├── loader.rs
│ ├── mod.rs
│ ├── read
│ │ ├── block.rs
│ │ ├── chunk.rs
│ │ ├── crc32.rs
│ │ ├── decoder.rs
│ │ └── mod.rs
│ ├── version.rs
│ ├── windows_version.rs
│ └── wizard.rs
├── mod.rs
├── msi
│ └── mod.rs
├── msix_family
│ ├── bundle.rs
│ ├── mod.rs
│ └── utils.rs
├── nsis
│ ├── entry
│ │ ├── creation_disposition.rs
│ │ ├── exec_flag.rs
│ │ ├── generic_access_rights.rs
│ │ ├── mod.rs
│ │ ├── push_pop.rs
│ │ ├── seek_from.rs
│ │ ├── show_window.rs
│ │ └── window_message.rs
│ ├── file_system
│ │ ├── item.rs
│ │ └── mod.rs
│ ├── first_header
│ │ ├── flags.rs
│ │ ├── mod.rs
│ │ └── signature.rs
│ ├── header
│ │ ├── block.rs
│ │ ├── compression.rs
│ │ ├── decoder.rs
│ │ ├── flags.rs
│ │ └── mod.rs
│ ├── language
│ │ ├── mod.rs
│ │ └── table.rs
│ ├── mod.rs
│ ├── registry.rs
│ ├── section
│ │ ├── flags.rs
│ │ └── mod.rs
│ ├── state.rs
│ ├── strings
│ │ ├── code.rs
│ │ ├── mod.rs
│ │ ├── predefined.rs
│ │ ├── shell.rs
│ │ └── var.rs
│ └── version.rs
├── possible_installers.rs
├── utils
│ ├── lzma_stream_header.rs
│ ├── mod.rs
│ └── registry.rs
└── zip.rs
├── main.rs
├── manifests
├── manifest.rs
├── mod.rs
└── url.rs
├── match_installers.rs
├── prompts
├── list.rs
├── mod.rs
└── text.rs
├── terminal
├── hyperlink.rs
└── mod.rs
├── traits
├── mod.rs
├── name.rs
└── path.rs
└── update_state.rs
/.dockerignore:
--------------------------------------------------------------------------------
1 | .github
2 | .gitattributes
3 | .gitigore
4 | *.md
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @russellbanks
2 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: russellbanks
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug.yml:
--------------------------------------------------------------------------------
1 | name: 'Bug Report'
2 | description: File a bug report
3 | title: '[Bug]: '
4 | labels: [bug, help wanted]
5 | assignees: russellbanks
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Thanks for taking the time to fill out this bug report!
11 | - type: checkboxes
12 | attributes:
13 | label: Is there an existing issue for this?
14 | description: Please search to see if an issue already exists for the bug you noticed.
15 | options:
16 | - label: I have searched the existing issues
17 | required: true
18 | - type: textarea
19 | attributes:
20 | label: What happened?
21 | description: Also tell us, what did you expect to happen?
22 | placeholder: Tell us what you see!
23 | validations:
24 | required: true
25 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: true
2 | contact_links:
3 | - name: Review open issues
4 | url: https://github.com/russellbanks/Komac/issues
5 | about: Please ensure you have gone through open issues.
6 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feat.yml:
--------------------------------------------------------------------------------
1 | name: 'Feature Request'
2 | description: Request a new feature
3 | title: '[Feature/Idea]: '
4 | labels: [feat]
5 | assignees: russellbanks
6 | body:
7 | - type: markdown
8 | attributes:
9 | value: |
10 | Thanks for taking the time to request a new feature!
11 | - type: textarea
12 | attributes:
13 | label: What would you like to see changed/added?
14 | description: Try to give some examples to make it really clear!
15 | placeholder: Tell us what you would like to see! Something new and amazing!
16 | validations:
17 | required: true
18 |
--------------------------------------------------------------------------------
/.github/renovate.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://docs.renovatebot.com/renovate-schema.json",
3 | "extends": [
4 | "config:recommended",
5 | ":maintainLockFilesWeekly",
6 | ":semanticCommitsDisabled"
7 | ],
8 | "rangeStrategy": "bump"
9 | }
10 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Cargo Build & Test
2 |
3 | on:
4 | push:
5 | paths:
6 | - .github/workflows/build.yml
7 | - .github/workflows/release.yml
8 | - assets/**
9 | - src/**
10 | - Cargo.toml
11 | - Cargo.lock
12 | - build.rs
13 | pull_request:
14 | paths:
15 | - .github/workflows/build.yml
16 | - .github/workflows/release.yml
17 | - assets/**
18 | - src/**
19 | - Cargo.toml
20 | - Cargo.lock
21 | - build.rs
22 |
23 | env:
24 | CARGO_TERM_COLOR: always
25 |
26 | jobs:
27 | build_and_test:
28 | name: Rust project - latest
29 | strategy:
30 | matrix:
31 | os: [ ubuntu-latest, windows-latest, macos-latest ]
32 | runs-on: ${{ matrix.os }}
33 |
34 | steps:
35 | - name: Checkout repository
36 | uses: actions/checkout@v4
37 |
38 | - name: Install Rust
39 | uses: moonrepo/setup-rust@v1
40 | with:
41 | cache: false
42 |
43 | - name: Check
44 | run: cargo check
45 |
46 | - name: Test
47 | run: cargo test
48 |
49 | - name: Publish dry run
50 | run: cargo publish --dry-run
51 |
52 | msrv:
53 | name: MSRV
54 | runs-on: ubuntu-latest
55 | steps:
56 | - name: Checkout repository
57 | uses: actions/checkout@v4
58 |
59 | - name: Install Rust
60 | uses: moonrepo/setup-rust@v1
61 | with:
62 | bins: cargo-msrv
63 |
64 | - name: Verify MSRV
65 | id: verify
66 | run: cargo msrv verify
67 |
68 | - name: Find actual MSRV
69 | if: steps.verify.outcome == 'failure'
70 | run: cargo msrv find
71 |
--------------------------------------------------------------------------------
/.github/workflows/update-graphql-schema.yml:
--------------------------------------------------------------------------------
1 | name: Update local copy of GraphQL schema
2 |
3 | on:
4 | schedule:
5 | - cron: '0 9 * * *'
6 |
7 | jobs:
8 | update-schema:
9 | runs-on: ubuntu-latest
10 | permissions:
11 | contents: write
12 | pull-requests: write
13 |
14 | steps:
15 | - name: Clone repository
16 | uses: actions/checkout@v4
17 |
18 | - name: Download latest GitHub GraphQL Schema
19 | run: curl -L https://docs.github.com/public/fpt/schema.docs.graphql -o assets/github.graphql
20 |
21 | - name: Create Pull Request
22 | uses: peter-evans/create-pull-request@v7
23 | with:
24 | commit-message: "Update GitHub GraphQL Schema"
25 | branch: update-github-graphql-schema
26 | title: "Update GitHub GraphQL Schema"
27 | body: "This is an automated pull request to update the local GitHub GraphQL schema"
28 |
--------------------------------------------------------------------------------
/.github/workflows/update-inno-dependencies.yml:
--------------------------------------------------------------------------------
1 | name: Update local copy of Inno Setup Dependency Installer script
2 |
3 | on:
4 | schedule:
5 | - cron: '0 9 1 * *'
6 |
7 | jobs:
8 | update-schema:
9 | runs-on: ubuntu-latest
10 | permissions:
11 | contents: write
12 | pull-requests: write
13 |
14 | steps:
15 | - name: Clone repository
16 | uses: actions/checkout@v4
17 |
18 | - name: Download latest Inno Setup Dependency Installer script
19 | run: curl -L https://github.com/DomGries/InnoDependencyInstaller/raw/HEAD/CodeDependencies.iss -o assets/CodeDependencies.iss
20 |
21 | - name: Create Pull Request
22 | uses: peter-evans/create-pull-request@v7
23 | with:
24 | commit-message: "Update Inno Setup Dependency Installer script"
25 | branch: update-inno-dependencies-script
26 | title: "Update Inno Setup Dependency Installer script"
27 | body: "This is an automated pull request to update the Inno Setup Dependency Installer script from [DomGries/InnoDependencyInstaller](https://github.com/DomGries/InnoDependencyInstaller)"
28 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | .idea
3 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | ## Contributing
2 |
3 | Contributions are what make the open source community such an amazing place to learn, inspire, and create. Any contributions you make are **greatly appreciated**.
4 |
5 | * If you have suggestions for adding or removing projects, feel free to [open an issue](https://github.com/russellbanks/Komac/issues/new) to discuss it, or directly create a pull request after you edit the *README.md* file with necessary changes.
6 | * Please make sure you check your spelling and grammar.
7 | * Create individual PR for each suggestion.
8 | * Please also read through the [Code of Conduct](./CODE_OF_CONDUCT.md) before posting your first idea as well.
9 |
10 | ### Creating a Pull Request
11 |
12 | 1. Fork the Project
13 | 2. Create your Feature Branch (`git checkout -b feat/new-feature`)
14 | 3. Commit your Changes (`git commit -m 'Add some feature'`)
15 | 4. Push to the Branch (`git push origin feat/new-feature`)
16 | 5. Open a Pull Request
17 |
18 | ### Testing your changes
19 |
20 | Using Docker is the easiest way to to test your code before submitting a pull request.
21 |
22 | > [!NOTE]
23 | > When using the Docker container on Windows, the WSL engine does not support the default collection for keys or tokens. This means that when testing inside the container GitHub tokens will not be stored, even when `komac token update` is used.
24 | >
25 | > This is a [known issue](https://github.com/hwchen/keyring-rs/blob/47c8daf3e6178a2282ae3e8670d1ea7fa736b8cb/src/secret_service.rs#L73-L77) which is documented in the keyring crate.
26 | >
27 | > As a workaround, you can set the `GITHUB_TOKEN` environment variable from within the container, in the `docker run` command, or in the Dockerfile itself
28 |
29 | 1. Ensure you have docker installed and the docker engine is running.
30 | 2. Run `docker build ./ --tag komac_dev:latest`.
31 | 3. Wait for the build to complete.
32 | 4. Start the container using `docker run -it komac_dev bash`.
33 | 5. Test out any commands. Use the `exit` command to quit the container
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "komac"
3 | version = "2.12.0"
4 | authors = ["Russell Banks"]
5 | edition = "2024"
6 | rust-version = "1.85"
7 | description = "A manifest creator for winget-pkgs"
8 | license = "GPL-3.0-or-later"
9 | repository = "https://github.com/russellbanks/Komac"
10 | readme = "README.md"
11 | documentation = "https://github.com/russellbanks/Komac/blob/main/README.md"
12 | categories = ["command-line-utilities", "development-tools"]
13 | keywords = ["winget", "winget-pkgs", "winget-cli", "windows"]
14 | build = "build.rs"
15 | include = ["**/*.rs", "assets/github.graphql", "assets/logo.ico"]
16 |
17 | [[bin]]
18 | name = "komac"
19 | path = "src/main.rs"
20 |
21 | [profile.release]
22 | codegen-units = 1
23 | lto = true
24 | strip = true
25 |
26 | [dependencies]
27 | anstream = "0.6.18"
28 | base64ct = { version = "1.8.0", features = ["std"] }
29 | bit-set = "0.8.0"
30 | bitflags = "2.9.1"
31 | bon = "3.6.3"
32 | byteorder = "1.5.0"
33 | bytes = "1.10.1"
34 | bzip2 = "0.5.2"
35 | cab = "0.6.0"
36 | camino = { version = "1.1.10", features = ["serde1"] }
37 | chrono = { version = "0.4.41", features = ["serde"] }
38 | clap = { version = "4.5.39", features = ["derive", "cargo", "env"] }
39 | clap_complete = "4.5.52"
40 | codepage = "0.1.2"
41 | color-eyre = { version = "0.6.5", default-features = false }
42 | compact_str = "0.9.0"
43 | const_format = { version = "0.2.34", features = ["derive"] }
44 | crc32fast = "1.4.2"
45 | crossbeam-channel = "0.5.15"
46 | crossterm = "0.29.0"
47 | cynic = { version = "3.11.0", features = ["http-reqwest"] }
48 | derive-new = "0.7.0"
49 | derive_more = { version = "2.0.1", features = ["as_ref", "debug", "deref", "deref_mut", "display", "from_str", "into", "into_iterator"] }
50 | encoding_rs = "0.8.35"
51 | flate2 = "1.1.1"
52 | futures = "0.3.31"
53 | futures-util = "0.3.31"
54 | html2text = "0.15.1"
55 | indexmap = "2.9.0"
56 | indextree = "4.7.4"
57 | indicatif = "0.17.11"
58 | inquire = "0.7.5"
59 | itertools = "0.14.0"
60 | keyring = { version = "3.6.2", features = ["apple-native", "crypto-openssl", "sync-secret-service", "vendored", "windows-native"] }
61 | liblzma = { version = "0.4.1", features = ["static"] }
62 | memchr = "2.7.4"
63 | memmap2 = "0.9.5"
64 | msi = "0.8.0"
65 | nt-time = { version = "0.11.1", features = ["chrono"] }
66 | num_cpus = "1.17.0"
67 | open = "5.3.2"
68 | ordinal = "0.4.0"
69 | owo-colors = "4.2.1"
70 | protobuf = "3.7.2"
71 | quick-xml = { version = "0.37.5", features = ["serialize"] }
72 | rand = "0.9.1"
73 | ratatui = "0.29.0"
74 | regex = "1.11.1"
75 | reqwest = { version = "0.12.19", features = ["native-tls-vendored", "stream"] }
76 | serde = { version = "1.0.219", features = ["derive"] }
77 | serde_json = "1.0.140"
78 | serde_yaml = "0.9.34"
79 | sha2 = "0.10.9"
80 | strsim = "0.11.1"
81 | strum = { version = "0.27.1", features = ["derive"] }
82 | supports-hyperlinks = "3.1.0"
83 | tempfile = "3.20.0"
84 | thiserror = "2.0.12"
85 | tokio = { version = "1.45.1", features = ["rt-multi-thread", "macros", "fs", "parking_lot"] }
86 | tracing = { version = "0.1.41", features = ["release_max_level_warn"] }
87 | tracing-indicatif = "0.3.9"
88 | tracing-subscriber = "0.3.19"
89 | tree-sitter-highlight = "0.25.6"
90 | tree-sitter-yaml = "0.7.1"
91 | tui-textarea = { version = "0.7.0", features = ["search"] }
92 | url = { version = "2.5.4", features = ["serde"] }
93 | uuid = { version = "1.17.0", features = ["v4"] }
94 | walkdir = "2.5.0"
95 | winget-types = { version = "0.3.0", features = ["serde", "std", "chrono"] }
96 | yara-x = { version = "0.15.0", default-features = false, features = ["pe-module"] }
97 | zerocopy = { version = "0.8.25", features = ["derive", "std"] }
98 | zip = { version = "4.0.0", default-features = false, features = ["deflate"] }
99 |
100 | [build-dependencies]
101 | cynic-codegen = { version = "3.11.0", features = ["rkyv"] }
102 | windows_exe_info = { version = "0.5.2", features = ["manifest"] }
103 |
104 | [dev-dependencies]
105 | indoc = "2.0.6"
106 | rstest = "0.25.0"
107 |
108 | [package.metadata.generate-rpm]
109 | assets = [
110 | { source = "target/release/komac", dest = "/usr/bin/komac", mode = "755" },
111 | ]
112 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM rust:slim as build
2 |
3 | # Copy source code into the build container
4 | WORKDIR /usr/src
5 | COPY ./ /usr/src
6 |
7 | # Install apt packages required for building the package dependencies
8 | RUN apt-get update \
9 | && apt-get install -y --no-install-recommends --no-install-suggests \
10 | libssl-dev \
11 | perl \
12 | make
13 |
14 | # Build Komac from the source code
15 | RUN cargo build --release
16 |
17 | # Create a new container for
18 | FROM debian:bookworm-slim as release
19 | RUN apt-get update \
20 | && apt-get install -y --no-install-recommends --no-install-suggests \
21 | ca-certificates \
22 | && rm -rf \
23 | /var/lib/apt/lists/* \
24 | /tmp/* \
25 | /var/tmp/*
26 |
27 | COPY --from=build /usr/src/target/release/komac /usr/local/bin/
28 | WORKDIR /root
--------------------------------------------------------------------------------
/assets/demo.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/russellbanks/Komac/5e13baafd65c5985d3603d6201c8a872d38d0610/assets/demo.gif
--------------------------------------------------------------------------------
/assets/installer.iss:
--------------------------------------------------------------------------------
1 | #define AppName "Komac"
2 | #define Version GetFileProductVersion(InputExecutable)
3 | #define Publisher "Russell Banks"
4 | #define URL "https://github.com/russellbanks/Komac"
5 | #define ExeName GetFileOriginalFilename(InputExecutable)
6 |
7 | #if Pos("x64", Architecture) > 0
8 | #define ArchAllowed "x64compatible and not arm64"
9 | #else
10 | #define ArchAllowed Architecture
11 | #endif
12 |
13 | #include "CodeDependencies.iss"
14 |
15 | [Setup]
16 | AppId={{776938BF-CF8E-488B-A3DF-8048BC64F2CD}
17 | AppName={#AppName}
18 | AppVersion={#Version}
19 | AppPublisher={#Publisher}
20 | AppPublisherURL={#URL}
21 | AppSupportURL={#URL}
22 | AppUpdatesURL={#URL}
23 | DefaultDirName={autopf}\{#AppName}
24 | DisableDirPage=yes
25 | DefaultGroupName={#AppName}
26 | DisableProgramGroupPage=yes
27 | LicenseFile=gpl-3.0.rst
28 | PrivilegesRequired=lowest
29 | PrivilegesRequiredOverridesAllowed=dialog
30 | OutputBaseFilename={#AppName}Setup-{#Version}-{#Architecture}
31 | SetupIconFile=logo.ico
32 | UninstallDisplayName={#AppName} ({#Architecture})
33 | WizardStyle=modern
34 | ChangesEnvironment=yes
35 | ArchitecturesAllowed={#ArchAllowed}
36 | ArchitecturesInstallIn64BitMode={#ArchAllowed}
37 |
38 | [Languages]
39 | Name: "english"; MessagesFile: "compiler:Default.isl"
40 |
41 | [Files]
42 | Source: "{#InputExecutable}"; DestDir: "{app}\bin"; DestName: "{#ExeName}"
43 |
44 | [Code]
45 | function InitializeSetup: Boolean;
46 | begin
47 | Dependency_AddVC2015To2022;
48 | Result := True;
49 | end;
50 |
51 | procedure EnvAddPath(Path: string);
52 | var
53 | Paths: string;
54 | RootKey: Integer;
55 | EnvironmentKey: string;
56 | begin
57 | if IsAdminInstallMode() then
58 | begin
59 | EnvironmentKey := 'SYSTEM\CurrentControlSet\Control\Session Manager\Environment';
60 | RootKey := HKEY_LOCAL_MACHINE;
61 | end
62 | else
63 | begin
64 | EnvironmentKey := 'Environment';
65 | RootKey := HKEY_CURRENT_USER;
66 | end;
67 |
68 | { Retrieve current path (use empty string if entry not exists) }
69 | if not RegQueryStringValue(RootKey, EnvironmentKey, 'Path', Paths)
70 | then Paths := '';
71 |
72 | { Skip if string already found in path }
73 | if Pos(';' + Uppercase(Path) + ';', ';' + Uppercase(Paths) + ';') > 0 then exit;
74 |
75 | { App string to the end of the path variable }
76 | Paths := Paths + ';'+ Path +';'
77 |
78 | { Overwrite (or create if missing) path environment variable }
79 | if RegWriteStringValue(RootKey, EnvironmentKey, 'Path', Paths)
80 | then Log(Format('The [%s] added to PATH: [%s]', [Path, Paths]))
81 | else Log(Format('Error while adding the [%s] to PATH: [%s]', [Path, Paths]));
82 | end;
83 |
84 |
85 | procedure EnvRemovePath(Path: string);
86 | var
87 | Paths: string;
88 | P: Integer;
89 | RootKey: Integer;
90 | EnvironmentKey: string;
91 | begin
92 | if Pos(ExpandConstant('{commonpf}'), ExpandConstant('{app}')) = 1 then
93 | begin
94 | EnvironmentKey := 'SYSTEM\CurrentControlSet\Control\Session Manager\Environment';
95 | RootKey := HKEY_LOCAL_MACHINE;
96 | end
97 | else
98 | begin
99 | EnvironmentKey := 'Environment';
100 | RootKey := HKEY_CURRENT_USER;
101 | end;
102 |
103 | { Skip if registry entry not exists }
104 | if not RegQueryStringValue(RootKey, EnvironmentKey, 'Path', Paths) then
105 | exit;
106 |
107 | { Skip if string not found in path }
108 | P := Pos(';' + Uppercase(Path) + ';', ';' + Uppercase(Paths) + ';');
109 | if P = 0 then exit;
110 |
111 | { Update path variable }
112 | Delete(Paths, P - 1, Length(Path) + 1);
113 |
114 | { Overwrite path environment variable }
115 | if RegWriteStringValue(RootKey, EnvironmentKey, 'Path', Paths)
116 | then Log(Format('The [%s] removed from PATH: [%s]', [Path, Paths]))
117 | else Log(Format('Error while removing the [%s] from PATH: [%s]', [Path, Paths]));
118 | end;
119 |
120 | procedure CurStepChanged(CurStep: TSetupStep);
121 | begin
122 | if CurStep = ssPostInstall
123 | then EnvAddPath(ExpandConstant('{app}') +'\bin');
124 | end;
125 |
126 | procedure CurUninstallStepChanged(CurUninstallStep: TUninstallStep);
127 | begin
128 | if CurUninstallStep = usPostUninstall
129 | then EnvRemovePath(ExpandConstant('{app}') +'\bin');
130 | end;
131 |
--------------------------------------------------------------------------------
/assets/logo.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/russellbanks/Komac/5e13baafd65c5985d3603d6201c8a872d38d0610/assets/logo.ico
--------------------------------------------------------------------------------
/assets/logo.svg:
--------------------------------------------------------------------------------
1 |
26 |
--------------------------------------------------------------------------------
/build.rs:
--------------------------------------------------------------------------------
1 | extern crate windows_exe_info;
2 |
3 | use cynic_codegen::registration::SchemaRegistration;
4 |
5 | fn main() {
6 | cynic_codegen::register_schema("github")
7 | .from_sdl_file("assets/github.graphql")
8 | .and_then(SchemaRegistration::as_default)
9 | .unwrap();
10 | windows_exe_info::icon::icon_ico("assets/logo.ico");
11 | windows_exe_info::versioninfo::link_cargo_env();
12 | }
13 |
--------------------------------------------------------------------------------
/rustfmt.toml:
--------------------------------------------------------------------------------
1 | newline_style = "Native"
2 | imports_granularity = "Crate"
3 | group_imports = "StdExternalCrate"
4 | unstable_features = true
5 |
--------------------------------------------------------------------------------
/src/commands/analyse.rs:
--------------------------------------------------------------------------------
1 | use std::fs::File;
2 |
3 | use anstream::stdout;
4 | use camino::{Utf8Path, Utf8PathBuf};
5 | use clap::Parser;
6 | use color_eyre::{Result, eyre::ensure};
7 | use memmap2::Mmap;
8 | use sha2::{Digest, Sha256};
9 | use winget_types::Sha256String;
10 |
11 | use crate::{file_analyser::FileAnalyser, manifests::print_manifest};
12 |
13 | /// Analyses a file and outputs information about it
14 | #[derive(Parser)]
15 | pub struct Analyse {
16 | #[arg(value_parser = is_valid_file, value_hint = clap::ValueHint::FilePath)]
17 | file_path: Utf8PathBuf,
18 |
19 | #[cfg(not(debug_assertions))]
20 | /// Hash the file and include it in the `InstallerSha256` field
21 | #[arg(long = "hash", alias = "sha256", overrides_with = "hash")]
22 | _no_hash: bool,
23 |
24 | #[cfg(not(debug_assertions))]
25 | /// Skip hashing the file
26 | #[arg(long = "no-hash", alias = "no-sha256", action = clap::ArgAction::SetFalse)]
27 | hash: bool,
28 |
29 | #[cfg(debug_assertions)]
30 | /// Hash the file and include it in the `InstallerSha256` field
31 | #[arg(long, alias = "sha256", overrides_with = "_no_hash")]
32 | hash: bool,
33 |
34 | #[cfg(debug_assertions)]
35 | /// Skip hashing the file
36 | #[arg(long = "no-hash", alias = "no-sha256")]
37 | _no_hash: bool,
38 | }
39 |
40 | impl Analyse {
41 | pub fn run(self) -> Result<()> {
42 | let file = File::open(&self.file_path)?;
43 | let mmap = unsafe { Mmap::map(&file) }?;
44 | let file_name = self
45 | .file_path
46 | .file_name()
47 | .unwrap_or_else(|| self.file_path.as_str());
48 | let mut analyser = FileAnalyser::new(&mmap, file_name)?;
49 | if self.hash {
50 | let sha_256 = Sha256String::from_digest(&Sha256::digest(&mmap));
51 | for installer in &mut analyser.installers {
52 | installer.sha_256 = sha_256.clone();
53 | }
54 | }
55 | let yaml = match analyser.installers.as_slice() {
56 | [installer] => serde_yaml::to_string(installer)?,
57 | installers => serde_yaml::to_string(installers)?,
58 | };
59 | let mut lock = stdout().lock();
60 | print_manifest(&mut lock, &yaml);
61 | Ok(())
62 | }
63 | }
64 |
65 | fn is_valid_file(path: &str) -> Result {
66 | let path = Utf8Path::new(path);
67 | ensure!(path.exists(), "{path} does not exist");
68 | ensure!(path.is_file(), "{path} is not a file");
69 | Ok(path.to_path_buf())
70 | }
71 |
--------------------------------------------------------------------------------
/src/commands/cleanup.rs:
--------------------------------------------------------------------------------
1 | use std::fmt::{Display, Formatter};
2 |
3 | use anstream::println;
4 | use bitflags::bitflags;
5 | use clap::Parser;
6 | use color_eyre::Result;
7 | use futures_util::TryFutureExt;
8 | use indicatif::ProgressBar;
9 | use inquire::MultiSelect;
10 | use owo_colors::OwoColorize;
11 |
12 | use crate::{
13 | commands::utils::SPINNER_TICK_RATE, credential::handle_token, github::github_client::GitHub,
14 | prompts::handle_inquire_error,
15 | };
16 |
17 | /// Finds branches from the fork of winget-pkgs that have had a merged or closed pull request to
18 | /// microsoft/winget-pkgs from them, prompting for which ones to delete
19 | #[derive(Parser)]
20 | #[clap(visible_alias = "clean")]
21 | pub struct Cleanup {
22 | /// Only delete merged branches
23 | #[arg(long)]
24 | only_merged: bool,
25 |
26 | /// Only delete closed branches
27 | #[arg(long)]
28 | only_closed: bool,
29 |
30 | /// Automatically delete all relevant branches
31 | #[arg(short, long, env = "CI")]
32 | all: bool,
33 |
34 | /// GitHub personal access token with the `public_repo` scope
35 | #[arg(short, long, env = "GITHUB_TOKEN")]
36 | token: Option,
37 | }
38 |
39 | impl Cleanup {
40 | pub async fn run(self) -> Result<()> {
41 | let token = handle_token(self.token.as_deref()).await?;
42 | let github = GitHub::new(&token)?;
43 |
44 | let merge_state = MergeState::from((self.only_merged, self.only_closed));
45 |
46 | let pb = ProgressBar::new_spinner().with_message(format!(
47 | "Retrieving branches that have a {merge_state} pull request associated with them"
48 | ));
49 | pb.enable_steady_tick(SPINNER_TICK_RATE);
50 |
51 | // Get all fork branches with an associated pull request to microsoft/winget-pkgs
52 | let (pr_branch_map, repository_id) = github
53 | .get_username()
54 | .and_then(|username| github.get_branches(username, merge_state))
55 | .await?;
56 |
57 | pb.finish_and_clear();
58 |
59 | // Exit if there are no branches to delete
60 | if pr_branch_map.is_empty() {
61 | println!(
62 | "There are no {} pull requests with branches that can be deleted",
63 | merge_state.blue()
64 | );
65 | return Ok(());
66 | }
67 |
68 | let chosen_pr_branches = if self.all {
69 | pr_branch_map.keys().collect()
70 | } else {
71 | // Show a multi-selection prompt for which branches to delete, with all options pre-selected
72 | MultiSelect::new(
73 | "Please select branches to delete",
74 | pr_branch_map.keys().collect(),
75 | )
76 | .with_all_selected_by_default()
77 | .with_page_size(10)
78 | .prompt()
79 | .map_err(handle_inquire_error)?
80 | };
81 |
82 | if chosen_pr_branches.is_empty() {
83 | println!("No branches have been deleted");
84 | return Ok(());
85 | }
86 |
87 | // Get branch names from chosen pull requests
88 | let branches_to_delete = chosen_pr_branches
89 | .into_iter()
90 | .filter_map(|pull_request| pr_branch_map.get(pull_request).map(String::as_str))
91 | .collect::>();
92 |
93 | let branch_label = match branches_to_delete.len() {
94 | 1 => "branch",
95 | _ => "branches",
96 | };
97 |
98 | pb.reset();
99 | pb.set_message(format!(
100 | "Deleting {} selected {branch_label}",
101 | branches_to_delete.len(),
102 | ));
103 | pb.enable_steady_tick(SPINNER_TICK_RATE);
104 |
105 | github
106 | .delete_branches(&repository_id, &branches_to_delete)
107 | .await?;
108 |
109 | pb.finish_and_clear();
110 |
111 | println!(
112 | "{} deleted {} selected {branch_label}",
113 | "Successfully".green(),
114 | branches_to_delete.len().blue(),
115 | );
116 |
117 | Ok(())
118 | }
119 | }
120 |
121 | // Using bitflags instead of an enum to allow combining multiple states (MERGED, CLOSED)
122 | bitflags! {
123 | #[derive(Copy, Clone, PartialEq, Eq)]
124 | pub struct MergeState: u8 {
125 | const MERGED = 1 << 0;
126 | const CLOSED = 1 << 1;
127 | }
128 | }
129 |
130 | impl Display for MergeState {
131 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
132 | write!(
133 | f,
134 | "{}",
135 | match *self {
136 | Self::MERGED => "merged",
137 | Self::CLOSED => "closed",
138 | _ => "merged or closed",
139 | }
140 | )
141 | }
142 | }
143 |
144 | impl From<(bool, bool)> for MergeState {
145 | fn from((only_merged, only_closed): (bool, bool)) -> Self {
146 | match (only_merged, only_closed) {
147 | (true, false) => Self::MERGED,
148 | (false, true) => Self::CLOSED,
149 | _ => Self::all(),
150 | }
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/src/commands/complete.rs:
--------------------------------------------------------------------------------
1 | use anstream::stdout;
2 | use clap::{CommandFactory, Parser};
3 | use clap_complete::{Shell, generate};
4 | use color_eyre::{Result, Section, eyre::eyre};
5 |
6 | use crate::Cli;
7 |
8 | /// Outputs an autocompletion script for the given shell. Example usage:
9 | ///
10 | /// Bash: echo "source <(komac complete bash)" >> ~/.bashrc
11 | /// Elvish: echo "eval (komac complete elvish | slurp)" >> ~/.elvish/rc.elv
12 | /// Fish: echo "source (komac complete fish | psub)" >> ~/.config/fish/config.fish
13 | /// Powershell: echo "komac complete powershell | Out-String | Invoke-Expression" >> $PROFILE
14 | /// Zsh: echo "source <(komac complete zsh)" >> ~/.zshrc
15 | #[derive(Parser)]
16 | #[clap(visible_alias = "autocomplete", verbatim_doc_comment)]
17 | pub struct Complete {
18 | /// Specifies the shell for which to generate the completion script.
19 | ///
20 | /// If not provided, the shell will be inferred based on the current environment.
21 | #[arg()]
22 | shell: Option,
23 | }
24 |
25 | impl Complete {
26 | pub fn run(self) -> Result<()> {
27 | let Some(shell) = self.shell.or_else(Shell::from_env) else {
28 | return Err(
29 | eyre!("Unable to determine the current shell from the environment")
30 | .suggestion("Specify shell explicitly"),
31 | );
32 | };
33 |
34 | let mut command = Cli::command();
35 | let command_name = command.get_name().to_owned();
36 | generate(shell, &mut command, command_name, &mut stdout());
37 |
38 | Ok(())
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/commands/list_versions.rs:
--------------------------------------------------------------------------------
1 | use std::{io, io::Write};
2 |
3 | use clap::{Args, Parser};
4 | use color_eyre::Result;
5 | use winget_types::PackageIdentifier;
6 |
7 | use crate::{credential::handle_token, github::github_client::GitHub};
8 |
9 | /// Lists all versions for a given package
10 | #[derive(Parser)]
11 | #[clap(visible_alias = "list")]
12 | pub struct ListVersions {
13 | #[arg()]
14 | package_identifier: PackageIdentifier,
15 |
16 | #[command(flatten)]
17 | output_type: OutputType,
18 |
19 | /// GitHub personal access token with the `public_repo` scope
20 | #[arg(short, long, env = "GITHUB_TOKEN")]
21 | token: Option,
22 | }
23 |
24 | #[derive(Args)]
25 | #[group(multiple = false)]
26 | struct OutputType {
27 | /// Output the versions as JSON
28 | #[arg(long)]
29 | json: bool,
30 |
31 | /// Output the versions as prettified JSON
32 | #[arg(long)]
33 | pretty_json: bool,
34 |
35 | /// Output the versions as YAML
36 | #[arg(long)]
37 | yaml: bool,
38 | }
39 |
40 | impl ListVersions {
41 | pub async fn run(self) -> Result<()> {
42 | let token = handle_token(self.token.as_deref()).await?;
43 | let github = GitHub::new(&token)?;
44 |
45 | let versions = github.get_versions(&self.package_identifier).await?;
46 |
47 | let mut stdout_lock = io::stdout().lock();
48 | match (
49 | self.output_type.json,
50 | self.output_type.pretty_json,
51 | self.output_type.yaml,
52 | ) {
53 | (true, _, _) => serde_json::to_writer(stdout_lock, &versions)?,
54 | (_, true, _) => serde_json::to_writer_pretty(stdout_lock, &versions)?,
55 | (_, _, true) => serde_yaml::to_writer(stdout_lock, &versions)?,
56 | _ => {
57 | for version in versions {
58 | writeln!(stdout_lock, "{version}")?;
59 | }
60 | }
61 | }
62 |
63 | Ok(())
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/src/commands/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod analyse;
2 | pub mod cleanup;
3 | pub mod complete;
4 | pub mod list_versions;
5 | pub mod new_version;
6 | pub mod remove_dead_versions;
7 | pub mod remove_version;
8 | pub mod show_version;
9 | pub mod submit;
10 | pub mod sync_fork;
11 | pub mod token;
12 | pub mod update_version;
13 | pub mod utils;
14 |
--------------------------------------------------------------------------------
/src/commands/remove_version.rs:
--------------------------------------------------------------------------------
1 | use std::num::NonZeroU32;
2 |
3 | use anstream::println;
4 | use clap::Parser;
5 | use color_eyre::eyre::{Result, bail};
6 | use futures_util::TryFutureExt;
7 | use inquire::{
8 | Text,
9 | validator::{MaxLengthValidator, MinLengthValidator},
10 | };
11 | use owo_colors::OwoColorize;
12 | use tokio::try_join;
13 | use winget_types::{PackageIdentifier, PackageVersion};
14 |
15 | use crate::{
16 | credential::handle_token,
17 | github::github_client::{GitHub, WINGET_PKGS_FULL_NAME},
18 | prompts::{handle_inquire_error, text::confirm_prompt},
19 | };
20 |
21 | /// Remove a version from winget-pkgs
22 | ///
23 | /// To remove a package, all versions of that package must be removed
24 | #[derive(Parser)]
25 | pub struct RemoveVersion {
26 | /// The package's unique identifier
27 | #[arg()]
28 | package_identifier: PackageIdentifier,
29 |
30 | /// The package's version
31 | #[arg(short = 'v', long = "version")]
32 | package_version: PackageVersion,
33 |
34 | #[arg(short = 'r', long = "reason")]
35 | deletion_reason: Option,
36 |
37 | /// List of issues that removing this version would resolve
38 | #[arg(long)]
39 | resolves: Option>,
40 |
41 | #[arg(short, long)]
42 | submit: bool,
43 |
44 | /// Don't show the package removal warning
45 | #[arg(long)]
46 | no_warning: bool,
47 |
48 | /// Open pull request link automatically
49 | #[arg(long, env = "OPEN_PR")]
50 | open_pr: bool,
51 |
52 | /// GitHub personal access token with the `public_repo` scope
53 | #[arg(short, long, env = "GITHUB_TOKEN")]
54 | token: Option,
55 | }
56 |
57 | impl RemoveVersion {
58 | const MIN_REASON_LENGTH: usize = 4;
59 | const MAX_REASON_LENGTH: usize = 1000;
60 |
61 | pub async fn run(self) -> Result<()> {
62 | let token = handle_token(self.token.as_deref()).await?;
63 | if !self.no_warning {
64 | println!(
65 | "{}",
66 | "Packages should only be removed when necessary".yellow()
67 | );
68 | }
69 | let github = GitHub::new(&token)?;
70 |
71 | let (fork, winget_pkgs, versions) = try_join!(
72 | github
73 | .get_username()
74 | .and_then(|current_user| github.get_winget_pkgs().owner(current_user).send()),
75 | github.get_winget_pkgs().send(),
76 | github.get_versions(&self.package_identifier)
77 | )?;
78 |
79 | if !versions.contains(&self.package_version) {
80 | bail!(
81 | "{} version {} does not exist in {WINGET_PKGS_FULL_NAME}",
82 | self.package_identifier,
83 | self.package_version,
84 | );
85 | }
86 |
87 | let latest_version = versions.last().unwrap_or_else(|| unreachable!());
88 | println!(
89 | "Latest version of {}: {latest_version}",
90 | &self.package_identifier
91 | );
92 | let deletion_reason = match self.deletion_reason {
93 | Some(reason) => reason,
94 | None => Text::new(&format!(
95 | "Give a reason for removing {} version {}",
96 | &self.package_identifier, &self.package_version
97 | ))
98 | .with_validator(MinLengthValidator::new(Self::MIN_REASON_LENGTH))
99 | .with_validator(MaxLengthValidator::new(Self::MAX_REASON_LENGTH))
100 | .prompt()
101 | .map_err(handle_inquire_error)?,
102 | };
103 | let should_remove_manifest = self.submit
104 | || confirm_prompt(&format!(
105 | "Would you like to make a pull request to remove {} {}?",
106 | self.package_identifier, self.package_version
107 | ))?;
108 |
109 | if !should_remove_manifest {
110 | return Ok(());
111 | }
112 |
113 | let pull_request_url = github
114 | .remove_version()
115 | .identifier(&self.package_identifier)
116 | .version(&self.package_version)
117 | .reason(deletion_reason)
118 | .fork(&fork)
119 | .winget_pkgs(&winget_pkgs)
120 | .maybe_issue_resolves(self.resolves)
121 | .send()
122 | .await?;
123 |
124 | if self.open_pr {
125 | open::that(pull_request_url.as_str())?;
126 | }
127 |
128 | Ok(())
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/src/commands/show_version.rs:
--------------------------------------------------------------------------------
1 | use clap::Parser;
2 | use color_eyre::Result;
3 | use winget_types::{PackageIdentifier, PackageVersion};
4 |
5 | use crate::{credential::handle_token, github::github_client::GitHub, manifests::print_changes};
6 |
7 | /// Output the manifests for a given package and version
8 | #[expect(clippy::struct_excessive_bools)]
9 | #[derive(Parser)]
10 | pub struct ShowVersion {
11 | /// The package's unique identifier
12 | #[arg()]
13 | package_identifier: PackageIdentifier,
14 |
15 | /// The package's version
16 | #[arg(short = 'v', long = "version")]
17 | package_version: Option,
18 |
19 | /// Switch to display the installer manifest
20 | #[arg(short, long)]
21 | installer_manifest: bool,
22 |
23 | /// Switch to display the default locale manifest
24 | #[arg(short, long = "defaultlocale-manifest")]
25 | default_locale_manifest: bool,
26 |
27 | /// Switch to display all locale manifests
28 | #[arg(short, long)]
29 | locale_manifests: bool,
30 |
31 | /// Switch to display the version manifest
32 | #[arg(long)]
33 | version_manifest: bool,
34 |
35 | /// GitHub personal access token with the `public_repo` scope
36 | #[arg(short, long, env = "GITHUB_TOKEN")]
37 | token: Option,
38 | }
39 |
40 | impl ShowVersion {
41 | pub async fn run(self) -> Result<()> {
42 | let token = handle_token(self.token.as_deref()).await?;
43 | let github = GitHub::new(&token)?;
44 |
45 | // Get a list of all versions for the given package
46 | let mut versions = github.get_versions(&self.package_identifier).await?;
47 |
48 | // Get the manifests for the latest or specified version
49 | let manifests = github
50 | .get_manifests(
51 | &self.package_identifier,
52 | &self
53 | .package_version
54 | .unwrap_or_else(|| versions.pop_last().unwrap_or_else(|| unreachable!())),
55 | )
56 | .await?;
57 |
58 | let all = matches!(
59 | (
60 | self.installer_manifest,
61 | self.default_locale_manifest,
62 | self.locale_manifests,
63 | self.version_manifest
64 | ),
65 | (false, false, false, false)
66 | );
67 |
68 | let mut contents = Vec::new();
69 | if all || self.installer_manifest {
70 | contents.push(serde_yaml::to_string(&manifests.installer)?);
71 | }
72 | if all || self.default_locale_manifest {
73 | contents.push(serde_yaml::to_string(&manifests.default_locale)?);
74 | }
75 | if all || self.locale_manifests {
76 | contents.extend(
77 | manifests
78 | .locales
79 | .into_iter()
80 | .flat_map(|locale_manifest| serde_yaml::to_string(&locale_manifest)),
81 | );
82 | }
83 | if all || self.version_manifest {
84 | contents.push(serde_yaml::to_string(&manifests.version)?);
85 | }
86 |
87 | print_changes(contents.iter().map(String::as_str));
88 |
89 | Ok(())
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/src/commands/sync_fork.rs:
--------------------------------------------------------------------------------
1 | use anstream::println;
2 | use clap::Parser;
3 | use color_eyre::Result;
4 | use futures_util::TryFutureExt;
5 | use indicatif::ProgressBar;
6 | use owo_colors::OwoColorize;
7 | use tokio::try_join;
8 |
9 | use crate::{
10 | commands::utils::SPINNER_TICK_RATE, credential::handle_token, github::github_client::GitHub,
11 | terminal::Hyperlinkable,
12 | };
13 |
14 | /// Merges changes from microsoft/winget-pkgs into the fork repository
15 | #[derive(Parser)]
16 | #[clap(visible_aliases = ["sync", "merge-upstream"])]
17 | pub struct SyncFork {
18 | /// Merges changes even if the fork's default branch is not fast-forward. This is not
19 | /// recommended as you should instead have a clean default branch that has not diverged from the
20 | /// upstream default branch
21 | #[arg(short, long)]
22 | force: bool,
23 |
24 | /// GitHub personal access token with the `public_repo` scope
25 | #[arg(short, long, env = "GITHUB_TOKEN")]
26 | token: Option,
27 | }
28 |
29 | impl SyncFork {
30 | pub async fn run(self) -> Result<()> {
31 | let token = handle_token(self.token.as_deref()).await?;
32 | let github = GitHub::new(&token)?;
33 |
34 | // Fetch repository data from both upstream and fork repositories asynchronously
35 | let (winget_pkgs, fork) = try_join!(
36 | github.get_winget_pkgs().send(),
37 | github
38 | .get_username()
39 | .and_then(|username| github.get_winget_pkgs().owner(username).send()),
40 | )?;
41 |
42 | // Check whether the fork is already up-to-date with upstream by their latest commit OID's
43 | if winget_pkgs.default_branch_oid == fork.default_branch_oid {
44 | println!(
45 | "{} is already {} with {}",
46 | fork.full_name.hyperlink(&fork.url).blue(),
47 | "up-to-date".green(),
48 | winget_pkgs.full_name.hyperlink(&winget_pkgs.url).blue()
49 | );
50 | return Ok(());
51 | }
52 |
53 | // Calculate how many commits upstream is ahead of fork
54 | let new_commits_count = winget_pkgs.commit_count - fork.commit_count;
55 | let commit_label = match new_commits_count {
56 | 1 => "commit",
57 | _ => "commits",
58 | };
59 |
60 | // Show an indeterminate progress bar while upstream changes are being merged
61 | let pb = ProgressBar::new_spinner().with_message(format!(
62 | "Merging {new_commits_count} upstream {commit_label} from {} into {}",
63 | winget_pkgs.full_name.blue(),
64 | fork.full_name.blue(),
65 | ));
66 | pb.enable_steady_tick(SPINNER_TICK_RATE);
67 |
68 | github
69 | .merge_upstream(
70 | &fork.default_branch_ref_id,
71 | winget_pkgs.default_branch_oid,
72 | self.force,
73 | )
74 | .await?;
75 |
76 | pb.finish_and_clear();
77 |
78 | println!(
79 | "{} merged {new_commits_count} upstream {commit_label} from {} into {}",
80 | "Successfully".green(),
81 | winget_pkgs.full_name.hyperlink(winget_pkgs.url).blue(),
82 | fork.full_name.hyperlink(fork.url).blue()
83 | );
84 |
85 | Ok(())
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/src/commands/token/commands.rs:
--------------------------------------------------------------------------------
1 | use clap::{Args, Subcommand};
2 |
3 | use crate::commands::token::{remove::RemoveToken, update::UpdateToken};
4 |
5 | #[derive(Args)]
6 | pub struct TokenArgs {
7 | #[command(subcommand)]
8 | pub command: TokenCommands,
9 | }
10 | #[derive(Subcommand)]
11 | pub enum TokenCommands {
12 | Update(UpdateToken),
13 | Remove(RemoveToken),
14 | }
15 |
--------------------------------------------------------------------------------
/src/commands/token/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod commands;
2 | pub mod remove;
3 | pub mod update;
4 |
--------------------------------------------------------------------------------
/src/commands/token/remove.rs:
--------------------------------------------------------------------------------
1 | use anstream::println;
2 | use clap::Parser;
3 | use color_eyre::eyre::Result;
4 | use owo_colors::OwoColorize;
5 |
6 | use crate::{credential::get_komac_credential, prompts::text::confirm_prompt};
7 |
8 | /// Remove the stored token
9 | #[derive(Parser)]
10 | #[clap(visible_alias = "delete")]
11 | pub struct RemoveToken {
12 | /// Skip the confirmation prompt to delete the token
13 | #[arg(short = 'y', long = "yes")]
14 | skip_prompt: bool,
15 | }
16 |
17 | impl RemoveToken {
18 | pub fn run(self) -> Result<()> {
19 | let credential = get_komac_credential()?;
20 |
21 | if matches!(
22 | credential.get_password().err(),
23 | Some(keyring::Error::NoEntry)
24 | ) {
25 | println!("No token stored is currently stored in the platform's secure storage");
26 | }
27 |
28 | let confirm = if self.skip_prompt {
29 | true
30 | } else {
31 | confirm_prompt("Would you like to remove the currently stored token?")?
32 | };
33 |
34 | if confirm {
35 | credential.delete_credential()?;
36 | println!(
37 | "{} deleted the stored token from the platform's secure storage",
38 | "Successfully".green()
39 | );
40 | } else {
41 | println!("{}", "No token was deleted".cyan());
42 | }
43 |
44 | Ok(())
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/src/commands/token/update.rs:
--------------------------------------------------------------------------------
1 | use anstream::println;
2 | use clap::Parser;
3 | use color_eyre::eyre::Result;
4 | use owo_colors::OwoColorize;
5 | use reqwest::Client;
6 |
7 | use crate::credential::{get_default_headers, get_komac_credential, token_prompt, validate_token};
8 |
9 | /// Update the stored token
10 | #[derive(Parser)]
11 | #[clap(visible_aliases = ["new", "add"])]
12 | pub struct UpdateToken {
13 | /// The new token to store
14 | #[arg(short, long)]
15 | token: Option,
16 | }
17 |
18 | impl UpdateToken {
19 | pub async fn run(self) -> Result<()> {
20 | let credential = get_komac_credential()?;
21 |
22 | let client = Client::builder()
23 | .default_headers(get_default_headers(None))
24 | .build()?;
25 |
26 | let token = match self.token {
27 | Some(token) => validate_token(&client, &token).await.map(|()| token)?,
28 | None => token_prompt(client, Some("Please enter the new token to set"))?,
29 | };
30 |
31 | if credential.set_password(&token).is_ok() {
32 | println!(
33 | "{} stored token in platform's secure storage",
34 | "Successfully".green()
35 | );
36 | }
37 |
38 | Ok(())
39 | }
40 | }
41 |
--------------------------------------------------------------------------------
/src/commands/utils/mod.rs:
--------------------------------------------------------------------------------
1 | use std::{env, time::Duration};
2 |
3 | use anstream::println;
4 | use camino::Utf8Path;
5 | use chrono::Local;
6 | use color_eyre::Result;
7 | use derive_more::Display;
8 | use futures_util::{StreamExt, TryStreamExt, stream};
9 | use inquire::{Select, error::InquireResult};
10 | use owo_colors::OwoColorize;
11 | use strum::{EnumIter, IntoEnumIterator};
12 | use tokio::{fs, fs::File, io::AsyncWriteExt};
13 | use winget_types::{PackageIdentifier, PackageVersion};
14 |
15 | use crate::{
16 | editor::Editor,
17 | github::graphql::get_existing_pull_request::PullRequest,
18 | manifests::print_changes,
19 | prompts::{handle_inquire_error, text::confirm_prompt},
20 | };
21 |
22 | pub const SPINNER_TICK_RATE: Duration = Duration::from_millis(50);
23 |
24 | pub const SPINNER_SLOW_TICK_RATE: Duration = Duration::from_millis(100);
25 |
26 | pub fn prompt_existing_pull_request(
27 | identifier: &PackageIdentifier,
28 | version: &PackageVersion,
29 | pull_request: &PullRequest,
30 | ) -> InquireResult {
31 | let created_at = pull_request.created_at.with_timezone(&Local);
32 | println!(
33 | "There is already {} pull request for {identifier} {version} that was created on {} at {}",
34 | pull_request.state,
35 | created_at.date_naive(),
36 | created_at.time()
37 | );
38 | println!("{}", pull_request.url.blue());
39 | if env::var("CI").is_ok_and(|ci| ci.parse() == Ok(true)) {
40 | // Exit instead of proceeding in CI environments
41 | Ok(false)
42 | } else {
43 | confirm_prompt("Would you like to proceed?")
44 | }
45 | }
46 |
47 | pub fn prompt_submit_option(
48 | changes: &mut [(String, String)],
49 | submit: bool,
50 | identifier: &PackageIdentifier,
51 | version: &PackageVersion,
52 | dry_run: bool,
53 | ) -> Result {
54 | let mut submit_option;
55 | loop {
56 | print_changes(changes.iter().map(|(_, content)| content.as_str()));
57 |
58 | submit_option = if dry_run {
59 | SubmitOption::Exit
60 | } else if submit {
61 | SubmitOption::Submit
62 | } else {
63 | Select::new(
64 | &format!("What would you like to do with {identifier} {version}?"),
65 | SubmitOption::iter().collect(),
66 | )
67 | .prompt()
68 | .map_err(handle_inquire_error)?
69 | };
70 |
71 | if submit_option == SubmitOption::Edit {
72 | Editor::new(changes).run()?;
73 | } else {
74 | break;
75 | }
76 | }
77 | Ok(submit_option)
78 | }
79 |
80 | #[derive(Display, EnumIter, Eq, PartialEq)]
81 | pub enum SubmitOption {
82 | Submit,
83 | Edit,
84 | Exit,
85 | }
86 |
87 | pub async fn write_changes_to_dir(changes: &[(String, String)], output: &Utf8Path) -> Result<()> {
88 | fs::create_dir_all(output).await?;
89 | stream::iter(changes.iter())
90 | .map(|(path, content)| async move {
91 | if let Some(file_name) = Utf8Path::new(path).file_name() {
92 | let mut file = File::create(output.join(file_name)).await?;
93 | file.write_all(content.as_bytes()).await?;
94 | }
95 | Ok::<(), color_eyre::eyre::Error>(())
96 | })
97 | .buffer_unordered(2)
98 | .try_collect()
99 | .await
100 | }
101 |
--------------------------------------------------------------------------------
/src/credential.rs:
--------------------------------------------------------------------------------
1 | use std::borrow::Cow;
2 |
3 | use color_eyre::eyre::{Result, bail};
4 | use inquire::{Password, error::InquireResult, validator::Validation};
5 | use keyring::Entry;
6 | use reqwest::{
7 | Client, StatusCode,
8 | header::{AUTHORIZATION, DNT, HeaderMap, HeaderValue, USER_AGENT},
9 | };
10 | use tokio::runtime::Handle;
11 |
12 | use crate::prompts::handle_inquire_error;
13 |
14 | const SERVICE: &str = "komac";
15 | const USERNAME: &str = "github-access-token";
16 | const GITHUB_API_ENDPOINT: &str = "https://api.github.com/octocat";
17 |
18 | pub fn get_komac_credential() -> keyring::Result {
19 | Entry::new(SERVICE, USERNAME)
20 | }
21 |
22 | pub async fn handle_token(token: Option<&str>) -> Result> {
23 | let client = Client::builder()
24 | .default_headers(get_default_headers(None))
25 | .build()?;
26 |
27 | if let Some(token) = token {
28 | return validate_token(&client, token)
29 | .await
30 | .map(|()| Cow::Borrowed(token));
31 | }
32 |
33 | let credential_entry = get_komac_credential()?;
34 |
35 | if let Ok(stored_token) = credential_entry.get_password() {
36 | validate_token(&client, &stored_token)
37 | .await
38 | .map(|()| Cow::Owned(stored_token))
39 | } else {
40 | let token = token_prompt(client, None)?;
41 | if credential_entry.set_password(&token).is_ok() {
42 | println!("Successfully stored token in platform's secure storage");
43 | }
44 | Ok(Cow::Owned(token))
45 | }
46 | }
47 |
48 | pub fn token_prompt(client: Client, prompt: Option<&str>) -> InquireResult {
49 | tokio::task::block_in_place(|| {
50 | let rt = Handle::current();
51 | let validator =
52 | move |input: &str| match rt.block_on(async { validate_token(&client, input).await }) {
53 | Ok(()) => Ok(Validation::Valid),
54 | Err(err) => Ok(Validation::Invalid(err.into())),
55 | };
56 | Password::new(prompt.unwrap_or("Enter a GitHub token"))
57 | .with_validator(validator)
58 | .without_confirmation()
59 | .prompt()
60 | .map_err(handle_inquire_error)
61 | })
62 | }
63 |
64 | pub async fn validate_token(client: &Client, token: &str) -> Result<()> {
65 | match client
66 | .get(GITHUB_API_ENDPOINT)
67 | .bearer_auth(token)
68 | .send()
69 | .await
70 | {
71 | Ok(response) => match response.status() {
72 | StatusCode::UNAUTHORIZED => bail!("GitHub token is invalid"),
73 | _ => Ok(()),
74 | },
75 | Err(error) => {
76 | if error.is_connect() {
77 | bail!("Failed to connect to GitHub. Please check your internet connection.");
78 | }
79 | Err(error.into())
80 | }
81 | }
82 | }
83 |
84 | const MICROSOFT_DELIVERY_OPTIMIZATION: HeaderValue =
85 | HeaderValue::from_static("Microsoft-Delivery-Optimization/10.1");
86 | const SEC_GPC: &str = "Sec-GPC";
87 |
88 | pub fn get_default_headers(github_token: Option<&str>) -> HeaderMap {
89 | let mut default_headers = HeaderMap::new();
90 | default_headers.insert(USER_AGENT, MICROSOFT_DELIVERY_OPTIMIZATION);
91 | default_headers.insert(DNT, HeaderValue::from(1));
92 | default_headers.insert(SEC_GPC, HeaderValue::from(1));
93 | if let Some(token) = github_token {
94 | if let Ok(bearer_auth) = HeaderValue::from_str(&format!("Bearer {token}")) {
95 | default_headers.insert(AUTHORIZATION, bearer_auth);
96 | }
97 | }
98 | default_headers
99 | }
100 |
--------------------------------------------------------------------------------
/src/download/file.rs:
--------------------------------------------------------------------------------
1 | use std::fs::File;
2 |
3 | use chrono::NaiveDate;
4 | use memmap2::Mmap;
5 | use winget_types::Sha256String;
6 |
7 | use crate::manifests::Url;
8 |
9 | pub struct DownloadedFile {
10 | // As the downloaded file is a temporary file, it's stored here so that the reference stays
11 | // alive and the file does not get deleted. This is necessary because the memory map needs the
12 | // file to remain present.
13 | #[expect(dead_code)]
14 | pub file: File,
15 | pub url: Url,
16 | pub mmap: Mmap,
17 | pub sha_256: Sha256String,
18 | pub file_name: String,
19 | pub last_modified: Option,
20 | }
21 |
--------------------------------------------------------------------------------
/src/download_file.rs:
--------------------------------------------------------------------------------
1 | use std::{collections::HashMap, mem};
2 |
3 | use color_eyre::eyre::Result;
4 | use futures_util::{StreamExt, TryStreamExt, stream};
5 | use winget_types::{installer::Architecture, url::DecodedUrl};
6 |
7 | use crate::{download::DownloadedFile, file_analyser::FileAnalyser};
8 |
9 | pub async fn process_files(
10 | files: &mut [DownloadedFile],
11 | ) -> Result> {
12 | stream::iter(files.iter_mut().map(
13 | |DownloadedFile {
14 | url,
15 | mmap,
16 | sha_256,
17 | file_name,
18 | last_modified,
19 | ..
20 | }| async move {
21 | let mut file_analyser = FileAnalyser::new(mmap, file_name)?;
22 | let architecture = url
23 | .override_architecture()
24 | .or_else(|| Architecture::from_url(url.as_str()));
25 | for installer in &mut file_analyser.installers {
26 | if let Some(architecture) = architecture {
27 | installer.architecture = architecture;
28 | }
29 | installer.url = url.inner().clone();
30 | installer.sha_256 = sha_256.clone();
31 | installer.release_date = *last_modified;
32 | }
33 | file_analyser.file_name = mem::take(file_name);
34 | Ok((mem::take(url.inner_mut()), file_analyser))
35 | },
36 | ))
37 | .buffer_unordered(num_cpus::get())
38 | .try_collect::>()
39 | .await
40 | }
41 |
--------------------------------------------------------------------------------
/src/github/graphql/create_commit.rs:
--------------------------------------------------------------------------------
1 | use std::borrow::Cow;
2 |
3 | use derive_new::new;
4 | use url::Url;
5 |
6 | use crate::github::graphql::{
7 | github_schema::github_schema as schema,
8 | types::{Base64String, GitObjectId},
9 | };
10 |
11 | #[derive(cynic::QueryVariables)]
12 | pub struct CreateCommitVariables<'a> {
13 | pub input: CreateCommitOnBranchInput<'a>,
14 | }
15 |
16 | #[derive(cynic::QueryFragment)]
17 | #[cynic(graphql_type = "Mutation", variables = "CreateCommitVariables")]
18 | pub struct CreateCommit {
19 | #[arguments(input: $input)]
20 | pub create_commit_on_branch: Option,
21 | }
22 |
23 | ///
24 | #[derive(cynic::QueryFragment)]
25 | pub struct CreateCommitOnBranchPayload {
26 | pub commit: Option,
27 | }
28 |
29 | ///
30 | #[derive(cynic::QueryFragment)]
31 | pub struct Commit {
32 | pub url: Url,
33 | }
34 |
35 | ///
36 | #[derive(cynic::InputObject)]
37 | pub struct CreateCommitOnBranchInput<'a> {
38 | pub branch: CommittableBranch<'a>,
39 | pub expected_head_oid: GitObjectId,
40 | #[cynic(skip_serializing_if = "Option::is_none")]
41 | pub file_changes: Option>,
42 | pub message: CommitMessage<'a>,
43 | }
44 |
45 | ///
46 | #[derive(cynic::InputObject)]
47 | pub struct FileChanges<'a> {
48 | #[cynic(skip_serializing_if = "Option::is_none")]
49 | pub additions: Option>>,
50 | #[cynic(skip_serializing_if = "Option::is_none")]
51 | pub deletions: Option>>,
52 | }
53 |
54 | ///
55 | #[derive(cynic::InputObject, new)]
56 | pub struct FileDeletion<'path> {
57 | #[new(into)]
58 | pub path: Cow<'path, str>,
59 | }
60 |
61 | ///
62 | #[derive(cynic::InputObject, new)]
63 | pub struct FileAddition<'path> {
64 | pub contents: Base64String,
65 | #[new(into)]
66 | pub path: Cow<'path, str>,
67 | }
68 |
69 | ///
70 | #[derive(cynic::InputObject)]
71 | pub struct CommittableBranch<'a> {
72 | pub id: &'a cynic::Id,
73 | }
74 |
75 | ///
76 | #[derive(cynic::InputObject)]
77 | pub struct CommitMessage<'a> {
78 | #[cynic(skip_serializing_if = "Option::is_none")]
79 | pub body: Option<&'a str>,
80 | pub headline: &'a str,
81 | }
82 |
83 | #[cfg(test)]
84 | mod tests {
85 | use cynic::{Id, MutationBuilder};
86 | use indoc::indoc;
87 |
88 | use crate::github::graphql::{
89 | create_commit::{
90 | CommitMessage, CommittableBranch, CreateCommit, CreateCommitOnBranchInput,
91 | CreateCommitVariables,
92 | },
93 | types::GitObjectId,
94 | };
95 |
96 | #[test]
97 | fn create_commit_output() {
98 | const CREATE_COMMIT_MUTATION: &str = indoc! {"
99 | mutation CreateCommit($input: CreateCommitOnBranchInput!) {
100 | createCommitOnBranch(input: $input) {
101 | commit {
102 | url
103 | }
104 | }
105 | }
106 | "};
107 |
108 | let id = Id::new("");
109 | let operation = CreateCommit::build(CreateCommitVariables {
110 | input: CreateCommitOnBranchInput {
111 | branch: CommittableBranch { id: &id },
112 | expected_head_oid: GitObjectId::new(""),
113 | file_changes: None,
114 | message: CommitMessage {
115 | body: None,
116 | headline: "",
117 | },
118 | },
119 | });
120 |
121 | assert_eq!(operation.query, CREATE_COMMIT_MUTATION);
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/src/github/graphql/create_pull_request.rs:
--------------------------------------------------------------------------------
1 | use url::Url;
2 |
3 | use crate::github::graphql::github_schema::github_schema as schema;
4 |
5 | #[derive(cynic::QueryVariables)]
6 | pub struct CreatePullRequestVariables<'a> {
7 | pub input: CreatePullRequestInput<'a>,
8 | }
9 |
10 | #[derive(cynic::QueryFragment)]
11 | #[cynic(graphql_type = "Mutation", variables = "CreatePullRequestVariables")]
12 | pub struct CreatePullRequest {
13 | #[arguments(input: $input)]
14 | pub create_pull_request: Option,
15 | }
16 |
17 | ///
18 | #[derive(cynic::QueryFragment)]
19 | pub struct CreatePullRequestPayload {
20 | pub pull_request: Option,
21 | }
22 |
23 | ///
24 | #[derive(cynic::QueryFragment)]
25 | pub struct PullRequest {
26 | pub url: Url,
27 | }
28 |
29 | ///
30 | #[derive(cynic::InputObject)]
31 | pub struct CreatePullRequestInput<'a> {
32 | pub base_ref_name: &'a str,
33 | #[cynic(skip_serializing_if = "Option::is_none")]
34 | pub body: Option<&'a str>,
35 | #[cynic(skip_serializing_if = "Option::is_none")]
36 | pub draft: Option,
37 | pub head_ref_name: &'a str,
38 | #[cynic(skip_serializing_if = "Option::is_none")]
39 | pub head_repository_id: Option<&'a cynic::Id>,
40 | #[cynic(skip_serializing_if = "Option::is_none")]
41 | pub maintainer_can_modify: Option,
42 | pub repository_id: &'a cynic::Id,
43 | pub title: &'a str,
44 | }
45 |
46 | #[cfg(test)]
47 | mod tests {
48 | use cynic::{Id, MutationBuilder};
49 | use indoc::indoc;
50 |
51 | use crate::github::graphql::create_pull_request::{
52 | CreatePullRequest, CreatePullRequestInput, CreatePullRequestVariables,
53 | };
54 |
55 | #[test]
56 | fn create_commit_output() {
57 | const CREATE_PULL_REQUEST_MUTATION: &str = indoc! {"
58 | mutation CreatePullRequest($input: CreatePullRequestInput!) {
59 | createPullRequest(input: $input) {
60 | pullRequest {
61 | url
62 | }
63 | }
64 | }
65 | "};
66 |
67 | let id = Id::new("");
68 | let operation = CreatePullRequest::build(CreatePullRequestVariables {
69 | input: CreatePullRequestInput {
70 | base_ref_name: "",
71 | body: None,
72 | draft: None,
73 | head_ref_name: "",
74 | head_repository_id: None,
75 | maintainer_can_modify: None,
76 | repository_id: &id,
77 | title: "",
78 | },
79 | });
80 |
81 | assert_eq!(operation.query, CREATE_PULL_REQUEST_MUTATION);
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/github/graphql/create_ref.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::{github_schema::github_schema as schema, types::GitObjectId};
2 |
3 | ///
4 | #[derive(cynic::QueryVariables)]
5 | pub struct CreateRefVariables<'a> {
6 | pub name: &'a str,
7 | pub oid: GitObjectId,
8 | pub repository_id: &'a cynic::Id,
9 | }
10 |
11 | #[derive(cynic::QueryFragment)]
12 | #[cynic(graphql_type = "Mutation", variables = "CreateRefVariables")]
13 | pub struct CreateRef {
14 | #[arguments(input: { name: $name, oid: $oid, repositoryId: $repository_id })]
15 | pub create_ref: Option,
16 | }
17 |
18 | ///
19 | #[derive(cynic::QueryFragment)]
20 | pub struct CreateRefPayload {
21 | #[cynic(rename = "ref")]
22 | pub ref_: Option[,
23 | }
24 |
25 | ///
26 | #[derive(cynic::QueryFragment)]
27 | pub struct Ref {
28 | pub id: cynic::Id,
29 | pub name: String,
30 | pub target: Option,
31 | }
32 |
33 | ///
34 | #[derive(cynic::QueryFragment)]
35 | pub struct GitObject {
36 | pub oid: GitObjectId,
37 | }
38 |
39 | #[cfg(test)]
40 | mod tests {
41 | use cynic::{Id, MutationBuilder};
42 | use indoc::indoc;
43 |
44 | use crate::github::graphql::{
45 | create_ref::{CreateRef, CreateRefVariables},
46 | types::GitObjectId,
47 | };
48 |
49 | #[test]
50 | fn create_ref_output() {
51 | const CREATE_REF_MUTATION: &str = indoc! {"
52 | mutation CreateRef($name: String!, $oid: GitObjectID!, $repositoryId: ID!) {
53 | createRef(input: {name: $name, oid: $oid, repositoryId: $repositoryId}) {
54 | ref {
55 | id
56 | name
57 | target {
58 | oid
59 | }
60 | }
61 | }
62 | }
63 | "};
64 |
65 | let id = Id::new("");
66 | let operation = CreateRef::build(CreateRefVariables {
67 | name: "",
68 | oid: GitObjectId::new(""),
69 | repository_id: &id,
70 | });
71 |
72 | assert_eq!(operation.query, CREATE_REF_MUTATION);
73 | }
74 | }
75 |
--------------------------------------------------------------------------------
/src/github/graphql/get_all_values.rs:
--------------------------------------------------------------------------------
1 | use url::Url;
2 |
3 | use crate::github::graphql::{github_schema::github_schema as schema, types::Html};
4 |
5 | #[derive(cynic::QueryVariables)]
6 | pub struct GetAllValuesVariables<'a> {
7 | pub owner: &'a str,
8 | pub name: &'a str,
9 | pub tag_name: &'a str,
10 | }
11 |
12 | #[derive(cynic::QueryFragment)]
13 | pub struct Tree {
14 | #[cynic(flatten)]
15 | pub entries: Vec,
16 | }
17 |
18 | #[derive(cynic::QueryFragment)]
19 | pub struct TreeEntry {
20 | pub name: String,
21 | #[cynic(rename = "type")]
22 | pub type_: String,
23 | }
24 |
25 | #[derive(cynic::QueryFragment)]
26 | #[cynic(graphql_type = "Query", variables = "GetAllValuesVariables")]
27 | pub struct GetAllValues {
28 | #[arguments(owner: $owner, name: $name)]
29 | pub repository: Option,
30 | }
31 |
32 | ///
33 | #[derive(cynic::QueryFragment)]
34 | #[cynic(variables = "GetAllValuesVariables")]
35 | pub struct Repository {
36 | pub has_issues_enabled: bool,
37 | pub license_info: Option,
38 | pub owner: RepositoryOwner,
39 | #[arguments(expression: "HEAD:")]
40 | pub object: Option,
41 | #[arguments(tagName: $tag_name)]
42 | pub release: Option,
43 | #[cynic(rename = "repositoryTopics")]
44 | #[arguments(first: 16)]
45 | pub topics: RepositoryTopicConnection,
46 | pub url: Url,
47 | }
48 |
49 | ///
50 | #[derive(cynic::QueryFragment)]
51 | pub struct RepositoryTopicConnection {
52 | #[cynic(flatten)]
53 | pub nodes: Vec,
54 | }
55 |
56 | ///
57 | #[derive(cynic::QueryFragment)]
58 | pub struct RepositoryTopic {
59 | pub topic: Topic,
60 | }
61 |
62 | ///
63 | #[derive(cynic::QueryFragment)]
64 | pub struct Topic {
65 | pub name: String,
66 | }
67 |
68 | ///
69 | #[derive(cynic::QueryFragment)]
70 | pub struct Release {
71 | #[cynic(rename = "descriptionHTML")]
72 | pub description_html: Option,
73 | pub url: Url,
74 | }
75 |
76 | ///
77 | #[derive(cynic::QueryFragment)]
78 | pub struct RepositoryOwner {
79 | pub url: Url,
80 | }
81 |
82 | ///
83 | #[derive(cynic::QueryFragment)]
84 | pub struct License {
85 | pub key: String,
86 | #[cynic(rename = "pseudoLicense")]
87 | pub is_pseudo: bool,
88 | pub spdx_id: Option,
89 | }
90 |
91 | #[derive(cynic::InlineFragments)]
92 | #[cynic(graphql_type = "GitObject")]
93 | pub enum GetAllValuesGitObject {
94 | Tree(Tree),
95 | #[cynic(fallback)]
96 | Unknown,
97 | }
98 |
99 | #[cfg(test)]
100 | mod tests {
101 | use cynic::QueryBuilder;
102 | use indoc::indoc;
103 |
104 | use crate::github::{
105 | github_client::{MICROSOFT, WINGET_PKGS},
106 | graphql::get_all_values::{GetAllValues, GetAllValuesVariables},
107 | };
108 |
109 | #[test]
110 | fn get_all_values_output() {
111 | const GET_ALL_VALUES_QUERY: &str = indoc! {r#"
112 | query GetAllValues($owner: String!, $name: String!, $tagName: String!) {
113 | repository(owner: $owner, name: $name) {
114 | hasIssuesEnabled
115 | licenseInfo {
116 | key
117 | pseudoLicense
118 | spdxId
119 | }
120 | owner {
121 | url
122 | }
123 | object(expression: "HEAD:") {
124 | __typename
125 | ... on Tree {
126 | entries {
127 | name
128 | type
129 | }
130 | }
131 | }
132 | release(tagName: $tagName) {
133 | descriptionHTML
134 | url
135 | }
136 | repositoryTopics(first: 16) {
137 | nodes {
138 | topic {
139 | name
140 | }
141 | }
142 | }
143 | url
144 | }
145 | }
146 | "#};
147 |
148 | let operation = GetAllValues::build(GetAllValuesVariables {
149 | owner: MICROSOFT,
150 | name: WINGET_PKGS,
151 | tag_name: "",
152 | });
153 |
154 | assert_eq!(operation.query, GET_ALL_VALUES_QUERY);
155 | }
156 | }
157 |
--------------------------------------------------------------------------------
/src/github/graphql/get_branches.rs:
--------------------------------------------------------------------------------
1 | use std::fmt::{Display, Formatter};
2 |
3 | use url::Url;
4 |
5 | use crate::github::graphql::github_schema::github_schema as schema;
6 |
7 | #[derive(cynic::QueryVariables)]
8 | pub struct GetBranchesVariables<'a> {
9 | pub owner: &'a str,
10 | pub name: &'a str,
11 | pub cursor: Option<&'a str>,
12 | }
13 |
14 | #[derive(cynic::QueryFragment)]
15 | #[cynic(graphql_type = "Query", variables = "GetBranchesVariables")]
16 | pub struct GetBranches {
17 | #[arguments(owner: $owner, name: $name)]
18 | pub repository: Option,
19 | }
20 |
21 | ///
22 | #[derive(cynic::QueryFragment)]
23 | #[cynic(variables = "GetBranchesVariables")]
24 | pub struct Repository {
25 | pub id: cynic::Id,
26 | pub default_branch_ref: Option,
27 | #[arguments(first: 100, after: $cursor, refPrefix: "refs/heads/")]
28 | pub refs: Option,
29 | }
30 |
31 | ///
32 | #[derive(cynic::QueryFragment)]
33 | pub struct RefConnection {
34 | #[cynic(rename = "nodes", flatten)]
35 | pub branches: Vec,
36 | pub page_info: PageInfo,
37 | }
38 |
39 | #[derive(cynic::QueryFragment)]
40 | pub struct PageInfo {
41 | pub end_cursor: Option,
42 | pub has_next_page: bool,
43 | }
44 |
45 | ///
46 | #[derive(cynic::QueryFragment, Hash, PartialEq, Eq)]
47 | #[cynic(graphql_type = "Ref")]
48 | pub struct PullRequestBranchRef {
49 | pub name: String,
50 | #[arguments(first: 5)]
51 | pub associated_pull_requests: PullRequestConnection,
52 | }
53 |
54 | ///
55 | #[derive(cynic::QueryFragment)]
56 | #[cynic(graphql_type = "Ref")]
57 | pub struct DefaultBranchRef {
58 | pub name: String,
59 | }
60 |
61 | ///
62 | #[derive(cynic::QueryFragment, Hash, PartialEq, Eq)]
63 | pub struct PullRequestConnection {
64 | #[cynic(rename = "nodes", flatten)]
65 | pub pull_requests: Vec,
66 | }
67 |
68 | ///
69 | #[derive(cynic::QueryFragment, Hash, PartialEq, Eq)]
70 | pub struct PullRequest {
71 | pub title: String,
72 | pub url: Url,
73 | pub state: PullRequestState,
74 | pub repository: PullRequestRepository,
75 | }
76 |
77 | impl Display for PullRequest {
78 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
79 | write!(f, "{}", self.title)
80 | }
81 | }
82 |
83 | ///
84 | #[derive(cynic::QueryFragment, Hash, PartialEq, Eq)]
85 | #[cynic(graphql_type = "Repository")]
86 | pub struct PullRequestRepository {
87 | pub name_with_owner: String,
88 | }
89 |
90 | ///
91 | #[derive(cynic::Enum, Clone, Copy, Hash, PartialEq, Eq)]
92 | pub enum PullRequestState {
93 | Closed,
94 | Merged,
95 | Open,
96 | }
97 |
98 | impl Display for PullRequestState {
99 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
100 | write!(
101 | f,
102 | "{}",
103 | match self {
104 | Self::Merged => "a merged",
105 | Self::Open => "an open",
106 | Self::Closed => "a closed",
107 | }
108 | )
109 | }
110 | }
111 |
112 | #[cfg(test)]
113 | mod tests {
114 | use cynic::QueryBuilder;
115 | use indoc::indoc;
116 |
117 | use crate::github::{
118 | github_client::{MICROSOFT, WINGET_PKGS},
119 | graphql::get_branches::{GetBranches, GetBranchesVariables},
120 | };
121 |
122 | #[test]
123 | fn get_branches_query_output() {
124 | const GET_BRANCHES_QUERY: &str = indoc! {r#"
125 | query GetBranches($owner: String!, $name: String!, $cursor: String) {
126 | repository(owner: $owner, name: $name) {
127 | id
128 | defaultBranchRef {
129 | name
130 | }
131 | refs(first: 100, after: $cursor, refPrefix: "refs/heads/") {
132 | nodes {
133 | name
134 | associatedPullRequests(first: 5) {
135 | nodes {
136 | title
137 | url
138 | state
139 | repository {
140 | nameWithOwner
141 | }
142 | }
143 | }
144 | }
145 | pageInfo {
146 | endCursor
147 | hasNextPage
148 | }
149 | }
150 | }
151 | }
152 | "#};
153 |
154 | let operation = GetBranches::build(GetBranchesVariables {
155 | owner: MICROSOFT,
156 | name: WINGET_PKGS,
157 | cursor: None,
158 | });
159 |
160 | assert_eq!(operation.query, GET_BRANCHES_QUERY);
161 | }
162 | }
163 |
--------------------------------------------------------------------------------
/src/github/graphql/get_current_user_login.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::github_schema::github_schema as schema;
2 |
3 | ///
4 | #[derive(cynic::QueryFragment)]
5 | #[cynic(graphql_type = "Query")]
6 | pub struct GetCurrentUserLogin {
7 | pub viewer: User,
8 | }
9 |
10 | ///
11 | #[derive(cynic::QueryFragment)]
12 | pub struct User {
13 | pub login: String,
14 | }
15 |
16 | #[cfg(test)]
17 | mod tests {
18 | use cynic::QueryBuilder;
19 | use indoc::indoc;
20 |
21 | use crate::github::graphql::get_current_user_login::GetCurrentUserLogin;
22 |
23 | #[test]
24 | fn get_current_user_login_output() {
25 | const GET_CURRENT_USER_LOGIN_QUERY: &str = indoc! {r#"
26 | query GetCurrentUserLogin {
27 | viewer {
28 | login
29 | }
30 | }
31 | "#};
32 |
33 | let operation = GetCurrentUserLogin::build(());
34 |
35 | assert_eq!(operation.query, GET_CURRENT_USER_LOGIN_QUERY);
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/src/github/graphql/get_directory_content.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::github_schema::github_schema as schema;
2 |
3 | #[derive(cynic::QueryVariables)]
4 | pub struct GetDirectoryContentVariables<'a> {
5 | pub owner: &'a str,
6 | pub name: &'a str,
7 | pub expression: &'a str,
8 | }
9 |
10 | #[derive(cynic::QueryFragment)]
11 | pub struct Tree {
12 | #[cynic(flatten)]
13 | pub entries: Vec,
14 | }
15 |
16 | #[derive(cynic::QueryFragment)]
17 | pub struct TreeEntry {
18 | pub path: Option,
19 | }
20 |
21 | #[derive(cynic::QueryFragment)]
22 | #[cynic(graphql_type = "Query", variables = "GetDirectoryContentVariables")]
23 | pub struct GetDirectoryContent {
24 | #[arguments(owner: $owner, name: $name)]
25 | pub repository: Option,
26 | }
27 |
28 | #[derive(cynic::QueryFragment)]
29 | #[cynic(variables = "GetDirectoryContentVariables")]
30 | pub struct Repository {
31 | #[arguments(expression: $expression)]
32 | pub object: Option,
33 | }
34 |
35 | #[derive(cynic::InlineFragments)]
36 | #[cynic(graphql_type = "GitObject")]
37 | pub enum TreeGitObject {
38 | Tree(Tree),
39 | #[cynic(fallback)]
40 | Unknown,
41 | }
42 |
43 | impl TreeGitObject {
44 | pub fn into_entries(self) -> Option> {
45 | match self {
46 | Self::Tree(tree) => Some(tree.entries),
47 | Self::Unknown => None,
48 | }
49 | }
50 | }
51 |
52 | #[cfg(test)]
53 | mod tests {
54 | use cynic::QueryBuilder;
55 | use indoc::indoc;
56 |
57 | use crate::github::{
58 | github_client::{MICROSOFT, WINGET_PKGS},
59 | graphql::get_directory_content::{GetDirectoryContent, GetDirectoryContentVariables},
60 | };
61 |
62 | #[test]
63 | fn get_directory_content_output() {
64 | const GET_DIRECTORY_CONTENT_QUERY: &str = indoc! {r#"
65 | query GetDirectoryContent($owner: String!, $name: String!, $expression: String!) {
66 | repository(owner: $owner, name: $name) {
67 | object(expression: $expression) {
68 | __typename
69 | ... on Tree {
70 | entries {
71 | path
72 | }
73 | }
74 | }
75 | }
76 | }
77 | "#};
78 |
79 | let operation = GetDirectoryContent::build(GetDirectoryContentVariables {
80 | owner: MICROSOFT,
81 | name: WINGET_PKGS,
82 | expression: "",
83 | });
84 |
85 | assert_eq!(operation.query, GET_DIRECTORY_CONTENT_QUERY);
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/src/github/graphql/get_directory_content_with_text.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::{
2 | get_directory_content::GetDirectoryContentVariablesFields,
3 | github_schema::github_schema as schema,
4 | };
5 |
6 | #[derive(cynic::QueryFragment)]
7 | pub struct Tree {
8 | #[cynic(flatten)]
9 | pub entries: Vec,
10 | }
11 |
12 | #[derive(cynic::QueryFragment)]
13 | pub struct TreeEntry {
14 | pub name: String,
15 | pub object: Option,
16 | }
17 |
18 | #[derive(cynic::QueryFragment)]
19 | #[cynic(graphql_type = "Query", variables = "GetDirectoryContentVariables")]
20 | pub struct GetDirectoryContentWithText {
21 | #[arguments(owner: $owner, name: $name)]
22 | pub repository: Option,
23 | }
24 |
25 | #[derive(cynic::QueryFragment)]
26 | #[cynic(variables = "GetDirectoryContentVariables")]
27 | pub struct Repository {
28 | #[arguments(expression: $expression)]
29 | pub object: Option,
30 | }
31 |
32 | #[derive(cynic::QueryFragment)]
33 | pub struct Blob {
34 | pub text: Option,
35 | }
36 |
37 | #[derive(cynic::InlineFragments)]
38 | #[cynic(graphql_type = "GitObject")]
39 | pub enum BlobObject {
40 | Blob(Blob),
41 | #[cynic(fallback)]
42 | Unknown,
43 | }
44 |
45 | impl BlobObject {
46 | pub fn into_blob_text(self) -> Option {
47 | match self {
48 | Self::Blob(blob) => blob.text,
49 | Self::Unknown => None,
50 | }
51 | }
52 | }
53 |
54 | #[derive(cynic::InlineFragments)]
55 | #[cynic(graphql_type = "GitObject")]
56 | pub enum TreeObject {
57 | Tree(Tree),
58 | #[cynic(fallback)]
59 | Unknown,
60 | }
61 |
62 | impl TreeObject {
63 | pub fn into_tree_entries(self) -> Option> {
64 | match self {
65 | Self::Tree(tree) => Some(tree.entries),
66 | Self::Unknown => None,
67 | }
68 | }
69 | }
70 |
71 | #[cfg(test)]
72 | mod tests {
73 | use cynic::QueryBuilder;
74 | use indoc::indoc;
75 |
76 | use crate::github::{
77 | github_client::{MICROSOFT, WINGET_PKGS},
78 | graphql::{
79 | get_directory_content::GetDirectoryContentVariables,
80 | get_directory_content_with_text::GetDirectoryContentWithText,
81 | },
82 | };
83 |
84 | #[test]
85 | fn get_directory_content_with_text_output() {
86 | const GET_DIRECTORY_CONTENT_WITH_TEXT_QUERY: &str = indoc! {r#"
87 | query GetDirectoryContentWithText($owner: String!, $name: String!, $expression: String!) {
88 | repository(owner: $owner, name: $name) {
89 | object(expression: $expression) {
90 | __typename
91 | ... on Tree {
92 | entries {
93 | name
94 | object {
95 | __typename
96 | ... on Blob {
97 | text
98 | }
99 | }
100 | }
101 | }
102 | }
103 | }
104 | }
105 | "#};
106 |
107 | let operation = GetDirectoryContentWithText::build(GetDirectoryContentVariables {
108 | owner: MICROSOFT,
109 | name: WINGET_PKGS,
110 | expression: "",
111 | });
112 |
113 | assert_eq!(operation.query, GET_DIRECTORY_CONTENT_WITH_TEXT_QUERY);
114 | }
115 | }
116 |
--------------------------------------------------------------------------------
/src/github/graphql/get_existing_pull_request.rs:
--------------------------------------------------------------------------------
1 | use chrono::{DateTime, Utc};
2 | use url::Url;
3 |
4 | use crate::github::graphql::{
5 | get_branches::PullRequestState, github_schema::github_schema as schema,
6 | };
7 |
8 | #[derive(cynic::QueryVariables)]
9 | pub struct GetExistingPullRequestVariables<'a> {
10 | pub query: &'a str,
11 | }
12 |
13 | #[derive(cynic::QueryFragment)]
14 | #[cynic(graphql_type = "Query", variables = "GetExistingPullRequestVariables")]
15 | pub struct GetExistingPullRequest {
16 | #[arguments(first: 1, type: ISSUE, query: $query)]
17 | pub search: SearchResultItemConnection,
18 | }
19 |
20 | #[derive(cynic::QueryFragment)]
21 | pub struct SearchResultItemConnection {
22 | #[cynic(flatten)]
23 | pub edges: Vec,
24 | }
25 |
26 | #[derive(cynic::QueryFragment)]
27 | pub struct SearchResultItemEdge {
28 | pub node: Option,
29 | }
30 |
31 | #[derive(cynic::QueryFragment)]
32 | pub struct PullRequest {
33 | pub url: Url,
34 | pub state: PullRequestState,
35 | pub created_at: DateTime,
36 | }
37 |
38 | #[derive(cynic::InlineFragments)]
39 | pub enum SearchResultItem {
40 | PullRequest(PullRequest),
41 | #[cynic(fallback)]
42 | Unknown,
43 | }
44 |
45 | impl SearchResultItem {
46 | pub fn into_pull_request(self) -> Option {
47 | match self {
48 | Self::PullRequest(pull_request) => Some(pull_request),
49 | Self::Unknown => None,
50 | }
51 | }
52 | }
53 |
54 | #[cfg(test)]
55 | mod tests {
56 | use cynic::QueryBuilder;
57 | use indoc::indoc;
58 |
59 | use crate::github::graphql::get_existing_pull_request::{
60 | GetExistingPullRequest, GetExistingPullRequestVariables,
61 | };
62 |
63 | #[test]
64 | fn get_existing_pull_request_output() {
65 | const GET_EXISTING_PULL_REQUEST_QUERY: &str = indoc! {r#"
66 | query GetExistingPullRequest($query: String!) {
67 | search(first: 1, type: ISSUE, query: $query) {
68 | edges {
69 | node {
70 | __typename
71 | ... on PullRequest {
72 | url
73 | state
74 | createdAt
75 | }
76 | }
77 | }
78 | }
79 | }
80 | "#};
81 |
82 | let operation =
83 | GetExistingPullRequest::build(GetExistingPullRequestVariables { query: "" });
84 |
85 | assert_eq!(operation.query, GET_EXISTING_PULL_REQUEST_QUERY);
86 | }
87 | }
88 |
--------------------------------------------------------------------------------
/src/github/graphql/get_file_content.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::{
2 | get_directory_content::GetDirectoryContentVariablesFields,
3 | get_directory_content_with_text::BlobObject, github_schema::github_schema as schema,
4 | };
5 |
6 | #[derive(cynic::QueryFragment)]
7 | #[cynic(graphql_type = "Query", variables = "GetDirectoryContentVariables")]
8 | pub struct GetFileContent {
9 | #[arguments(owner: $owner, name: $name)]
10 | pub repository: Option,
11 | }
12 |
13 | #[derive(cynic::QueryFragment)]
14 | #[cynic(variables = "GetDirectoryContentVariables")]
15 | pub struct Repository {
16 | #[arguments(expression: $expression)]
17 | pub object: Option,
18 | }
19 |
20 | #[cfg(test)]
21 | mod tests {
22 | use cynic::QueryBuilder;
23 | use indoc::indoc;
24 |
25 | use crate::github::{
26 | github_client::{MICROSOFT, WINGET_PKGS},
27 | graphql::{
28 | get_directory_content::GetDirectoryContentVariables, get_file_content::GetFileContent,
29 | },
30 | };
31 |
32 | #[test]
33 | fn get_file_content_output() {
34 | const GET_FILE_CONTENT_QUERY: &str = indoc! {r#"
35 | query GetFileContent($owner: String!, $name: String!, $expression: String!) {
36 | repository(owner: $owner, name: $name) {
37 | object(expression: $expression) {
38 | __typename
39 | ... on Blob {
40 | text
41 | }
42 | }
43 | }
44 | }
45 | "#};
46 |
47 | let operation = GetFileContent::build(GetDirectoryContentVariables {
48 | owner: MICROSOFT,
49 | name: WINGET_PKGS,
50 | expression: "",
51 | });
52 |
53 | assert_eq!(operation.query, GET_FILE_CONTENT_QUERY);
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/src/github/graphql/get_repository_info.rs:
--------------------------------------------------------------------------------
1 | use url::Url;
2 |
3 | use crate::github::graphql::{github_schema::github_schema as schema, types::GitObjectId};
4 |
5 | #[derive(cynic::QueryVariables)]
6 | pub struct RepositoryVariables<'a> {
7 | pub owner: &'a str,
8 | pub name: &'a str,
9 | }
10 |
11 | #[derive(cynic::QueryFragment)]
12 | #[cynic(graphql_type = "Query", variables = "RepositoryVariables")]
13 | pub struct GetRepositoryInfo {
14 | #[arguments(owner: $owner, name: $name)]
15 | pub repository: Option,
16 | }
17 |
18 | #[derive(cynic::QueryFragment)]
19 | pub struct Repository {
20 | pub id: cynic::Id,
21 | pub owner: RepositoryOwner,
22 | pub name_with_owner: String,
23 | pub url: Url,
24 | pub default_branch_ref: Option][,
25 | }
26 |
27 | #[derive(cynic::QueryFragment)]
28 | pub struct Ref {
29 | pub name: String,
30 | pub id: cynic::Id,
31 | pub target: Option,
32 | }
33 |
34 | #[derive(cynic::QueryFragment)]
35 | pub struct RepositoryOwner {
36 | pub login: String,
37 | }
38 |
39 | #[derive(cynic::QueryFragment)]
40 | pub struct Commit {
41 | pub oid: GitObjectId,
42 | pub history: CommitHistoryConnection,
43 | }
44 |
45 | #[derive(cynic::QueryFragment)]
46 | pub struct CommitHistoryConnection {
47 | pub total_count: i32,
48 | }
49 |
50 | #[derive(cynic::InlineFragments)]
51 | #[cynic(graphql_type = "GitObject")]
52 | pub enum TargetGitObject {
53 | Commit(Commit),
54 | #[cynic(fallback)]
55 | Unknown,
56 | }
57 |
58 | impl TargetGitObject {
59 | pub fn into_commit(self) -> Option {
60 | match self {
61 | Self::Commit(commit) => Some(commit),
62 | Self::Unknown => None,
63 | }
64 | }
65 | }
66 |
67 | #[cfg(test)]
68 | mod tests {
69 | use cynic::QueryBuilder;
70 | use indoc::indoc;
71 |
72 | use crate::github::{
73 | github_client::{MICROSOFT, WINGET_PKGS},
74 | graphql::get_repository_info::{GetRepositoryInfo, RepositoryVariables},
75 | };
76 |
77 | #[test]
78 | fn get_repository_info_output() {
79 | const GET_REPOSITORY_INFO_QUERY: &str = indoc! {r#"
80 | query GetRepositoryInfo($owner: String!, $name: String!) {
81 | repository(owner: $owner, name: $name) {
82 | id
83 | owner {
84 | login
85 | }
86 | nameWithOwner
87 | url
88 | defaultBranchRef {
89 | name
90 | id
91 | target {
92 | __typename
93 | ... on Commit {
94 | oid
95 | history {
96 | totalCount
97 | }
98 | }
99 | }
100 | }
101 | }
102 | }
103 | "#};
104 |
105 | let operation = GetRepositoryInfo::build(RepositoryVariables {
106 | owner: MICROSOFT,
107 | name: WINGET_PKGS,
108 | });
109 |
110 | assert_eq!(operation.query, GET_REPOSITORY_INFO_QUERY);
111 | }
112 | }
113 |
--------------------------------------------------------------------------------
/src/github/graphql/github_schema.rs:
--------------------------------------------------------------------------------
1 | use chrono::{DateTime, Utc};
2 | use cynic::impl_scalar;
3 | use url::Url;
4 |
5 | #[cynic::schema("github")]
6 | pub mod github_schema {}
7 |
8 | impl_scalar!(Url, github_schema::URI);
9 | impl_scalar!(DateTime, github_schema::DateTime);
10 |
--------------------------------------------------------------------------------
/src/github/graphql/merge_upstream.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::{github_schema::github_schema as schema, types::GitObjectId};
2 |
3 | #[derive(cynic::QueryVariables)]
4 | pub struct MergeUpstreamVariables<'id> {
5 | pub branch_ref_id: &'id cynic::Id,
6 | pub upstream_target_oid: GitObjectId,
7 | pub force: bool,
8 | }
9 |
10 | #[derive(cynic::QueryFragment)]
11 | #[cynic(graphql_type = "Mutation", variables = "MergeUpstreamVariables")]
12 | pub struct MergeUpstream {
13 | #[expect(dead_code)]
14 | #[arguments(input: { oid: $upstream_target_oid, refId: $branch_ref_id, force: $force })]
15 | pub update_ref: Option,
16 | }
17 |
18 | #[derive(cynic::QueryFragment)]
19 | pub struct UpdateRefPayload {
20 | #[expect(dead_code)]
21 | pub client_mutation_id: Option,
22 | }
23 |
24 | #[cfg(test)]
25 | mod tests {
26 | use cynic::{Id, MutationBuilder};
27 | use indoc::indoc;
28 |
29 | use crate::github::graphql::{
30 | merge_upstream::{MergeUpstream, MergeUpstreamVariables},
31 | types::GitObjectId,
32 | };
33 |
34 | #[test]
35 | fn merge_upstream_output() {
36 | const MERGE_UPSTREAM_MUTATION: &str = indoc! {"
37 | mutation MergeUpstream($branchRefId: ID!, $upstreamTargetOid: GitObjectID!, $force: Boolean!) {
38 | updateRef(input: {oid: $upstreamTargetOid, refId: $branchRefId, force: $force}) {
39 | clientMutationId
40 | }
41 | }
42 | "};
43 |
44 | let id = Id::new("");
45 | let operation = MergeUpstream::build(MergeUpstreamVariables {
46 | branch_ref_id: &id,
47 | upstream_target_oid: GitObjectId::new(""),
48 | force: false,
49 | });
50 |
51 | assert_eq!(operation.query, MERGE_UPSTREAM_MUTATION);
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/src/github/graphql/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod create_commit;
2 | pub mod create_pull_request;
3 | pub mod create_ref;
4 | pub mod get_all_values;
5 | pub mod get_branches;
6 | pub mod get_current_user_login;
7 | pub mod get_directory_content;
8 | pub mod get_directory_content_with_text;
9 | pub mod get_existing_pull_request;
10 | pub mod get_file_content;
11 | pub mod get_repository_info;
12 | pub mod github_schema;
13 | pub mod merge_upstream;
14 | pub mod types;
15 | pub mod update_refs;
16 |
--------------------------------------------------------------------------------
/src/github/graphql/types.rs:
--------------------------------------------------------------------------------
1 | use derive_more::Deref;
2 | use derive_new::new;
3 |
4 | use crate::github::graphql::github_schema::github_schema as schema;
5 |
6 | ///
7 | #[derive(cynic::Scalar, new)]
8 | pub struct Base64String(#[new(into)] String);
9 |
10 | ///
11 | #[derive(cynic::Scalar, PartialEq, Eq, Clone, new)]
12 | #[cynic(graphql_type = "GitObjectID")]
13 | pub struct GitObjectId(#[new(into)] String);
14 |
15 | ///
16 | #[derive(cynic::Scalar, new)]
17 | #[cynic(graphql_type = "GitRefname")]
18 | pub struct GitRefName(#[new(into)] String);
19 |
20 | ///
21 | #[derive(cynic::Scalar, Deref)]
22 | #[cynic(graphql_type = "HTML")]
23 | pub struct Html(String);
24 |
--------------------------------------------------------------------------------
/src/github/graphql/update_refs.rs:
--------------------------------------------------------------------------------
1 | use crate::github::graphql::{
2 | github_schema::github_schema as schema,
3 | types::{GitObjectId, GitRefName},
4 | };
5 |
6 | ///
7 | #[derive(cynic::QueryVariables)]
8 | pub struct UpdateRefsVariables<'id> {
9 | pub ref_updates: Vec,
10 | pub repository_id: &'id cynic::Id,
11 | }
12 |
13 | #[derive(cynic::QueryFragment)]
14 | #[cynic(graphql_type = "Mutation", variables = "UpdateRefsVariables")]
15 | pub struct UpdateRefs {
16 | #[expect(dead_code)]
17 | #[arguments(input: { refUpdates: $ref_updates, repositoryId: $repository_id })]
18 | pub update_refs: Option,
19 | }
20 |
21 | #[derive(cynic::QueryFragment)]
22 | pub struct UpdateRefsPayload {
23 | #[expect(dead_code)]
24 | pub client_mutation_id: Option,
25 | }
26 |
27 | ///
28 | #[derive(cynic::InputObject)]
29 | pub struct RefUpdate {
30 | pub after_oid: GitObjectId,
31 | #[cynic(skip_serializing_if = "Option::is_none")]
32 | pub before_oid: Option,
33 | #[cynic(skip_serializing_if = "Option::is_none")]
34 | pub force: Option,
35 | pub name: GitRefName,
36 | }
37 |
38 | #[cfg(test)]
39 | mod tests {
40 | use cynic::{Id, MutationBuilder};
41 | use indoc::indoc;
42 |
43 | use crate::github::graphql::update_refs::{UpdateRefs, UpdateRefsVariables};
44 |
45 | #[test]
46 | fn create_ref_output() {
47 | const UPDATE_REFS_MUTATION: &str = indoc! {"
48 | mutation UpdateRefs($refUpdates: [RefUpdate!]!, $repositoryId: ID!) {
49 | updateRefs(input: {refUpdates: $refUpdates, repositoryId: $repositoryId}) {
50 | clientMutationId
51 | }
52 | }
53 | "};
54 |
55 | let id = Id::new("");
56 | let operation = UpdateRefs::build(UpdateRefsVariables {
57 | repository_id: &id,
58 | ref_updates: vec![],
59 | });
60 |
61 | assert_eq!(operation.query, UPDATE_REFS_MUTATION);
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/github/mod.rs:
--------------------------------------------------------------------------------
1 | pub mod github_client;
2 | pub mod graphql;
3 | mod rest;
4 | pub mod utils;
5 |
--------------------------------------------------------------------------------
/src/github/rest/get_tree.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 |
3 | #[derive(Serialize, Deserialize)]
4 | pub struct GitTree {
5 | pub sha: String,
6 | pub url: String,
7 | pub truncated: bool,
8 | pub tree: Vec,
9 | }
10 |
11 | #[derive(Serialize, Deserialize)]
12 | pub struct TreeObject {
13 | pub path: String,
14 | pub mode: String,
15 | pub r#type: String,
16 | pub sha: String,
17 | pub size: Option,
18 | pub url: String,
19 | }
20 |
--------------------------------------------------------------------------------
/src/github/rest/mod.rs:
--------------------------------------------------------------------------------
1 | use reqwest::header::HeaderValue;
2 |
3 | pub mod get_tree;
4 |
5 | pub const GITHUB_JSON_MIME: HeaderValue = HeaderValue::from_static("application/vnd.github+json");
6 |
--------------------------------------------------------------------------------
/src/github/utils/package_path.rs:
--------------------------------------------------------------------------------
1 | use std::fmt::{Display, Formatter, Write};
2 |
3 | use winget_types::{ManifestTypeWithLocale, PackageIdentifier, PackageVersion};
4 |
5 | use super::{INSTALLER_PART, LOCALE_PART, YAML_EXTENSION};
6 |
7 | #[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
8 | #[repr(transparent)]
9 | pub struct PackagePath(String);
10 |
11 | impl PackagePath {
12 | pub fn new(
13 | identifier: &PackageIdentifier,
14 | version: Option<&PackageVersion>,
15 | manifest_type: Option<&ManifestTypeWithLocale>,
16 | ) -> Self {
17 | let first_character = identifier.as_str().chars().next().map_or_else(
18 | || unreachable!("Package identifiers cannot be empty"),
19 | |mut first| {
20 | first.make_ascii_lowercase();
21 | first
22 | },
23 | );
24 |
25 | // manifests/p
26 | let mut result = format!("manifests/{first_character}");
27 |
28 | // manifests/p/Package/Identifier
29 | for part in identifier.as_str().split('.') {
30 | let _ = write!(result, "/{part}");
31 | }
32 |
33 | // manifests/p/Package/Identifier/1.2.3
34 | if let Some(version) = version {
35 | let _ = write!(result, "/{version}");
36 |
37 | // The full manifest file path should only be included if a version was passed in too
38 | if let Some(manifest_type) = manifest_type {
39 | let _ = write!(result, "/{identifier}");
40 | if matches!(manifest_type, ManifestTypeWithLocale::Installer) {
41 | // manifests/p/Package/Identifier/1.2.3/Package.Identifier.installer.yaml
42 | result.push_str(INSTALLER_PART);
43 | } else if let ManifestTypeWithLocale::Locale(tag) = manifest_type {
44 | let _ = write!(result, "{LOCALE_PART}{tag}");
45 | }
46 | result.push_str(YAML_EXTENSION);
47 | }
48 | }
49 |
50 | Self(result)
51 | }
52 |
53 | pub fn as_str(&self) -> &str {
54 | self.0.as_str()
55 | }
56 | }
57 |
58 | impl Display for PackagePath {
59 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
60 | self.0.fmt(f)
61 | }
62 | }
63 |
64 | #[cfg(test)]
65 | mod tests {
66 | use rstest::rstest;
67 | use winget_types::{ManifestTypeWithLocale, PackageIdentifier, icu_locid::langid};
68 |
69 | use super::PackagePath;
70 |
71 | #[rstest]
72 | #[case("Package.Identifier", None, None, "manifests/p/Package/Identifier")]
73 | #[case(
74 | "Package.Identifier",
75 | Some("1.2.3"),
76 | None,
77 | "manifests/p/Package/Identifier/1.2.3"
78 | )]
79 | #[case(
80 | "Package.Identifier",
81 | Some("1.2.3"),
82 | Some(ManifestTypeWithLocale::Installer),
83 | "manifests/p/Package/Identifier/1.2.3/Package.Identifier.installer.yaml"
84 | )]
85 | #[case(
86 | "Package.Identifier",
87 | Some("1.2.3"),
88 | Some(ManifestTypeWithLocale::Locale(langid!("en-US"))),
89 | "manifests/p/Package/Identifier/1.2.3/Package.Identifier.locale.en-US.yaml"
90 | )]
91 | #[case(
92 | "Package.Identifier",
93 | Some("1.2.3"),
94 | Some(ManifestTypeWithLocale::Locale(langid!("zh-CN"))),
95 | "manifests/p/Package/Identifier/1.2.3/Package.Identifier.locale.zh-CN.yaml"
96 | )]
97 | #[case(
98 | "Package.Identifier",
99 | Some("1.2.3"),
100 | Some(ManifestTypeWithLocale::Version),
101 | "manifests/p/Package/Identifier/1.2.3/Package.Identifier.yaml"
102 | )]
103 | fn package_paths(
104 | #[case] identifier: &str,
105 | #[case] version: Option<&str>,
106 | #[case] manifest_type: Option,
107 | #[case] expected: &str,
108 | ) {
109 | let identifier = identifier.parse::().unwrap();
110 | let version = version.and_then(|version| version.parse().ok());
111 | assert_eq!(
112 | PackagePath::new(&identifier, version.as_ref(), manifest_type.as_ref()).as_str(),
113 | expected
114 | )
115 | }
116 | }
117 |
--------------------------------------------------------------------------------
/src/github/utils/pull_request.rs:
--------------------------------------------------------------------------------
1 | use bon::builder;
2 | use color_eyre::Result;
3 | use winget_types::PackageIdentifier;
4 |
5 | use crate::{
6 | github::utils::PackagePath,
7 | manifests::{Manifests, build_manifest_string},
8 | };
9 |
10 | #[builder(finish_fn = create)]
11 | pub fn pr_changes(
12 | package_identifier: &PackageIdentifier,
13 | manifests: &Manifests,
14 | package_path: &PackagePath,
15 | created_with: Option<&str>,
16 | ) -> Result> {
17 | let mut path_content_map = vec![
18 | (
19 | format!("{package_path}/{package_identifier}.installer.yaml"),
20 | build_manifest_string(&manifests.installer, created_with)?,
21 | ),
22 | (
23 | format!(
24 | "{}/{}.locale.{}.yaml",
25 | package_path, package_identifier, manifests.version.default_locale
26 | ),
27 | build_manifest_string(&manifests.default_locale, created_with)?,
28 | ),
29 | ];
30 | for locale_manifest in &manifests.locales {
31 | path_content_map.push((
32 | format!(
33 | "{package_path}/{package_identifier}.locale.{}.yaml",
34 | locale_manifest.package_locale
35 | ),
36 | build_manifest_string(locale_manifest, created_with)?,
37 | ));
38 | }
39 | path_content_map.push((
40 | format!("{package_path}/{package_identifier}.yaml"),
41 | build_manifest_string(&manifests.version, created_with)?,
42 | ));
43 | Ok(path_content_map)
44 | }
45 |
--------------------------------------------------------------------------------
/src/installers/burn/wix_burn_stub.rs:
--------------------------------------------------------------------------------
1 | use std::ops::Range;
2 |
3 | use zerocopy::{Immutable, KnownLayout, TryFromBytes, little_endian::U32};
4 |
5 | #[expect(dead_code)]
6 | #[derive(Debug, TryFromBytes, KnownLayout, Immutable)]
7 | #[repr(u32)]
8 | enum WixBurnStubMagic {
9 | F14300 = 0x00F1_4300_u32.to_le(),
10 | }
11 |
12 | ///
13 | #[derive(Debug, TryFromBytes, KnownLayout, Immutable)]
14 | #[repr(C)]
15 | pub struct WixBurnStub {
16 | magic: WixBurnStubMagic,
17 | version: U32,
18 | guid: uuid::Bytes,
19 | stub_size: U32,
20 | original_checksum: U32,
21 | original_signature_offset: U32,
22 | original_signature_size: U32,
23 | container_format: U32,
24 | container_count: U32,
25 | bootstrapper_application_container_size: U32,
26 | // (512 (minimum section size) - 52 (size of above data)) / 4 (size of DWORD)
27 | attached_container_sizes: [U32; 115],
28 | }
29 |
30 | impl WixBurnStub {
31 | pub const fn ux_container_slice_range(&self) -> Range {
32 | let stub_size = self.stub_size.get() as usize;
33 | stub_size..stub_size + self.bootstrapper_application_container_size.get() as usize
34 | }
35 | }
36 |
37 | #[cfg(test)]
38 | mod tests {
39 | use crate::installers::burn::wix_burn_stub::WixBurnStub;
40 |
41 | #[test]
42 | fn wix_burn_stub_size() {
43 | const MINIMUM_PE_SECTION_SIZE: usize = 512;
44 |
45 | assert_eq!(size_of::(), MINIMUM_PE_SECTION_SIZE)
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/src/installers/inno/compression.rs:
--------------------------------------------------------------------------------
1 | use std::ops::{Deref, DerefMut};
2 |
3 | #[derive(Debug)]
4 | pub enum Compression {
5 | Stored(u32),
6 | Zlib(u32),
7 | LZMA1(u32),
8 | }
9 |
10 | impl Deref for Compression {
11 | type Target = u32;
12 |
13 | fn deref(&self) -> &Self::Target {
14 | match self {
15 | Self::Stored(size) | Self::Zlib(size) | Self::LZMA1(size) => size,
16 | }
17 | }
18 | }
19 |
20 | impl DerefMut for Compression {
21 | fn deref_mut(&mut self) -> &mut Self::Target {
22 | match self {
23 | Self::Stored(size) | Self::Zlib(size) | Self::LZMA1(size) => size,
24 | }
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/src/installers/inno/encoding.rs:
--------------------------------------------------------------------------------
1 | use std::io::{Read, Result};
2 |
3 | use byteorder::{LE, ReadBytesExt};
4 | use encoding_rs::Encoding;
5 |
6 | #[derive(Debug, Default)]
7 | pub struct InnoValue(Vec);
8 |
9 | impl InnoValue {
10 | pub fn new_raw(reader: &mut R) -> Result]