├── public └── .gitkeep ├── src-tauri ├── libs │ ├── hdiff-sys │ │ ├── .gitignore │ │ ├── .gitattributes │ │ ├── Cargo.toml │ │ ├── HDiff │ │ │ └── private_diff │ │ │ │ ├── libdivsufsort │ │ │ │ ├── divsufsort.cpp │ │ │ │ ├── divsufsort64.cpp │ │ │ │ ├── config.h │ │ │ │ ├── divsufsort.h │ │ │ │ ├── divsufsort64.h │ │ │ │ └── divsufsort_private.h │ │ │ │ ├── mem_buf.h │ │ │ │ ├── compress_detect.h │ │ │ │ ├── bytes_rle.h │ │ │ │ ├── limit_mem_diff │ │ │ │ ├── digest_matcher.h │ │ │ │ ├── covers.h │ │ │ │ └── bloom_filter.h │ │ │ │ ├── suffix_string.h │ │ │ │ ├── pack_uint.h │ │ │ │ └── qsort_parallel.h │ │ ├── Cargo.lock │ │ ├── build.rs │ │ ├── wrapper.h │ │ ├── src │ │ │ └── lib.rs │ │ └── libParallel │ │ │ ├── parallel_import.h │ │ │ ├── parallel_channel.h │ │ │ └── parallel_channel.cpp │ └── hpatch-sys │ │ ├── .gitignore │ │ ├── wrapper.h │ │ ├── build.rs │ │ ├── .gitattributes │ │ ├── Cargo.toml │ │ └── HPatch │ │ └── checksum_plugin.h ├── src │ ├── module │ │ └── mod.rs │ ├── thirdparty │ │ └── mod.rs │ ├── ipc │ │ ├── mod.rs │ │ └── operation.rs │ ├── builder │ │ ├── utils │ │ │ └── mod.rs │ │ ├── main.rs │ │ ├── append.rs │ │ ├── cli │ │ │ └── mod.rs │ │ └── metadata.rs │ ├── utils │ │ ├── gui.rs │ │ ├── mod.rs │ │ ├── acl.rs │ │ ├── progressed_read.rs │ │ ├── hash.rs │ │ ├── metadata.rs │ │ ├── dir.rs │ │ ├── uac.rs │ │ ├── wincred.rs │ │ ├── error.rs │ │ ├── icon.rs │ │ └── url.rs │ ├── cli │ │ ├── arg.rs │ │ └── mod.rs │ └── installer │ │ ├── lnk.rs │ │ └── registry.rs ├── rust-toolchain.toml ├── build.rs ├── icons │ └── icon.ico ├── .gitignore ├── capabilities │ └── default.json ├── .cargo │ └── config.toml ├── tauri.conf.json └── Cargo.toml ├── .prettierrc ├── .prettierignore ├── pnpm-workspace.yaml ├── src ├── left.webp ├── IconMinimize.vue ├── consts.ts ├── plugins │ ├── registry.ts │ ├── types.ts │ └── index.ts ├── IconSheild.vue ├── networkInsights.ts ├── IconEdit.vue ├── IconClose.vue ├── CircleSuccess.vue ├── env.d.ts ├── Dialog.vue ├── tauri.ts ├── index.ts ├── Feedback.vue ├── components │ └── SafeIcon.vue ├── Checkbox.vue ├── Cloud.vue ├── CloudPaid.vue ├── FInput.vue ├── utils │ ├── friendlyError.ts │ └── svgSanitizer.ts ├── mirrorc-errors.ts ├── api │ └── installFile.ts └── types.ts ├── .vscode └── extensions.json ├── eslint.config.mjs ├── .gitignore ├── tsconfig.json ├── tests ├── server.mjs ├── offline-install.mjs ├── online-install.mjs ├── offline-update.mjs ├── utils.mjs └── online-update.mjs ├── rsbuild.config.ts ├── package.json ├── README.md └── .github └── workflows └── build.yml /public/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/.gitignore: -------------------------------------------------------------------------------- 1 | target/ -------------------------------------------------------------------------------- /src-tauri/libs/hpatch-sys/.gitignore: -------------------------------------------------------------------------------- 1 | target/ -------------------------------------------------------------------------------- /src-tauri/src/module/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod wv2; 2 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "singleQuote": true 3 | } 4 | -------------------------------------------------------------------------------- /src-tauri/src/thirdparty/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod mirrorc; 2 | -------------------------------------------------------------------------------- /src-tauri/rust-toolchain.toml: -------------------------------------------------------------------------------- 1 | [toolchain] 2 | channel = "nightly" -------------------------------------------------------------------------------- /src-tauri/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | tauri_build::build() 3 | } 4 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | # Lock files 2 | package-lock.json 3 | pnpm-lock.yaml 4 | yarn.lock 5 | -------------------------------------------------------------------------------- /pnpm-workspace.yaml: -------------------------------------------------------------------------------- 1 | onlyBuiltDependencies: 2 | - '@sentry/cli' 3 | - core-js 4 | -------------------------------------------------------------------------------- /src/left.webp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/YuehaiTeam/kachina-installer/HEAD/src/left.webp -------------------------------------------------------------------------------- /src-tauri/src/ipc/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod install_file; 2 | pub mod manager; 3 | pub mod operation; 4 | -------------------------------------------------------------------------------- /src-tauri/libs/hpatch-sys/wrapper.h: -------------------------------------------------------------------------------- 1 | #include "HPatch/patch.h" 2 | #include "HPatch/patch_types.h" 3 | -------------------------------------------------------------------------------- /src-tauri/icons/icon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/YuehaiTeam/kachina-installer/HEAD/src-tauri/icons/icon.ico -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["tauri-apps.tauri-vscode", "rust-lang.rust-analyzer"] 3 | } 4 | -------------------------------------------------------------------------------- /src-tauri/libs/hpatch-sys/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | cc::Build::new().file("HPatch/patch.c").compile("hpatch"); 3 | } 4 | -------------------------------------------------------------------------------- /src-tauri/libs/hpatch-sys/.gitattributes: -------------------------------------------------------------------------------- 1 | HPatch/** linguist-vendored 2 | binding.rs linguist-vendored 3 | wrapper.h linguist-vendored -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/.gitattributes: -------------------------------------------------------------------------------- 1 | HDiff/** linguist-vendored 2 | libParallel/** linguist-vendored 3 | binding.rs linguist-vendored 4 | wrapper.h linguist-vendored 5 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hdiff-sys" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | [build-dependencies] 8 | cc = "1.0" -------------------------------------------------------------------------------- /src-tauri/libs/hpatch-sys/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "hpatch-sys" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [dependencies] 7 | [build-dependencies] 8 | cc = "1.0" 9 | -------------------------------------------------------------------------------- /src/IconMinimize.vue: -------------------------------------------------------------------------------- 1 | 6 | -------------------------------------------------------------------------------- /src/consts.ts: -------------------------------------------------------------------------------- 1 | export const getRuntimeName = (tag: string): string => { 2 | if (tag.startsWith('Microsoft.DotNet')) { 3 | return 'Microsoft .NET Runtime'; 4 | } 5 | return tag; 6 | }; 7 | -------------------------------------------------------------------------------- /src/plugins/registry.ts: -------------------------------------------------------------------------------- 1 | import { pluginManager } from './index'; 2 | import { GitHubPlugin } from './github'; 3 | 4 | export function registerAllPlugins() { 5 | pluginManager.register(new GitHubPlugin()); 6 | } -------------------------------------------------------------------------------- /src-tauri/.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Generated by Tauri 6 | # will have schema files for capabilities auto-completion 7 | /gen/schemas 8 | -------------------------------------------------------------------------------- /src-tauri/src/builder/utils/mod.rs: -------------------------------------------------------------------------------- 1 | #[path = "../../utils/hash.rs"] 2 | pub mod hash; 3 | #[path = "../../utils/progressed_read.rs"] 4 | pub mod progressed_read; 5 | 6 | #[path = "../../utils/metadata.rs"] 7 | pub mod metadata; 8 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | import js from '@eslint/js'; 2 | import globals from 'globals'; 3 | import ts from 'typescript-eslint'; 4 | 5 | export default [ 6 | { languageOptions: { globals: globals.browser } }, 7 | js.configs.recommended, 8 | ...ts.configs.recommended, 9 | { ignores: ['dist/'] }, 10 | ]; 11 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/libdivsufsort/divsufsort.cpp: -------------------------------------------------------------------------------- 1 | #define HAVE_CONFIG_H 1 2 | # include "divsufsort.h" 3 | typedef saidx32_t saidx_t; 4 | typedef saidx_t sastore_t; 5 | 6 | #include "divsufsort_private.h" 7 | #include "divsufsort.c.inc.h" 8 | #include "sssort.c.inc.h" 9 | #include "trsort.c.inc.h" 10 | 11 | -------------------------------------------------------------------------------- /src/IconSheild.vue: -------------------------------------------------------------------------------- 1 | 9 | -------------------------------------------------------------------------------- /src/networkInsights.ts: -------------------------------------------------------------------------------- 1 | import { InsightItem } from './types'; 2 | 3 | // 全局统计数据数组 4 | export const networkInsights: InsightItem[] = []; 5 | 6 | // 添加统计数据 7 | export function addNetworkInsight(insight: InsightItem) { 8 | networkInsights.push(insight); 9 | } 10 | 11 | // 清空统计数据 12 | export function clearNetworkInsights() { 13 | networkInsights.length = 0; 14 | } 15 | -------------------------------------------------------------------------------- /src/IconEdit.vue: -------------------------------------------------------------------------------- 1 | 10 | -------------------------------------------------------------------------------- /src-tauri/src/utils/gui.rs: -------------------------------------------------------------------------------- 1 | const SUBKEY: &str = "Software\\Microsoft\\Windows\\CurrentVersion\\Themes\\Personalize"; 2 | const VALUE: &str = "AppsUseLightTheme"; 3 | 4 | pub fn is_dark_mode() -> windows_registry::Result { 5 | let hkcu = windows_registry::CURRENT_USER; 6 | let subkey = hkcu.options().read().open(SUBKEY)?; 7 | let dword: u32 = subkey.get_u32(VALUE)?; 8 | Ok(dword == 0) 9 | } 10 | -------------------------------------------------------------------------------- /src/IconClose.vue: -------------------------------------------------------------------------------- 1 | 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | dist 12 | dist-ssr 13 | *.local 14 | 15 | # Editor directories and files 16 | .vscode/* 17 | !.vscode/extensions.json 18 | .idea 19 | .DS_Store 20 | *.suo 21 | *.ntvs* 22 | *.njsproj 23 | *.sln 24 | *.sw? 25 | 26 | .env 27 | 28 | tests/fixtures 29 | 30 | rustc-ice* -------------------------------------------------------------------------------- /src/CircleSuccess.vue: -------------------------------------------------------------------------------- 1 | 13 | -------------------------------------------------------------------------------- /src/env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | 3 | declare module '*.vue' { 4 | import type { DefineComponent } from 'vue'; 5 | 6 | // biome-ignore lint/complexity/noBannedTypes: reason 7 | const component: DefineComponent<{}, {}, any>; 8 | export default component; 9 | } 10 | 11 | // process.env.NODE_ENV is defined by the environment 12 | declare const process: { 13 | env: { 14 | NODE_ENV: 'development' | 'production'; 15 | }; 16 | }; 17 | -------------------------------------------------------------------------------- /src-tauri/capabilities/default.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "../gen/schemas/desktop-schema.json", 3 | "identifier": "default", 4 | "description": "Capability for the main window", 5 | "windows": [ 6 | "main" 7 | ], 8 | "permissions": [ 9 | "core:window:allow-set-title", 10 | "core:window:allow-show", 11 | "core:window:allow-close", 12 | "core:window:allow-minimize", 13 | "core:event:default", 14 | "core:window:allow-set-decorations" 15 | ] 16 | } -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/libdivsufsort/divsufsort64.cpp: -------------------------------------------------------------------------------- 1 | #define BUILD_DIVSUFSORT64 2 | #define HAVE_CONFIG_H 1 3 | # include "divsufsort64.h" 4 | typedef saidx64_t saidx_t; 5 | typedef saidx_t sastore_t; 6 | # define divsufsort divsufsort64 7 | # define divsufsort_version divsufsort64_version 8 | # define sssort sssort64 9 | # define trsort trsort64 10 | 11 | #include "divsufsort_private.h" 12 | #include "divsufsort.c.inc.h" 13 | #include "sssort.c.inc.h" 14 | #include "trsort.c.inc.h" 15 | -------------------------------------------------------------------------------- /src-tauri/.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.'cfg(all(windows, target_env = "msvc"))'] 2 | rustflags = [ 3 | "-C", 4 | "target-feature=+crt-static", 5 | "-C", 6 | "link-args=/NODEFAULTLIB:ucrt.lib /NODEFAULTLIB:libucrtd.lib /NODEFAULTLIB:ucrtd.lib /NODEFAULTLIB:libcmtd.lib /NODEFAULTLIB:msvcrt.lib /NODEFAULTLIB:msvcrtd.lib /NODEFAULTLIB:libvcruntimed.lib /NODEFAULTLIB:vcruntime.lib /NODEFAULTLIB:vcruntimed.lib /DEFAULTLIB:libucrt.lib /DEFAULTLIB:libvcruntime.lib /DEFAULTLIB:libcmt.lib /DEFAULTLIB:msvcrt.lib /DEFAULTLIB:ucrt.lib /DEFAULTLIB:oldnames.lib /DEFAULTLIB:legacy_stdio_definitions.lib", 7 | ] 8 | -------------------------------------------------------------------------------- /src/plugins/types.ts: -------------------------------------------------------------------------------- 1 | import type { Dfs2Data, InvokeGetDfsMetadataRes, Embedded } from '../types'; 2 | 3 | export interface KachinaInstallSource { 4 | name: string; 5 | matchUrl: (url: string) => boolean; 6 | 7 | // 可选:自定义元数据获取,返回完整的DFS2数据结构 8 | getMetadata?: (url: string) => Promise; 9 | 10 | // 可选:会话管理(插件自己管理sessionId) 11 | createSession?: (url: string, diffchunks: string[]) => Promise; 12 | endSession?: (url: string, insights: any) => Promise; 13 | 14 | // 必需:获取文件块URL 15 | getChunkUrl: (url: string, range: string) => Promise<{url: string, range: string}>; 16 | } -------------------------------------------------------------------------------- /src/Dialog.vue: -------------------------------------------------------------------------------- 1 | 17 | 18 | 19 | 20 | 29 | -------------------------------------------------------------------------------- /src/tauri.ts: -------------------------------------------------------------------------------- 1 | import type { invoke as invokeType } from '@tauri-apps/api/core'; 2 | import type { listen as listenType } from '@tauri-apps/api/event'; 3 | import type { sep as sepType } from '@tauri-apps/api/path'; 4 | import type { getCurrentWindow as getCurrentWindowType } from '@tauri-apps/api/window'; 5 | const TAURI = (window as any).__TAURI__; 6 | export const invoke = TAURI.core.invoke as typeof invokeType; 7 | export const listen = TAURI.event.listen as typeof listenType; 8 | export const sep = TAURI.path.sep as typeof sepType; 9 | export const getCurrentWindow = TAURI.window 10 | .getCurrentWindow as typeof getCurrentWindowType; 11 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["DOM", "ES2020"], 4 | "jsx": "preserve", 5 | "target": "ES2020", 6 | "noEmit": true, 7 | "skipLibCheck": true, 8 | "jsxImportSource": "vue", 9 | "useDefineForClassFields": true, 10 | 11 | /* modules */ 12 | "module": "ESNext", 13 | "isolatedModules": true, 14 | "resolveJsonModule": true, 15 | "moduleResolution": "Bundler", 16 | "allowImportingTsExtensions": true, 17 | 18 | /* type checking */ 19 | "strict": true, 20 | "noUnusedLocals": true, 21 | "noUnusedParameters": true 22 | }, 23 | "include": ["src"] 24 | } 25 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import { createApp } from 'vue'; 2 | import App from './App.vue'; 3 | import './index.css'; 4 | 5 | createApp(App).mount('#root'); 6 | 7 | if (process.env.NODE_ENV !== 'development') { 8 | window.addEventListener('contextmenu', (e) => { 9 | e.preventDefault(); 10 | }); 11 | document.addEventListener('keydown', function (event) { 12 | // Prevent F5 or Ctrl+R (Windows/Linux) and Command+R (Mac) from refreshing the page 13 | if ( 14 | event.key === 'F5' || 15 | (event.ctrlKey && event.key === 'r') || 16 | (event.metaKey && event.key === 'r') 17 | ) { 18 | event.preventDefault(); 19 | } 20 | }); 21 | } 22 | -------------------------------------------------------------------------------- /src/Feedback.vue: -------------------------------------------------------------------------------- 1 | 9 | -------------------------------------------------------------------------------- /src-tauri/src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod acl; 2 | pub mod dir; 3 | pub mod error; 4 | pub mod gui; 5 | pub mod hash; 6 | pub mod icon; 7 | pub mod metadata; 8 | pub mod progressed_read; 9 | pub mod sentry; 10 | pub mod uac; 11 | pub mod url; 12 | pub mod wincred; 13 | 14 | pub fn get_device_id() -> anyhow::Result { 15 | let username = whoami::username(); 16 | let key = windows_registry::LOCAL_MACHINE 17 | .options() 18 | .read() 19 | .open(r#"SOFTWARE\Microsoft\Cryptography"#)?; 20 | 21 | let guid: String = key.get_string("MachineGuid")?; 22 | let raw_device_id = format!("{username}{guid}"); 23 | Ok(chksum_md5::hash(raw_device_id).to_hex_uppercase()) 24 | } 25 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 4 4 | 5 | [[package]] 6 | name = "cc" 7 | version = "1.2.9" 8 | source = "registry+https://github.com/rust-lang/crates.io-index" 9 | checksum = "c8293772165d9345bdaaa39b45b2109591e63fe5e6fbc23c6ff930a048aa310b" 10 | dependencies = [ 11 | "shlex", 12 | ] 13 | 14 | [[package]] 15 | name = "hdiff-sys" 16 | version = "0.1.0" 17 | dependencies = [ 18 | "cc", 19 | ] 20 | 21 | [[package]] 22 | name = "shlex" 23 | version = "1.3.0" 24 | source = "registry+https://github.com/rust-lang/crates.io-index" 25 | checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" 26 | -------------------------------------------------------------------------------- /src/components/SafeIcon.vue: -------------------------------------------------------------------------------- 1 | 9 | 10 | 25 | 26 | -------------------------------------------------------------------------------- /src/Checkbox.vue: -------------------------------------------------------------------------------- 1 | 25 | 26 | 29 | -------------------------------------------------------------------------------- /src/Cloud.vue: -------------------------------------------------------------------------------- 1 | 9 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/build.rs: -------------------------------------------------------------------------------- 1 | fn main() { 2 | cc::Build::new() 3 | .cpp(true) 4 | .cargo_output(true) 5 | .file("HDiff/diff.cpp") 6 | .file("HDiff/match_block.cpp") 7 | .file("HDiff/private_diff/bytes_rle.cpp") 8 | .file("HDiff/private_diff/compress_detect.cpp") 9 | .file("HDiff/private_diff/suffix_string.cpp") 10 | .file("HDiff/private_diff/limit_mem_diff/adler_roll.c") 11 | .file("HDiff/private_diff/limit_mem_diff/digest_matcher.cpp") 12 | .file("HDiff/private_diff/limit_mem_diff/stream_serialize.cpp") 13 | .file("HDiff/private_diff/libdivsufsort/divsufsort.cpp") 14 | .file("HDiff/private_diff/libdivsufsort/divsufsort64.cpp") 15 | .file("libParallel/parallel_channel.cpp") 16 | .file("libParallel/parallel_import.cpp") 17 | .file("../hpatch-sys/HPatch/patch.c") 18 | .compile("hdiff"); 19 | } 20 | -------------------------------------------------------------------------------- /src/CloudPaid.vue: -------------------------------------------------------------------------------- 1 | 9 | -------------------------------------------------------------------------------- /src-tauri/src/utils/acl.rs: -------------------------------------------------------------------------------- 1 | use windows::{ 2 | core::w, 3 | Win32::Security::{ 4 | Authorization::{ConvertStringSecurityDescriptorToSecurityDescriptorW, SDDL_REVISION}, 5 | PSECURITY_DESCRIPTOR, SECURITY_ATTRIBUTES, 6 | }, 7 | }; 8 | 9 | pub fn create_security_attributes() -> SECURITY_ATTRIBUTES { 10 | let mut security_descriptor = PSECURITY_DESCRIPTOR::default(); 11 | unsafe { 12 | ConvertStringSecurityDescriptorToSecurityDescriptorW( 13 | w!("D:(A;;GA;;;AC)(A;;GA;;;RC)(A;;GA;;;SY)(A;;GA;;;BA)(A;;GA;;;BU)S:(ML;;NW;;;LW)"), 14 | SDDL_REVISION, 15 | &mut security_descriptor, 16 | None, 17 | ) 18 | .unwrap(); 19 | 20 | SECURITY_ATTRIBUTES { 21 | nLength: size_of::() as u32, 22 | lpSecurityDescriptor: security_descriptor.0, 23 | bInheritHandle: false.into(), 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src-tauri/src/utils/progressed_read.rs: -------------------------------------------------------------------------------- 1 | use pin_project::pin_project; 2 | use std::{ 3 | pin::Pin, 4 | task::{Context, Poll}, 5 | }; 6 | use tokio::io::{AsyncRead, ReadBuf}; 7 | 8 | #[pin_project] 9 | pub struct ReadWithCallback 10 | where 11 | R: AsyncRead, 12 | F: FnMut(usize), 13 | { 14 | #[pin] 15 | pub reader: R, 16 | pub callback: F, 17 | } 18 | 19 | impl AsyncRead for ReadWithCallback 20 | where 21 | R: AsyncRead, 22 | F: FnMut(usize), 23 | { 24 | fn poll_read( 25 | self: Pin<&mut Self>, 26 | cx: &mut Context<'_>, 27 | buf: &mut ReadBuf<'_>, 28 | ) -> Poll> { 29 | let this = self.project(); 30 | let res = this.reader.poll_read(cx, buf); 31 | if let Poll::Ready(Ok(())) = res { 32 | if !buf.filled().is_empty() { 33 | (this.callback)(buf.filled().len()); 34 | } 35 | } 36 | res 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src-tauri/tauri.conf.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://schema.tauri.app/config/2", 3 | "productName": "Kachina Installer", 4 | "version": "0.1.0", 5 | "identifier": "click.kachina", 6 | "build": { 7 | "beforeDevCommand": "rsbuild dev", 8 | "devUrl": "http://localhost:1420", 9 | "beforeBuildCommand": "rsbuild build", 10 | "frontendDist": "../dist" 11 | }, 12 | "app": { 13 | "withGlobalTauri": true, 14 | "windows": [], 15 | "security": { 16 | "csp": { 17 | "default-src": "'self' customprotocol: asset:", 18 | "connect-src": "*", 19 | "img-src": "'self' asset: http://asset.localhost blob: data:", 20 | "style-src": "'unsafe-inline' 'self'", 21 | "script-src": "'self' 'unsafe-eval' 'unsafe-inline'" 22 | } 23 | } 24 | }, 25 | "bundle": { 26 | "active": false, 27 | "targets": "all", 28 | "copyright": "Built by Kachina Installer", 29 | "icon": ["icons/icon.ico"] 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/FInput.vue: -------------------------------------------------------------------------------- 1 | 20 | 21 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /tests/server.mjs: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import path from 'path'; 3 | 4 | const PORT = process.env.PORT || 8080; 5 | const FIXTURES_DIR = './fixtures'; 6 | 7 | function createServer() { 8 | const app = express(); 9 | 10 | // 启用Range请求支持 11 | app.use(express.static(path.resolve(FIXTURES_DIR), { 12 | acceptRanges: true, 13 | lastModified: true, 14 | etag: true 15 | })); 16 | 17 | // 日志中间件 18 | app.use((req, res, next) => { 19 | console.log(`${req.method} ${req.url}`); 20 | next(); 21 | }); 22 | 23 | return app; 24 | } 25 | 26 | async function startServer() { 27 | const app = createServer(); 28 | 29 | return new Promise((resolve) => { 30 | const server = app.listen(PORT, () => { 31 | console.log(chalk.green(`Express server listening on port ${PORT}`)); 32 | console.log(chalk.gray(`Serving files from: ${path.resolve(FIXTURES_DIR)}`)); 33 | resolve(server); 34 | }); 35 | 36 | // 优雅关闭 37 | process.on('SIGINT', () => { 38 | console.log('\\nShutting down server...'); 39 | server.close(() => process.exit(0)); 40 | }); 41 | }); 42 | } 43 | 44 | if (import.meta.url === `file://${process.argv[1]}`) { 45 | startServer().catch(console.error); 46 | } 47 | 48 | export { startServer, createServer }; -------------------------------------------------------------------------------- /src-tauri/src/cli/arg.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use clap::Subcommand; 4 | 5 | #[derive(Debug, Clone, clap::Args, serde::Serialize)] 6 | pub struct InstallArgs { 7 | #[clap(short = 'D', help = "Install directory")] 8 | pub target: Option, 9 | #[clap(short = 'I', help = "Non-interactive install")] 10 | pub non_interactive: bool, 11 | #[clap(short = 'S', help = "Silent install")] 12 | pub silent: bool, 13 | #[clap(short = 'O', help = "Force online install")] 14 | pub online: bool, 15 | #[clap(short = 'U', help = "Uninstall")] 16 | pub uninstall: bool, 17 | // override install source 18 | #[clap(long, hide = true)] 19 | pub source: Option, 20 | // dfs extra data 21 | #[clap(long, hide = true)] 22 | pub dfs_extras: Option, 23 | // override mirrorc cdk 24 | #[clap(long, hide = true)] 25 | pub mirrorc_cdk: Option, 26 | } 27 | 28 | #[derive(Debug, Clone, clap::Args)] 29 | pub struct UacArgs { 30 | pub pipe_id: String, 31 | } 32 | 33 | #[derive(Subcommand, Clone, Debug)] 34 | pub enum Command { 35 | #[clap(hide = true)] 36 | Install(InstallArgs), 37 | #[clap(hide = true)] 38 | InstallWebview2, 39 | #[clap(hide = true)] 40 | HeadlessUac(UacArgs), 41 | #[clap(external_subcommand)] 42 | Other(Vec), 43 | } 44 | -------------------------------------------------------------------------------- /src-tauri/src/builder/main.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use cli::Command; 3 | 4 | mod append; 5 | mod cli; 6 | mod extract; 7 | mod gen; 8 | mod local; 9 | mod metadata; 10 | mod pack; 11 | mod replace_bin; 12 | mod utils; 13 | 14 | pub fn main() { 15 | tokio::runtime::Builder::new_multi_thread() 16 | .enable_all() 17 | .build() 18 | .unwrap() 19 | .block_on(async_main()); 20 | } 21 | 22 | async fn async_main() { 23 | println!("Kachina Builder v{}", env!("CARGO_PKG_VERSION")); 24 | let now = std::time::Instant::now(); 25 | let cli = cli::Cli::parse(); 26 | let mut command = cli.command; 27 | if command.is_none() { 28 | panic!("No command provided"); 29 | } 30 | let command = command.take().unwrap(); 31 | match command { 32 | Command::Pack(args) => pack::pack_cli(args).await, 33 | Command::Gen(args) => gen::gen_cli(args).await, 34 | Command::Append(args) => append::append_cli(args).await, 35 | Command::Extract(args) => extract::extract_cli(args).await, 36 | Command::ReplaceBin(args) => { 37 | if let Err(e) = replace_bin::replace_bin_cli(args).await { 38 | eprintln!("Replace-bin failed: {}", e); 39 | } 40 | } 41 | } 42 | let duration = now.elapsed(); 43 | println!("Finished in {duration:?}"); 44 | } 45 | -------------------------------------------------------------------------------- /src/utils/friendlyError.ts: -------------------------------------------------------------------------------- 1 | export const friendlyError = ( 2 | error: string | { message: string } | unknown, 3 | ): string => { 4 | const errStr = 5 | typeof error === 'string' 6 | ? error 7 | : error && typeof error === 'object' && 'message' in error 8 | ? (error as { message: string }).message 9 | : JSON.stringify(error); 10 | // 空格,换行符,制表符,右括号,逗号都是url结束 11 | const firstUrlInstr = errStr.match(/https?:\/\/[^\s),]+/); 12 | // 替换url时保留url结束标志字符,避免把右括号等也替换掉 13 | const errStrWithoutUrl = errStr.replace(/https?:\/\/[^\s),]+/g, '[url]'); 14 | let friendlyStr = ''; 15 | const checkStr = errStrWithoutUrl.toLowerCase(); 16 | if (errStr.includes('operation timed out')) { 17 | friendlyStr = '连接下载服务器超时,请检查你的网络连接或更换下载源'; 18 | } else if (checkStr.includes('connection refused')) { 19 | friendlyStr = '下载服务器出现问题,请重试或更换下载源'; 20 | } else if (checkStr.includes('connection reset')) { 21 | friendlyStr = '连接下载服务器失败,请重试或更换下载源'; 22 | } else if (checkStr.includes('too_slow') || checkStr.includes('stalled')) { 23 | friendlyStr = '检测到下载速度异常,请检查你的网络连接或更换下载源'; 24 | } 25 | 26 | if (friendlyStr) { 27 | return `${friendlyStr}\n\n原始错误:${errStrWithoutUrl}${firstUrlInstr ? `\n\n下载服务器:${firstUrlInstr[0]}` : ''}`; 28 | } else { 29 | return `${errStrWithoutUrl}${firstUrlInstr ? `\n\n下载服务器:${firstUrlInstr[0]}` : ''}`; 30 | } 31 | }; 32 | -------------------------------------------------------------------------------- /src-tauri/src/utils/hash.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use std::{io::Read, path::Path}; 3 | 4 | pub async fn run_hash(hash_algorithm: &str, path: &str) -> Result { 5 | if hash_algorithm == "md5" { 6 | let md5 = chksum_md5::async_chksum(Path::new(path)) 7 | .await 8 | .context("HASH_COMPLETE_ERR")?; 9 | Ok(md5.to_hex_lowercase()) 10 | } else if hash_algorithm == "xxh" { 11 | let path = path.to_string(); 12 | let res = tokio::task::spawn_blocking(move || { 13 | use twox_hash::XxHash3_128; 14 | let mut hasher = XxHash3_128::new(); 15 | let mut file = std::fs::OpenOptions::new() 16 | .read(true) 17 | .write(false) 18 | .open(&path) 19 | .context("OPEN_TARGET_ERR")?; 20 | 21 | let mut buffer = [0u8; 1024]; 22 | loop { 23 | let read = file.read(&mut buffer).context("READ_FILE_ERR")?; 24 | if read == 0 { 25 | break; 26 | } 27 | hasher.write(&buffer[..read]); 28 | } 29 | let hash = hasher.finish_128(); 30 | Ok::(format!("{hash:x}")) 31 | }) 32 | .await 33 | .context("HASH_THREAD_ERR")? 34 | .context("HASH_COMPLETE_ERR")?; 35 | Ok(res) 36 | } else { 37 | Err(anyhow::anyhow!("NO_HASH_ALGO_ERR")) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /rsbuild.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from '@rsbuild/core'; 2 | import { pluginVue } from '@rsbuild/plugin-vue'; 3 | import { purgeCSSPlugin } from '@fullhuman/postcss-purgecss'; 4 | 5 | export default defineConfig({ 6 | server: { 7 | port: 1420, 8 | }, 9 | source: { 10 | define: { 11 | 'process.env.NODE_ENV': JSON.stringify(process.env.NODE_ENV), 12 | }, 13 | }, 14 | output: { 15 | overrideBrowserslist: ['edge >= 100'], 16 | }, 17 | performance: { 18 | chunkSplit: { 19 | strategy: 'single-vendor', 20 | }, 21 | }, 22 | plugins: [pluginVue()], 23 | tools: { 24 | bundlerChain: (chain) => { 25 | // if (process.env.NODE_ENV !== 'development') { 26 | // chain.plugin('compress').use(CompressionPlugin, [ 27 | // { 28 | // test: /\.(js|css|svg)$/, 29 | // filename: '[path][base].gz', 30 | // algorithm: 'gzip', 31 | // threshold: 1024, 32 | // minRatio: 0.8, 33 | // deleteOriginalAssets: true, 34 | // }, 35 | // ]); 36 | // } 37 | }, 38 | rspack: { 39 | experiments: { 40 | rspackFuture: { 41 | bundlerInfo: { force: false }, 42 | }, 43 | }, 44 | }, 45 | // @ts-expect-error -- postcss type not compatible 46 | postcss: { 47 | postcssOptions: { 48 | plugins: [ 49 | purgeCSSPlugin({ 50 | safelist: [/^(?!h[1-6]).*$/], 51 | variables: true, 52 | }), 53 | ], 54 | }, 55 | }, 56 | }, 57 | }); 58 | -------------------------------------------------------------------------------- /src-tauri/src/installer/lnk.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{anyhow, Context, Result}; 2 | use std::path::Path; 3 | use windows::Win32::UI::Shell::{ 4 | FOLDERID_CommonPrograms, FOLDERID_Desktop, FOLDERID_Programs, FOLDERID_PublicDesktop, 5 | }; 6 | 7 | use crate::utils::{ 8 | dir::get_dir, 9 | error::{IntoAnyhow, TAResult}, 10 | }; 11 | 12 | #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] 13 | pub struct CreateLnkArgs { 14 | pub target: String, 15 | pub lnk: String, 16 | } 17 | pub async fn create_lnk_with_args(args: CreateLnkArgs) -> Result<()> { 18 | create_lnk(args.target, args.lnk).await.into_anyhow() 19 | } 20 | 21 | #[tauri::command] 22 | pub async fn create_lnk(target: String, lnk: String) -> TAResult<()> { 23 | let target = Path::new(&target); 24 | let lnk = Path::new(&lnk); 25 | let lnk_dir = lnk.parent(); 26 | if lnk_dir.is_none() { 27 | return Err(anyhow!("Failed to get lnk parent dir") 28 | .context("CREATE_LNK_ERR") 29 | .into()); 30 | } 31 | let lnk_dir = lnk_dir.unwrap(); 32 | tokio::fs::create_dir_all(lnk_dir) 33 | .await 34 | .context("CREATE_LNK_ERR")?; 35 | let sl = mslnk::ShellLink::new(target).context("CREATE_LNK_ERR")?; 36 | sl.create_lnk(lnk).context("CREATE_LNK_ERR")?; 37 | Ok(()) 38 | } 39 | 40 | #[tauri::command] 41 | pub async fn get_dirs(elevated: bool) -> TAResult<(String, String)> { 42 | if elevated { 43 | Ok(( 44 | get_dir(&FOLDERID_CommonPrograms)?, 45 | get_dir(&FOLDERID_PublicDesktop)?, 46 | )) 47 | } else { 48 | Ok((get_dir(&FOLDERID_Programs)?, get_dir(&FOLDERID_Desktop)?)) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src-tauri/src/utils/metadata.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | 3 | #[derive(Serialize, Deserialize, Debug, Clone)] 4 | pub struct Metadata { 5 | pub file_name: String, 6 | pub size: u64, 7 | #[serde(skip_serializing_if = "Option::is_none")] 8 | pub md5: Option, 9 | #[serde(skip_serializing_if = "Option::is_none")] 10 | pub xxh: Option, 11 | } 12 | 13 | #[derive(Serialize, Deserialize, Debug)] 14 | pub struct PatchItem { 15 | pub size: u64, 16 | #[serde(skip_serializing_if = "Option::is_none")] 17 | pub md5: Option, 18 | #[serde(skip_serializing_if = "Option::is_none")] 19 | pub xxh: Option, 20 | } 21 | 22 | #[derive(Serialize, Deserialize, Debug)] 23 | pub struct PatchInfo { 24 | pub file_name: String, 25 | pub size: u64, 26 | pub from: PatchItem, 27 | pub to: PatchItem, 28 | } 29 | 30 | #[derive(Serialize, Deserialize, Debug)] 31 | pub struct InstallerInfo { 32 | pub size: u64, 33 | pub md5: Option, 34 | pub xxh: Option, 35 | } 36 | 37 | #[derive(Serialize, Deserialize, Debug)] 38 | pub struct RepoMetadata { 39 | pub repo_name: String, 40 | pub tag_name: String, 41 | #[serde(skip_serializing_if = "Option::is_none")] 42 | pub assets: Option>, 43 | #[serde(skip_serializing_if = "Option::is_none")] 44 | pub hashed: Option>, 45 | #[serde(skip_serializing_if = "Option::is_none")] 46 | pub patches: Option>, 47 | #[serde(skip_serializing_if = "Option::is_none")] 48 | pub installer: Option, 49 | #[serde(skip_serializing_if = "Option::is_none")] 50 | pub deletes: Option>, 51 | #[serde(skip_serializing_if = "Option::is_none")] 52 | pub packing_info: Option>>, 53 | } 54 | -------------------------------------------------------------------------------- /src-tauri/src/builder/append.rs: -------------------------------------------------------------------------------- 1 | use tokio::io::AsyncSeekExt; 2 | 3 | use crate::{ 4 | cli::AppendArgs, 5 | pack::{write_file, PackFile}, 6 | }; 7 | 8 | pub async fn append_cli(args: AppendArgs) { 9 | // files len should equals to names len, or names len should be 0 10 | if args.file.len() != args.name.len() && !args.name.is_empty() { 11 | panic!("Files length must equal to names length, or names length must be 0"); 12 | } 13 | // open file as append mode 14 | let mut output = tokio::fs::OpenOptions::new() 15 | .append(true) 16 | .open(&args.output) 17 | .await 18 | .expect("Failed to open output file"); 19 | // seek to the end of the file 20 | output 21 | .seek(std::io::SeekFrom::End(0)) 22 | .await 23 | .expect("Failed to seek to the end of the file"); 24 | // loop through input files, get corresponding name or dafault to the file name 25 | for (i, file) in args.file.iter().enumerate() { 26 | let name = if !args.name.is_empty() { 27 | &args.name[i] 28 | } else { 29 | file.file_name().and_then(|s| s.to_str()).unwrap() 30 | }; 31 | let input_stream = tokio::fs::File::open(file) 32 | .await 33 | .expect("Failed to open input file"); 34 | let input_length = input_stream 35 | .metadata() 36 | .await 37 | .expect("Failed to get input file metadata") 38 | .len(); 39 | // write file to output 40 | write_file( 41 | &mut output, 42 | &mut PackFile { 43 | name: name.to_string(), 44 | data: Box::new(input_stream), 45 | size: input_length.try_into().expect("File size too large"), 46 | }, 47 | ) 48 | .await 49 | .expect("Failed to write file"); 50 | println!("Appended file: {name} ({input_length} bytes)"); 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src-tauri/src/utils/dir.rs: -------------------------------------------------------------------------------- 1 | use anyhow::{Context, Result}; 2 | use std::path::Path; 3 | use windows::{ 4 | core::{GUID, PWSTR}, 5 | Win32::{ 6 | Foundation::HANDLE, 7 | UI::Shell::{ 8 | FOLDERID_Desktop, FOLDERID_Documents, FOLDERID_Downloads, FOLDERID_LocalAppData, 9 | FOLDERID_LocalAppDataLow, FOLDERID_RoamingAppData, GetUserProfileDirectoryW, 10 | SHGetKnownFolderPath, KF_FLAG_DEFAULT, 11 | }, 12 | }, 13 | }; 14 | 15 | pub fn get_dir(dir: &GUID) -> Result { 16 | let pwstr = unsafe { 17 | SHGetKnownFolderPath(dir, KF_FLAG_DEFAULT, None) 18 | .map(|pwstr| pwstr.to_string().context("INTERNAL_ERROR")) 19 | .context("GET_KNOWNFOLDER_ERR")?? 20 | }; 21 | Ok(pwstr) 22 | } 23 | 24 | pub fn get_userprofile() -> Result { 25 | let mut buffer = [0u16; 1024]; 26 | let pwstr = PWSTR::from_raw(buffer.as_mut_ptr()); 27 | let mut size = buffer.len() as u32; 28 | unsafe { GetUserProfileDirectoryW(HANDLE::default(), Some(pwstr), &mut size) } 29 | .context("GET_KNOWNFOLDER_ERR")?; 30 | Ok(unsafe { pwstr.to_string().context("INTERNAL_ERROR")? }) 31 | } 32 | 33 | pub fn in_private_folder(path: &Path) -> bool { 34 | let path_ids = vec![ 35 | FOLDERID_LocalAppData, 36 | FOLDERID_LocalAppDataLow, 37 | FOLDERID_RoamingAppData, 38 | FOLDERID_Desktop, 39 | FOLDERID_Documents, 40 | FOLDERID_Downloads, 41 | ]; 42 | // first check userprofile 43 | let userprofile = get_userprofile(); 44 | if let Ok(userprofile) = userprofile { 45 | if path.starts_with(userprofile) { 46 | return true; 47 | } 48 | } 49 | // then check known folders 50 | for id in path_ids { 51 | let known_folder = get_dir(&id); 52 | if let Ok(known_folder) = known_folder { 53 | if path.starts_with(known_folder) { 54 | return true; 55 | } 56 | } 57 | } 58 | false 59 | } 60 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kachina-installer", 3 | "private": true, 4 | "version": "0.1.0", 5 | "type": "module", 6 | "scripts": { 7 | "dev": "tauri dev --exit-on-panic", 8 | "build": "cross-env STATIC_VCRUNTIME=false tauri build -- --target x86_64-win7-windows-msvc -Z build-std=std,panic_abort -Z build-std-features=\"optimize_for_size\" && cd src-tauri/target/x86_64-win7-windows-msvc/release && ren kachina-builder.exe kachina-builder-standalone.exe && del kachina-builder.exe && copy /b kachina-builder-standalone.exe+kachina-installer.exe kachina-builder.exe", 9 | "debug": "tauri build --debug && pnpm dev:uac", 10 | "preview": "rsbuild preview", 11 | "tauri": "tauri", 12 | "test:prepare": "cd tests && node prepare.mjs", 13 | "test:offline-install": "cd tests && node offline-install.mjs", 14 | "test:online-install": "cd tests && node online-install.mjs", 15 | "test:offline-update": "cd tests && node offline-update.mjs", 16 | "test:online-update": "cd tests && node online-update.mjs", 17 | "test:all": "npm run test:prepare && npm run test:offline-install && npm run test:online-install && npm run test:offline-update && npm run test:online-update" 18 | }, 19 | "dependencies": { 20 | "@sentry/cli": "^2.46.0", 21 | "@tauri-apps/api": "^2.5.0", 22 | "async": "^3.2.6", 23 | "compare-versions": "^6.1.1", 24 | "dompurify": "^3.2.6", 25 | "uuid": "^11.1.0", 26 | "vue": "^3.5.16" 27 | }, 28 | "devDependencies": { 29 | "@eslint/js": "^9.29.0", 30 | "@fullhuman/postcss-purgecss": "^7.0.2", 31 | "@rsbuild/core": "^1.5.10", 32 | "@rsbuild/plugin-vue": "^1.1.2", 33 | "@tauri-apps/cli": "^2.5.0", 34 | "@types/async": "^3.2.24", 35 | "@types/uuid": "^10.0.0", 36 | "compression-webpack-plugin": "^11.1.0", 37 | "cross-env": "^7.0.3", 38 | "eslint": "^9.29.0", 39 | "express": "^4.18.2", 40 | "fs-extra": "^11.2.0", 41 | "globals": "^15.15.0", 42 | "prettier": "^3.5.3", 43 | "typescript": "^5.8.3", 44 | "typescript-eslint": "^8.34.0", 45 | "zx": "^8.8.2" 46 | }, 47 | "packageManager": "pnpm@10.17.0" 48 | } 49 | -------------------------------------------------------------------------------- /tests/offline-install.mjs: -------------------------------------------------------------------------------- 1 | import { verifyFiles, cleanupTestDir, getTestDir, printLogFileIfExists, FLAGS } from './utils.mjs'; 2 | import 'zx/globals'; 3 | import { $, usePwsh } from 'zx'; 4 | usePwsh(); 5 | 6 | async function test() { 7 | const testDir = getTestDir('offline-install'); 8 | const installerPath = './fixtures/test-app-v1.exe'; 9 | 10 | console.log(chalk.blue('=== Offline Installation Test ===')); 11 | console.log(`Test directory: ${testDir}`); 12 | console.log(`Installer: ${installerPath}`); 13 | 14 | try { 15 | // 执行离线安装 16 | console.log('Running offline installation...'); 17 | let result; 18 | try { 19 | result = await $`${installerPath} ${FLAGS} -D ${testDir}`.timeout('3m').quiet(); 20 | } catch (error) { 21 | if (error.message && error.message.includes('timed out')) { 22 | console.error(chalk.red('Offline installation timed out after 3 minutes')); 23 | await printLogFileIfExists(); 24 | } 25 | throw error; 26 | } 27 | 28 | if (result.exitCode !== 0) { 29 | throw new Error(`Installation failed with exit code ${result.exitCode}`); 30 | } 31 | 32 | // 验证安装的文件 33 | const expectedFiles = [ 34 | { path: 'app.exe', contains: 'APP_V1' }, 35 | { path: 'config.json', contains: '"version": "1.0.0"' }, 36 | { path: 'readme.txt', contains: 'v1.0.0' }, 37 | { path: 'data/assets.dat', size: 10240 }, 38 | { path: 'updater.exe' }, // v1更新器 39 | ]; 40 | 41 | console.log('Verifying installed files...'); 42 | const verification = await verifyFiles(testDir, expectedFiles); 43 | 44 | // 输出结果 45 | if (verification.failed.length === 0) { 46 | console.log(chalk.green('✓ All files installed correctly')); 47 | console.log(chalk.gray(` Verified: ${verification.passed.join(', ')}`)); 48 | } else { 49 | console.error(chalk.red('✗ Verification failed:')); 50 | verification.failed.forEach((msg) => 51 | console.error(chalk.red(` - ${msg}`)), 52 | ); 53 | process.exit(1); 54 | } 55 | } catch (error) { 56 | console.error(chalk.red('Test failed:'), error.message); 57 | process.exit(1); 58 | } finally { 59 | await cleanupTestDir(testDir); 60 | } 61 | } 62 | 63 | test(); 64 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/wrapper.h: -------------------------------------------------------------------------------- 1 | #include "../hpatch-sys/HPatch/patch_types.h" 2 | #include 3 | 4 | typedef hpatch_TStreamOutput hdiff_TStreamOutput; 5 | typedef hpatch_TStreamInput hdiff_TStreamInput; 6 | // compress plugin 7 | typedef struct hdiff_TCompress { 8 | // return type tag; strlen(result)<=hpatch_kMaxPluginTypeLength; (Note:result lifetime) 9 | const char *(*compressType)(void); // ascii cstring,cannot contain '&' 10 | // return the max compressed size, if input dataSize data; 11 | hpatch_StreamPos_t (*maxCompressedSize)(hpatch_StreamPos_t dataSize); 12 | // return support threadNumber 13 | int (*setParallelThreadNumber)(struct hdiff_TCompress *compressPlugin, int threadNum); 14 | // compress data to out_code; return compressed size, if error or not need compress then return 0; 15 | // if out_code->write() return hdiff_stream_kCancelCompress(error) then return 0; 16 | // if memory I/O can use hdiff_compress_mem() 17 | hpatch_StreamPos_t (*compress)(const struct hdiff_TCompress *compressPlugin, 18 | const hpatch_TStreamOutput *out_code, 19 | const hpatch_TStreamInput *in_data); 20 | const char *(*compressTypeForDisplay)(void); // like compressType but just for display,can NULL 21 | } hdiff_TCompress; 22 | 23 | // create a diff data between oldData and newData, the diffData saved as single compressed stream 24 | // kMinSingleMatchScore: default 6, bin: 0--4 text: 4--9 25 | // patchStepMemSize>=hpatch_kStreamCacheSize, default 256k, recommended 64k,2m etc... 26 | // isUseBigCacheMatch: big cache max used O(oldSize) memory, match speed faster, but build big cache slow 27 | void create_single_compressed_diff(const unsigned char *newData, const unsigned char *newData_end, 28 | const unsigned char *oldData, const unsigned char *oldData_end, 29 | const hpatch_TStreamOutput *out_diff, const hdiff_TCompress *compressPlugin, 30 | int kMinSingleMatchScore, 31 | size_t patchStepMemSize, 32 | bool isUseBigCacheMatch, 33 | void *listener, size_t threadNum); -------------------------------------------------------------------------------- /src-tauri/src/cli/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod arg; 2 | use arg::{Command, InstallArgs}; 3 | use clap::Parser; 4 | 5 | use crate::{utils::url::HttpContextExt, REQUEST_CLIENT}; 6 | 7 | #[derive(Parser)] 8 | #[command(args_conflicts_with_subcommands = true)] 9 | pub struct Cli { 10 | #[command(subcommand)] 11 | command: Option, 12 | #[clap(flatten)] 13 | pub install: InstallArgs, 14 | } 15 | impl Cli { 16 | pub fn command(&self) -> Command { 17 | self.command 18 | .clone() 19 | .unwrap_or(Command::Install(self.install.clone())) 20 | } 21 | } 22 | 23 | pub async fn install_webview2() { 24 | println!("安装程序缺少必要的运行环境"); 25 | println!("当前系统未安装 WebView2 运行时,正在下载并安装..."); 26 | // use reqwest to download the installer 27 | let wv2_url = "https://go.microsoft.com/fwlink/p/?LinkId=2124703"; 28 | let res = REQUEST_CLIENT 29 | .get(wv2_url) 30 | .send() 31 | .await 32 | .with_http_context("install_webview2", wv2_url) 33 | .expect("Failed to download WebView2 installer"); 34 | let wv2_installer_blob = res 35 | .bytes() 36 | .await 37 | .with_http_context("install_webview2", wv2_url) 38 | .expect("Failed to read WebView2 installer data"); 39 | let temp_dir = std::env::temp_dir(); 40 | let installer_path = temp_dir 41 | .as_path() 42 | .join("kachina.MicrosoftEdgeWebview2Setup.exe"); 43 | tokio::fs::write(&installer_path, wv2_installer_blob) 44 | .await 45 | .expect("failed to write installer to temp dir"); 46 | // run the installer 47 | let status = tokio::process::Command::new(installer_path.clone()) 48 | .arg("/install") 49 | .status() 50 | .await 51 | .expect("failed to run installer"); 52 | let _ = tokio::fs::remove_file(installer_path).await; 53 | if status.success() { 54 | println!("WebView2 运行时安装成功"); 55 | println!("正在重新启动安装程序..."); 56 | // exec self and detatch 57 | let _ = tokio::process::Command::new(std::env::current_exe().unwrap()).spawn(); 58 | // delete the installer 59 | } else { 60 | println!("WebView2 运行时安装失败"); 61 | println!("按任意键退出..."); 62 | let _ = tokio::io::AsyncReadExt::read(&mut tokio::io::stdin(), &mut [0u8]).await; 63 | std::process::exit(0); 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src-tauri/libs/hpatch-sys/HPatch/checksum_plugin.h: -------------------------------------------------------------------------------- 1 | //checksum_plugin.h 2 | // checksum plugin type 3 | /* 4 | The MIT License (MIT) 5 | Copyright (c) 2018-2019 HouSisong 6 | 7 | Permission is hereby granted, free of charge, to any person 8 | obtaining a copy of this software and associated documentation 9 | files (the "Software"), to deal in the Software without 10 | restriction, including without limitation the rights to use, 11 | copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the 13 | Software is furnished to do so, subject to the following 14 | conditions: 15 | 16 | The above copyright notice and this permission notice shall be 17 | included in all copies of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 21 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 23 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 24 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 25 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 26 | OTHER DEALINGS IN THE SOFTWARE. 27 | */ 28 | #ifndef HPatch_checksum_plugin_h 29 | #define HPatch_checksum_plugin_h 30 | #include "patch_types.h" 31 | #ifdef __cplusplus 32 | extern "C" { 33 | #endif 34 | 35 | typedef void* hpatch_checksumHandle; 36 | typedef struct hpatch_TChecksum{ 37 | //return type tag; strlen(result)<=hpatch_kMaxPluginTypeLength; (Note:result lifetime) 38 | const char* (*checksumType)(void); //ascii cstring,cannot contain '&' 39 | hpatch_size_t (*checksumByteSize)(void); //result<=hpatch_kStreamCacheSize 40 | hpatch_checksumHandle (*open)(struct hpatch_TChecksum* plugin); 41 | void (*close)(struct hpatch_TChecksum* plugin,hpatch_checksumHandle handle); 42 | void (*begin)(hpatch_checksumHandle handle); 43 | void (*append)(hpatch_checksumHandle handle, 44 | const unsigned char* part_data,const unsigned char* part_data_end); 45 | void (*end)(hpatch_checksumHandle handle, 46 | unsigned char* checksum,unsigned char* checksum_end); 47 | } hpatch_TChecksum; 48 | 49 | #ifdef __cplusplus 50 | } 51 | #endif 52 | #endif 53 | -------------------------------------------------------------------------------- /src-tauri/src/utils/uac.rs: -------------------------------------------------------------------------------- 1 | use std::ffi::{c_void, OsStr}; 2 | use std::mem::{size_of, zeroed}; 3 | use std::ptr::null_mut; 4 | use windows::core::{w, HSTRING, PCWSTR}; 5 | use windows::Win32::Foundation::HANDLE; 6 | use windows::Win32::Security::{GetTokenInformation, TokenElevation, TOKEN_ELEVATION, TOKEN_QUERY}; 7 | use windows::Win32::System::Threading::{GetCurrentProcess, OpenProcessToken}; 8 | use windows::Win32::UI::Shell::{ 9 | ShellExecuteExW, SEE_MASK_NOASYNC, SEE_MASK_NOCLOSEPROCESS, SHELLEXECUTEINFOW, 10 | }; 11 | #[derive(Debug)] 12 | pub struct SendableHandle(pub HANDLE); 13 | unsafe impl Send for SendableHandle {} 14 | unsafe impl Sync for SendableHandle {} 15 | 16 | pub fn check_elevated() -> windows::core::Result { 17 | unsafe { 18 | let h_process = GetCurrentProcess(); 19 | let mut h_token = HANDLE(null_mut()); 20 | let open_result = OpenProcessToken(h_process, TOKEN_QUERY, &mut h_token); 21 | let mut ret_len: u32 = 0; 22 | let mut token_info: TOKEN_ELEVATION = zeroed(); 23 | 24 | if let Err(e) = open_result { 25 | println!("OpenProcessToken {e:?}"); 26 | return Err(e); 27 | } 28 | 29 | if let Err(e) = GetTokenInformation( 30 | h_token, 31 | TokenElevation, 32 | Some(std::ptr::addr_of_mut!(token_info).cast::()), 33 | size_of::() as u32, 34 | &mut ret_len, 35 | ) { 36 | println!("GetTokenInformation {e:?}"); 37 | 38 | return Err(e); 39 | } 40 | 41 | Ok(token_info.TokenIsElevated != 0) 42 | } 43 | } 44 | 45 | pub fn run_elevated, T: AsRef>( 46 | program_path: S, 47 | args: T, 48 | ) -> std::io::Result { 49 | let file = HSTRING::from(program_path.as_ref()); 50 | let par = HSTRING::from(args.as_ref()); 51 | 52 | let mut sei = SHELLEXECUTEINFOW { 53 | cbSize: std::mem::size_of::() as u32, 54 | fMask: SEE_MASK_NOASYNC | SEE_MASK_NOCLOSEPROCESS, 55 | lpVerb: w!("runas"), 56 | lpFile: PCWSTR(file.as_ptr()), 57 | lpParameters: PCWSTR(par.as_ptr()), 58 | nShow: 1, 59 | ..Default::default() 60 | }; 61 | unsafe { 62 | ShellExecuteExW(&mut sei)?; 63 | let process = { sei.hProcess }; 64 | if process.is_invalid() { 65 | return Err(std::io::Error::last_os_error()); 66 | }; 67 | Ok(SendableHandle(process)) 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/plugins/index.ts: -------------------------------------------------------------------------------- 1 | import type { KachinaInstallSource } from './types'; 2 | 3 | export class PluginManager { 4 | private plugins: KachinaInstallSource[] = []; 5 | 6 | register(plugin: KachinaInstallSource): void { 7 | this.plugins.push(plugin); 8 | } 9 | 10 | private parseUrl(url: string): { cleanUrl: string | null; forcedPlugin: string | null } { 11 | // 检查是否包含dfs+或dfs2+,如果有则不匹配插件 12 | if (url.includes('dfs+') || url.includes('dfs2+')) { 13 | return { cleanUrl: null, forcedPlugin: null }; 14 | } 15 | 16 | // 找到://前的内容进行分析 17 | const protocolIndex = url.indexOf('://'); 18 | if (protocolIndex === -1) return { cleanUrl: url, forcedPlugin: null }; 19 | 20 | const beforeProtocol = url.substring(0, protocolIndex); 21 | const afterProtocol = url.substring(protocolIndex); 22 | 23 | // 检查是否有plugin-强制指定格式 24 | const pluginMatch = beforeProtocol.match(/plugin-([^+]+)\+(.*)$/); 25 | if (pluginMatch) { 26 | const [, pluginName, remainingPrefix] = pluginMatch; 27 | // 重新组装URL,移除plugin-xxx+部分 28 | const cleanUrl = remainingPrefix ? `${remainingPrefix}${afterProtocol}` : `https${afterProtocol}`; 29 | return { cleanUrl, forcedPlugin: pluginName }; 30 | } 31 | 32 | // 普通处理:找到最后一个+,过滤前面的内容 33 | const lastPlusIndex = beforeProtocol.lastIndexOf('+'); 34 | const cleanUrl = lastPlusIndex === -1 35 | ? url 36 | : beforeProtocol.substring(lastPlusIndex + 1) + afterProtocol; 37 | 38 | return { cleanUrl, forcedPlugin: null }; 39 | } 40 | 41 | private extractCleanUrl(url: string): string | null { 42 | return this.parseUrl(url).cleanUrl; 43 | } 44 | 45 | findPlugin(url: string): KachinaInstallSource | null { 46 | const { cleanUrl, forcedPlugin } = this.parseUrl(url); 47 | if (!cleanUrl) return null; 48 | 49 | // 如果指定了强制插件,直接按名称查找 50 | if (forcedPlugin) { 51 | const plugin = this.plugins.find(p => p.name === forcedPlugin); 52 | if (!plugin) { 53 | throw new Error(`Plugin "${forcedPlugin}" not found`); 54 | } 55 | return plugin; 56 | } 57 | 58 | // 否则按URL匹配 59 | return this.plugins.find(plugin => plugin.matchUrl(cleanUrl)) || null; 60 | } 61 | 62 | isPluginSource(url: string): boolean { 63 | return this.findPlugin(url) !== null; 64 | } 65 | 66 | getCleanUrl(url: string): string | null { 67 | return this.extractCleanUrl(url); 68 | } 69 | } 70 | 71 | export const pluginManager = new PluginManager(); 72 | 73 | // 导出类型供其他模块使用 74 | export type { KachinaInstallSource } from './types'; -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/src/lib.rs: -------------------------------------------------------------------------------- 1 | #![allow(non_upper_case_globals)] 2 | #![allow(non_camel_case_types)] 3 | #![allow(non_snake_case)] 4 | 5 | use std::ffi::c_void; 6 | 7 | include!("../binding.rs"); 8 | 9 | trait WriteSeek: std::io::Write + std::io::Seek {} 10 | 11 | impl WriteSeek for T {} 12 | 13 | struct WriteStreamWrapper<'a> { 14 | stream: &'a mut dyn WriteSeek, 15 | } 16 | 17 | extern "C" fn write_seek_callback( 18 | stream: *const hpatch_TStreamOutput, 19 | write_to: u64, 20 | out_data: *const u8, 21 | out_data_end: *const u8, 22 | ) -> i32 { 23 | let write_size = unsafe { out_data_end.offset_from(out_data) }; 24 | let stream: &hpatch_TStreamOutput = unsafe { &*stream }; 25 | let input_wrapper = unsafe { &mut *(stream.streamImport as *mut WriteStreamWrapper) }; 26 | // seek 27 | if let Err(err) = input_wrapper 28 | .stream 29 | .seek(std::io::SeekFrom::Start(write_to)) 30 | { 31 | println!("Error in read_seek: {:?}", err); 32 | return 0; 33 | } 34 | // buffer: out_data to out_data_end 35 | let buffer = unsafe { std::slice::from_raw_parts(out_data, write_size as usize) }; 36 | // read exact, return 0 if failed 37 | let res = input_wrapper.stream.write_all(buffer); 38 | if let Err(err) = res { 39 | println!("Error in write_seq_callback: {:?}", err); 40 | return 0; 41 | } 42 | write_size as i32 43 | } 44 | 45 | pub fn safe_create_single_patch( 46 | new_data: &[u8], 47 | old_data: &[u8], 48 | mut output: impl std::io::Write + std::io::Seek, 49 | level: u8, 50 | ) -> Result<(), String> { 51 | let new_start_ptr = new_data.as_ptr(); 52 | let new_end_ptr = unsafe { new_start_ptr.add(new_data.len()) }; 53 | let old_start_ptr = old_data.as_ptr(); 54 | let old_end_ptr = unsafe { old_start_ptr.add(old_data.len()) }; 55 | let mut output_wrapper = WriteStreamWrapper { 56 | stream: &mut output, 57 | }; 58 | let mut stream_output = hpatch_TStreamOutput { 59 | // 1G 60 | streamSize: 1 << 30, 61 | streamImport: &mut output_wrapper as *mut WriteStreamWrapper as *mut c_void, 62 | write: Some(write_seek_callback), 63 | read_writed: None, 64 | }; 65 | unsafe { 66 | create_single_compressed_diff( 67 | new_start_ptr, 68 | new_end_ptr, 69 | old_start_ptr, 70 | old_end_ptr, 71 | &mut stream_output, 72 | std::ptr::null_mut(), 73 | level as i32, 74 | 1024 * 256, 75 | true, 76 | std::ptr::null_mut(), 77 | 1, 78 | ); 79 | } 80 | Ok(()) 81 | } 82 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/libdivsufsort/config.h: -------------------------------------------------------------------------------- 1 | /* 2 | * config.h for libdivsufsort 3 | * Copyright (c) 2003-2008 Yuta Mori All Rights Reserved. 4 | * 5 | * Permission is hereby granted, free of charge, to any person 6 | * obtaining a copy of this software and associated documentation 7 | * files (the "Software"), to deal in the Software without 8 | * restriction, including without limitation the rights to use, 9 | * copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the 11 | * Software is furnished to do so, subject to the following 12 | * conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be 15 | * included in all copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | * OTHER DEALINGS IN THE SOFTWARE. 25 | */ 26 | 27 | #ifndef _CONFIG_H 28 | #define _CONFIG_H 1 29 | 30 | #ifdef __cplusplus 31 | extern "C" { 32 | #endif /* __cplusplus */ 33 | 34 | /** Define to the version of this package. **/ 35 | #define PROJECT_VERSION_FULL "2.0.1-14-g5f60d6f" 36 | 37 | /** Define to 1 if you have the header files. **/ 38 | #define HAVE_INTTYPES_H 0 39 | #define HAVE_STDDEF_H 1 40 | #if defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) 41 | # define HAVE_STDINT_H 1 42 | #else 43 | # define HAVE_STDINT_H 0 44 | #endif 45 | #define HAVE_STDLIB_H 1 46 | #define HAVE_STRING_H 1 47 | #define HAVE_STRINGS_H 0 48 | #define HAVE_MEMORY_H 1 49 | #define HAVE_SYS_TYPES_H 0 50 | 51 | /** for WinIO **/ 52 | /* #undef HAVE_IO_H */ 53 | /* #undef HAVE_FCNTL_H */ 54 | /* #undef HAVE__SETMODE */ 55 | /* #undef HAVE_SETMODE */ 56 | /* #undef HAVE__FILENO */ 57 | /* #undef HAVE_FOPEN_S */ 58 | /* #undef HAVE__O_BINARY */ 59 | #ifndef HAVE__SETMODE 60 | # if HAVE_SETMODE 61 | # define _setmode setmode 62 | # define HAVE__SETMODE 1 63 | # endif 64 | # if HAVE__SETMODE && !HAVE__O_BINARY 65 | # define _O_BINARY 0 66 | # define HAVE__O_BINARY 1 67 | # endif 68 | #endif 69 | 70 | /** for inline **/ 71 | #ifndef INLINE 72 | # ifdef _MSC_VER 73 | # define INLINE __inline 74 | # else 75 | # define INLINE inline 76 | # endif 77 | #endif 78 | 79 | /** for VC++ warning **/ 80 | #ifdef _MSC_VER 81 | #pragma warning(disable: 4127) 82 | #endif 83 | 84 | 85 | #ifdef __cplusplus 86 | } /* extern "C" */ 87 | #endif /* __cplusplus */ 88 | 89 | #endif /* _CONFIG_H */ 90 | -------------------------------------------------------------------------------- /src-tauri/src/builder/cli/mod.rs: -------------------------------------------------------------------------------- 1 | #[path = "../../cli/arg.rs"] 2 | pub mod arg; 3 | 4 | use std::path::PathBuf; 5 | 6 | use clap::{Parser, Subcommand}; 7 | 8 | #[derive(Debug, Clone, clap::Args)] 9 | pub struct PackArgs { 10 | #[clap(long, short = 'o', default_value = "output.exe")] 11 | pub output: PathBuf, 12 | #[clap(long, short = 'c', default_value = ".config.json")] 13 | pub config: PathBuf, 14 | #[clap(long, short = 't')] 15 | pub image: Option, 16 | #[clap(long, short = 'm')] 17 | pub metadata: Option, 18 | #[clap(long, short = 'd')] 19 | pub data_dir: Option, 20 | #[clap(long)] 21 | pub icon: Option, 22 | } 23 | 24 | #[derive(Debug, Clone, clap::Args)] 25 | pub struct GenArgs { 26 | #[clap(long, short = 'i')] 27 | pub input_dir: PathBuf, 28 | #[clap(long, short = 'm')] 29 | pub output_metadata: PathBuf, 30 | #[clap(long, short = 'o')] 31 | pub output_dir: PathBuf, 32 | #[clap(long, short = 'r')] 33 | pub repo: String, 34 | #[clap(long, short = 't')] 35 | pub tag: String, 36 | #[clap(long, short = 'd')] 37 | pub diff_vers: Option>, 38 | #[clap(long, short = 'x')] 39 | pub diff_ignore: Option>, 40 | #[clap(long, short = 'u')] 41 | pub updater: Option, 42 | #[clap(long, short = 'p')] 43 | pub updater_name: Option, 44 | #[clap(long, short = 'j', default_value = "2")] 45 | pub zstd_concurrency: usize, 46 | } 47 | 48 | #[derive(Debug, Clone, clap::Args)] 49 | pub struct AppendArgs { 50 | #[clap(long, short = 'o', default_value = "output.exe")] 51 | pub output: PathBuf, 52 | #[clap(long, short = 'f')] 53 | pub file: Vec, 54 | #[clap(long, short = 'n')] 55 | pub name: Vec, 56 | } 57 | 58 | #[derive(Debug, Clone, clap::Args)] 59 | pub struct ExtractArgs { 60 | #[clap(long, short = 'i', default_value = "output.exe")] 61 | pub input: PathBuf, 62 | 63 | // 原有参数保持不变 64 | #[clap(long, short = 'f')] 65 | pub file: Vec, 66 | #[clap(long, short = 'n')] 67 | pub name: Vec, 68 | 69 | // 新增参数 70 | #[clap(long)] 71 | pub meta_name: Vec, 72 | #[clap(long)] 73 | pub all: Option, 74 | #[clap(long)] 75 | pub list: bool, 76 | } 77 | 78 | #[derive(Debug, Clone, clap::Args)] 79 | pub struct ReplaceBinArgs { 80 | /// 输入的安装包文件 81 | pub input: PathBuf, 82 | /// 输出的新安装包文件 83 | #[clap(long, short = 'o')] 84 | pub output: PathBuf, 85 | } 86 | 87 | #[derive(Subcommand, Clone, Debug)] 88 | pub enum Command { 89 | Pack(PackArgs), 90 | Append(AppendArgs), 91 | Extract(ExtractArgs), 92 | Gen(GenArgs), 93 | ReplaceBin(ReplaceBinArgs), 94 | } 95 | 96 | #[derive(Parser)] 97 | #[command(args_conflicts_with_subcommands = true, arg_required_else_help = true)] 98 | pub struct Cli { 99 | #[command(subcommand)] 100 | pub command: Option, 101 | } 102 | -------------------------------------------------------------------------------- /tests/online-install.mjs: -------------------------------------------------------------------------------- 1 | import { 2 | verifyFiles, 3 | cleanupTestDir, 4 | getTestDir, 5 | waitForServer, 6 | printLogFileIfExists, 7 | FLAGS, 8 | } from './utils.mjs'; 9 | import { startServer } from './server.mjs'; 10 | import 'zx/globals'; 11 | import { $, usePwsh } from 'zx'; 12 | usePwsh(); 13 | 14 | async function test() { 15 | const testDir = getTestDir('online-install'); 16 | const installerPath = './fixtures/test-app-v1.exe'; 17 | 18 | console.log(chalk.blue('=== Online Installation Test ===')); 19 | console.log(`Test directory: ${testDir}`); 20 | 21 | // 启动HTTP服务器 22 | console.log('Starting HTTP server...'); 23 | const server = await startServer(); 24 | 25 | try { 26 | // 等待服务器启动 27 | await waitForServer('http://localhost:8080/test-app-v1.exe'); 28 | 29 | // 删除日志文件 %temp%/KachinaInstaller.log 30 | const logFile = os.tmpdir() + '/KachinaInstaller.log'; 31 | if (await fs.pathExists(logFile)) { 32 | await fs.remove(logFile); 33 | } 34 | 35 | // 执行在线安装 36 | console.log('Running online installation...'); 37 | let result; 38 | try { 39 | result = 40 | await $`${installerPath} ${FLAGS} -O -D ${testDir} --source local-v1`.timeout('3m').quiet(); 41 | } catch (error) { 42 | if (error.message && error.message.includes('timed out')) { 43 | console.error(chalk.red('Installation process timed out after 3 minutes')); 44 | await printLogFileIfExists(); 45 | } 46 | throw error; 47 | } 48 | 49 | if (result.exitCode !== 0) { 50 | throw new Error(`Installation failed with exit code ${result.exitCode}`); 51 | } 52 | 53 | // check if fail in logs 54 | if (await fs.pathExists(logFile)) { 55 | const logs = await fs.readFile(logFile, 'utf-8'); 56 | console.log(logs); 57 | // 验证日志文件是否有错误 58 | if (logs.includes('ERROR kachina_installer::installer')) { 59 | throw new Error('Updater log contains errors'); 60 | } 61 | } 62 | 63 | // 验证安装的文件 64 | const expectedFiles = [ 65 | { path: 'app.exe', contains: 'APP_V1' }, 66 | { path: 'config.json', contains: '"version": "1.0.0"' }, 67 | { path: 'readme.txt', contains: 'v1.0.0' }, 68 | { path: 'data/assets.dat', size: 10240 }, 69 | { path: 'updater.exe' }, 70 | ]; 71 | 72 | console.log('Verifying installed files...'); 73 | const verification = await verifyFiles(testDir, expectedFiles); 74 | 75 | if (verification.failed.length === 0) { 76 | console.log(chalk.green('✓ All files installed correctly via HTTP')); 77 | console.log(chalk.gray(` Verified: ${verification.passed.join(', ')}`)); 78 | } else { 79 | console.error(chalk.red('✗ Verification failed:')); 80 | verification.failed.forEach((msg) => 81 | console.error(chalk.red(` - ${msg}`)), 82 | ); 83 | process.exit(1); 84 | } 85 | } catch (error) { 86 | console.error(chalk.red('Test failed:'), error.message); 87 | process.exit(1); 88 | } finally { 89 | // 停止服务器 90 | server?.close(); 91 | await cleanupTestDir(testDir); 92 | } 93 | } 94 | 95 | test(); 96 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/libdivsufsort/divsufsort.h: -------------------------------------------------------------------------------- 1 | /* 2 | * divsufsort.h for libdivsufsort 3 | * Copyright (c) 2003-2008 Yuta Mori All Rights Reserved. 4 | * 5 | * Permission is hereby granted, free of charge, to any person 6 | * obtaining a copy of this software and associated documentation 7 | * files (the "Software"), to deal in the Software without 8 | * restriction, including without limitation the rights to use, 9 | * copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the 11 | * Software is furnished to do so, subject to the following 12 | * conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be 15 | * included in all copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | * OTHER DEALINGS IN THE SOFTWARE. 25 | */ 26 | 27 | #ifndef _DIVSUFSORT_H 28 | #define _DIVSUFSORT_H 1 29 | 30 | #if defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) 31 | # include //for uint8_t,int32_t 32 | #else 33 | # if (_MSC_VER >= 1300) 34 | typedef unsigned __int8 uint8_t; 35 | typedef signed __int32 int32_t; 36 | # else 37 | typedef unsigned char uint8_t; 38 | typedef signed int int32_t; 39 | # endif 40 | #endif 41 | 42 | #ifdef __cplusplus 43 | extern "C" { 44 | #endif /* __cplusplus */ 45 | 46 | #ifndef PRId32 47 | # define PRId32 "d" 48 | #endif 49 | 50 | #ifndef DIVSUFSORT_API 51 | # ifdef DIVSUFSORT_BUILD_DLL 52 | # define DIVSUFSORT_API 53 | # else 54 | # define DIVSUFSORT_API 55 | # endif 56 | #endif 57 | 58 | /*- Datatypes -*/ 59 | #ifndef SAUCHAR_T 60 | #define SAUCHAR_T 61 | typedef uint8_t sauchar_t; 62 | #endif /* SAUCHAR_T */ 63 | #ifndef SAINT_T 64 | #define SAINT_T 65 | typedef int32_t saint_t; 66 | #endif /* SAINT_T */ 67 | #ifndef SAIDX32_T 68 | #define SAIDX32_T 69 | typedef int32_t saidx32_t; 70 | #endif /* SAIDX32_T */ 71 | 72 | /*- Prototypes -*/ 73 | 74 | /** 75 | * Constructs the suffix array of a given string. 76 | * @param T[0..n-1] The input string. 77 | * @param SA[0..n-1] The output array of suffixes. 78 | * @param n The length of the given string. 79 | * @return 0 if no error occurred, -1 or -2 otherwise. 80 | */ 81 | DIVSUFSORT_API 82 | saint_t 83 | divsufsort(const sauchar_t *T,saidx32_t *SA,saidx32_t n,int threadNum); 84 | 85 | /** 86 | * Returns the version of the divsufsort library. 87 | * @return The version number string. 88 | */ 89 | DIVSUFSORT_API 90 | const char * 91 | divsufsort_version(void); 92 | 93 | #ifdef __cplusplus 94 | } /* extern "C" */ 95 | #endif /* __cplusplus */ 96 | 97 | #endif /* _DIVSUFSORT_H */ 98 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/mem_buf.h: -------------------------------------------------------------------------------- 1 | // mem_buf.h 2 | // 3 | /* 4 | The MIT License (MIT) 5 | Copyright (c) 2012-2019 HouSisong 6 | 7 | Permission is hereby granted, free of charge, to any person 8 | obtaining a copy of this software and associated documentation 9 | files (the "Software"), to deal in the Software without 10 | restriction, including without limitation the rights to use, 11 | copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the 13 | Software is furnished to do so, subject to the following 14 | conditions: 15 | 16 | The above copyright notice and this permission notice shall be 17 | included in all copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 21 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 23 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 24 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 25 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 26 | OTHER DEALINGS IN THE SOFTWARE. 27 | */ 28 | 29 | #ifndef __mem_buf_h 30 | #define __mem_buf_h 31 | #include //malloc free 32 | #include //size_t 33 | #include //std::runtime_error 34 | #include 35 | namespace hdiff_private{ 36 | 37 | struct TAutoMem{ 38 | inline explicit TAutoMem(size_t size=0) :_data(0),_data_end(0),_capacity_end(0){ realloc(size); } 39 | inline ~TAutoMem(){ clear(); } 40 | inline unsigned char* data(){ return _data; } 41 | inline const unsigned char* data()const{ return _data; } 42 | inline unsigned char* data_end(){ return _data_end; } 43 | inline const unsigned char* data_end()const{ return _data_end; } 44 | inline size_t size()const{ return (size_t)(_data_end-_data); } 45 | inline size_t capacity()const{ return (size_t)(_capacity_end-_data); } 46 | inline bool empty()const{ return (_data_end==_data); } 47 | inline void clear(){ if (_data) { free(_data); _data=0; _data_end=0; _capacity_end=0; } } 48 | inline void realloc(size_t newSize){ 49 | if (newSize<=capacity()){ 50 | _data_end=_data+newSize; 51 | }else{ 52 | unsigned char* _new_data=(unsigned char*)::realloc(_data,newSize); 53 | if (_new_data==0) throw std::runtime_error("TAutoMem::TAutoMem() realloc() error!"); 54 | _data=_new_data; 55 | _data_end=_new_data+newSize; 56 | _capacity_end=_data_end; 57 | } 58 | } 59 | inline void reduceSize(size_t reserveSize){ 60 | if (reserveSize<=capacity()) 61 | _data_end=_data+reserveSize; 62 | else 63 | throw std::runtime_error("TAutoMem::reduceSize() error!"); 64 | } 65 | private: 66 | unsigned char* _data; 67 | unsigned char* _data_end; 68 | unsigned char* _capacity_end; 69 | }; 70 | 71 | }//namespace hdiff_private 72 | #endif //__mem_buf_h 73 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/compress_detect.h: -------------------------------------------------------------------------------- 1 | //compress_detect.h 2 | //粗略估算数据的可压缩性 for diff. 3 | /* 4 | The MIT License (MIT) 5 | Copyright (c) 2012-2017 HouSisong 6 | 7 | Permission is hereby granted, free of charge, to any person 8 | obtaining a copy of this software and associated documentation 9 | files (the "Software"), to deal in the Software without 10 | restriction, including without limitation the rights to use, 11 | copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the 13 | Software is furnished to do so, subject to the following 14 | conditions: 15 | 16 | The above copyright notice and this permission notice shall be 17 | included in all copies of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 21 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 23 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 24 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 25 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 26 | OTHER DEALINGS IN THE SOFTWARE. 27 | */ 28 | #ifdef _MSC_VER 29 | #pragma warning( disable : 4706) 30 | #endif 31 | 32 | #ifndef compress_detect_h 33 | #define compress_detect_h 34 | #include //for size_t 35 | #include "../../../hpatch-sys/HPatch/patch_types.h" //for hpatch_uint32_t 36 | #include "mem_buf.h" 37 | namespace hdiff_private{ 38 | 39 | template 40 | static unsigned int _getUIntCost(_UInt v){ 41 | if ((sizeof(_UInt)<8)||((v>>28)>>28)==0) { 42 | int cost=1; 43 | _UInt t; 44 | if ((t=(v>>28))) { v=t; cost+=4; } 45 | if ((t=(v>>14))) { v=t; cost+=2; } 46 | if ((t=(v>> 7))) { v=t; ++cost; } 47 | return cost; 48 | }else{ 49 | return 9; 50 | } 51 | } 52 | 53 | template 54 | inline static unsigned _getIntCost(_Int v){ 55 | return _getUIntCost((_UInt)(2*((v>=0)?(_UInt)v:(_UInt)(-v)))); 56 | } 57 | 58 | //粗略估算该区域存储成本. 59 | size_t getRegionRleCost(const unsigned char* d,size_t n,const unsigned char* sub=0, 60 | unsigned char* out_nocompress=0,size_t* nocompressSize=0); 61 | 62 | class TCompressDetect{ 63 | public: 64 | TCompressDetect(); 65 | ~TCompressDetect(); 66 | void add_chars(const unsigned char* d,size_t n,const unsigned char* sub=0); 67 | size_t cost(const unsigned char* d,size_t n,const unsigned char* sub=0)const; 68 | private: 69 | struct TCharConvTable{ 70 | hpatch_uint32_t sum; 71 | hpatch_uint32_t sum1char[256]; 72 | hpatch_uint32_t sum2char[256*256];//用相邻字符转换几率来近似估计数据的可压缩性. 73 | unsigned char cache[1];//实际大小为kCacheSize,超出该距离的旧数据会被清除. 74 | }; 75 | TAutoMem m_mem; 76 | TCharConvTable* m_table; 77 | int m_lastChar; 78 | int m_lastPopChar; 79 | hpatch_uint32_t m_cacheBegin; 80 | hpatch_uint32_t m_cacheEnd; 81 | void clear(); 82 | void _add_rle(const unsigned char* d,size_t n); 83 | size_t _cost_rle(const unsigned char* d,size_t n)const; 84 | }; 85 | 86 | }//namespace hdiff_private 87 | 88 | #endif 89 | -------------------------------------------------------------------------------- /src/mirrorc-errors.ts: -------------------------------------------------------------------------------- 1 | import { error } from './api/ipc'; 2 | 3 | /** 4 | * Mirror酱错误码对应表 5 | */ 6 | export interface MirrorcErrorInfo { 7 | code: number; 8 | message: string; 9 | showSourceDialog?: boolean; 10 | } 11 | 12 | export const MIRRORC_ERROR_CODES: Record = { 13 | 1001: { 14 | code: 1001, 15 | message: 'Mirror酱参数错误,请检查打包配置', 16 | }, 17 | 7001: { 18 | code: 7001, 19 | message: 'Mirror酱 CDK 已过期', 20 | showSourceDialog: true, 21 | }, 22 | 7002: { 23 | code: 7002, 24 | message: 'Mirror酱 CDK 错误,请检查设置的 CDK 是否正确', 25 | showSourceDialog: true, 26 | }, 27 | 7003: { 28 | code: 7003, 29 | message: 'Mirror酱 CDK 今日下载次数已达上限,请更换 CDK 或明天再试', 30 | }, 31 | 7004: { 32 | code: 7004, 33 | message: 'Mirror酱 CDK 类型和待下载的资源不匹配,请检查设置的 CDK 是否正确', 34 | showSourceDialog: true, 35 | }, 36 | 7005: { 37 | code: 7005, 38 | message: 'Mirror酱 CDK 已被封禁,请更换 CDK', 39 | showSourceDialog: true, 40 | }, 41 | 8001: { 42 | code: 8001, 43 | message: '从Mirror酱获取更新失败,请检查打包配置', 44 | }, 45 | 8002: { 46 | code: 8002, 47 | message: 'Mirror酱参数错误,请检查打包配置', 48 | }, 49 | 8003: { 50 | code: 8003, 51 | message: 'Mirror酱参数错误,请检查打包配置', 52 | }, 53 | 8004: { 54 | code: 8004, 55 | message: 'Mirror酱参数错误,请检查打包配置', 56 | }, 57 | }; 58 | 59 | /** 60 | * 获取Mirror酱错误信息 61 | * @param code 错误码 62 | * @returns 错误信息,如果不是已知错误码则返回null 63 | */ 64 | export function getMirrorcErrorInfo(code: number): MirrorcErrorInfo | null { 65 | return MIRRORC_ERROR_CODES[code] || null; 66 | } 67 | 68 | /** 69 | * 处理Mirror酱错误并记录日志 70 | * @param mirrorcStatus Mirror酱状态响应 71 | * @param contextType 错误上下文类型(用于日志区分) 72 | * @returns 处理后的错误信息 73 | */ 74 | export function processMirrorcError( 75 | mirrorcStatus: { code: number; msg?: string }, 76 | contextType: 'install' | 'cdk-validation' = 'install' 77 | ): { 78 | isError: boolean; 79 | errorInfo: MirrorcErrorInfo; 80 | message: string; 81 | showSourceDialog: boolean; 82 | } | null { 83 | if (mirrorcStatus.code === 0) { 84 | return null; 85 | } 86 | 87 | const errorInfo = getMirrorcErrorInfo(mirrorcStatus.code); 88 | 89 | if (errorInfo) { 90 | // 记录已知错误码 91 | error(`Mirror酱${contextType === 'cdk-validation' ? 'CDK验证' : ''}错误 [${mirrorcStatus.code}]: ${errorInfo.message}`); 92 | 93 | return { 94 | isError: true, 95 | errorInfo, 96 | message: errorInfo.message, 97 | showSourceDialog: errorInfo.showSourceDialog || false 98 | }; 99 | } else { 100 | // 处理未知错误码 101 | const unknownMessage = contextType === 'cdk-validation' 102 | ? `从Mirror酱获取CDK状态失败: ${mirrorcStatus.msg || '未知错误'},请联系Mirror酱客服` 103 | : `从Mirror酱获取更新失败: ${mirrorcStatus.msg || '未知错误'},请联系Mirror酱客服`; 104 | 105 | // 记录未知错误码 106 | error(`Mirror酱${contextType === 'cdk-validation' ? 'CDK验证' : ''}未知错误 [${mirrorcStatus.code}]: ${mirrorcStatus.msg || '无详细信息'}`); 107 | 108 | return { 109 | isError: true, 110 | errorInfo: { 111 | code: mirrorcStatus.code, 112 | message: unknownMessage 113 | }, 114 | message: unknownMessage, 115 | showSourceDialog: false 116 | }; 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/utils/svgSanitizer.ts: -------------------------------------------------------------------------------- 1 | import DOMPurify from 'dompurify'; 2 | 3 | // 安全的CSS属性白名单(SVG图形属性 + 基础布局样式) 4 | const SAFE_CSS_PROPERTIES = [ 5 | // SVG专用属性 6 | 'fill', 'stroke', 'stroke-width', 'stroke-dasharray', 'stroke-dashoffset', 7 | 'opacity', 'fill-opacity', 'stroke-opacity', 'visibility', 8 | 'transform', 'transform-origin', 'clip-path', 'mask', 9 | 10 | // 基础布局和间距 11 | 'margin', 'margin-top', 'margin-right', 'margin-bottom', 'margin-left', 12 | 'padding', 'padding-top', 'padding-right', 'padding-bottom', 'padding-left', 13 | 'width', 'height', 'max-width', 'max-height', 'min-width', 'min-height', 14 | 15 | // 显示和定位(限制范围) 16 | 'display', 'overflow', 'box-sizing', 17 | 18 | // 颜色和文本 19 | 'color', 'background-color', 'border-color', 20 | 'font-size', 'font-weight', 'font-family', 'text-align', 21 | 22 | // 边框 23 | 'border', 'border-width', 'border-style', 'border-radius', 24 | 'border-top', 'border-right', 'border-bottom', 'border-left' 25 | ]; 26 | 27 | function sanitizeCssStyle(styleValue: string): string { 28 | if (!styleValue) return ''; 29 | 30 | // 移除危险的CSS函数和关键字 31 | const dangerousPatterns = [ 32 | /javascript:/gi, 33 | /expression\s*\(/gi, 34 | /url\s*\(/gi, 35 | /import/gi, 36 | /@/gi, // 移除CSS at-rules 37 | /behaviour:/gi, 38 | /-moz-binding:/gi 39 | ]; 40 | 41 | for (const pattern of dangerousPatterns) { 42 | if (pattern.test(styleValue)) { 43 | return ''; // 发现危险内容,返回空字符串 44 | } 45 | } 46 | 47 | // 解析CSS属性并过滤 48 | const declarations = styleValue.split(';') 49 | .map(decl => decl.trim()) 50 | .filter(decl => decl) 51 | .filter(decl => { 52 | const [property] = decl.split(':').map(part => part.trim()); 53 | return SAFE_CSS_PROPERTIES.includes(property.toLowerCase()); 54 | }); 55 | 56 | return declarations.join('; '); 57 | } 58 | 59 | export function sanitizeSvg(svgContent: string): string | null { 60 | if ( 61 | !svgContent?.trim().startsWith('') 63 | ) { 64 | return null; 65 | } 66 | 67 | try { 68 | // 首先用DOMPurify进行基础清理 69 | let cleaned = DOMPurify.sanitize(svgContent, { 70 | USE_PROFILES: { svg: true }, 71 | ALLOWED_TAGS: [ 72 | 'svg', 73 | 'path', 74 | 'g', 75 | 'rect', 76 | 'circle', 77 | 'ellipse', 78 | 'line', 79 | 'polyline', 80 | 'polygon', 81 | ], 82 | ALLOWED_ATTR: [ 83 | 'viewBox', 84 | 'width', 85 | 'height', 86 | 'fill', 87 | 'stroke', 88 | 'stroke-width', 89 | 'd', 90 | 'x', 91 | 'y', 92 | 'cx', 93 | 'cy', 94 | 'r', 95 | 'rx', 96 | 'ry', 97 | 'transform', 98 | 'style', 99 | 'class', 100 | ], 101 | FORBID_TAGS: ['script', 'iframe', 'object', 'embed'], 102 | FORBID_ATTR: ['onload', 'onclick', 'onmouseover'], 103 | }); 104 | 105 | if (!cleaned) return null; 106 | 107 | // 进一步净化style属性 108 | cleaned = cleaned.replace(/style\s*=\s*["']([^"']*)["']/gi, (match, styleValue) => { 109 | const safeCss = sanitizeCssStyle(styleValue); 110 | return safeCss ? `style="${safeCss}"` : ''; 111 | }); 112 | 113 | return cleaned; 114 | } catch { 115 | return null; 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/libParallel/parallel_import.h: -------------------------------------------------------------------------------- 1 | // parallel_import.h 2 | /* 3 | The MIT License (MIT) 4 | Copyright (c) 2018 HouSisong 5 | 6 | Permission is hereby granted, free of charge, to any person 7 | obtaining a copy of this software and associated documentation 8 | files (the "Software"), to deal in the Software without 9 | restriction, including without limitation the rights to use, 10 | copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the 12 | Software is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice shall be 16 | included in all copies of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 19 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 20 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 24 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 25 | OTHER DEALINGS IN THE SOFTWARE. 26 | */ 27 | 28 | #ifndef parallel_import_h 29 | #define parallel_import_h 30 | 31 | //select define one for support parallel 32 | //#define _IS_USED_PTHREAD 1 33 | //#define _IS_USED_CPP11THREAD 1 34 | //#define _IS_USED_WIN32THREAD 1 35 | 36 | #ifndef _IS_USED_MULTITHREAD 37 | # define _IS_USED_MULTITHREAD 1 38 | #endif 39 | 40 | #if ((_IS_USED_PTHREAD>0) || (_IS_USED_CPP11THREAD>0) || (_IS_USED_WIN32THREAD>0)) 41 | # //ok have one 42 | # define _IS_USED_MULTITHREAD 1 43 | #else 44 | # if (_IS_USED_MULTITHREAD>0) 45 | # if ( (!(defined _IS_USED_WIN32THREAD)) && (defined _WIN32) ) 46 | # define _IS_USED_WIN32THREAD 1 47 | # else 48 | # if ( (!(defined _IS_USED_CPP11THREAD)) && (__cplusplus >= 201103L) ) 49 | # define _IS_USED_CPP11THREAD 1 50 | # else 51 | # if (!(defined _IS_USED_PTHREAD)) 52 | # define _IS_USED_PTHREAD 1 53 | # endif 54 | # endif 55 | # endif 56 | # endif 57 | #endif 58 | 59 | #if (_IS_USED_MULTITHREAD) 60 | 61 | //并行临界区锁; 62 | typedef void* HLocker; 63 | HLocker locker_new(void); 64 | void locker_delete(HLocker locker); 65 | void locker_enter(HLocker locker); 66 | void locker_leave(HLocker locker); 67 | 68 | //同步变量; 69 | typedef void* HCondvar; 70 | #if (_IS_USED_CPP11THREAD) 71 | # define TLockerBox void /* used std::unique_lock */ 72 | # define _TLockerBox_name std::unique_lock 73 | #else 74 | typedef struct{ 75 | HLocker locker; 76 | } TLockerBox; 77 | #endif 78 | HCondvar condvar_new(void); 79 | void condvar_delete(HCondvar cond); 80 | void condvar_wait(HCondvar cond,TLockerBox* lockerBox); 81 | void condvar_signal(HCondvar cond); 82 | void condvar_broadcast(HCondvar cond); 83 | 84 | void this_thread_yield(void); 85 | 86 | //parallel run 87 | typedef void (*TThreadRunCallBackProc)(int threadIndex,void* workData); 88 | void thread_parallel(int threadCount,TThreadRunCallBackProc threadProc,void* workData, 89 | int isUseThisThread,int threadIndexOffset); 90 | 91 | #endif //_IS_USED_MULTITHREAD 92 | #endif //parallel_import_h 93 | -------------------------------------------------------------------------------- /src-tauri/src/utils/wincred.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use windows::{ 3 | core::PWSTR, 4 | Win32::{ 5 | Foundation::FILETIME, 6 | Security::Credentials::{ 7 | CredDeleteW, CredFree, CredReadW, CredWriteW, CREDENTIALW, CRED_FLAGS, 8 | CRED_PERSIST_LOCAL_MACHINE, CRED_TYPE_GENERIC, 9 | }, 10 | }, 11 | }; 12 | 13 | use super::error::TAResult; 14 | #[tauri::command] 15 | pub fn wincred_write(target: &str, token: &str, comment: &str) -> TAResult<()> { 16 | let mut comment = comment.encode_utf16().collect::>(); 17 | comment.push(0); // Null-terminate the string 18 | let mut target_name = target.encode_utf16().collect::>(); 19 | let token_utf16 = token.encode_utf16().collect::>(); 20 | let token_bytes = token_utf16 21 | .iter() 22 | .flat_map(|c| { 23 | let bytes = c.to_ne_bytes(); 24 | [bytes[0], bytes[1]] 25 | }) 26 | .collect::>(); 27 | target_name.push(0); // Null-terminate the string 28 | let credential = CREDENTIALW { 29 | Flags: CRED_FLAGS(0), 30 | Type: CRED_TYPE_GENERIC, 31 | TargetName: PWSTR(target_name.as_mut_ptr()), 32 | Comment: PWSTR(comment.as_mut_ptr()), 33 | LastWritten: FILETIME { 34 | dwLowDateTime: 0, 35 | dwHighDateTime: 0, 36 | }, 37 | CredentialBlobSize: token_bytes.len() as u32, 38 | CredentialBlob: token_bytes.as_ptr() as *mut u8, 39 | Persist: CRED_PERSIST_LOCAL_MACHINE, 40 | AttributeCount: 0, 41 | Attributes: std::ptr::null_mut(), 42 | TargetAlias: PWSTR(std::ptr::null_mut()), 43 | UserName: PWSTR(target_name.as_mut_ptr()), 44 | }; 45 | unsafe { CredWriteW(&credential, 0) } 46 | .map_err(|e| anyhow::anyhow!(e)) 47 | .context("WRITE_CRED_ERR")?; 48 | Ok(()) 49 | } 50 | 51 | #[tauri::command] 52 | pub fn wincred_read(target: &str) -> TAResult { 53 | let mut target_name = target.encode_utf16().collect::>(); 54 | target_name.push(0); // Null-terminate the string 55 | let mut credential_ptr: *mut CREDENTIALW = std::ptr::null_mut(); 56 | unsafe { 57 | CredReadW( 58 | PWSTR(target_name.as_mut_ptr()), 59 | CRED_TYPE_GENERIC, 60 | None, 61 | &mut credential_ptr, 62 | ) 63 | } 64 | .map_err(|e| anyhow::anyhow!(e)) 65 | .context("READ_CRED_ERR")?; 66 | let credential = unsafe { &*credential_ptr }; 67 | let token = unsafe { 68 | std::slice::from_raw_parts( 69 | credential.CredentialBlob, 70 | credential.CredentialBlobSize as usize, 71 | ) 72 | .to_vec() 73 | }; 74 | let token_16 = token 75 | .chunks(2) 76 | .map(|chunk| u16::from_ne_bytes([chunk[0], chunk.get(1).copied().unwrap_or(0)])) 77 | .collect::>(); 78 | unsafe { CredFree(credential_ptr as *const std::ffi::c_void) }; 79 | Ok(String::from_utf16(&token_16) 80 | .map_err(|e| anyhow::anyhow!(e)) 81 | .context("READ_CRED_ERR")?) 82 | } 83 | 84 | #[tauri::command] 85 | pub fn wincred_delete(target: &str) -> TAResult<()> { 86 | let mut target_name = target.encode_utf16().collect::>(); 87 | target_name.push(0); // Null-terminate the string 88 | unsafe { CredDeleteW(PWSTR(target_name.as_mut_ptr()), CRED_TYPE_GENERIC, None) } 89 | .map_err(|e| anyhow::anyhow!(e)) 90 | .context("DELETE_CRED_ERR")?; 91 | Ok(()) 92 | } 93 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/libdivsufsort/divsufsort64.h: -------------------------------------------------------------------------------- 1 | /* 2 | * divsufsort64.h for libdivsufsort64 3 | * Copyright (c) 2003-2008 Yuta Mori All Rights Reserved. 4 | * 5 | * Permission is hereby granted, free of charge, to any person 6 | * obtaining a copy of this software and associated documentation 7 | * files (the "Software"), to deal in the Software without 8 | * restriction, including without limitation the rights to use, 9 | * copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the 11 | * Software is furnished to do so, subject to the following 12 | * conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be 15 | * included in all copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | * OTHER DEALINGS IN THE SOFTWARE. 25 | */ 26 | 27 | #ifndef _DIVSUFSORT64_H 28 | #define _DIVSUFSORT64_H 1 29 | 30 | #if defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) 31 | # include //for uint8_t,int32_t 32 | #else 33 | # if (_MSC_VER >= 1300) 34 | typedef unsigned __int8 uint8_t; 35 | typedef signed __int32 int32_t; 36 | # else 37 | typedef unsigned char uint8_t; 38 | typedef signed int int32_t; 39 | # endif 40 | #endif 41 | 42 | #ifdef __cplusplus 43 | extern "C" { 44 | #endif /* __cplusplus */ 45 | #ifndef PRId32 46 | # define PRId32 "d" 47 | #endif 48 | #ifdef _MSC_VER 49 | typedef signed __int64 llong_t; 50 | # ifndef PRId64 51 | # define PRId64 "I64d" 52 | # endif 53 | #else 54 | typedef signed long long llong_t; 55 | # ifndef PRId64 56 | # define PRId64 "lld" 57 | # endif 58 | #endif 59 | 60 | #ifndef DIVSUFSORT_API 61 | # ifdef DIVSUFSORT_BUILD_DLL 62 | # define DIVSUFSORT_API 63 | # else 64 | # define DIVSUFSORT_API 65 | # endif 66 | #endif 67 | 68 | /*- Datatypes -*/ 69 | #ifndef SAUCHAR_T 70 | #define SAUCHAR_T 71 | typedef uint8_t sauchar_t; 72 | #endif /* SAUCHAR_T */ 73 | #ifndef SAINT_T 74 | #define SAINT_T 75 | typedef int32_t saint_t; 76 | #endif /* SAINT_T */ 77 | #ifndef SAIDX64_T 78 | #define SAIDX64_T 79 | typedef llong_t saidx64_t; 80 | #endif /* SAIDX64_T */ 81 | 82 | 83 | /*- Prototypes -*/ 84 | 85 | /** 86 | * Constructs the suffix array of a given string. 87 | * @param T[0..n-1] The input string. 88 | * @param SA[0..n-1] The output array of suffixes. 89 | * @param n The length of the given string. 90 | * @return 0 if no error occurred, -1 or -2 otherwise. 91 | */ 92 | DIVSUFSORT_API 93 | saint_t 94 | divsufsort64(const sauchar_t *T, saidx64_t *SA, saidx64_t n,int threadNum); 95 | 96 | /** 97 | * Returns the version of the divsufsort library. 98 | * @return The version number string. 99 | */ 100 | DIVSUFSORT_API 101 | const char * 102 | divsufsort64_version(void); 103 | 104 | #ifdef __cplusplus 105 | } /* extern "C" */ 106 | #endif /* __cplusplus */ 107 | 108 | #endif /* _DIVSUFSORT64_H */ 109 | -------------------------------------------------------------------------------- /tests/offline-update.mjs: -------------------------------------------------------------------------------- 1 | import { 2 | verifyFiles, 3 | verifyFilesRemoved, 4 | cleanupTestDir, 5 | getTestDir, 6 | printLogFileIfExists, 7 | FLAGS, 8 | } from './utils.mjs'; 9 | import 'zx/globals'; 10 | import { $, usePwsh } from 'zx'; 11 | usePwsh(); 12 | 13 | async function test() { 14 | const testDir = getTestDir('offline-update'); 15 | const installerV1 = './fixtures/test-app-v1.exe'; 16 | const installerV2 = './fixtures/test-app-v2.exe'; 17 | 18 | console.log(chalk.blue('=== Offline Update Test ===')); 19 | console.log(`Test directory: ${testDir}`); 20 | 21 | try { 22 | // 步骤1: 安装v1 23 | console.log('Installing v1...'); 24 | let result; 25 | try { 26 | result = await $`${installerV1} ${FLAGS} -D ${testDir}`.timeout('3m').quiet(); 27 | } catch (error) { 28 | if (error.message && error.message.includes('timed out')) { 29 | console.error(chalk.red('V1 installation timed out after 3 minutes')); 30 | await printLogFileIfExists(); 31 | } 32 | throw error; 33 | } 34 | if (result.exitCode !== 0) { 35 | throw new Error( 36 | `V1 installation failed with exit code ${result.exitCode}`, 37 | ); 38 | } 39 | 40 | // 步骤2: 使用v2包进行更新 41 | console.log('Updating to v2...'); 42 | try { 43 | result = await $`${installerV2} ${FLAGS} -D ${testDir}`.timeout('3m').quiet(); 44 | } catch (error) { 45 | if (error.message && error.message.includes('timed out')) { 46 | console.error(chalk.red('Update to v2 timed out after 3 minutes')); 47 | await printLogFileIfExists(); 48 | } 49 | throw error; 50 | } 51 | if (result.exitCode !== 0) { 52 | throw new Error(`Update to v2 failed with exit code ${result.exitCode}`); 53 | } 54 | 55 | // 验证v2文件存在 56 | const expectedFiles = [ 57 | { path: 'app.exe', contains: 'APP_V2' }, 58 | { path: 'config.json', contains: '"version": "2.0.0"' }, 59 | { path: 'feature.dll', size: 30720 }, 60 | { path: 'data/assets.dat', size: 15360 }, 61 | { path: 'data/new-assets.dat', size: 5120 }, 62 | { path: 'updater.exe' }, 63 | ]; 64 | 65 | console.log('Verifying v2 files...'); 66 | const verification = await verifyFiles(testDir, expectedFiles); 67 | 68 | // 验证v1独有文件被删除 69 | const removedFiles = ['readme.txt']; 70 | const removalVerification = await verifyFilesRemoved(testDir, removedFiles); 71 | 72 | // 输出结果 73 | const allPassed = 74 | verification.failed.length === 0 && 75 | removalVerification.failed.length === 0; 76 | 77 | if (allPassed) { 78 | console.log(chalk.green('✓ Update completed successfully')); 79 | console.log( 80 | chalk.gray(` New/Updated files: ${verification.passed.join(', ')}`), 81 | ); 82 | console.log( 83 | chalk.gray(` Removed files: ${removalVerification.passed.join(', ')}`), 84 | ); 85 | } else { 86 | console.error(chalk.red('✗ Update verification failed:')); 87 | verification.failed.forEach((msg) => 88 | console.error(chalk.red(` - ${msg}`)), 89 | ); 90 | removalVerification.failed.forEach((msg) => 91 | console.error(chalk.red(` - ${msg}`)), 92 | ); 93 | process.exit(1); 94 | } 95 | } catch (error) { 96 | console.error(chalk.red('Test failed:'), error.message); 97 | process.exit(1); 98 | } finally { 99 | await cleanupTestDir(testDir); 100 | } 101 | } 102 | 103 | test(); 104 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/bytes_rle.h: -------------------------------------------------------------------------------- 1 | //bytes_rle.h 2 | //快速解压的一个通用字节流压缩rle算法. 3 | // 4 | /* 5 | The MIT License (MIT) 6 | Copyright (c) 2012-2021 HouSisong 7 | 8 | Permission is hereby granted, free of charge, to any person 9 | obtaining a copy of this software and associated documentation 10 | files (the "Software"), to deal in the Software without 11 | restriction, including without limitation the rights to use, 12 | copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the 14 | Software is furnished to do so, subject to the following 15 | conditions: 16 | 17 | The above copyright notice and this permission notice shall be 18 | included in all copies of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 22 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 24 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 25 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 26 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 | OTHER DEALINGS IN THE SOFTWARE. 28 | */ 29 | 30 | #ifndef __BYTES_RLE_H_ 31 | #define __BYTES_RLE_H_ 32 | #include 33 | #include 34 | #include "../../../hpatch-sys/HPatch/patch_types.h" //hpatch_TStreamInput 35 | namespace hdiff_private{ 36 | 37 | #ifndef kMaxBytesRleLen 38 | static const size_t kMaxBytesRleLen =(size_t)(((size_t)1<<31)-1); 39 | #endif 40 | 41 | enum TRleParameter{ kRle_bestSize=1, kRle_default=3, kRle_bestUnRleSpeed=32 }; 42 | 43 | void bytesRLE_save(std::vector& out_code, 44 | const unsigned char* src,const unsigned char* src_end, 45 | int rle_parameter=kRle_default); 46 | void bytesRLE_save(std::vector& out_code, 47 | const hpatch_TStreamInput* src,//sequential read 48 | int rle_parameter=kRle_default); 49 | 50 | void bytesRLE_save(std::vector& out_ctrlBuf,std::vector& out_codeBuf, 51 | const unsigned char* src,const unsigned char* src_end,int rle_parameter); 52 | void bytesRLE_save(std::vector& out_ctrlBuf,std::vector& out_codeBuf, 53 | const hpatch_TStreamInput* src,//sequential read 54 | int rle_parameter); 55 | 56 | struct TSingleStreamRLE0{ 57 | std::vector fixed_code; 58 | std::vector uncompressData; 59 | hpatch_StreamPos_t len0; 60 | inline TSingleStreamRLE0():len0(0){} 61 | inline hpatch_StreamPos_t curCodeSize() const { return maxCodeSize(0,0); } 62 | hpatch_StreamPos_t maxCodeSize(const unsigned char* appendData,const unsigned char* appendData_end) const; 63 | hpatch_StreamPos_t maxCodeSize(const hpatch_TStreamInput* appendData) const;//sequential read 64 | hpatch_StreamPos_t maxCodeSizeByZeroLen(hpatch_StreamPos_t appendZeroLen) const; 65 | void append(const unsigned char* appendData,const unsigned char* appendData_end); 66 | void append(const hpatch_TStreamInput* appendData);//sequential read 67 | void appendByZeroLen(hpatch_StreamPos_t appendZeroLen); 68 | void finishAppend(); 69 | inline void clear() { fixed_code.clear(); uncompressData.clear(); len0=0; } 70 | }; 71 | 72 | }//namespace hdiff_private 73 | #endif //__BYTES_RLE_H_ 74 | -------------------------------------------------------------------------------- /src-tauri/src/installer/registry.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::{ 2 | error::{IntoTAResult, TAResult}, 3 | uac::check_elevated, 4 | }; 5 | use anyhow::{Context, Result}; 6 | use serde_json::Value; 7 | 8 | #[derive(serde::Deserialize, serde::Serialize, Debug, Clone)] 9 | pub struct WriteRegistryParams { 10 | pub reg_name: String, 11 | pub name: String, 12 | pub version: String, 13 | pub exe: String, 14 | pub source: String, 15 | pub uninstaller: String, 16 | pub metadata: String, 17 | pub size: u64, 18 | pub publisher: String, 19 | } 20 | 21 | pub async fn write_registry_with_params(params: WriteRegistryParams) -> TAResult<()> { 22 | write_registry( 23 | params.reg_name, 24 | params.name, 25 | params.version, 26 | params.exe, 27 | params.source, 28 | params.uninstaller, 29 | params.metadata, 30 | params.size, 31 | params.publisher, 32 | ) 33 | .await 34 | } 35 | 36 | #[tauri::command] 37 | pub async fn write_registry( 38 | reg_name: String, 39 | name: String, 40 | version: String, 41 | exe: String, 42 | source: String, 43 | uninstaller: String, 44 | metadata: String, 45 | size: u64, 46 | publisher: String, 47 | ) -> TAResult<()> { 48 | write_registry_raw( 49 | reg_name, 50 | name, 51 | version, 52 | exe, 53 | source, 54 | uninstaller, 55 | metadata, 56 | size, 57 | publisher, 58 | ) 59 | .await 60 | .into_ta_result() 61 | } 62 | pub async fn write_registry_raw( 63 | reg_name: String, 64 | name: String, 65 | version: String, 66 | exe: String, 67 | source: String, 68 | uninstaller: String, 69 | metadata: String, 70 | size: u64, 71 | publisher: String, 72 | ) -> Result<()> { 73 | let elevated = check_elevated().unwrap_or(false); 74 | let hive = if elevated { 75 | windows_registry::LOCAL_MACHINE 76 | } else { 77 | windows_registry::CURRENT_USER 78 | }; 79 | 80 | let key_path = format!("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{reg_name}"); 81 | 82 | let key = hive.create(&key_path).context("OPEN_REG_ERR")?; 83 | { 84 | key.set_string("DisplayName", &name)?; 85 | key.set_string("DisplayVersion", &version)?; 86 | key.set_string("UninstallString", &uninstaller)?; 87 | key.set_string("InstallLocation", &source)?; 88 | key.set_string("DisplayIcon", &exe)?; 89 | key.set_string("Publisher", &publisher)?; 90 | key.set_u32("EstimatedSize", (size as u32) / 1024)?; 91 | key.set_u32("NoModify", 1u32)?; 92 | key.set_u32("NoRepair", 1u32)?; 93 | key.set_string("InstallerMeta", &metadata)?; 94 | Ok::<(), anyhow::Error>(()) 95 | } 96 | .context("WRITE_REG_ERR") 97 | } 98 | 99 | #[tauri::command] 100 | pub async fn read_uninstall_metadata(reg_name: String) -> TAResult { 101 | let key_path = format!("SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{reg_name}"); 102 | 103 | // First try HKLM, if not exist, try HKCU 104 | let key = windows_registry::LOCAL_MACHINE 105 | .options() 106 | .read() 107 | .open(&key_path) 108 | .or_else(|_| { 109 | windows_registry::CURRENT_USER 110 | .options() 111 | .read() 112 | .open(&key_path) 113 | }) 114 | .context("GET_INSTALLMETA_ERR")?; 115 | 116 | let metadata: String = key 117 | .get_string("InstallerMeta") 118 | .context("GET_INSTALLMETA_ERR")?; 119 | 120 | let metadata: Value = serde_json::from_str(&metadata).context("GET_INSTALLMETA_ERR")?; 121 | Ok(metadata) 122 | } 123 | -------------------------------------------------------------------------------- /src-tauri/Cargo.toml: -------------------------------------------------------------------------------- 1 | cargo-features = ["profile-rustflags", "trim-paths"] 2 | [package] 3 | name = "kachina-installer" 4 | version = "0.1.0" 5 | description = "Unified installer and upgrader" 6 | authors = ["YuehaiTeam"] 7 | edition = "2021" 8 | default-run = "kachina-installer" 9 | 10 | [build-dependencies] 11 | tauri-build = { version = "2", features = [] } 12 | 13 | [dependencies] 14 | tauri = { version = "2", default-features = false, features = [ 15 | "wry", 16 | "devtools", 17 | ] } 18 | tauri-utils = "2" 19 | serde = { version = "1", features = ["derive"] } 20 | serde_json = { version = "1", features = ["preserve_order"] } 21 | async-compression = { version = "0.4", features = ["tokio", "zstd", "zstdmt"] } 22 | tokio = { version = "1", features = [ 23 | "rt", 24 | "rt-multi-thread", 25 | "sync", 26 | "fs", 27 | "io-util", 28 | "process", 29 | "macros", 30 | "net", 31 | ] } 32 | chksum-md5 = { version = "0.1", features = ["async-runtime-tokio"] } 33 | sha2 = "0.10" 34 | reqwest = { version = "0.12", default-features = false, features = [ 35 | "http2", 36 | "json", 37 | "native-tls", 38 | "native-tls-alpn", 39 | "charset", 40 | "gzip", 41 | "zstd", 42 | "stream", 43 | "system-proxy" 44 | ] } 45 | futures = "0.3" 46 | tokio-util = { version = "0.7", features = ["io-util"] } 47 | async-walkdir = "2.0" 48 | windows = { version = "0.61.1", features = [ 49 | "Win32_System_Console", 50 | "Win32_Security", 51 | "Win32_System_Registry", 52 | "Win32_Security_Authorization", 53 | "Win32_Security_Credentials", 54 | "Security", 55 | "Win32_System_ProcessStatus", 56 | "Win32_System_Diagnostics_ToolHelp", 57 | "Win32_UI_WindowsAndMessaging", 58 | "Win32_Graphics_Gdi", 59 | "Win32_UI_Shell", 60 | ] } 61 | rfd = { version = "0.15", default-features = false, features = [ 62 | "tokio", 63 | "common-controls-v6", 64 | ] } 65 | nt_version = "0.1" 66 | lazy_static = "1.5" 67 | open = { version = "5.3.1", features = ["shellexecute-on-windows"] } 68 | mslnk = "=0.1" 69 | twox-hash = "2.1.0" 70 | pin-project = "1.1.7" 71 | fmmap = { version = "0.4", features = ["tokio"] } 72 | clap = { version = "4.5", features = ["derive"] } 73 | chrono = "0.4.39" 74 | uuid = { version = "1.16", features = ["v4"] } 75 | hpatch-sys = { path = "./libs/hpatch-sys" } 76 | hdiff-sys = { path = "./libs/hdiff-sys" } 77 | ignore = "0.4.23" 78 | num_cpus = "1.16.0" 79 | indicatif = { version = "0.17.9", features = ["tokio"] } 80 | console = "0.15.10" 81 | rcedit = { version = "0.1.0", git = "https://github.com/Devolutions/rcedit-rs.git" } 82 | windows-registry = "0.5" 83 | sentry = { version = "0.37.0", features = [ 84 | "contexts", 85 | "contexts", 86 | "transport", 87 | "reqwest", 88 | "native-tls", 89 | ], default-features = false } 90 | tracing-subscriber = { version = "0.3", features = ["fmt", "registry"] } 91 | sentry-tracing = "0.37" 92 | zip = { git = "https://github.com/xytoki/zip2.git", default-features = false, features = [ 93 | "deflate", 94 | "deflate64", 95 | "zstd", 96 | ] } 97 | anyhow = "1.0" 98 | reqwest-middleware = "0.4" 99 | tracing = "0.1" 100 | whoami = "1.6.0" 101 | win32-version-info = "0.2.0" 102 | multer = "3.1.0" 103 | bytes = "1.10.1" 104 | url = "2.5" 105 | base64 = "0.22.1" 106 | 107 | [[bin]] 108 | name = "kachina-installer" 109 | path = "src/main.rs" 110 | 111 | [[bin]] 112 | name = "kachina-builder" 113 | path = "src/builder/main.rs" 114 | 115 | [profile.dev] 116 | incremental = true # Compile your binary in smaller steps. 117 | rustflags = ["-Zthreads=16"] # Better compile performance. 118 | 119 | [profile.release] 120 | debug = true 121 | split-debuginfo = "packed" 122 | codegen-units = 1 # Allows LLVM to perform better optimization. 123 | lto = true # Enables link-time-optimizations. 124 | opt-level = "s" # Prioritizes small binary size. Use `3` if you prefer speed. 125 | panic = "abort" # Higher performance by disabling panic handlers. 126 | trim-paths = "all" # Removes potentially privileged information from your binaries. 127 | rustflags = ["-Zthreads=16"] # Better compile performance. 128 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Kachina Installer 2 | 3 | 快速、多功能的通用安装程序。 4 | 5 | - 离线安装 6 | - 多线程安装,速度快 7 | - 安装后校验,避免错误 8 | - 在线安装 9 | - 分块下载,边下载边解压 10 | - 在线更新 11 | - 自动比对文件差异,增量更新 12 | - 支持`HDiffPatch`的文件级差分更新 13 | - 占用检测、结束进程 14 | - 只需要提供一个离线安装包链接即可完成以上所有操作,无需额外部署 15 | - 运行库安装 16 | - 支持自动安装 .Net Runtime/Desktop 和 VCRedist 17 | - 自动安装Webview2以保证`Tauri`的正常运行 18 | - 混合安装 19 | - 通过旧版安装包和在线更新直接安装最新版 20 | - 卸载 21 | - 支持只删除包体内的文件,默认不删除用户数据 22 | 23 | 24 | #### 使用方式 25 | 1. 编写`kachina.config.json`,作为安装器的配置文件 26 | ```jsonc 27 | { 28 | // 离线包下载地址,需要固定 29 | "source": "https://example.com/Kachina.Install.exe", 30 | // 注册表中的应用名称 31 | "appName": "Kachina Installer", 32 | // 注册表中的发布者 33 | "publisher": "YuehaiTeam", 34 | // 注册表中的应用ID 35 | "regName": "Kachina", 36 | // 主程序文件名 37 | "exeName": "Kachina.exe", 38 | // 卸载程序文件名 39 | "uninstallName": "Kachina.uninst.exe", 40 | // 更新器文件名 41 | "updaterName": "Kachina.update.exe", 42 | // 默认安装路径,和Program Files相对 43 | "programFilesPath": "KachinaInstaller", 44 | // GUI里的标题 45 | "title": "Kachina Installer", 46 | // GUI里的副标题 47 | "description": "快速多功能的安装器", 48 | // 窗口标题 49 | "windowTitle": "Kachina Installer 安装程序", 50 | // 卸载时需要删除的用户数据目录或文件 51 | "userDataPath": ["${INSTALL_PATH}/User"], 52 | // 更新时如果文件夹已存在且非空则跳过的目录 53 | "ignoreFolderPath": ["${INSTALL_PATH}/cache"], 54 | // 卸载时需要额外删除的其他目录或文件 55 | "extraUninstallPath": ["${INSTALL_PATH}/log"], 56 | // UAC 策略 57 | // prefer-admin: 除非用户安装在%User%、%AppData%、%Documents%、%Desktop%、%Downloads%目录,都请求UAC 58 | // prefer-user: 只在用户没有权限写入的目录请求UAC 59 | // force: 强制请求UAC 60 | "uacStrategy": "prefer-admin", 61 | // 需要安装的运行库,以下为目前支持的列表 62 | "runtimes": [ 63 | // .NET 的版本号支持 8/8.0/8.0.13 的格式 64 | "Microsoft.DotNet.DesktopRuntime.8", 65 | "Microsoft.DotNet.Runtime.8", 66 | // VCRedist 只支持以下两种格式 67 | "Microsoft.VCRedist.2015+.x64", 68 | "Microsoft.VCRedist.2015+.x86" 69 | ] 70 | } 71 | ``` 72 | 2. 构建更新器,用于打包在便携版内等。更新器不需要被打包到离线包内。 73 | ```bat 74 | kachina-builder.exe pack -c kachina.config.json -o Kachina.update.exe 75 | ``` 76 | 3. 构建Metadata、压缩应用文件 77 | ```bat 78 | kachina-builder.exe gen -j 8 -i {AppDir} -m metadata.json -o hashed -r {AppId} -t {Version} -u Kachina.update.exe 79 | ``` 80 | 4. 构建离线包 81 | ```bat 82 | kachina-builder.exe pack -c kachina.config.json -m metadata.json -d hashed -o Kachina.Install.exe 83 | ``` 84 | 5. 部署离线包到服务器上,确保可以通过json里的url下载到。在目前版本里,你不需要部署压缩产生的`hashed`文件夹和metadata文件,这些文件是在构建过程中临时使用的。 85 | 6. 此时第二步得到的更新器可以直接作为在线安装包使用。 86 | 87 | #### 多安装源 88 | 如果你希望用户可以自由选择安装源,你可以指定多个Source,此时用户主动打开安装器时将在路径选择上方看到安装源选择按钮。 89 | 90 | 示例配置如下: 91 | ``` 92 | { 93 | "source": [ 94 | { 95 | "id": "stable", 96 | "name": "正式版", 97 | "uri": "https://example.com/Kachina.Install.exe" 98 | }, 99 | { 100 | "id": "beta", 101 | "name": "测试版", 102 | "uri": "https://example.com/Kachina.Install.Beta.exe" 103 | } 104 | ] 105 | } 106 | ``` 107 | 108 | #### Mirror酱平台支持 109 | [Mirror酱](https://mirrorchyan.com) 是独立的第三方软件下载平台,提供付费的软件下载加速服务。`kachina-installer`接入了Mirror酱的API,允许用户使用Mirror酱更新软件。例如,你可以结合上述的安装源选择功能,让用户选择使用自建服务器更新还是使用Mirror酱更新。 110 | 111 | 112 | 如需使用,请设置`source`的值为`mirrorc://{rid}?channel={stable|beta|alpha}`。同时,你需要将前述产生的`.metadata.json`放置到上传给Mirror酱的文件中。示例的上传格式: 113 | ``` 114 | upload_to_mirrorc.zip 115 | - .metadata.json 116 | - Main.exe 117 | - Main.update.exe 118 | ``` 119 | 也支持 120 | ``` 121 | upload_to_mirrorc.zip 122 | - App/.metadata.json 123 | - App/Main.exe 124 | - App/Main.update.exe 125 | ``` 126 | 127 | Tips:Mirror酱使用独立的文件级增量更新机制,因此当选择Mirror酱作为更新源时候,将无法使用`kachina-installer`自带的版本比对、二进制Patch级增量等功能。 128 | 129 | 130 | ## 部分技术细节 131 | 安装器的离线包是一个可寻址的文件,其中包含了安装器主体、索引、配置、元数据、程序文件、Patch文件。当安装程序运行时,如果程序没有有内嵌资源,会对配置URL中的离线包进行远程寻址,通过文件头中的索引获取资源,并通过HTTP 206 部分下载需要的内容。如果程序有内嵌资源,程序会对比线上和本地的版本,优先使用本地的资源,并在可行的情况下使用先释放本地资源、随后使用服务器上的更新Patch的形式以减少流量损耗。 132 | 133 | 安装程序和dfs服务器不是强绑定关系,任何可以通过HTTP提供离线包下载的服务器都可以作为更新服务器。dfs在本项目中仅作为一个获取下载地址的API使用。 134 | 135 | 更多技术细节可以看看 [![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/YuehaiTeam/kachina-installer) ,我觉得DeepWiki写得挺好的。 -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/limit_mem_diff/digest_matcher.h: -------------------------------------------------------------------------------- 1 | //digest_matcher.h 2 | //用摘要匹配的办法代替后缀数组的匹配,匹配效果比后缀数差,但内存占用少; 3 | //用adler计算数据的摘要信息,以便于滚动匹配. 4 | // 5 | /* 6 | The MIT License (MIT) 7 | Copyright (c) 2012-2017 HouSisong 8 | 9 | Permission is hereby granted, free of charge, to any person 10 | obtaining a copy of this software and associated documentation 11 | files (the "Software"), to deal in the Software without 12 | restriction, including without limitation the rights to use, 13 | copy, modify, merge, publish, distribute, sublicense, and/or sell 14 | copies of the Software, and to permit persons to whom the 15 | Software is furnished to do so, subject to the following 16 | conditions: 17 | 18 | The above copyright notice and this permission notice shall be 19 | included in all copies of the Software. 20 | 21 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 22 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 23 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 24 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 25 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 26 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 27 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 28 | OTHER DEALINGS IN THE SOFTWARE. 29 | */ 30 | 31 | #ifndef digest_matcher_h 32 | #define digest_matcher_h 33 | #include "bloom_filter.h" 34 | #include "covers.h" 35 | #include "adler_roll.h" 36 | #include "../mem_buf.h" 37 | namespace hdiff_private{ 38 | 39 | typedef uint64_t adler_uint_t; 40 | #define adler_start fast_adler64_start 41 | #define adler_roll fast_adler64_roll 42 | typedef uint64_t adler_hash_t; 43 | static inline adler_hash_t adler_to_hash(const uint64_t x){ return x; } 44 | 45 | //typedef adler128_t adler_uint_t; 46 | //#define adler_start fast_adler128_start 47 | //#define adler_roll fast_adler128_roll 48 | //typedef uint64_t adler_hash_t; 49 | //static inline adler_hash_t adler_to_hash(const adler128_t& x){ return x.adler^x.sum; } 50 | //static inline bool operator !=(const adler128_t& x,const adler128_t& y){ 51 | // return (x.adler!=y.adler)||(x.sum!=y.sum);} 52 | //static inline bool operator <(const adler128_t& x,const adler128_t& y){ 53 | // if (x.adler!=y.adler) return (x.adlerread error or kMatchBlockSize error; 64 | TDigestMatcher(const hpatch_TStreamInput* oldData,const hpatch_TStreamInput* newData, 65 | size_t kMatchBlockSize,const hdiff_TMTSets_s& mtsets); 66 | void search_cover(hpatch_TOutputCovers* out_covers); 67 | ~TDigestMatcher(); 68 | private: 69 | TDigestMatcher(const TDigestMatcher &); //empty 70 | TDigestMatcher &operator=(const TDigestMatcher &); //empty 71 | private: 72 | const hpatch_TStreamInput* m_oldData; 73 | const hpatch_TStreamInput* m_newData; 74 | std::vector m_blocks; 75 | TBloomFilter m_filter; 76 | std::vector m_sorted_limit; 77 | std::vector m_sorted_larger; 78 | bool m_isUseLargeSorted; 79 | const hdiff_TMTSets_s m_mtsets; 80 | TAutoMem m_mem; 81 | size_t m_newCacheSize; 82 | size_t m_oldCacheSize; 83 | size_t m_oldMinCacheSize; 84 | size_t m_backupCacheSize; 85 | size_t m_kMatchBlockSize; 86 | 87 | void getDigests(); 88 | size_t getSearchThreadNum()const; 89 | void _search_cover(const hpatch_TStreamInput* newData,hpatch_StreamPos_t newOffset, 90 | hpatch_TOutputCovers* out_covers,unsigned char* pmem, 91 | void* dataLocker=0,void* newDataLocker=0); 92 | public: //private for multi-thread 93 | void _search_cover_thread(hpatch_TOutputCovers* out_covers,unsigned char* pmem,void* mt_data); 94 | }; 95 | 96 | }//namespace hdiff_private 97 | #endif 98 | -------------------------------------------------------------------------------- /src-tauri/src/builder/metadata.rs: -------------------------------------------------------------------------------- 1 | use std::path::{Path, PathBuf}; 2 | 3 | use futures::StreamExt; 4 | 5 | use crate::utils::{hash::run_hash, metadata::Metadata}; 6 | 7 | pub async fn deep_generate_metadata(source: &PathBuf) -> Result, String> { 8 | let path = Path::new(&source); 9 | if !path.exists() { 10 | return Ok(Vec::new()); 11 | } 12 | let mut entries = async_walkdir::WalkDir::new(source); 13 | let mut files = Vec::new(); 14 | loop { 15 | match entries.next().await { 16 | Some(Ok(entry)) => { 17 | let f = entry.file_type().await; 18 | if f.is_err() { 19 | return Err(format!("Failed to get file type: {:?}", f.err())); 20 | } 21 | let f = f.unwrap(); 22 | if f.is_file() { 23 | let path = entry.path(); 24 | let path = path.to_str(); 25 | if path.is_none() { 26 | return Err("Failed to convert path to string".to_string()); 27 | } 28 | let path = path.unwrap(); 29 | let fin_path = path.replace("\\", "/").replacen( 30 | format!("{}/", source.to_str().unwrap().replace("\\", "/")).as_str(), 31 | "", 32 | 1, 33 | ); 34 | let size = entry.metadata().await.unwrap().len(); 35 | files.push(Metadata { 36 | file_name: fin_path, 37 | md5: None, 38 | xxh: None, 39 | size, 40 | }); 41 | } 42 | } 43 | Some(Err(e)) => { 44 | return Err(format!("Failed to read entry: {e:?}")); 45 | } 46 | None => break, 47 | } 48 | } 49 | 50 | let mut joinset = tokio::task::JoinSet::new(); 51 | 52 | for file in files.iter() { 53 | let source = source.clone(); 54 | let mut file = file.clone(); 55 | joinset.spawn(async move { 56 | let real_path = source.join(&file.file_name); 57 | let hash = run_hash("xxh", real_path.to_str().unwrap()).await; 58 | if hash.is_err() { 59 | return Err(hash.err().unwrap()); 60 | } 61 | let hash = hash.unwrap(); 62 | file.xxh = Some(hash); 63 | println!("Hashed: {:?}", file.file_name); 64 | Ok(file) 65 | }); 66 | } 67 | let mut finished_hashes = Vec::new(); 68 | while let Some(res) = joinset.join_next().await { 69 | if let Err(e) = res { 70 | return Err(format!("Failed to run hashing thread: {e:?}")); 71 | } 72 | let res = res.unwrap(); 73 | if let Err(e) = res { 74 | return Err(format!("Failed to finish hashing: {e:?}")); 75 | } 76 | let res = res.unwrap(); 77 | finished_hashes.push(res); 78 | } 79 | Ok(finished_hashes) 80 | } 81 | 82 | pub async fn deep_get_filelist(source: &PathBuf) -> Result, String> { 83 | let path = Path::new(&source); 84 | if !path.exists() { 85 | return Ok(Vec::new()); 86 | } 87 | let mut entries = async_walkdir::WalkDir::new(source); 88 | let mut files = Vec::new(); 89 | loop { 90 | match entries.next().await { 91 | Some(Ok(entry)) => { 92 | let f = entry.file_type().await; 93 | if f.is_err() { 94 | return Err(format!("Failed to get file type: {:?}", f.err())); 95 | } 96 | let f = f.unwrap(); 97 | if f.is_file() { 98 | let path = entry.path(); 99 | let path = path.to_str(); 100 | if path.is_none() { 101 | return Err("Failed to convert path to string".to_string()); 102 | } 103 | let path = path.unwrap(); 104 | let fin_path = path.replace("\\", "/").replacen( 105 | format!("{}/", source.to_str().unwrap().replace("\\", "/")).as_str(), 106 | "", 107 | 1, 108 | ); 109 | files.push(fin_path); 110 | } 111 | } 112 | Some(Err(e)) => { 113 | return Err(format!("Failed to read entry: {e:?}")); 114 | } 115 | None => break, 116 | } 117 | } 118 | Ok(files) 119 | } 120 | -------------------------------------------------------------------------------- /src/api/installFile.ts: -------------------------------------------------------------------------------- 1 | type InstallFileSource = 2 | | { url: string; offset: number; size: number; skip_decompress?: boolean } 3 | | { offset: number; size: number; skip_decompress?: boolean }; 4 | 5 | type InstallFileMode = 6 | | { type: 'Direct'; source: InstallFileSource } 7 | | { type: 'Patch'; source: InstallFileSource; diff_size: number } 8 | | { 9 | type: 'HybridPatch'; 10 | diff: InstallFileSource; 11 | source: InstallFileSource; 12 | }; 13 | 14 | interface InstallFileArgs { 15 | mode: InstallFileMode; 16 | target: string; 17 | xxh?: string; 18 | md5?: string; 19 | clear_installer_index_mark?: boolean; 20 | type: 'InstallFile'; 21 | } 22 | 23 | /** 24 | * @param source - 文件来源(Url 字符串或 Local 对象) 25 | * @param target - 目标路径 26 | * @param diff_size - Patch 模式需要的 diff_size 27 | */ 28 | export function InstallFile( 29 | source: InstallFileSource & { skip_hash?: boolean }, 30 | target: string, 31 | hash: { 32 | xxh?: string; 33 | md5?: string; 34 | }, 35 | diff_size?: number, 36 | clearInstallerIndexMark?: boolean, 37 | ): InstallFileArgs { 38 | let mode: InstallFileMode; 39 | if (!diff_size) { 40 | mode = { type: 'Direct', source }; 41 | } else { 42 | mode = { type: 'Patch', source, diff_size }; 43 | } 44 | if (source.skip_hash) { 45 | delete hash.xxh; 46 | delete hash.md5; 47 | } 48 | return { 49 | mode, 50 | target, 51 | type: 'InstallFile', 52 | ...hash, 53 | clear_installer_index_mark: clearInstallerIndexMark, 54 | }; 55 | } 56 | 57 | export function hybridPatch( 58 | source: { offset: number; size: number }, 59 | diff: { url: string; offset: number; size: number }, 60 | target: string, 61 | hash: { 62 | xxh?: string; 63 | md5?: string; 64 | }, 65 | ): InstallFileArgs { 66 | const mode: InstallFileMode = { 67 | type: 'HybridPatch', 68 | diff, 69 | source, 70 | }; 71 | 72 | return { mode, target, type: 'InstallFile', ...hash }; 73 | } 74 | 75 | interface InstallMultipartStreamArgs { 76 | url: string; 77 | range: string; 78 | chunks: InstallFileArgs[]; 79 | type: 'InstallMultipartStream'; 80 | } 81 | 82 | interface InstallMultichunkStreamArgs { 83 | url: string; 84 | range: string; 85 | chunks: InstallFileArgs[]; 86 | type: 'InstallMultichunkStream'; 87 | } 88 | 89 | export function getRangeParams(chunks: InstallFileArgs[]): { 90 | total_start: number; 91 | total_end: number; 92 | multipart: string; 93 | ranges: [number, number][]; 94 | } { 95 | let total_start = Infinity; 96 | let total_end = -1; 97 | const ranges: [number, number][] = []; 98 | const multipart: string[] = []; 99 | 100 | for (const chunk of chunks) { 101 | const { offset, size } = chunk.mode.source; 102 | if (offset < total_start) { 103 | total_start = offset; 104 | } 105 | if (offset + size > total_end) { 106 | total_end = offset + size; 107 | } 108 | ranges.push([offset, offset + size - 1]); 109 | multipart.push(`${offset}-${offset + size - 1}`); 110 | } 111 | 112 | return { 113 | total_start, 114 | total_end, 115 | multipart: multipart.join(','), 116 | ranges, 117 | }; 118 | } 119 | 120 | /** 121 | * 安装多部分流文件 - 用于处理服务器支持 multipart/byteranges 的情况 122 | * @param url - 文件 URL 123 | * @param range - HTTP Range 范围,如 "100-200,300-400,500-600" 124 | * @param chunks - 要安装的文件块列表 125 | */ 126 | export function InstallMultipartStream( 127 | url: string, 128 | range: string, 129 | chunks: InstallFileArgs[], 130 | ): InstallMultipartStreamArgs { 131 | return { 132 | url, 133 | range, 134 | chunks, 135 | type: 'InstallMultipartStream', 136 | }; 137 | } 138 | 139 | /** 140 | * 安装多块流文件 - 用于处理非连续块的单一 HTTP Range 请求 141 | * @param url - 文件 URL 142 | * @param range - 总的 HTTP Range 范围,如 "0-1024" 143 | * @param chunks - 要安装的文件块列表,每个块的 offset 字段指定在流中的位置 144 | */ 145 | export function InstallMultichunkStream( 146 | url: string, 147 | range: string, 148 | chunks: InstallFileArgs[], 149 | ): InstallMultichunkStreamArgs { 150 | return { 151 | url, 152 | range, 153 | chunks, 154 | type: 'InstallMultichunkStream', 155 | }; 156 | } 157 | 158 | export async function createMultiInstall( 159 | chunks: InstallFileArgs[], 160 | multipart = true, 161 | getUrl: (ranges: ReturnType) => Promise, 162 | ) { 163 | const ranges = getRangeParams(chunks); 164 | if (multipart) { 165 | return InstallMultipartStream( 166 | await getUrl(ranges), 167 | ranges.multipart, 168 | chunks, 169 | ); 170 | } else { 171 | return InstallMultichunkStream( 172 | await getUrl(ranges), 173 | `${ranges.total_start}-${ranges.total_end}`, 174 | chunks, 175 | ); 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /tests/utils.mjs: -------------------------------------------------------------------------------- 1 | import crypto from 'crypto'; 2 | import fs from 'fs-extra'; 3 | import path from 'path'; 4 | import os from 'os'; 5 | 6 | export const dev = !!process.env.DEV; 7 | export const FLAGS = dev ? '-I' : '-S'; 8 | 9 | export function getTestDir(name) { 10 | return path.join(os.tmpdir(), `kachina-test-${name}-${Date.now()}`); 11 | } 12 | 13 | export async function getFileHash(filePath) { 14 | const hash = crypto.createHash('sha256'); 15 | const stream = fs.createReadStream(filePath); 16 | 17 | return new Promise((resolve, reject) => { 18 | stream.on('data', (chunk) => hash.update(chunk)); 19 | stream.on('end', () => resolve(hash.digest('hex'))); 20 | stream.on('error', reject); 21 | }); 22 | } 23 | 24 | export async function verifyFiles(installDir, expectedFiles) { 25 | const results = { passed: [], failed: [] }; 26 | 27 | for (const file of expectedFiles) { 28 | const fullPath = path.join(installDir, file.path); 29 | 30 | // 检查文件是否存在 31 | if (!(await fs.pathExists(fullPath))) { 32 | results.failed.push(`Missing file: ${file.path}`); 33 | continue; 34 | } 35 | 36 | // 验证文件大小(可选) 37 | if (file.size) { 38 | const stats = await fs.stat(fullPath); 39 | if (stats.size !== file.size) { 40 | results.failed.push( 41 | `Size mismatch for ${file.path}: expected ${file.size}, got ${stats.size}`, 42 | ); 43 | continue; 44 | } 45 | } 46 | 47 | // 验证文件内容包含特定字符串(可选) 48 | if (file.contains) { 49 | const content = await fs.readFile(fullPath); 50 | if (!content.includes(file.contains)) { 51 | results.failed.push( 52 | `Content mismatch for ${file.path}: missing "${file.contains}"`, 53 | ); 54 | continue; 55 | } 56 | } 57 | 58 | // 验证文件hash(可选) 59 | if (file.hash) { 60 | const actualHash = await getFileHash(fullPath); 61 | if (actualHash !== file.hash) { 62 | results.failed.push( 63 | `Hash mismatch for ${file.path}: expected ${file.hash}, got ${actualHash}`, 64 | ); 65 | continue; 66 | } 67 | } 68 | 69 | results.passed.push(file.path); 70 | } 71 | 72 | return results; 73 | } 74 | 75 | export async function verifyFilesRemoved(installDir, removedFiles) { 76 | const results = { passed: [], failed: [] }; 77 | 78 | for (const file of removedFiles) { 79 | const fullPath = path.join(installDir, file); 80 | const exists = await fs.pathExists(fullPath); 81 | 82 | if (exists) { 83 | results.failed.push(`File should be removed but exists: ${file}`); 84 | } else { 85 | results.passed.push(file); 86 | } 87 | } 88 | 89 | return results; 90 | } 91 | 92 | export async function verifyUpdaterReplaced(installDir, expectedV2Hash) { 93 | const updaterPath = path.join(installDir, 'updater.exe'); 94 | 95 | if (!(await fs.pathExists(updaterPath))) { 96 | return { success: false, message: 'Updater file not found' }; 97 | } 98 | 99 | // 通过hash比对验证更新器是否为v2版本 100 | const actualHash = await getFileHash(updaterPath); 101 | 102 | if (actualHash === expectedV2Hash) { 103 | return { 104 | success: true, 105 | message: 'Updater successfully self-updated to v2', 106 | }; 107 | } 108 | 109 | return { 110 | success: false, 111 | message: `Updater was not updated to v2. Expected hash: ${expectedV2Hash}, got: ${actualHash}`, 112 | }; 113 | } 114 | 115 | export async function cleanupTestDir(testDir) { 116 | try { 117 | await fs.remove(testDir); 118 | console.log(chalk.gray(`Cleaned up: ${testDir}`)); 119 | } catch (error) { 120 | console.warn( 121 | chalk.yellow(`Failed to cleanup ${testDir}: ${error.message}`), 122 | ); 123 | } 124 | } 125 | 126 | export async function waitForServer(url, maxAttempts = 10, interval = 1000) { 127 | for (let i = 0; i < maxAttempts; i++) { 128 | try { 129 | const response = await fetch(url, { method: 'HEAD' }); 130 | if (response.ok) { 131 | return true; 132 | } 133 | } catch (error) { 134 | // 继续尝试 135 | } 136 | await new Promise((resolve) => setTimeout(resolve, interval)); 137 | } 138 | throw new Error( 139 | `Server at ${url} did not respond after ${maxAttempts} attempts`, 140 | ); 141 | } 142 | 143 | export async function printLogFileIfExists() { 144 | const logFile = path.join(os.tmpdir(), 'KachinaInstaller.log'); 145 | if (await fs.pathExists(logFile)) { 146 | console.log(chalk.yellow('\n=== Installer Log File Contents ===')); 147 | const logs = await fs.readFile(logFile, 'utf-8'); 148 | console.log(logs); 149 | console.log(chalk.yellow('=== End of Log File ===\n')); 150 | } else { 151 | console.log(chalk.yellow('Log file not found at: ' + logFile)); 152 | } 153 | } 154 | -------------------------------------------------------------------------------- /tests/online-update.mjs: -------------------------------------------------------------------------------- 1 | import { 2 | verifyFiles, 3 | verifyFilesRemoved, 4 | verifyUpdaterReplaced, 5 | cleanupTestDir, 6 | getTestDir, 7 | waitForServer, 8 | getFileHash, 9 | printLogFileIfExists, 10 | FLAGS, 11 | } from './utils.mjs'; 12 | import { startServer } from './server.mjs'; 13 | import path from 'path'; 14 | import { usePwsh, $ } from 'zx'; 15 | import 'zx/globals'; 16 | usePwsh(); 17 | $.verbose = true; 18 | 19 | async function test() { 20 | const testDir = getTestDir('online-update'); 21 | const installerV1 = './fixtures/test-app-v1.exe'; 22 | 23 | console.log( 24 | chalk.blue('=== Online Update Test (with updater self-patch) ==='), 25 | ); 26 | console.log(`Test directory: ${testDir}`); 27 | 28 | // 获取v2更新器的hash用于验证 29 | const v2UpdaterHash = await getFileHash('./fixtures/test-app-v2/updater.exe'); 30 | console.log(chalk.gray(`Expected v2 updater hash: ${v2UpdaterHash}`)); 31 | 32 | // 启动HTTP服务器 33 | console.log('Starting HTTP server...'); 34 | const server = await startServer(); 35 | 36 | try { 37 | // 等待服务器启动 38 | await waitForServer('http://localhost:8080/test-app-v2.exe'); 39 | 40 | // 步骤1: 安装v1 41 | console.log('Installing v1...'); 42 | let result; 43 | try { 44 | result = await $`${installerV1} -S -D ${testDir}`.timeout('3m').quiet(); 45 | } catch (error) { 46 | if (error.message && error.message.includes('timed out')) { 47 | console.error(chalk.red('V1 installation timed out after 3 minutes')); 48 | await printLogFileIfExists(); 49 | } 50 | throw error; 51 | } 52 | if (result.exitCode !== 0) { 53 | throw new Error( 54 | `V1 installation failed with exit code ${result.exitCode}`, 55 | ); 56 | } 57 | 58 | // 步骤2: 从服务器获取v2进行更新 59 | // 删除日志文件 %temp%/KachinaInstaller.log 60 | const logFile = os.tmpdir() + '/KachinaInstaller.log'; 61 | if (await fs.pathExists(logFile)) { 62 | await fs.remove(logFile); 63 | } 64 | console.log('Updating to v2 from server...'); 65 | const updaterPath = path.join(testDir, 'updater.exe'); 66 | try { 67 | result = await $`& ${updaterPath} ${FLAGS} -D ${testDir} --source local-v2`.timeout('3m'); 68 | } catch (error) { 69 | if (error.message && error.message.includes('timed out')) { 70 | console.error(chalk.red('Update to v2 timed out after 3 minutes')); 71 | await printLogFileIfExists(); 72 | } 73 | throw error; 74 | } 75 | if (result.exitCode !== 0) { 76 | throw new Error(`Update to v2 failed with exit code ${result.exitCode}`); 77 | } 78 | 79 | // check if fail in logs 80 | if (await fs.pathExists(logFile)) { 81 | const logs = await fs.readFile(logFile, 'utf-8'); 82 | console.log(logs); 83 | // 验证日志文件是否有错误 84 | if (logs.includes('ERROR kachina_installer::installer')) { 85 | throw new Error('Updater log contains errors'); 86 | } 87 | } 88 | 89 | // 验证v2文件 90 | const expectedFiles = [ 91 | { path: 'app.exe', contains: 'APP_V2' }, 92 | { path: 'config.json', contains: '"version": "2.0.0"' }, 93 | { path: 'feature.dll', size: 30720 }, 94 | { path: 'data/assets.dat', size: 15360 }, 95 | { path: 'data/new-assets.dat', size: 5120 }, 96 | { path: 'updater.exe' }, // 更新器本身 97 | ]; 98 | 99 | console.log('Verifying v2 files...'); 100 | const verification = await verifyFiles(testDir, expectedFiles); 101 | 102 | // 验证更新器自我更新(通过hash比对) 103 | console.log('Verifying updater self-patch...'); 104 | const updaterCheck = await verifyUpdaterReplaced(testDir, v2UpdaterHash); 105 | 106 | // 验证文件删除 107 | const removedFiles = ['readme.txt']; 108 | const removalVerification = await verifyFilesRemoved(testDir, removedFiles); 109 | 110 | // 输出结果 111 | const allPassed = 112 | verification.failed.length === 0 && 113 | removalVerification.failed.length === 0 && 114 | updaterCheck.success; 115 | 116 | if (allPassed) { 117 | console.log(chalk.green('✓ Online update completed successfully')); 118 | console.log(chalk.green(`✓ ${updaterCheck.message}`)); 119 | console.log( 120 | chalk.gray(` Updated files: ${verification.passed.join(', ')}`), 121 | ); 122 | console.log( 123 | chalk.gray(` Removed files: ${removalVerification.passed.join(', ')}`), 124 | ); 125 | } else { 126 | console.error(chalk.red('✗ Update verification failed:')); 127 | if (!updaterCheck.success) { 128 | console.error(chalk.red(` - ${updaterCheck.message}`)); 129 | } 130 | verification.failed.forEach((msg) => 131 | console.error(chalk.red(` - ${msg}`)), 132 | ); 133 | removalVerification.failed.forEach((msg) => 134 | console.error(chalk.red(` - ${msg}`)), 135 | ); 136 | process.exit(1); 137 | } 138 | } catch (error) { 139 | console.error(chalk.red('Test failed:'), error.message); 140 | process.exit(1); 141 | } finally { 142 | // 停止服务器 143 | server?.close(); 144 | await cleanupTestDir(testDir); 145 | } 146 | } 147 | 148 | test(); 149 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/suffix_string.h: -------------------------------------------------------------------------------- 1 | //suffix_string.h 2 | //后缀字符串的一个实现. 3 | // 4 | /* 5 | The MIT License (MIT) 6 | Copyright (c) 2012-2017 HouSisong 7 | 8 | Permission is hereby granted, free of charge, to any person 9 | obtaining a copy of this software and associated documentation 10 | files (the "Software"), to deal in the Software without 11 | restriction, including without limitation the rights to use, 12 | copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the 14 | Software is furnished to do so, subject to the following 15 | conditions: 16 | 17 | The above copyright notice and this permission notice shall be 18 | included in all copies of the Software. 19 | 20 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 21 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 22 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 23 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 24 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 25 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 26 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 27 | OTHER DEALINGS IN THE SOFTWARE. 28 | */ 29 | 30 | #ifndef __SUFFIX_STRING_H_ 31 | #define __SUFFIX_STRING_H_ 32 | #include 33 | #include //for ptrdiff_t,size_t 34 | #ifndef _SSTRING_FAST_MATCH 35 | # define _SSTRING_FAST_MATCH 5 36 | #endif 37 | #if (_SSTRING_FAST_MATCH>0) 38 | # if (_SSTRING_FAST_MATCH<2) 39 | # error must _SSTRING_FAST_MATCH>=2! 40 | # endif 41 | # include "limit_mem_diff/bloom_filter.h" 42 | # include "limit_mem_diff/adler_roll.h" 43 | #endif 44 | 45 | #if defined (__cplusplus) || (defined (__STDC_VERSION__) && (__STDC_VERSION__ >= 199901L) /* C99 */) 46 | # include //for int32_t 47 | namespace hdiff_private{ 48 | #else 49 | namespace hdiff_private{ 50 | # if (_MSC_VER >= 1300) 51 | typedef signed __int32 int32_t; 52 | # else 53 | typedef signed int int32_t; 54 | # endif 55 | #endif 56 | 57 | #if (_SSTRING_FAST_MATCH>0) 58 | class TFastMatchForSString{ 59 | public: 60 | typedef uint32_t THash; 61 | typedef unsigned char TChar; 62 | enum { kFMMinStrSize=_SSTRING_FAST_MATCH }; 63 | 64 | inline TFastMatchForSString(){} 65 | inline void clear(){ bf.clear(); } 66 | void buildMatchCache(const TChar* src_begin,const TChar* src_end,size_t threadNum); 67 | 68 | static inline THash getHash(const TChar* datas) { return fast_adler32_start(datas,kFMMinStrSize); } 69 | static inline THash rollHash(THash h,const TChar* cur) { return fast_adler32_roll(h,kFMMinStrSize,cur[-kFMMinStrSize],cur[0]); } 70 | 71 | inline bool isHit(THash h) const { return bf.is_hit(h); } 72 | private: 73 | TBloomFilter bf; 74 | }; 75 | #endif 76 | 77 | class TSuffixString{ 78 | public: 79 | typedef ptrdiff_t TInt; 80 | typedef int32_t TInt32; 81 | typedef unsigned char TChar; 82 | explicit TSuffixString(bool isUsedFastMatch=false); 83 | ~TSuffixString(); 84 | 85 | //throw std::runtime_error when create SA error 86 | TSuffixString(const TChar* src_begin,const TChar* src_end,bool isUsedFastMatch=false,size_t threadNum=1); 87 | void resetSuffixString(const TChar* src_begin,const TChar* src_end,size_t threadNum=1); 88 | 89 | inline const TChar* src_begin()const{ return m_src_begin; } 90 | inline const TChar* src_end()const{ return m_src_end; } 91 | inline size_t SASize()const{ return (size_t)(m_src_end-m_src_begin); } 92 | void clear(); 93 | 94 | inline TInt SA(TInt i)const{//return m_SA[i];//排好序的后缀字符串数组. 95 | if (isUseLargeSA()) 96 | return m_SA_large[i]; 97 | else 98 | return (TInt)m_SA_limit[i]; 99 | } 100 | TInt lower_bound(const TChar* str,const TChar* str_end)const;//return index in SA; must str_end-str>=2 ! 101 | private: 102 | TSuffixString(const TSuffixString &); //empty 103 | TSuffixString &operator=(const TSuffixString &); //empty 104 | private: 105 | const TChar* m_src_begin;//原字符串. 106 | const TChar* m_src_end; 107 | std::vector m_SA_limit; 108 | std::vector m_SA_large; 109 | enum{ kLimitSASize= (1<<30)-1 + (1<<30) };//2G-1 110 | inline bool isUseLargeSA()const{ 111 | return (sizeof(TInt)>sizeof(TInt32)) && (SASize()>kLimitSASize); 112 | } 113 | private: 114 | // all cache for lower_bound speed 115 | const bool m_isUsedFastMatch; 116 | #if (_SSTRING_FAST_MATCH>0) 117 | TFastMatchForSString m_fastMatch; //a big memory cache & build slow 118 | #endif 119 | const void* m_cached_SA_begin; 120 | const void* m_cached_SA_end; 121 | const void* m_cached1char_range[256+1]; 122 | void* m_cached2char_range;//[256*256+1] 123 | typedef TInt (*t_lower_bound_func)(const void* rbegin,const void* rend, 124 | const TChar* str,const TChar* str_end, 125 | const TChar* src_begin,const TChar* src_end, 126 | const void* SA_begin,size_t min_eq); 127 | t_lower_bound_func m_lower_bound; 128 | void build_cache(size_t threadNum); 129 | void clear_cache(); 130 | }; 131 | 132 | }//namespace hdiff_private 133 | #endif //__SUFFIX_STRING_H_ 134 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/libParallel/parallel_channel.h: -------------------------------------------------------------------------------- 1 | // parallel_channel.h 2 | /* 3 | The MIT License (MIT) 4 | Copyright (c) 2018 HouSisong 5 | 6 | Permission is hereby granted, free of charge, to any person 7 | obtaining a copy of this software and associated documentation 8 | files (the "Software"), to deal in the Software without 9 | restriction, including without limitation the rights to use, 10 | copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the 12 | Software is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice shall be 16 | included in all copies of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 19 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 20 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 24 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 25 | OTHER DEALINGS IN THE SOFTWARE. 26 | */ 27 | 28 | #ifndef parallel_channel_h 29 | #define parallel_channel_h 30 | #include "parallel_import.h" 31 | #if (_IS_USED_MULTITHREAD) 32 | #include //uint32 33 | #include 34 | #include //for size_t ptrdiff_t 35 | 36 | struct CHLocker{ 37 | HLocker locker; 38 | inline CHLocker():locker(0) { locker=locker_new(); } 39 | inline ~CHLocker() { locker_delete(locker); } 40 | }; 41 | 42 | #if (_IS_USED_CPP11THREAD) 43 | # include 44 | struct CAutoLocker:public _TLockerBox_name { 45 | inline CAutoLocker(HLocker _locker) 46 | :_TLockerBox_name(){ if (_locker) { _TLockerBox_name _t(*(std::mutex*)_locker); _t.swap(*this); } } 47 | inline ~CAutoLocker(){ } 48 | }; 49 | #else 50 | struct CAutoLocker:public TLockerBox { 51 | inline CAutoLocker(HLocker _locker){ locker=_locker; if (locker) locker_enter(locker); } 52 | inline ~CAutoLocker(){ if (locker) locker_leave(locker); } 53 | }; 54 | #endif 55 | 56 | //通道交互数据; 57 | typedef void* TChanData; 58 | 59 | class _CChannel_import; 60 | //通道; 61 | class CChannel{ 62 | public: 63 | explicit CChannel(ptrdiff_t maxDataCount=-1); 64 | ~CChannel(); 65 | void close(); 66 | bool is_can_fast_send(bool isWait); //mybe not need wait when send 67 | bool send(TChanData data,bool isWait); //can't send null 68 | TChanData accept(bool isWait); //result can null; 69 | private: 70 | _CChannel_import* _import; 71 | }; 72 | 73 | //用通道传递来共享数据; 74 | struct TMtByChannel { 75 | CChannel read_chan; 76 | CChannel work_chan; 77 | CChannel data_chan; 78 | 79 | void on_error(){ 80 | { 81 | CAutoLocker _auto_locker(_locker.locker); 82 | if (_is_on_error) return; 83 | _is_on_error=true; 84 | } 85 | closeAndClear(); 86 | } 87 | 88 | bool start_threads(int threadCount,TThreadRunCallBackProc threadProc,void* workData,bool isUseThisThread){ 89 | for (int i=0;i 33 | #include "../../../hpatch-sys/HPatch/patch_types.h" //hpatch_packUIntWithTag 34 | #include //std::runtime_error 35 | #include 36 | namespace hdiff_private{ 37 | 38 | template 39 | inline static void packUIntWithTag(std::vector& out_code,_UInt uValue, 40 | int highTag,const int kTagBit){ 41 | unsigned char codeBuf[hpatch_kMaxPackedUIntBytes]; 42 | unsigned char* codeEnd=codeBuf; 43 | if (!hpatch_packUIntWithTag(&codeEnd,codeBuf+hpatch_kMaxPackedUIntBytes,uValue,highTag,kTagBit)) 44 | throw std::runtime_error("packUIntWithTag<_UInt>() hpatch_packUIntWithTag() error!"); 45 | out_code.insert(out_code.end(),codeBuf,codeEnd); 46 | } 47 | 48 | template 49 | inline static void packUInt(std::vector& out_code,_UInt uValue){ 50 | packUIntWithTag(out_code,uValue,0,0); 51 | } 52 | 53 | inline static void pushBack(std::vector& out_buf, 54 | const unsigned char* data,const unsigned char* data_end){ 55 | out_buf.insert(out_buf.end(),data,data_end); 56 | } 57 | inline static void pushBack(std::vector& out_buf, 58 | const unsigned char* data,size_t dataSize){ 59 | pushBack(out_buf,data,data+dataSize); 60 | } 61 | inline static void pushBack(std::vector& out_buf,const std::vector& data){ 62 | out_buf.insert(out_buf.end(),data.begin(),data.end()); 63 | } 64 | 65 | static void pushBack(std::vector& out_buf,const hpatch_TStreamInput* data){ 66 | const size_t kStepSize=hpatch_kStreamCacheSize*2; 67 | unsigned char buf[kStepSize]; 68 | out_buf.reserve(out_buf.size()+(size_t)data->streamSize); 69 | hpatch_StreamPos_t curPos=0; 70 | while (curPosstreamSize){ 71 | size_t len=kStepSize; 72 | if (curPos+len>data->streamSize) 73 | len=(size_t)(data->streamSize-curPos); 74 | if (!data->read(data,curPos,buf,buf+len)) 75 | throw std::runtime_error("pushBack() data->read() error!"); 76 | pushBack(out_buf,buf,len); 77 | curPos+=len; 78 | } 79 | } 80 | 81 | 82 | template inline static 83 | void pushUInt(std::vector& out_buf,TUInt v){ 84 | unsigned char buf[sizeof(TUInt)]; 85 | for (size_t i=0; i1) v>>=8; 88 | } 89 | pushBack(out_buf,buf,sizeof(TUInt)); 90 | } 91 | 92 | inline static 93 | void pushUInt(std::vector& out_buf,unsigned char v){ 94 | out_buf.insert(out_buf.end(),v); 95 | } 96 | 97 | 98 | inline static void pushCStr(std::vector& out_buf,const char* cstr){ 99 | const unsigned char* data=(const unsigned char*)cstr; 100 | pushBack(out_buf,data,data+strlen(cstr)); 101 | } 102 | inline static void pushString(std::vector& out_buf,const std::string& str){ 103 | const unsigned char* data=(const unsigned char*)str.c_str(); 104 | pushBack(out_buf,data,data+str.size()); 105 | } 106 | 107 | struct TPlaceholder{ 108 | hpatch_StreamPos_t pos; 109 | hpatch_StreamPos_t pos_end; 110 | inline TPlaceholder(hpatch_StreamPos_t _pos,hpatch_StreamPos_t _pos_end) 111 | :pos(_pos),pos_end(_pos_end){ assert(_pos<=_pos_end); } 112 | inline hpatch_StreamPos_t size()const{ return pos_end-pos; } 113 | inline bool isNullPos()const{ return (pos_end==0)&&(pos_end==pos); } 114 | }; 115 | 116 | hpatch_inline static 117 | void packUInt_fixSize(unsigned char* out_code,unsigned char* out_code_fixEnd, 118 | hpatch_StreamPos_t uValue){ 119 | if (out_code>=out_code_fixEnd) 120 | throw std::runtime_error("packUInt_fixSize() out_code size error!"); 121 | --out_code_fixEnd; 122 | *out_code_fixEnd=uValue&((1<<7)-1); uValue>>=7; 123 | while (out_code>=7; 126 | } 127 | if (uValue!=0) 128 | throw std::runtime_error("packUInt_fixSize() out_code too small error!"); 129 | } 130 | 131 | }//namespace hdiff_private 132 | #endif //__PACK_UINT_H_ 133 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/qsort_parallel.h: -------------------------------------------------------------------------------- 1 | // qsort_parallel.h 2 | // parallel sort for HDiffz 3 | /* 4 | The MIT License (MIT) 5 | Copyright (c) 2022 HouSisong 6 | 7 | Permission is hereby granted, free of charge, to any person 8 | obtaining a copy of this software and associated documentation 9 | files (the "Software"), to deal in the Software without 10 | restriction, including without limitation the rights to use, 11 | copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the 13 | Software is furnished to do so, subject to the following 14 | conditions: 15 | 16 | The above copyright notice and this permission notice shall be 17 | included in all copies of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 21 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 23 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 24 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 25 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 26 | OTHER DEALINGS IN THE SOFTWARE. 27 | */ 28 | 29 | #ifndef HDiff_qsort_parallel_h 30 | #define HDiff_qsort_parallel_h 31 | #include 32 | #include "../../../libParallel/parallel_import.h" 33 | #if (_IS_USED_MULTITHREAD) 34 | #include //if used vc++, need >= vc2012 35 | #endif 36 | 37 | #if (_IS_USED_MULTITHREAD) 38 | inline static size_t __index_by_ratio(size_t size,size_t ratio,size_t ratio_base){ 39 | return (size_t)(((hpatch_uint64_t)size)*ratio/ratio_base); 40 | } 41 | 42 | template 43 | struct _sort_parallel_TCmpi{ 44 | inline _sort_parallel_TCmpi(const TValue* begin,TCmp& cmp):_begin(begin),_cmp(cmp){} 45 | inline bool operator()(const size_t& x,const size_t& y) const{ 46 | return _cmp(_begin[x],_begin[y]); 47 | } 48 | const TValue* _begin; 49 | TCmp& _cmp; 50 | }; 51 | 52 | template 53 | static TValue* _sort_parallel_partition(TValue* begin,TValue* end,TCmp cmp, 54 | size_t leftWeight=1,size_t rightWeight=1){ 55 | const size_t size=end-begin; 56 | size_t samples[kSampleSize]; 57 | const size_t _kIndexStep=size/kSampleSize+1; 58 | size_t curIndex=0; 59 | for(size_t i=0;i(begin,cmp)); 65 | size_t _pivot_i=__index_by_ratio(kSampleSize,leftWeight,(leftWeight+rightWeight)); 66 | if (!_kIsSortNotNth) std::nth_element(samples,samples+_pivot_i,samples+kSampleSize,_sort_parallel_TCmpi(begin,cmp)); 67 | size_t pivot=samples[_pivot_i]; 68 | std::swap(begin[0],begin[pivot]); 69 | TValue x(begin[0]); 70 | size_t mid=0; 71 | for (size_t i=mid+1;i 80 | static void _sort_parallel_thread(TValue* begin,TValue* end,TCmp cmp,size_t threadNum){ 81 | if (threadNum>1){ 82 | const size_t rightWeight=(threadNum>>1); 83 | const size_t leftWeight=threadNum-rightWeight; 84 | TValue* mid; 85 | const bool _kIsPartitionNotMerge=true; 86 | if (_kIsPartitionNotMerge){ // partition 87 | //mid=begin+__index_by_ratio(size,leftWeight,threadNum); std::nth_element(begin,mid,end,cmp); //for test 88 | //mid=std::_Partition_by_median_guess_unchecked(begin, end, cmp).first; //for test by vc 89 | mid=_sort_parallel_partition(begin,end,cmp,leftWeight,rightWeight); 90 | }else{ 91 | mid=begin+__index_by_ratio(end-begin,leftWeight,threadNum); 92 | } 93 | 94 | std::thread threadRight(_sort_parallel_thread, 95 | mid,end,cmp,rightWeight); 96 | _sort_parallel_thread(begin,mid,cmp,leftWeight); 97 | threadRight.join(); 98 | 99 | if (!_kIsPartitionNotMerge){ //merge 100 | std::inplace_merge(begin,mid,end,cmp); 101 | } 102 | }else{ 103 | std::sort(begin,end,cmp); 104 | //printf("parallel sort size: %" PRIu64 " \n",(hpatch_StreamPos_t)(end-begin)); 105 | } 106 | } 107 | #endif 108 | 109 | template 110 | static void sort_parallel(TValue* begin,TValue* end,TCmp cmp,size_t threadNum){ 111 | #if (_IS_USED_MULTITHREAD) 112 | const size_t size=end-begin; 113 | if ((threadNum>1)&&(size>=kMinQSortParallelSize)){ 114 | const size_t maxThreanNum=size/(kMinQSortParallelSize/2); 115 | threadNum=(threadNum<=maxThreanNum)?threadNum:maxThreanNum; 116 | //std::random_shuffle(begin,end); //test shuffle befor parallel sort? 117 | _sort_parallel_thread(begin,end,cmp,threadNum); 118 | }else 119 | #endif 120 | { 121 | std::sort(begin,end,cmp); 122 | } 123 | } 124 | 125 | #endif 126 | -------------------------------------------------------------------------------- /src-tauri/src/ipc/operation.rs: -------------------------------------------------------------------------------- 1 | use crate::utils::error::TAResult; 2 | #[derive(serde::Deserialize, serde::Serialize, Clone, Debug)] 3 | #[serde(tag = "type")] 4 | pub enum IpcOperation { 5 | Ping, 6 | InstallFile(super::install_file::InstallFileArgs), 7 | InstallMultipartStream(super::install_file::InstallMultiStreamArgs), 8 | InstallMultichunkStream(super::install_file::InstallMultiStreamArgs), 9 | CreateLnk(crate::installer::lnk::CreateLnkArgs), 10 | WriteRegistry(crate::installer::registry::WriteRegistryParams), 11 | CreateUninstaller(crate::installer::uninstall::CreateUninstallerArgs), 12 | RunUninstall(crate::installer::uninstall::RunUninstallArgs), 13 | FindProcessByName { 14 | name: String, 15 | }, 16 | KillProcess { 17 | pid: u32, 18 | }, 19 | RmList { 20 | list: Vec, 21 | }, 22 | InstallRuntime { 23 | tag: String, 24 | offset: Option, 25 | size: Option, 26 | }, 27 | CheckLocalFiles { 28 | source: String, 29 | hash_algorithm: String, 30 | file_list: Vec, 31 | }, 32 | RunMirrorcDownload { 33 | zip_path: String, 34 | url: String, 35 | }, 36 | RunMirrorcInstall { 37 | zip_path: String, 38 | target_path: String, 39 | }, 40 | } 41 | 42 | pub async fn run_opr( 43 | op: IpcOperation, 44 | notify: impl Fn(serde_json::Value) + std::marker::Send + 'static + Clone, 45 | context: Vec<(String, String)>, 46 | ) -> TAResult { 47 | let op_name = match &op { 48 | IpcOperation::Ping => "Ping", 49 | IpcOperation::InstallFile(_) => "InstallFile", 50 | IpcOperation::InstallMultipartStream(_) => "InstallMultipartStream", 51 | IpcOperation::InstallMultichunkStream(_) => "InstallMultichunkStream", 52 | IpcOperation::CreateLnk(_) => "CreateLnk", 53 | IpcOperation::WriteRegistry(_) => "WriteRegistry", 54 | IpcOperation::CreateUninstaller(_) => "CreateUninstaller", 55 | IpcOperation::RunUninstall(_) => "RunUninstall", 56 | IpcOperation::FindProcessByName { .. } => "FindProcessByName", 57 | IpcOperation::KillProcess { .. } => "KillProcess", 58 | IpcOperation::RmList { .. } => "RmList", 59 | IpcOperation::InstallRuntime { .. } => "InstallRuntime", 60 | IpcOperation::CheckLocalFiles { .. } => "CheckLocalFiles", 61 | IpcOperation::RunMirrorcDownload { .. } => "RunMirrorcDownload", 62 | IpcOperation::RunMirrorcInstall { .. } => "RunMirrorcInstall", 63 | }; 64 | tracing::info!("IPC operation: {}", op_name); 65 | let ctx_str = context 66 | .iter() 67 | .map(|(k, v)| (k.as_str(), v.as_str())) 68 | .collect::>(); 69 | let tx_ctx = sentry::TransactionContext::continue_from_headers(op_name, op_name, ctx_str); 70 | let transaction = sentry::start_transaction(tx_ctx); 71 | let ret = match op { 72 | IpcOperation::Ping => Ok(serde_json::value::Value::Null), 73 | IpcOperation::InstallFile(args) => { 74 | super::install_file::ipc_install_file(args, notify).await 75 | } 76 | IpcOperation::InstallMultipartStream(args) => { 77 | super::install_file::ipc_install_multipart_stream(args, notify).await 78 | } 79 | IpcOperation::InstallMultichunkStream(args) => { 80 | super::install_file::ipc_install_multichunk_stream(args, notify).await 81 | } 82 | IpcOperation::WriteRegistry(params) => { 83 | crate::installer::registry::write_registry_with_params(params).await?; 84 | Ok(serde_json::Value::Null) 85 | } 86 | IpcOperation::CreateUninstaller(args) => { 87 | crate::installer::uninstall::create_uninstaller_with_args(args).await?; 88 | Ok(serde_json::Value::Null) 89 | } 90 | IpcOperation::RunUninstall(args) => Ok(serde_json::json!( 91 | crate::installer::uninstall::run_uninstall_with_args(args).await? 92 | )), 93 | IpcOperation::CreateLnk(args) => { 94 | crate::installer::lnk::create_lnk_with_args(args).await?; 95 | Ok(serde_json::Value::Null) 96 | } 97 | IpcOperation::FindProcessByName { name } => Ok(serde_json::json!( 98 | crate::installer::find_process_by_name(name).await? 99 | )), 100 | IpcOperation::KillProcess { pid } => Ok(serde_json::json!( 101 | crate::installer::kill_process(pid).await? 102 | )), 103 | IpcOperation::RmList { list } => { 104 | let list = list.into_iter().map(std::path::PathBuf::from).collect(); 105 | Ok(serde_json::json!( 106 | crate::installer::uninstall::rm_list(list).await 107 | )) 108 | } 109 | IpcOperation::InstallRuntime { tag, offset, size } => Ok(serde_json::json!( 110 | crate::installer::runtimes::install_runtime(tag, offset, size, notify).await? 111 | )), 112 | IpcOperation::CheckLocalFiles { 113 | source, 114 | hash_algorithm, 115 | file_list, 116 | } => Ok(serde_json::json!( 117 | crate::fs::check_local_files(source, hash_algorithm, file_list, notify).await? 118 | )), 119 | IpcOperation::RunMirrorcDownload { zip_path, url } => { 120 | crate::thirdparty::mirrorc::run_mirrorc_download(&zip_path, &url, notify).await?; 121 | Ok(serde_json::Value::Null) 122 | } 123 | IpcOperation::RunMirrorcInstall { 124 | zip_path, 125 | target_path, 126 | } => Ok(serde_json::json!( 127 | crate::thirdparty::mirrorc::run_mirrorc_install(&zip_path, &target_path, notify) 128 | .await? 129 | )), 130 | }; 131 | transaction.finish(); 132 | ret 133 | } 134 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/limit_mem_diff/covers.h: -------------------------------------------------------------------------------- 1 | // covers.h 2 | /* 3 | The MIT License (MIT) 4 | Copyright (c) 2012-2017 HouSisong 5 | 6 | Permission is hereby granted, free of charge, to any person 7 | obtaining a copy of this software and associated documentation 8 | files (the "Software"), to deal in the Software without 9 | restriction, including without limitation the rights to use, 10 | copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the 12 | Software is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice shall be 16 | included in all copies of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 19 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 20 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 24 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 25 | OTHER DEALINGS IN THE SOFTWARE. 26 | */ 27 | #ifndef covers_h 28 | #define covers_h 29 | #include 30 | #include //std::sort 31 | #include "../../../HDiff/diff_types.h" 32 | namespace hdiff_private{ 33 | typedef hpatch_TCover TCover; 34 | 35 | static inline void setCover(TCover& cover,hpatch_StreamPos_t oldPos,hpatch_StreamPos_t newPos,hpatch_StreamPos_t length) { 36 | cover.oldPos=oldPos; cover.newPos=newPos; cover.length=length; } 37 | 38 | // input & output covers 39 | // must overwrite push_cover for output covers 40 | struct TCovers:public hpatch_TOutputCovers{ 41 | void* _covers; 42 | size_t _coverCount; 43 | bool _isCover32; 44 | inline TCovers(void* covers,size_t coverCount,bool isCover32) 45 | :_covers(covers),_coverCount(coverCount),_isCover32(isCover32) 46 | { push_cover=0; } //default unsupport push 47 | inline size_t coverCount()const{ return _coverCount; } 48 | inline void covers(size_t index,TCover* out_cover)const{ 49 | if (_isCover32) { 50 | const hpatch_TCover32& c32=((const hpatch_TCover32*)_covers)[index]; 51 | out_cover->oldPos=c32.oldPos; 52 | out_cover->newPos=c32.newPos; 53 | out_cover->length=c32.length; 54 | }else{ 55 | *out_cover=((const hpatch_TCover*)_covers)[index]; 56 | } 57 | } 58 | }; 59 | 60 | template 61 | static void tm_collate_covers(std::vector<_TCover>& covers){ 62 | if (covers.size()<=1) return; 63 | std::sort(covers.begin(),covers.end(),cover_cmp_by_new_t<_TCover>()); 64 | size_t backi=0; 65 | for (size_t i=1;icovers[backi].newPos+covers[backi].length){ 68 | if (cover_is_collinear(covers[i],covers[backi])){//insert i part to backi,del i 69 | covers[backi].length=covers[i].newPos+covers[i].length-covers[backi].newPos; 70 | }else{//del backi part, save i 71 | covers[backi].length=covers[i].newPos-covers[backi].newPos; 72 | if (covers[backi].length>=kCoverMinMatchLen) 73 | ++backi; 74 | covers[backi]=covers[i]; 75 | } 76 | } //else del i 77 | }else if ((covers[i].newPos==covers[backi].newPos+covers[backi].length) 78 | &&(covers[i].oldPos==covers[backi].oldPos+covers[backi].length)){ 79 | covers[backi].length+=covers[i].length; //insert i all to backi,del i 80 | }else{ //save i 81 | ++backi; 82 | covers[backi]=covers[i]; 83 | } 84 | } 85 | covers.resize(backi+1); 86 | } 87 | 88 | class TCoversBuf:public TCovers{ 89 | public: 90 | inline TCoversBuf(hpatch_StreamPos_t dataSize0,hpatch_StreamPos_t dataSize1) 91 | :TCovers(0,0,(dataSize0|dataSize1)<((hpatch_StreamPos_t)1<<32)){ 92 | push_cover=_push_cover; 93 | collate_covers=_collate_covers; 94 | } 95 | private: 96 | template 97 | inline void _update(std::vector<_TCover>& covers){ 98 | _covers=covers.data(); 99 | _coverCount=covers.size(); 100 | } 101 | static hpatch_BOOL _push_cover(struct hpatch_TOutputCovers* out_covers,const TCover* cover){ 102 | TCoversBuf* self=(TCoversBuf*)out_covers; 103 | if (self->_isCover32) { 104 | hpatch_TCover32 c32; 105 | c32.oldPos=(hpatch_uint32_t)cover->oldPos; 106 | c32.newPos=(hpatch_uint32_t)cover->newPos; 107 | c32.length=(hpatch_uint32_t)cover->length; 108 | self->m_covers_limit.push_back(c32); 109 | self->_update(self->m_covers_limit); 110 | }else{ 111 | self->m_covers_larger.push_back(*cover); 112 | self->_update(self->m_covers_larger); 113 | } 114 | return hpatch_TRUE; 115 | } 116 | static void _collate_covers(struct hpatch_TOutputCovers* out_covers){ 117 | TCoversBuf* self=(TCoversBuf*)out_covers; 118 | if (self->_isCover32){ 119 | tm_collate_covers(self->m_covers_limit); 120 | self->_update(self->m_covers_limit); 121 | }else{ 122 | tm_collate_covers(self->m_covers_larger); 123 | self->_update(self->m_covers_larger); 124 | } 125 | } 126 | public: 127 | std::vector m_covers_limit; 128 | std::vector m_covers_larger; 129 | inline void update(){ 130 | if (_isCover32) 131 | _update(m_covers_limit); 132 | else 133 | _update(m_covers_larger); 134 | } 135 | }; 136 | 137 | }//namespace hdiff_private 138 | #endif /* icover_h */ 139 | -------------------------------------------------------------------------------- /.github/workflows/build.yml: -------------------------------------------------------------------------------- 1 | name: Build 2 | on: 3 | push: 4 | pull_request: 5 | env: 6 | CARGO_TERM_COLOR: always 7 | RUST_NIGHTLY_VERSION: nightly 8 | jobs: 9 | build: 10 | runs-on: windows-latest 11 | outputs: 12 | version: ${{ steps.update_meta.outputs.version }} 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | fetch-depth: 0 17 | fetch-tags: true 18 | - name: Override rust-toolchain for CI 19 | run: | 20 | (Get-Content src-tauri/rust-toolchain.toml) -replace 'channel = "nightly"', 'channel = "${{ env.RUST_NIGHTLY_VERSION }}"' | Set-Content src-tauri/rust-toolchain.toml 21 | - run: rustup toolchain install ${{ env.RUST_NIGHTLY_VERSION }} --profile minimal && rustup component add rust-src --toolchain ${{ env.RUST_NIGHTLY_VERSION }} 22 | - uses: Swatinem/rust-cache@v2 23 | with: 24 | cache-targets: false 25 | workspaces: 'src-tauri' 26 | - uses: pnpm/action-setup@v4.0.0 27 | - uses: actions/setup-node@v4 28 | with: 29 | node-version: 22 30 | cache: 'pnpm' 31 | - name: Install NPM Dependencies 32 | run: pnpm i 33 | - name: Update Meta 34 | id: update_meta 35 | run: | 36 | $tag = git describe --tags --abbrev=0 2>$null 37 | if (-not $tag) { $tag = "0.0.0" } 38 | $TAG = $tag -replace '^v', '' 39 | $PKGVER = "r$(git rev-list --count HEAD).$(git rev-parse --short HEAD)" 40 | $cstTimeZone = [System.TimeZoneInfo]::FindSystemTimeZoneById("China Standard Time") 41 | $utcNow = [System.DateTime]::UtcNow 42 | $cstTime = [System.TimeZoneInfo]::ConvertTimeFromUtc($utcNow, $cstTimeZone) 43 | $BUILDMETA = $cstTime.ToString("yyMMddHHmm") 44 | (Get-Content ./src-tauri/Cargo.toml -Raw) -replace '(?m)^(version = "\d+\.\d+\.\d+)', "version = `"$TAG-$PKGVER+$BUILDMETA" | Set-Content ./src-tauri/Cargo.toml -Encoding utf8 45 | echo "version=$TAG-$PKGVER+$BUILDMETA" >> $env:GITHUB_OUTPUT 46 | - name: Build 47 | run: pnpm build 48 | - name: Copy Binary 49 | run: copy src-tauri/target/x86_64-win7-windows-msvc/release/kachina-builder.exe kachina-builder.exe 50 | - name: Artifact 51 | uses: actions/upload-artifact@v4.6.0 52 | with: 53 | path: | 54 | src-tauri/target/x86_64-win7-windows-msvc/release/*.exe 55 | src-tauri/target/x86_64-win7-windows-msvc/release/*.pdb 56 | release: 57 | needs: [build, test] 58 | runs-on: windows-latest 59 | if: startsWith(github.ref, 'refs/tags/') 60 | steps: 61 | - uses: actions/checkout@v4 62 | - uses: pnpm/action-setup@v4.0.0 63 | - uses: actions/setup-node@v4 64 | with: 65 | node-version: 22 66 | cache: 'pnpm' 67 | - name: Install NPM Dependencies 68 | run: pnpm i 69 | - name: Download build artifacts 70 | uses: actions/download-artifact@v4 71 | with: 72 | path: artifacts 73 | - name: Copy Binary 74 | run: Copy-Item "artifacts/artifact/kachina-builder.exe" "kachina-builder.exe" -Force 75 | - name: Sentry upload 76 | continue-on-error: true 77 | env: 78 | SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} 79 | SENTRY_ORG: yuehaiteam 80 | SENTRY_PROJECT: kachina-installer 81 | SENTRY_URL: ${{ secrets.SENTRY_URL }} 82 | run: | 83 | $ErrorActionPreference = "Stop" 84 | $env:SENTRY_LOG_LEVEL = "info" 85 | echo "Creating Sentry release for version ${{ needs.build.outputs.version }}" 86 | pnpm exec sentry-cli releases new "${{ needs.build.outputs.version }}" 87 | echo "Setting release metadata for version ${{ needs.build.outputs.version }}" 88 | pnpm exec sentry-cli releases set-commits --auto "${{ needs.build.outputs.version }}" 89 | echo "Uploading debug files for version ${{ needs.build.outputs.version }}" 90 | pnpm exec sentry-cli debug-files upload ./artifacts/artifact/kachina_installer.pdb 91 | echo "Finalizing release for version ${{ needs.build.outputs.version }}" 92 | pnpm exec sentry-cli releases finalize "${{ needs.build.outputs.version }}" 93 | - name: Release 94 | uses: softprops/action-gh-release@v2 95 | with: 96 | files: kachina-builder.exe 97 | make_latest: true 98 | body: "${{ needs.build.outputs.version }}" 99 | 100 | test: 101 | needs: build 102 | runs-on: windows-latest 103 | strategy: 104 | matrix: 105 | test: [offline-install, online-install, offline-update, online-update] 106 | fail-fast: false 107 | steps: 108 | - uses: actions/checkout@v4 109 | - uses: pnpm/action-setup@v4.0.0 110 | - uses: actions/setup-node@v4 111 | with: 112 | node-version: 22 113 | cache: 'pnpm' 114 | - name: Install NPM Dependencies 115 | run: pnpm i 116 | - name: Download build artifacts 117 | uses: actions/download-artifact@v4 118 | with: 119 | path: artifacts 120 | - name: Setup test environment 121 | run: | 122 | # 复制构建的二进制文件到正确位置 123 | New-Item -ItemType Directory -Force -Path "src-tauri/target/release" 124 | New-Item -ItemType Directory -Force -Path "src-tauri/target/x86_64-win7-windows-msvc/release" 125 | Copy-Item "artifacts/artifact/*.exe" "src-tauri/target/x86_64-win7-windows-msvc/release/" -Force 126 | Copy-Item "artifacts/artifact/*.pdb" "src-tauri/target/x86_64-win7-windows-msvc/release/" -Force 127 | Copy-Item "artifacts/artifact/*.exe" "src-tauri/target/release/" -Force 128 | Copy-Item "artifacts/artifact/*.pdb" "src-tauri/target/release/" -Force 129 | - name: Prepare test fixtures 130 | run: npm run test:prepare 131 | continue-on-error: true 132 | - name: Run ${{ matrix.test }} test 133 | run: npm run test:${{ matrix.test }} 134 | timeout-minutes: 10 135 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/libParallel/parallel_channel.cpp: -------------------------------------------------------------------------------- 1 | // parallel_channel.cpp 2 | /* 3 | The MIT License (MIT) 4 | Copyright (c) 2018 HouSisong 5 | 6 | Permission is hereby granted, free of charge, to any person 7 | obtaining a copy of this software and associated documentation 8 | files (the "Software"), to deal in the Software without 9 | restriction, including without limitation the rights to use, 10 | copy, modify, merge, publish, distribute, sublicense, and/or sell 11 | copies of the Software, and to permit persons to whom the 12 | Software is furnished to do so, subject to the following 13 | conditions: 14 | 15 | The above copyright notice and this permission notice shall be 16 | included in all copies of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 19 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 20 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 21 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 22 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 23 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 24 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 25 | OTHER DEALINGS IN THE SOFTWARE. 26 | */ 27 | #include "parallel_channel.h" 28 | 29 | #if (_IS_USED_MULTITHREAD) 30 | #include 31 | #include 32 | 33 | class _CChannel_import{ 34 | public: 35 | explicit _CChannel_import(ptrdiff_t maxDataCount) 36 | :_locker(0),_sendCond(0),_acceptCond(0), 37 | _maxDataCount(maxDataCount),_waitingCount(0),_isClosed(false){ 38 | _locker=locker_new(); 39 | _sendCond=condvar_new(); 40 | _acceptCond=condvar_new(); 41 | } 42 | ~_CChannel_import(){ 43 | close(); 44 | while (true) { //wait all thread exit 45 | { 46 | CAutoLocker locker(_locker); 47 | if (_waitingCount==0) break; 48 | } 49 | this_thread_yield(); //todo:优化? 50 | } 51 | locker_delete(_locker); 52 | assert(_dataList.empty()); // why? if saved resource then leaks 53 | condvar_delete(_acceptCond); 54 | condvar_delete(_sendCond); 55 | } 56 | void close(){ 57 | if (_isClosed) return; 58 | { 59 | CAutoLocker locker(_locker); 60 | if (_isClosed) return; 61 | _isClosed=true; 62 | condvar_broadcast(_sendCond); 63 | condvar_broadcast(_acceptCond); 64 | } 65 | } 66 | bool is_can_fast_send(bool isWait){ 67 | if (_maxDataCount<0) return true; 68 | if (_maxDataCount==0) return false; 69 | 70 | CAutoLocker locker(_locker); 71 | while (true) { 72 | if (_isClosed) return false; 73 | if (_dataList.size()<(size_t)_maxDataCount) { 74 | return true; 75 | }else if(!isWait){ 76 | return false; 77 | }//else wait 78 | ++_waitingCount; 79 | condvar_wait(_sendCond,&locker); 80 | --_waitingCount; 81 | } 82 | } 83 | 84 | bool send(TChanData data,bool isWait){ 85 | assert(data!=0); 86 | { 87 | CAutoLocker locker(_locker); 88 | while (true) { 89 | if (_isClosed) return false; 90 | if ((_maxDataCount<=0)||(_dataList.size()<(size_t)_maxDataCount)) { 91 | try{ 92 | _dataList.push_back(data); 93 | }catch(...){ 94 | return false; 95 | } 96 | condvar_signal(_acceptCond); 97 | if (_maxDataCount==0) //must wait accepted? 98 | break;// to wait accepted 99 | else 100 | return true; //ok 101 | }else if(!isWait){ 102 | return false; 103 | }//else wait 104 | ++_waitingCount; 105 | condvar_wait(_sendCond,&locker); 106 | --_waitingCount; 107 | } 108 | } 109 | 110 | //wait accepted 111 | while (true) { //wait _dataList empty 112 | { 113 | CAutoLocker locker(_locker); 114 | if (_isClosed) break; 115 | if (_dataList.empty()) break; 116 | } 117 | this_thread_yield(); //todo:优化; 118 | } 119 | return true; 120 | } 121 | TChanData accept(bool isWait){ 122 | CAutoLocker locker(_locker); 123 | while (true) { 124 | if (!_dataList.empty()) { 125 | TChanData result=_dataList.front(); 126 | _dataList.pop_front(); 127 | if (!_isClosed) 128 | condvar_signal(_sendCond); 129 | return result; //ok 130 | }else if(_isClosed){ 131 | return 0; 132 | }else if(!isWait){ 133 | return 0; 134 | }//else wait 135 | ++_waitingCount; 136 | condvar_wait(_acceptCond,&locker); 137 | --_waitingCount; 138 | } 139 | } 140 | private: 141 | HLocker _locker; 142 | HCondvar _sendCond; 143 | HCondvar _acceptCond; 144 | std::deque _dataList; 145 | const ptrdiff_t _maxDataCount; 146 | volatile size_t _waitingCount; 147 | volatile bool _isClosed; 148 | }; 149 | 150 | 151 | CChannel::CChannel(ptrdiff_t maxDataCount):_import(0){ 152 | _import=new _CChannel_import(maxDataCount); 153 | } 154 | CChannel::~CChannel(){ 155 | delete (_CChannel_import*)_import; 156 | } 157 | void CChannel::close(){ 158 | _import->close(); 159 | } 160 | bool CChannel::is_can_fast_send(bool isWait){ 161 | return _import->is_can_fast_send(isWait); 162 | } 163 | bool CChannel::send(TChanData data,bool isWait){ 164 | return _import->send(data,isWait); 165 | } 166 | TChanData CChannel::accept(bool isWait){ 167 | return _import->accept(isWait); 168 | } 169 | 170 | #endif //_IS_USED_MULTITHREAD 171 | -------------------------------------------------------------------------------- /src-tauri/src/utils/error.rs: -------------------------------------------------------------------------------- 1 | // This file is part of the `anyhow-tauri` library. 2 | 3 | use crate::dfs::InsightItem; 4 | use serde::Serialize; 5 | use std::sync::{Arc, Mutex}; 6 | 7 | // Download error constants 8 | pub const DOWNLOAD_STALLED: &str = "DOWNLOAD_STALLED"; 9 | pub const DOWNLOAD_TOO_SLOW: &str = "DOWNLOAD_TOO_SLOW"; 10 | 11 | // Just extending the `anyhow::Error` 12 | #[derive(Debug)] 13 | pub struct TACommandError { 14 | pub error: anyhow::Error, 15 | pub insight: Option, 16 | } 17 | impl std::error::Error for TACommandError {} 18 | impl std::fmt::Display for TACommandError { 19 | fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { 20 | write!(f, "{:#}", self.error) 21 | } 22 | } 23 | 24 | // Every "renspose" from a tauri command needs to be serializeable into json with serde. 25 | // This is why we cannot use `anyhow` directly. This piece of code fixes that. 26 | impl Serialize for TACommandError { 27 | fn serialize(&self, serializer: S) -> std::result::Result 28 | where 29 | S: serde::Serializer, 30 | { 31 | #[derive(Serialize)] 32 | struct ErrorWithInsight { 33 | message: String, 34 | insight: Option, 35 | } 36 | 37 | let response = ErrorWithInsight { 38 | message: format!("{:#}", self.error), 39 | insight: self.insight.clone(), 40 | }; 41 | 42 | super::sentry::capture_anyhow(&self.error); 43 | response.serialize(serializer) 44 | } 45 | } 46 | 47 | // Ability to convert between `anyhow::Error` and `TACommandError` 48 | impl From for TACommandError { 49 | fn from(error: anyhow::Error) -> Self { 50 | Self { 51 | error, 52 | insight: None, 53 | } 54 | } 55 | } 56 | 57 | /// Use this as your command's return type. 58 | /// 59 | /// Example usage: 60 | /// ``` 61 | /// #[tauri::command] 62 | /// fn test() -> anyhow_tauri::TAResult { 63 | /// Ok("No error thrown.".into()) 64 | /// } 65 | /// ``` 66 | /// 67 | /// You can find more examples inside the library's repo at `/demo/src-tauri/src/main.rs` 68 | pub type TAResult = std::result::Result; 69 | 70 | pub trait IntoTAResult { 71 | fn into_ta_result(self) -> TAResult; 72 | } 73 | 74 | impl IntoTAResult for std::result::Result 75 | where 76 | E: Into, 77 | { 78 | /// Maps errors, which can be converted into `anyhow`'s error type, into `TACommandError` which can be returned from command call. 79 | /// This is a "quality of life" improvement. 80 | /// 81 | /// Example usage: 82 | /// ``` 83 | /// #[tauri::command] 84 | /// fn test_into_ta_result() -> anyhow_tauri::TAResult { 85 | /// function_that_succeeds().into_ta_result() 86 | /// // could also be written as: 87 | /// // Ok(function_that_succeeds()?) 88 | /// } 89 | /// ``` 90 | fn into_ta_result(self) -> TAResult { 91 | self.map_err(|e| TACommandError { 92 | error: e.into(), 93 | insight: None, 94 | }) 95 | } 96 | } 97 | impl IntoTAResult for anyhow::Error { 98 | /// Maps `anyhow`'s error type into `TACommandError` which can be returned from a command call. 99 | /// This is a "quality of life" improvement. 100 | /// 101 | /// Example usage: 102 | /// ``` 103 | /// #[tauri::command] 104 | /// fn test_into_ta_result() -> anyhow_tauri::TAResult { 105 | /// function_that_succeeds().into_ta_result() 106 | /// // could also be written as: 107 | /// // Ok(function_that_succeeds()?) 108 | /// } 109 | /// ``` 110 | fn into_ta_result(self) -> TAResult { 111 | Err(TACommandError { 112 | error: self, 113 | insight: None, 114 | }) 115 | } 116 | } 117 | 118 | pub trait IntoEmptyTAResult { 119 | /// Usefull whenever you want to create `Result<(), TACommandError>` (or `TAResult<()>`) 120 | /// 121 | /// Example usage: 122 | /// ``` 123 | /// #[tauri::command] 124 | /// fn test_into_ta_empty_result() -> anyhow_tauri::TAResult<()> { 125 | /// anyhow::anyhow!("Showcase of the .into_ta_empty_result()").into_ta_empty_result() 126 | /// } 127 | /// ``` 128 | fn into_ta_empty_result(self) -> TAResult; 129 | } 130 | impl IntoEmptyTAResult<()> for anyhow::Error { 131 | fn into_ta_empty_result(self) -> TAResult<()> { 132 | Err(TACommandError { 133 | error: self, 134 | insight: None, 135 | }) 136 | } 137 | } 138 | 139 | pub trait IntoAnyhow { 140 | // convert TAResult into anyhow::Result 141 | fn into_anyhow(self) -> std::result::Result; 142 | } 143 | impl IntoAnyhow for TAResult { 144 | fn into_anyhow(self) -> std::result::Result { 145 | self.map_err(|e| e.error) 146 | } 147 | } 148 | 149 | pub fn return_ta_result(msg: String, ctx: &str) -> TAResult { 150 | Err(TACommandError { 151 | error: anyhow::anyhow!(msg).context(ctx.to_string()), 152 | insight: None, 153 | }) 154 | } 155 | 156 | pub fn return_anyhow_result(msg: String, ctx: &str) -> anyhow::Result { 157 | Err(anyhow::anyhow!(msg).context(ctx.to_string())) 158 | } 159 | 160 | impl TACommandError { 161 | pub fn new(error: anyhow::Error) -> Self { 162 | Self { 163 | error, 164 | insight: None, 165 | } 166 | } 167 | 168 | pub fn with_insight(error: anyhow::Error, insight: InsightItem) -> Self { 169 | Self { 170 | error, 171 | insight: Some(insight), 172 | } 173 | } 174 | 175 | pub fn with_insight_handle( 176 | error: anyhow::Error, 177 | insight_handle: Arc>, 178 | ) -> Self { 179 | let insight = if let Ok(insight) = insight_handle.lock() { 180 | Some(insight.clone()) 181 | } else { 182 | None 183 | }; 184 | 185 | Self { error, insight } 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /src-tauri/src/utils/icon.rs: -------------------------------------------------------------------------------- 1 | use std::os::windows::ffi::OsStrExt; 2 | use std::path::Path; 3 | use windows::core::PCWSTR; 4 | use windows::Win32::Graphics::Gdi::{ 5 | CreateCompatibleDC, DeleteDC, DeleteObject, GetDIBits, GetObjectW, SelectObject, BITMAP, 6 | BITMAPINFO, BITMAPINFOHEADER, BI_RGB, DIB_RGB_COLORS, 7 | }; 8 | use windows::Win32::UI::Shell::ExtractIconExW; 9 | use windows::Win32::UI::WindowsAndMessaging::{DestroyIcon, GetIconInfo, HICON, ICONINFO}; 10 | 11 | /// 从 exe 提取图标并转换为 RGBA 字节数组 12 | /// 返回 (rgba_bytes, width, height) 或 None 13 | pub fn extract_icon_from_exe(exe_path: &Path) -> Option<(Vec, u32, u32)> { 14 | unsafe { 15 | // 1. 提取图标 16 | let path_wide: Vec = exe_path 17 | .as_os_str() 18 | .encode_wide() 19 | .chain(std::iter::once(0)) 20 | .collect(); 21 | 22 | let mut large_icon = HICON::default(); 23 | let mut small_icon = HICON::default(); 24 | 25 | // 提取第一个图标的大小两种尺寸 26 | let result = ExtractIconExW( 27 | PCWSTR(path_wide.as_ptr()), 28 | 0, // 索引 0 = 第一个图标 29 | Some(&mut large_icon), // 大图标(48x48 或更大) 30 | Some(&mut small_icon), // 小图标(16x16) 31 | 1, // 提取 1 个 32 | ); 33 | 34 | if result == 0 { 35 | tracing::warn!("ExtractIconExW failed to extract icon"); 36 | return None; 37 | } 38 | 39 | // 优先使用大图标,如果不存在则使用小图标 40 | let hicon = if !large_icon.is_invalid() { 41 | // 如果有小图标也要清理 42 | if !small_icon.is_invalid() { 43 | let _ = DestroyIcon(small_icon); 44 | } 45 | large_icon 46 | } else if !small_icon.is_invalid() { 47 | small_icon 48 | } else { 49 | tracing::warn!("No valid icon extracted"); 50 | return None; 51 | }; 52 | 53 | // 2. 获取图标信息 54 | let mut icon_info = ICONINFO::default(); 55 | if GetIconInfo(hicon, &mut icon_info).is_err() { 56 | let _ = DestroyIcon(hicon); 57 | return None; 58 | } 59 | 60 | let hbm_color = icon_info.hbmColor; 61 | if hbm_color.is_invalid() { 62 | tracing::warn!("Icon has no color bitmap"); 63 | let _ = DeleteObject(icon_info.hbmMask.into()); 64 | let _ = DestroyIcon(hicon); 65 | return None; 66 | } 67 | 68 | // 3. 获取位图尺寸 69 | let mut bitmap = BITMAP::default(); 70 | if GetObjectW( 71 | hbm_color.into(), 72 | std::mem::size_of::() as i32, 73 | Some(&mut bitmap as *mut BITMAP as *mut _), 74 | ) == 0 75 | { 76 | // 清理资源 77 | let _ = DeleteObject(hbm_color.into()); 78 | let _ = DeleteObject(icon_info.hbmMask.into()); 79 | let _ = DestroyIcon(hicon); 80 | return None; 81 | } 82 | 83 | let width = bitmap.bmWidth as u32; 84 | let height = bitmap.bmHeight as u32; 85 | 86 | // 4. 创建设备上下文 87 | let hdc = CreateCompatibleDC(None); 88 | if hdc.is_invalid() { 89 | let _ = DeleteObject(hbm_color.into()); 90 | let _ = DeleteObject(icon_info.hbmMask.into()); 91 | let _ = DestroyIcon(hicon); 92 | return None; 93 | } 94 | 95 | let old_bitmap = SelectObject(hdc, hbm_color.into()); 96 | 97 | // 5. 准备 BITMAPINFO 98 | let mut bmi = BITMAPINFO { 99 | bmiHeader: BITMAPINFOHEADER { 100 | biSize: std::mem::size_of::() as u32, 101 | biWidth: width as i32, 102 | biHeight: -(height as i32), // 负值表示自顶向下 103 | biPlanes: 1, 104 | biBitCount: 32, // 32位 BGRA 105 | biCompression: BI_RGB.0, 106 | biSizeImage: 0, 107 | biXPelsPerMeter: 0, 108 | biYPelsPerMeter: 0, 109 | biClrUsed: 0, 110 | biClrImportant: 0, 111 | }, 112 | bmiColors: [Default::default(); 1], 113 | }; 114 | 115 | // 6. 提取 BGRA 数据 116 | let pixel_count = (width * height) as usize; 117 | let mut bgra_data: Vec = vec![0; pixel_count * 4]; 118 | 119 | let result = GetDIBits( 120 | hdc, 121 | hbm_color, 122 | 0, 123 | height, 124 | Some(bgra_data.as_mut_ptr() as *mut _), 125 | &mut bmi, 126 | DIB_RGB_COLORS, 127 | ); 128 | 129 | // 7. 清理资源 130 | SelectObject(hdc, old_bitmap); 131 | let _ = DeleteDC(hdc); 132 | let _ = DeleteObject(hbm_color.into()); 133 | let _ = DeleteObject(icon_info.hbmMask.into()); 134 | let _ = DestroyIcon(hicon); 135 | 136 | if result == 0 { 137 | tracing::warn!("GetDIBits failed"); 138 | return None; 139 | } 140 | 141 | // 8. 转换 BGRA 到 RGBA 142 | let rgba_data = bgra_to_rgba(bgra_data); 143 | 144 | Some((rgba_data, width, height)) 145 | } 146 | } 147 | 148 | /// 将 BGRA 转换为 RGBA 149 | fn bgra_to_rgba(bgra: Vec) -> Vec { 150 | let mut rgba = Vec::with_capacity(bgra.len()); 151 | 152 | for chunk in bgra.chunks_exact(4) { 153 | rgba.push(chunk[2]); // R (从 B 位置) 154 | rgba.push(chunk[1]); // G (保持不变) 155 | rgba.push(chunk[0]); // B (从 R 位置) 156 | rgba.push(chunk[3]); // A (保持不变) 157 | } 158 | 159 | rgba 160 | } 161 | 162 | /// 提取当前 exe 图标并返回 Tauri Image 163 | /// 失败时返回 None(会记录警告日志) 164 | pub fn get_exe_icon_for_tauri() -> Option> { 165 | let exe_path = std::env::current_exe().ok()?; 166 | 167 | match extract_icon_from_exe(&exe_path) { 168 | Some((rgba_data, width, height)) => { 169 | tracing::info!("Successfully extracted icon: {}x{}", width, height); 170 | // Leak the data to get 'static lifetime 171 | let rgba_static: &'static [u8] = Box::leak(rgba_data.into_boxed_slice()); 172 | Some(tauri::image::Image::new(rgba_static, width, height)) 173 | } 174 | None => { 175 | tracing::warn!("Failed to extract icon from exe, using default"); 176 | None 177 | } 178 | } 179 | } 180 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/libdivsufsort/divsufsort_private.h: -------------------------------------------------------------------------------- 1 | /* 2 | * divsufsort_private.h for libdivsufsort 3 | * Copyright (c) 2003-2008 Yuta Mori All Rights Reserved. 4 | * 5 | * Permission is hereby granted, free of charge, to any person 6 | * obtaining a copy of this software and associated documentation 7 | * files (the "Software"), to deal in the Software without 8 | * restriction, including without limitation the rights to use, 9 | * copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | * copies of the Software, and to permit persons to whom the 11 | * Software is furnished to do so, subject to the following 12 | * conditions: 13 | * 14 | * The above copyright notice and this permission notice shall be 15 | * included in all copies or substantial portions of the Software. 16 | * 17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | * OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | * NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | * HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | * WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | * OTHER DEALINGS IN THE SOFTWARE. 25 | */ 26 | 27 | #ifndef _DIVSUFSORT_PRIVATE_H 28 | #define _DIVSUFSORT_PRIVATE_H 1 29 | 30 | #if HAVE_CONFIG_H 31 | # include "config.h" 32 | #endif 33 | #include 34 | #include 35 | #if HAVE_STRING_H 36 | # include 37 | #endif 38 | #if HAVE_STDLIB_H 39 | # include 40 | #endif 41 | #if HAVE_MEMORY_H 42 | # include 43 | #endif 44 | #if HAVE_STDDEF_H 45 | # include 46 | #endif 47 | #if HAVE_STRINGS_H 48 | # include 49 | #endif 50 | #if HAVE_INTTYPES_H 51 | # include 52 | #else 53 | # if HAVE_STDINT_H 54 | # include 55 | # endif 56 | #endif 57 | 58 | /*- Constants -*/ 59 | #if !defined(UINT8_MAX) 60 | # define UINT8_MAX (255) 61 | #endif /* UINT8_MAX */ 62 | #if defined(ALPHABET_SIZE) && (ALPHABET_SIZE < 1) 63 | # undef ALPHABET_SIZE 64 | #endif 65 | #if !defined(ALPHABET_SIZE) 66 | # define ALPHABET_SIZE (UINT8_MAX + 1) 67 | #endif 68 | /* for divsufsort.c */ 69 | #define BUCKET_A_SIZE (ALPHABET_SIZE) 70 | #define BUCKET_B_SIZE (ALPHABET_SIZE * ALPHABET_SIZE) 71 | /* for sssort.c */ 72 | #if defined(SS_INSERTIONSORT_THRESHOLD) 73 | # if SS_INSERTIONSORT_THRESHOLD < 1 74 | # undef SS_INSERTIONSORT_THRESHOLD 75 | # define SS_INSERTIONSORT_THRESHOLD (1) 76 | # endif 77 | #else 78 | # define SS_INSERTIONSORT_THRESHOLD (8) 79 | #endif 80 | #if defined(SS_BLOCKSIZE) 81 | # if SS_BLOCKSIZE < 0 82 | # undef SS_BLOCKSIZE 83 | # define SS_BLOCKSIZE (0) 84 | # elif 32768 <= SS_BLOCKSIZE 85 | # undef SS_BLOCKSIZE 86 | # define SS_BLOCKSIZE (32767) 87 | # endif 88 | #else 89 | # define SS_BLOCKSIZE (1024) 90 | #endif 91 | /* minstacksize = log(SS_BLOCKSIZE) / log(3) * 2 */ 92 | #if SS_BLOCKSIZE == 0 93 | # if !defined(BUILD_DIVSUFSORT64) 94 | # define SS_MISORT_STACKSIZE (64) 95 | # else 96 | # define SS_MISORT_STACKSIZE (96) 97 | # endif 98 | #elif SS_BLOCKSIZE <= 4096 99 | # define SS_MISORT_STACKSIZE (16) 100 | #else 101 | # define SS_MISORT_STACKSIZE (24) 102 | #endif 103 | #if !defined(BUILD_DIVSUFSORT64) 104 | # define SS_SMERGE_STACKSIZE (32) 105 | #else 106 | # define SS_SMERGE_STACKSIZE (64) 107 | #endif 108 | /* for trsort.c */ 109 | #define TR_INSERTIONSORT_THRESHOLD (8) 110 | #if !defined(BUILD_DIVSUFSORT64) 111 | # define TR_STACKSIZE (64) 112 | #else 113 | # define TR_STACKSIZE (96) 114 | #endif 115 | 116 | 117 | /*- Macros -*/ 118 | #ifndef SWAP 119 | # define SWAP(_a, _b) do { t = (_a); (_a) = (_b); (_b) = t; } while(0) 120 | #endif /* SWAP */ 121 | #ifndef MIN 122 | # define MIN(_a, _b) (((_a) < (_b)) ? (_a) : (_b)) 123 | #endif /* MIN */ 124 | #ifndef MAX 125 | # define MAX(_a, _b) (((_a) > (_b)) ? (_a) : (_b)) 126 | #endif /* MAX */ 127 | #define STACK_PUSH(_a, _b, _c, _d)\ 128 | do {\ 129 | assert(ssize < STACK_SIZE);\ 130 | stack[ssize].a = (_a), stack[ssize].b = (_b),\ 131 | stack[ssize].c = (_c), stack[ssize++].d = (_d);\ 132 | } while(0) 133 | #define STACK_PUSH5(_a, _b, _c, _d, _e)\ 134 | do {\ 135 | assert(ssize < STACK_SIZE);\ 136 | stack[ssize].a = (_a), stack[ssize].b = (_b),\ 137 | stack[ssize].c = (_c), stack[ssize].d = (_d), stack[ssize++].e = (_e);\ 138 | } while(0) 139 | #define STACK_POP(_a, _b, _c, _d)\ 140 | do {\ 141 | assert(0 <= ssize);\ 142 | if(ssize == 0) { return; }\ 143 | (_a) = stack[--ssize].a, (_b) = stack[ssize].b,\ 144 | (_c) = stack[ssize].c, (_d) = stack[ssize].d;\ 145 | } while(0) 146 | #define STACK_POP5(_a, _b, _c, _d, _e)\ 147 | do {\ 148 | assert(0 <= ssize);\ 149 | if(ssize == 0) { return; }\ 150 | (_a) = stack[--ssize].a, (_b) = stack[ssize].b,\ 151 | (_c) = stack[ssize].c, (_d) = stack[ssize].d, (_e) = stack[ssize].e;\ 152 | } while(0) 153 | /* for divsufsort.c */ 154 | #define BUCKET_A(_c0) bucket_A[(_c0)] 155 | #if ALPHABET_SIZE == 256 156 | #define BUCKET_B(_c0, _c1) (bucket_B[((_c1) << 8) | (_c0)]) 157 | #define BUCKET_BSTAR(_c0, _c1) (bucket_B[((_c0) << 8) | (_c1)]) 158 | #else 159 | #define BUCKET_B(_c0, _c1) (bucket_B[(_c1) * ALPHABET_SIZE + (_c0)]) 160 | #define BUCKET_BSTAR(_c0, _c1) (bucket_B[(_c0) * ALPHABET_SIZE + (_c1)]) 161 | #endif 162 | 163 | 164 | /*- Private Prototypes -*/ 165 | /* sssort.c */ 166 | void 167 | sssort(const sauchar_t *Td, const sastore_t *PA, 168 | sastore_t *first, sastore_t *last, 169 | sastore_t *buf, saidx_t bufsize, 170 | saidx_t depth, saidx_t n, saint_t lastsuffix); 171 | /* trsort.c */ 172 | void 173 | trsort(sastore_t *ISA, sastore_t* SA, saidx_t n, saidx_t depth); 174 | 175 | static const int lg_table[256]= { 176 | -1,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4, 177 | 5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5, 178 | 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, 179 | 6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6, 180 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 181 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 182 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, 183 | 7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7 184 | }; 185 | 186 | #endif /* _DIVSUFSORT_PRIVATE_H */ 187 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | // ^(?:(dfs)\+)?(?:(hashed|packed|auto)\+)?(http(?:s)?:\/\/(?:.*?))$ 2 | export interface SourceItem { 3 | uri: string; 4 | id: string; 5 | name: string; 6 | hidden: boolean; 7 | icon?: string; // 可选的SVG图标字符串 8 | } 9 | export type ProjectConfig = { 10 | source: string | SourceItem[]; 11 | appName: string; 12 | publisher: string; 13 | regName: string; 14 | exeName: string; 15 | uninstallName: string; 16 | updaterName: string; 17 | programFilesPath: string; 18 | userDataPath: string[]; 19 | ignoreFolderPath?: string[]; 20 | extraUninstallPath: string[]; 21 | title: string; 22 | description: string; 23 | windowTitle: string; 24 | // UAC 策略 25 | // prefer-admin: 除非用户安装在%User%、%AppData%、%Documents%、%Desktop%、%Downloads%目录,都请求UAC 26 | // prefer-user: 只在用户没有权限写入的目录请求UAC 27 | // force: 强制请求UAC 28 | uacStrategy: 'prefer-admin' | 'prefer-user' | 'force'; 29 | runtimes?: string[]; 30 | windowBorderless?: boolean; 31 | }; 32 | 33 | export type InstallStat = { 34 | speedLastSize: number; 35 | lastTime: DOMHighResTimeStamp; 36 | speed: number; 37 | }; 38 | 39 | export type DfsMetadataHashType = 'md5' | 'xxh'; 40 | 41 | export type DfsMetadataHashInfo = { 42 | file_name: string; 43 | size: number; 44 | md5?: string; 45 | xxh?: string; 46 | installer?: true; 47 | }; 48 | 49 | export type DfsMetadataPatchInfo = { 50 | file_name: string; 51 | size: number; 52 | from: Omit; 53 | to: Omit; 54 | }; 55 | 56 | export interface DfsUpdateTask extends DfsMetadataHashInfo { 57 | patch?: DfsMetadataPatchInfo; 58 | lpatch?: DfsMetadataPatchInfo; 59 | downloaded: number; 60 | running: boolean; 61 | old_hash?: string; 62 | unwritable: boolean; 63 | failed?: true; 64 | errorMessage?: string; // 用于存储合并下载中的单个文件错误信息 65 | } 66 | 67 | // 合并下载相关类型定义 68 | export interface FileWithPosition extends DfsUpdateTask { 69 | dfsOffset: number; 70 | dfsSize: number; 71 | } 72 | 73 | export interface MergedGroupInfo { 74 | files: DfsUpdateTask[]; 75 | mergedRange: string; 76 | totalDownloadSize: number; 77 | totalEffectiveSize: number; 78 | wasteRatio: number; 79 | gaps: Array<{start: number, end: number}>; 80 | } 81 | 82 | export interface VirtualMergedFile extends DfsUpdateTask { 83 | _isMergedGroup: true; 84 | _mergedInfo: MergedGroupInfo; 85 | _fallbackFiles: DfsUpdateTask[]; 86 | } 87 | 88 | export type InvokeGetDfsMetadataRes = { 89 | tag_name: string; 90 | hashed: Array; 91 | patches?: Array; 92 | installer?: { 93 | size: number; 94 | md5?: string; 95 | xxh?: string; 96 | }; 97 | deletes?: string[]; 98 | }; 99 | 100 | export type InvokeDeepReaddirWithMetadataRes = Array<{ 101 | file_name: string; 102 | size: number; 103 | hash: string; 104 | unwritable: boolean; 105 | }>; 106 | 107 | export type InvokeGetDfsRes = { 108 | url?: string; 109 | tests?: Array<[string, string]>; 110 | source: string; 111 | }; 112 | 113 | // DFS2 types 114 | export type Dfs2Metadata = { 115 | resource_version: string; 116 | name: string; 117 | data: Dfs2Data | null; 118 | }; 119 | 120 | export type Dfs2Data = { 121 | index: Record; 122 | metadata: InvokeGetDfsMetadataRes; 123 | installer_end: number; 124 | }; 125 | 126 | export type Dfs2FileInfo = { 127 | name: string; 128 | offset: number; 129 | raw_offset: number; 130 | size: number; 131 | }; 132 | 133 | export type Dfs2SessionResponse = { 134 | tries?: string[]; 135 | sid?: string; 136 | challenge?: string; 137 | data?: string; 138 | }; 139 | 140 | export type Dfs2ChunkResponse = { 141 | url: string; 142 | }; 143 | 144 | export type Dfs2BatchChunkRequest = { 145 | chunks: string[]; 146 | }; 147 | 148 | export type Dfs2ChunkUrlResult = { 149 | url?: string; 150 | error?: string; 151 | }; 152 | 153 | export type Dfs2BatchChunkResponse = { 154 | urls: Record; 155 | }; 156 | 157 | export interface InsightItem { 158 | url: string; 159 | ttfb: number; // 首字节时间(ms) 160 | time: number; // 纯下载时间(ms) = 总时间 - TTFB 161 | size: number; // 实际下载字节数 162 | error?: string; 163 | range?: [number, number][]; // HTTP Range请求范围 164 | mode?: string; // 安装模式 165 | } 166 | 167 | export interface InstallResult { 168 | bytes_transferred: number; 169 | insight?: InsightItem; 170 | } 171 | 172 | export type Dfs2SessionInsights = { 173 | servers: InsightItem[]; 174 | }; 175 | 176 | export interface TAErrorData { 177 | message: string; 178 | insight?: InsightItem; 179 | } 180 | 181 | export class TAError extends Error { 182 | public readonly insight?: InsightItem; 183 | 184 | constructor(data: TAErrorData | string) { 185 | if (typeof data === 'string') { 186 | super(data); 187 | } else { 188 | super(data.message); 189 | this.insight = data.insight; 190 | } 191 | } 192 | 193 | static fromErrorData(data: TAErrorData): TAError { 194 | return new TAError(data); 195 | } 196 | } 197 | 198 | export type InvokeGetDirsRes = [string, string]; 199 | 200 | export type InvokeSelectDirRes = { 201 | path: string; 202 | state: 'Unwritable' | 'Writable' | 'Private'; 203 | empty: boolean; 204 | upgrade: boolean; 205 | } | null; 206 | 207 | export interface Embedded { 208 | name: string; 209 | offset: number; 210 | raw_offset: number; 211 | size: number; 212 | } 213 | 214 | export interface InstallerConfig { 215 | install_path: string; 216 | install_path_exists: boolean; 217 | install_path_source: 218 | | 'CURRENT_DIR' 219 | | 'PARENT_DIR' 220 | | 'REG' 221 | | 'REG_FOLDED' 222 | | 'DEFAULT'; 223 | is_uninstall: boolean; 224 | embedded_files: Embedded[] | null; 225 | embedded_index: Embedded[] | null; 226 | embedded_config: ProjectConfig | null; 227 | enbedded_metadata: InvokeGetDfsMetadataRes | null; 228 | embedded_image: string | null; 229 | exe_path: string; 230 | args: { 231 | target: string | null; 232 | non_interactive: boolean; 233 | silent: boolean; 234 | online: boolean; 235 | uninstall: boolean; 236 | source?: string; 237 | dfs_extras?: string; 238 | mirrorc_cdk?: string; 239 | }; 240 | elevated: boolean; 241 | } 242 | 243 | export interface HttpGetResponse { 244 | status_code: number; 245 | headers: Record; 246 | body: string; 247 | final_url: string; 248 | } 249 | -------------------------------------------------------------------------------- /src-tauri/libs/hdiff-sys/HDiff/private_diff/limit_mem_diff/bloom_filter.h: -------------------------------------------------------------------------------- 1 | //bloom_filter.h 2 | //bloom过滤的一个定制实现. 3 | /* 4 | The MIT License (MIT) 5 | Copyright (c) 2017 HouSisong 6 | 7 | Permission is hereby granted, free of charge, to any person 8 | obtaining a copy of this software and associated documentation 9 | files (the "Software"), to deal in the Software without 10 | restriction, including without limitation the rights to use, 11 | copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the 13 | Software is furnished to do so, subject to the following 14 | conditions: 15 | 16 | The above copyright notice and this permission notice shall be 17 | included in all copies of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 20 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 21 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 22 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 23 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 24 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 25 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 26 | OTHER DEALINGS IN THE SOFTWARE. 27 | */ 28 | 29 | #ifndef bloom_filter_h 30 | #define bloom_filter_h 31 | #include //memset 32 | #include 33 | #include //std::runtime_error 34 | #include "../../../libParallel/parallel_channel.h" 35 | #if (_IS_USED_MULTITHREAD) 36 | # if defined(ANDROID) && (defined(__GNUC__) || defined(__clang__)) 37 | # define _IS_USED__sync_fetch_and_or 1 38 | # else 39 | # include //need c++11, vc version need vc2012 40 | # endif 41 | #endif 42 | 43 | namespace hdiff_private{ 44 | 45 | class TBitSet{ 46 | public: 47 | inline TBitSet():m_bits(0),m_bitSize(0){} 48 | inline ~TBitSet(){ clear(0); } 49 | 50 | inline void set(size_t bitIndex){ 51 | //assert(bitIndex>kBaseShr] |= ((base_t)1<<(bitIndex&kBaseMask)); 53 | } 54 | #if (_IS_USED_MULTITHREAD) 55 | inline void set_MT(size_t bitIndex){ 56 | //assert(bitIndex>kBaseShr],((base_t)1<<(bitIndex&kBaseMask))); 59 | #else 60 | ((std::atomic*)&m_bits[bitIndex>>kBaseShr])->fetch_or(((base_t)1<<(bitIndex&kBaseMask))); 61 | #endif 62 | } 63 | #endif 64 | inline bool is_hit(size_t bitIndex)const{ 65 | //assert(bitIndex>kBaseShr] & ((base_t)1<<(bitIndex&kBaseMask))); 67 | } 68 | 69 | inline size_t bitSize()const{return m_bitSize; } 70 | 71 | void clear(size_t newBitSize){ 72 | size_t count=bitSizeToCount(newBitSize); 73 | if (newBitSize==m_bitSize){ 74 | if (count>0) memset(m_bits,0,count*sizeof(base_t)); 75 | }else{ 76 | m_bitSize=newBitSize; 77 | if (m_bits) { delete [] m_bits; m_bits=0; } 78 | if (count>0){ 79 | m_bits=new base_t[count]; 80 | memset(m_bits,0,count*sizeof(base_t)); 81 | } 82 | } 83 | } 84 | private: 85 | inline static size_t bitSizeToCount(size_t bitSize){ return (bitSize+(kBaseTBits-1))/kBaseTBits; } 86 | typedef uint32_t base_t; 87 | enum { 88 | kBaseShr=(sizeof(base_t)==8)?6:((sizeof(base_t)==4)?5:0), 89 | kBaseTBits=(1< 99 | class TBloomFilter{ 100 | public: 101 | enum { kZoomMin=3, kZoomBig=32 }; 102 | 103 | inline TBloomFilter():m_bitSetMask(0){} 104 | inline void clear(){ m_bitSet.clear(0); } 105 | void init(size_t dataCount,size_t zoom = kZoomBig){ 106 | m_bitSetMask=getMask(dataCount,zoom);//mask is 2^N-1 107 | m_bitSet.clear(m_bitSetMask+1); 108 | } 109 | inline size_t bitSize()const{ return m_bitSet.bitSize(); } 110 | inline void insert(T data){ 111 | m_bitSet.set(hash0(data)); 112 | m_bitSet.set(hash1(data)); 113 | m_bitSet.set(hash2(data)); 114 | } 115 | #if (_IS_USED_MULTITHREAD) 116 | inline void insert_MT(T data){ 117 | m_bitSet.set_MT(hash0(data)); 118 | m_bitSet.set_MT(hash1(data)); 119 | m_bitSet.set_MT(hash2(data)); 120 | } 121 | #endif 122 | inline bool is_hit(T data)const{ 123 | return m_bitSet.is_hit(hash0(data)) 124 | && m_bitSet.is_hit(hash1(data)) 125 | && m_bitSet.is_hit(hash2(data)); 126 | } 127 | private: 128 | TBitSet m_bitSet; 129 | size_t m_bitSetMask; 130 | static size_t getMask(size_t dataCount,size_t zoom){ 131 | if (zoom>(sizeof(T)*4)))&m_bitSetMask; } 143 | inline size_t hash1(T key)const { return ((~key)+(key << (sizeof(T)*2+1))+1)%m_bitSetMask; } 144 | inline size_t hash2(T key)const { 145 | size_t h=(sizeof(T)>4)?_hash2_64(key):_hash2_32((size_t)key); return h%(m_bitSetMask-1); } 146 | static size_t _hash2_32(size_t key){//from: https://gist.github.com/badboy/6267743 147 | const size_t c2=0x27d4eb2d; // a prime or an odd constant 148 | key = (key ^ 61) ^ (key >> 16); 149 | key = key + (key << 3); 150 | key = key ^ (key >> 4); 151 | key = key * c2; 152 | key = key ^ (key >> 15); 153 | return key; 154 | } 155 | static size_t _hash2_64(T key){ 156 | key = (~key) + (key << 18); // key = (key << 18) - key - 1; 157 | key = key ^ (key >> 31); 158 | key = key * 21; // key = (key + (key << 2)) + (key << 4); 159 | key = key ^ (key >> 11); 160 | key = key + (key << 6); 161 | key = key ^ (key >> 22); 162 | return (size_t)key; 163 | } 164 | }; 165 | 166 | }//namespace hdiff_private 167 | #endif /* bloom_filter_h */ 168 | -------------------------------------------------------------------------------- /src-tauri/src/utils/url.rs: -------------------------------------------------------------------------------- 1 | use anyhow::Context; 2 | use url::Url; 3 | 4 | /// Sanitizes a URL for logging by removing query parameters and fragments 5 | /// to prevent sensitive data (tokens, session IDs, etc.) from appearing in logs. 6 | /// 7 | /// # Arguments 8 | /// * `url` - The URL to sanitize 9 | /// 10 | /// # Returns 11 | /// A sanitized URL containing only protocol, host, and path 12 | /// 13 | /// # Examples 14 | /// ``` 15 | /// let sanitized = sanitize_url_for_logging("https://api.example.com/data?token=secret&id=123#section"); 16 | /// assert_eq!(sanitized, "https://api.example.com/data"); 17 | /// ``` 18 | pub fn sanitize_url_for_logging(url: &str) -> String { 19 | match Url::parse(url) { 20 | Ok(parsed) => { 21 | let mut sanitized = String::new(); 22 | 23 | // Add scheme 24 | sanitized.push_str(parsed.scheme()); 25 | sanitized.push_str("://"); 26 | 27 | // Add host 28 | if let Some(host) = parsed.host_str() { 29 | sanitized.push_str(host); 30 | 31 | // Add port if present and not default 32 | if let Some(port) = parsed.port() { 33 | sanitized.push(':'); 34 | sanitized.push_str(&port.to_string()); 35 | } 36 | } 37 | 38 | // Add path 39 | sanitized.push_str(parsed.path()); 40 | 41 | sanitized 42 | } 43 | Err(_) => { 44 | // If URL parsing fails, try to extract basic components manually 45 | if let Some(query_start) = url.find('?') { 46 | url[..query_start].to_string() 47 | } else if let Some(fragment_start) = url.find('#') { 48 | url[..fragment_start].to_string() 49 | } else { 50 | url.to_string() 51 | } 52 | } 53 | } 54 | } 55 | 56 | /// Creates a standardized error context for reqwest HTTP requests 57 | /// 58 | /// # Arguments 59 | /// * `function_name` - Name of the function where the error occurred 60 | /// * `url` - The URL that was being requested (will be sanitized) 61 | /// * `error_type` - Type of error (e.g., "HTTP_REQUEST_ERR", "HTTP_STATUS_ERR") 62 | /// 63 | /// # Returns 64 | /// A formatted error context string 65 | pub fn create_reqwest_context(function_name: &str, url: &str, error_type: &str) -> String { 66 | let sanitized_url = sanitize_url_for_logging(url); 67 | format!("{} in {}: {}", error_type, function_name, sanitized_url) 68 | } 69 | 70 | /// Extension trait for adding HTTP context to anyhow errors 71 | pub trait HttpContextExt { 72 | /// Adds HTTP request context to an anyhow Result 73 | fn with_http_context(self, function_name: &str, url: &str) -> anyhow::Result; 74 | 75 | /// Adds HTTP status context to an anyhow Result 76 | fn with_http_status_context(self, function_name: &str, url: &str) -> anyhow::Result; 77 | 78 | /// Adds generic HTTP error context to an anyhow Result 79 | fn with_http_error_context( 80 | self, 81 | function_name: &str, 82 | url: &str, 83 | error_type: &str, 84 | ) -> anyhow::Result; 85 | } 86 | 87 | impl HttpContextExt for Result { 88 | fn with_http_context(self, function_name: &str, url: &str) -> anyhow::Result { 89 | self.context(create_reqwest_context( 90 | function_name, 91 | url, 92 | "HTTP_REQUEST_ERR", 93 | )) 94 | } 95 | 96 | fn with_http_status_context(self, function_name: &str, url: &str) -> anyhow::Result { 97 | self.context(create_reqwest_context( 98 | function_name, 99 | url, 100 | "HTTP_STATUS_ERR", 101 | )) 102 | } 103 | 104 | fn with_http_error_context( 105 | self, 106 | function_name: &str, 107 | url: &str, 108 | error_type: &str, 109 | ) -> anyhow::Result { 110 | self.context(create_reqwest_context(function_name, url, error_type)) 111 | } 112 | } 113 | 114 | impl HttpContextExt for anyhow::Result { 115 | fn with_http_context(self, function_name: &str, url: &str) -> anyhow::Result { 116 | self.context(create_reqwest_context( 117 | function_name, 118 | url, 119 | "HTTP_REQUEST_ERR", 120 | )) 121 | } 122 | 123 | fn with_http_status_context(self, function_name: &str, url: &str) -> anyhow::Result { 124 | self.context(create_reqwest_context( 125 | function_name, 126 | url, 127 | "HTTP_STATUS_ERR", 128 | )) 129 | } 130 | 131 | fn with_http_error_context( 132 | self, 133 | function_name: &str, 134 | url: &str, 135 | error_type: &str, 136 | ) -> anyhow::Result { 137 | self.context(create_reqwest_context(function_name, url, error_type)) 138 | } 139 | } 140 | 141 | #[cfg(test)] 142 | mod tests { 143 | use super::*; 144 | 145 | #[test] 146 | fn test_sanitize_url_for_logging() { 147 | // Test with query parameters 148 | assert_eq!( 149 | sanitize_url_for_logging("https://api.example.com/data?token=secret&id=123"), 150 | "https://api.example.com/data" 151 | ); 152 | 153 | // Test with fragment 154 | assert_eq!( 155 | sanitize_url_for_logging("https://example.com/page#section"), 156 | "https://example.com/page" 157 | ); 158 | 159 | // Test with both query and fragment 160 | assert_eq!( 161 | sanitize_url_for_logging("https://api.example.com/data?key=value#top"), 162 | "https://api.example.com/data" 163 | ); 164 | 165 | // Test with port 166 | assert_eq!( 167 | sanitize_url_for_logging("https://api.example.com:8080/data?token=secret"), 168 | "https://api.example.com:8080/data" 169 | ); 170 | 171 | // Test clean URL (no changes needed) 172 | assert_eq!( 173 | sanitize_url_for_logging("https://api.example.com/data"), 174 | "https://api.example.com/data" 175 | ); 176 | 177 | // Test with path containing sensitive info (should be preserved as it's part of the path) 178 | assert_eq!( 179 | sanitize_url_for_logging("https://api.example.com/users/123/profile?token=secret"), 180 | "https://api.example.com/users/123/profile" 181 | ); 182 | } 183 | 184 | #[test] 185 | fn test_create_reqwest_context() { 186 | let context = create_reqwest_context( 187 | "get_user_data", 188 | "https://api.example.com/users?token=secret", 189 | "HTTP_REQUEST_ERR", 190 | ); 191 | assert_eq!( 192 | context, 193 | "HTTP_REQUEST_ERR in get_user_data: https://api.example.com/users" 194 | ); 195 | } 196 | } 197 | --------------------------------------------------------------------------------