├── .gitignore ├── server ├── .gitignore ├── package.json ├── tsconfig.json ├── bun.lock ├── src │ ├── index.ts │ ├── git-http-sqlite.ts │ ├── git-storage.ts │ └── git-pack.ts ├── README.md └── test │ └── server.test.ts ├── test ├── globalSetup.ts ├── globalTeardown.ts ├── vitest.config.ts ├── package.json ├── .gitignore ├── src │ ├── diff.bench.ts │ ├── log.bench.ts │ ├── status.bench.ts │ ├── add.bench.ts │ ├── commit.bench.ts │ ├── add.test.ts │ ├── status.test.ts │ ├── shared.ts │ ├── log.test.ts │ ├── commit.test.ts │ ├── diff.test.ts │ └── fork.test.ts ├── tsconfig.json └── fixtures │ └── setup.ts ├── .github └── workflows │ ├── ci.yml │ └── release.yml ├── docs ├── style.md ├── architecture.md ├── motivation.md └── features.md ├── LICENCE ├── src ├── passthrough.zig ├── cmds │ ├── git.zig │ ├── add.zig │ ├── status.zig │ └── alias.zig ├── main.zig └── guardrails.zig ├── README.md ├── install.sh └── AGENTS.md /.gitignore: -------------------------------------------------------------------------------- 1 | .zig-cache 2 | zig-out 3 | test/fixtures/repos/ 4 | todo.md 5 | .forks/ 6 | -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | .data/ 2 | node_modules/ 3 | *.sqlite 4 | test/.test-data/ 5 | test/.test-clones/ 6 | -------------------------------------------------------------------------------- /test/globalSetup.ts: -------------------------------------------------------------------------------- 1 | import { cleanupFixtures } from "./fixtures/setup"; 2 | 3 | export default function () { 4 | // Clean up any leftover fixtures from previous runs 5 | cleanupFixtures(); 6 | } 7 | -------------------------------------------------------------------------------- /test/globalTeardown.ts: -------------------------------------------------------------------------------- 1 | import { cleanupFixtures } from "./fixtures/setup"; 2 | 3 | export default function () { 4 | // Clean up all fixtures after tests complete 5 | cleanupFixtures(); 6 | } 7 | -------------------------------------------------------------------------------- /test/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | 3 | export default defineConfig({ 4 | test: { 5 | include: ["src/**/*.test.ts"], 6 | globalSetup: "./globalSetup.ts", 7 | globalTeardown: "./globalTeardown.ts", 8 | benchmark: { 9 | include: ["src/**/*.bench.ts"], 10 | }, 11 | }, 12 | }); 13 | -------------------------------------------------------------------------------- /test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "zagi-bench", 3 | "type": "module", 4 | "private": true, 5 | "scripts": { 6 | "bench": "vitest bench", 7 | "test": "vitest run" 8 | }, 9 | "devDependencies": { 10 | "@types/bun": "latest", 11 | "vitest": "^4.0.16" 12 | }, 13 | "peerDependencies": { 14 | "typescript": "^5" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "zagi-server", 3 | "type": "module", 4 | "private": true, 5 | "scripts": { 6 | "dev": "bun run --watch src/index.ts", 7 | "start": "bun run src/index.ts", 8 | "test": "bun test" 9 | }, 10 | "devDependencies": { 11 | "@types/bun": "latest" 12 | }, 13 | "peerDependencies": { 14 | "typescript": "^5" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /test/.gitignore: -------------------------------------------------------------------------------- 1 | # dependencies (bun install) 2 | node_modules 3 | 4 | # output 5 | out 6 | dist 7 | *.tgz 8 | 9 | # code coverage 10 | coverage 11 | *.lcov 12 | 13 | # logs 14 | logs 15 | _.log 16 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 17 | 18 | # dotenv environment variable files 19 | .env 20 | .env.development.local 21 | .env.test.local 22 | .env.production.local 23 | .env.local 24 | 25 | # caches 26 | .eslintcache 27 | .cache 28 | *.tsbuildinfo 29 | 30 | # IntelliJ based IDEs 31 | .idea 32 | 33 | # Finder (MacOS) folder config 34 | .DS_Store 35 | -------------------------------------------------------------------------------- /server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "lib": ["ESNext"], 4 | "target": "ESNext", 5 | "module": "Preserve", 6 | "moduleDetection": "force", 7 | "allowJs": true, 8 | "moduleResolution": "bundler", 9 | "allowImportingTsExtensions": true, 10 | "verbatimModuleSyntax": true, 11 | "noEmit": true, 12 | "strict": true, 13 | "skipLibCheck": true, 14 | "noFallthroughCasesInSwitch": true, 15 | "noUncheckedIndexedAccess": true, 16 | "noImplicitOverride": true, 17 | "noUnusedLocals": false, 18 | "noUnusedParameters": false 19 | }, 20 | "exclude": [".repos"] 21 | } 22 | -------------------------------------------------------------------------------- /test/src/diff.bench.ts: -------------------------------------------------------------------------------- 1 | import { describe, bench, beforeAll, afterAll } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeAll(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterAll(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("git diff benchmarks", () => { 19 | bench("zagi diff", () => { 20 | zagi(["diff"], { cwd: REPO_DIR }); 21 | }); 22 | 23 | bench("git diff", () => { 24 | git(["diff"], { cwd: REPO_DIR }); 25 | }); 26 | 27 | bench("git diff --stat", () => { 28 | git(["diff", "--stat"], { cwd: REPO_DIR }); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/src/log.bench.ts: -------------------------------------------------------------------------------- 1 | import { describe, bench, beforeAll, afterAll } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeAll(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterAll(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("git log benchmarks", () => { 19 | bench("zagi log (default)", () => { 20 | zagi(["log"], { cwd: REPO_DIR }); 21 | }); 22 | 23 | bench("git log -n 10", () => { 24 | git(["log", "-n", "10"], { cwd: REPO_DIR }); 25 | }); 26 | 27 | bench("git log --oneline -n 10", () => { 28 | git(["log", "--oneline", "-n", "10"], { cwd: REPO_DIR }); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | // Environment setup & latest features 4 | "lib": ["ESNext"], 5 | "target": "ESNext", 6 | "module": "Preserve", 7 | "moduleDetection": "force", 8 | "jsx": "react-jsx", 9 | "allowJs": true, 10 | 11 | // Bundler mode 12 | "moduleResolution": "bundler", 13 | "allowImportingTsExtensions": true, 14 | "verbatimModuleSyntax": true, 15 | "noEmit": true, 16 | 17 | // Best practices 18 | "strict": true, 19 | "skipLibCheck": true, 20 | "noFallthroughCasesInSwitch": true, 21 | "noUncheckedIndexedAccess": true, 22 | "noImplicitOverride": true, 23 | 24 | // Some stricter flags (disabled by default) 25 | "noUnusedLocals": false, 26 | "noUnusedParameters": false, 27 | "noPropertyAccessFromIndexSignature": false 28 | }, 29 | "exclude": ["fixtures/repos"] 30 | } 31 | -------------------------------------------------------------------------------- /test/src/status.bench.ts: -------------------------------------------------------------------------------- 1 | import { describe, bench, beforeAll, afterAll } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeAll(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterAll(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("git status benchmarks", () => { 19 | bench("zagi status", () => { 20 | zagi(["status"], { cwd: REPO_DIR }); 21 | }); 22 | 23 | bench("git status", () => { 24 | git(["status"], { cwd: REPO_DIR }); 25 | }); 26 | 27 | bench("git status --porcelain", () => { 28 | git(["status", "--porcelain"], { cwd: REPO_DIR }); 29 | }); 30 | 31 | bench("git status -s", () => { 32 | git(["status", "-s"], { cwd: REPO_DIR }); 33 | }); 34 | }); 35 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build-and-test: 11 | runs-on: ubuntu-latest 12 | 13 | steps: 14 | - uses: actions/checkout@v4 15 | 16 | - name: Install Zig 17 | run: | 18 | curl -L https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz | tar -xJ 19 | echo "$PWD/zig-x86_64-linux-0.15.2" >> $GITHUB_PATH 20 | 21 | - name: Setup Bun 22 | uses: oven-sh/setup-bun@v2 23 | 24 | - name: Build zagi 25 | run: zig build -Doptimize=ReleaseFast 26 | 27 | - name: Run zig tests 28 | run: zig build test 29 | 30 | - name: Install test dependencies 31 | working-directory: test 32 | run: bun install 33 | 34 | - name: Run integration tests 35 | working-directory: test 36 | run: bun run test 37 | -------------------------------------------------------------------------------- /test/src/add.bench.ts: -------------------------------------------------------------------------------- 1 | import { describe, bench, beforeAll, afterAll, beforeEach } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeAll(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterAll(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("git add benchmarks", () => { 19 | beforeEach(() => { 20 | try { 21 | git(["reset", "HEAD", "."], { cwd: REPO_DIR }); 22 | } catch {} 23 | }); 24 | 25 | bench("zagi add (single file)", () => { 26 | zagi(["add", "src/new-file.ts"], { cwd: REPO_DIR }); 27 | }); 28 | 29 | bench("git add (single file)", () => { 30 | git(["add", "src/new-file.ts"], { cwd: REPO_DIR }); 31 | }); 32 | 33 | bench("zagi add . (all)", () => { 34 | zagi(["add", "."], { cwd: REPO_DIR }); 35 | }); 36 | 37 | bench("git add . (all)", () => { 38 | git(["add", "."], { cwd: REPO_DIR }); 39 | }); 40 | }); 41 | -------------------------------------------------------------------------------- /docs/style.md: -------------------------------------------------------------------------------- 1 | # Style Guide 2 | 3 | ## Naming 4 | 5 | - zagi is always lowercase, even at the start of a sentence 6 | - git is always lowercase 7 | 8 | ## Documentation 9 | 10 | - No emojis 11 | - Minimal formatting - use plain text where possible 12 | - Code blocks for commands and output examples 13 | - Tables only when comparing things side by side 14 | 15 | ## Code 16 | 17 | - Zig standard library naming conventions 18 | - snake_case for functions and variables 19 | - PascalCase for types 20 | - Return errors rather than calling exit() in command modules 21 | 22 | ## Output messages 23 | 24 | - Lowercase, no trailing punctuation 25 | - Concise - every word must earn its place 26 | - Show what happened, not instructions on what to do next 27 | 28 | Good: 29 | ``` 30 | staged: 3 files 31 | committed: abc123f "message" 32 | error: file not found 33 | ``` 34 | 35 | Bad: 36 | ``` 37 | Successfully staged 3 files! 38 | Use git commit to commit your changes. 39 | ERROR: The file was not found. 40 | ``` 41 | 42 | ## Help text 43 | 44 | - Use `git` not `zagi` in usage examples (users alias git to zagi) 45 | - Exception: `zagi alias` since it must be run as zagi 46 | -------------------------------------------------------------------------------- /test/src/commit.bench.ts: -------------------------------------------------------------------------------- 1 | import { bench, describe, beforeAll, afterAll } from "vitest"; 2 | import { resolve } from "path"; 3 | import { writeFileSync, rmSync } from "fs"; 4 | import { createFixtureRepo } from "../fixtures/setup"; 5 | import { zagi, git } from "./shared"; 6 | 7 | let REPO_DIR: string; 8 | 9 | beforeAll(() => { 10 | REPO_DIR = createFixtureRepo(); 11 | }); 12 | 13 | afterAll(() => { 14 | if (REPO_DIR) { 15 | rmSync(REPO_DIR, { recursive: true, force: true }); 16 | } 17 | }); 18 | 19 | describe("git add + commit benchmarks", () => { 20 | const uid = () => `${Date.now()}-${Math.random().toString(36).slice(2, 8)}`; 21 | 22 | bench("zagi add + commit", () => { 23 | const id = uid(); 24 | const testFile = resolve(REPO_DIR, `zagi-${id}.txt`); 25 | writeFileSync(testFile, `zagi bench ${id}\n`); 26 | zagi(["add", testFile], { cwd: REPO_DIR }); 27 | zagi(["commit", "-m", `zagi ${id}`], { cwd: REPO_DIR }); 28 | }); 29 | 30 | bench("git add + commit", () => { 31 | const id = uid(); 32 | const testFile = resolve(REPO_DIR, `git-${id}.txt`); 33 | writeFileSync(testFile, `git bench ${id}\n`); 34 | git(["add", testFile], { cwd: REPO_DIR }); 35 | git(["commit", "-m", `git ${id}`], { cwd: REPO_DIR }); 36 | }); 37 | }); 38 | -------------------------------------------------------------------------------- /server/bun.lock: -------------------------------------------------------------------------------- 1 | { 2 | "lockfileVersion": 1, 3 | "workspaces": { 4 | "": { 5 | "name": "zagi-server", 6 | "devDependencies": { 7 | "@types/bun": "latest", 8 | }, 9 | "peerDependencies": { 10 | "typescript": "^5", 11 | }, 12 | }, 13 | }, 14 | "packages": { 15 | "@types/bun": ["@types/bun@1.3.5", "", { "dependencies": { "bun-types": "1.3.5" } }, "sha512-RnygCqNrd3srIPEWBd5LFeUYG7plCoH2Yw9WaZGyNmdTEei+gWaHqydbaIRkIkcbXwhBT94q78QljxN0Sk838w=="], 16 | 17 | "@types/node": ["@types/node@25.0.3", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-W609buLVRVmeW693xKfzHeIV6nJGGz98uCPfeXI1ELMLXVeKYZ9m15fAMSaUPBHYLGFsVRcMmSCksQOrZV9BYA=="], 18 | 19 | "bun-types": ["bun-types@1.3.5", "", { "dependencies": { "@types/node": "*" } }, "sha512-inmAYe2PFLs0SUbFOWSVD24sg1jFlMPxOjOSSCYqUgn4Hsc3rDc7dFvfVYjFPNHtov6kgUeulV4SxbuIV/stPw=="], 20 | 21 | "typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="], 22 | 23 | "undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="], 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /test/src/add.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, beforeEach, afterEach } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeEach(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterEach(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("zagi add", () => { 19 | test("shows confirmation after adding file", () => { 20 | const result = zagi(["add", "src/new-file.ts"], { cwd: REPO_DIR }); 21 | 22 | expect(result).toContain("staged:"); 23 | expect(result).toContain("A "); 24 | expect(result).toContain("new-file.ts"); 25 | }); 26 | 27 | test("shows count of staged files", () => { 28 | const result = zagi(["add", "src/new-file.ts"], { cwd: REPO_DIR }); 29 | 30 | expect(result).toMatch(/staged: \d+ file/); 31 | }); 32 | 33 | test("error message is concise for missing file", () => { 34 | const result = zagi(["add", "nonexistent.txt"], { cwd: REPO_DIR }); 35 | 36 | expect(result).toBe("error: file not found\n"); 37 | }); 38 | 39 | test("git add is silent on success", () => { 40 | const result = git(["add", "src/new-file.ts"], { cwd: REPO_DIR }); 41 | 42 | expect(result).toBe(""); 43 | }); 44 | 45 | test("zagi add provides feedback", () => { 46 | const result = zagi(["add", "src/new-file.ts"], { cwd: REPO_DIR }); 47 | 48 | expect(result.length).toBeGreaterThan(0); 49 | }); 50 | }); 51 | -------------------------------------------------------------------------------- /docs/architecture.md: -------------------------------------------------------------------------------- 1 | # Architecture 2 | 3 | ## Overview 4 | 5 | zagi is a Zig binary that wraps git commands. It uses libgit2 for git operations rather than shelling out to git, which provides better control over output formatting. 6 | 7 | ## Components 8 | 9 | ``` 10 | src/ 11 | main.zig # entry point, command routing 12 | passthrough.zig # delegates to git CLI 13 | cmds/ 14 | git.zig # shared utilities (status markers, etc) 15 | status.zig # git status 16 | log.zig # git log 17 | diff.zig # git diff 18 | add.zig # git add 19 | commit.zig # git commit 20 | alias.zig # zagi alias (shell setup) 21 | ``` 22 | 23 | ## Command flow 24 | 25 | 1. Parse command line args 26 | 2. Check for `-g` flag - if present, pass through to git 27 | 3. Route to command handler based on first arg 28 | 4. If no handler exists, pass through to git 29 | 5. Command handler uses libgit2 to perform operation 30 | 6. Format output in concise style 31 | 7. Return appropriate exit code 32 | 33 | ## libgit2 integration 34 | 35 | Zagi uses libgit2 via Zig's C interop: 36 | 37 | ```zig 38 | const c = @cImport(@cInclude("git2.h")); 39 | ``` 40 | 41 | This provides: 42 | - Direct repository access without subprocess overhead 43 | - Fine-grained control over output formatting 44 | - Consistent behavior across platforms 45 | 46 | ## Error handling 47 | 48 | Commands return errors to main.zig rather than calling exit directly: 49 | 50 | ```zig 51 | pub fn run(allocator: std.mem.Allocator, args: [][:0]u8) git.Error!void { 52 | // return errors, don't exit 53 | } 54 | ``` 55 | 56 | This enables unit testing and centralizes exit code handling. 57 | 58 | ## Testing 59 | 60 | Two levels of tests: 61 | 62 | 1. Zig unit tests - test pure functions (formatters, parsers) 63 | 2. TypeScript integration tests - test end-to-end behavior via `bench/` 64 | 65 | The integration tests create temporary git repos and verify output format and correctness. 66 | -------------------------------------------------------------------------------- /LICENCE: -------------------------------------------------------------------------------- 1 | zagi is MIT licensed, with some parts under different licenses. 2 | 3 | ## zagi 4 | 5 | MIT License 6 | 7 | Copyright (c) 2025 Matt Carey 8 | 9 | Permission is hereby granted, free of charge, to any person obtaining a copy 10 | of this software and associated documentation files (the "Software"), to deal 11 | in the Software without restriction, including without limitation the rights 12 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 13 | copies of the Software, and to permit persons to whom the Software is 14 | furnished to do so, subject to the following conditions: 15 | 16 | The above copyright notice and this permission notice shall be included in all 17 | copies or substantial portions of the Software. 18 | 19 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 20 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 21 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 22 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 23 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 24 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 25 | SOFTWARE. 26 | 27 | --- 28 | 29 | ## Linked Libraries 30 | 31 | zagi statically links against the following libraries: 32 | 33 | ### libgit2 34 | 35 | libgit2 is Copyright (C) the libgit2 contributors. 36 | 37 | libgit2 is licensed under GPLv2 with a linking exception: 38 | 39 | > In addition to the permissions in the GNU General Public License, 40 | > the authors give you unlimited permission to link the compiled 41 | > version of this library into combinations with other programs, 42 | > and to distribute those combinations without any restriction 43 | > coming from the use of this file. (The General Public License 44 | > restrictions do apply in other respects; for example, they cover 45 | > modification of the file, and distribution when not linked into 46 | > a combined executable.) 47 | 48 | The full libgit2 license is available at: 49 | https://github.com/libgit2/libgit2/blob/main/COPYING 50 | 51 | If you modify libgit2 itself, you must distribute the source code of your 52 | modifications under the terms of the GPLv2. 53 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | build: 13 | strategy: 14 | matrix: 15 | include: 16 | - os: ubuntu-latest 17 | target: native 18 | artifact: zagi-linux-x86_64 19 | zig_url: https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz 20 | zig_dir: zig-x86_64-linux-0.15.2 21 | - os: macos-15-intel 22 | target: native 23 | artifact: zagi-macos-x86_64 24 | zig_url: https://ziglang.org/download/0.15.2/zig-x86_64-macos-0.15.2.tar.xz 25 | zig_dir: zig-x86_64-macos-0.15.2 26 | - os: macos-latest 27 | target: native 28 | artifact: zagi-macos-aarch64 29 | zig_url: https://ziglang.org/download/0.15.2/zig-aarch64-macos-0.15.2.tar.xz 30 | zig_dir: zig-aarch64-macos-0.15.2 31 | - os: ubuntu-latest 32 | target: aarch64-linux-gnu 33 | artifact: zagi-linux-aarch64 34 | zig_url: https://ziglang.org/download/0.15.2/zig-x86_64-linux-0.15.2.tar.xz 35 | zig_dir: zig-x86_64-linux-0.15.2 36 | 37 | runs-on: ${{ matrix.os }} 38 | 39 | steps: 40 | - uses: actions/checkout@v4 41 | 42 | - name: Install Zig 43 | run: | 44 | curl -L ${{ matrix.zig_url }} | tar -xJ 45 | echo "$PWD/${{ matrix.zig_dir }}" >> $GITHUB_PATH 46 | 47 | - name: Build (native) 48 | if: matrix.target == 'native' 49 | run: zig build -Doptimize=ReleaseFast 50 | 51 | - name: Build (cross-compile) 52 | if: matrix.target != 'native' 53 | run: zig build -Doptimize=ReleaseFast -Dtarget=${{ matrix.target }} 54 | 55 | - name: Package 56 | run: | 57 | cd zig-out/bin 58 | tar -czvf ../../${{ matrix.artifact }}.tar.gz zagi 59 | 60 | - name: Upload artifact 61 | uses: actions/upload-artifact@v4 62 | with: 63 | name: ${{ matrix.artifact }} 64 | path: ${{ matrix.artifact }}.tar.gz 65 | 66 | release: 67 | needs: build 68 | runs-on: ubuntu-latest 69 | 70 | steps: 71 | - uses: actions/checkout@v4 72 | 73 | - name: Download all artifacts 74 | uses: actions/download-artifact@v4 75 | with: 76 | path: artifacts 77 | 78 | - name: Create Release 79 | uses: softprops/action-gh-release@v1 80 | with: 81 | files: artifacts/**/*.tar.gz 82 | generate_release_notes: true 83 | -------------------------------------------------------------------------------- /src/passthrough.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const guardrails = @import("guardrails.zig"); 3 | 4 | /// Pass through a command to git CLI 5 | pub fn run(allocator: std.mem.Allocator, args: [][:0]u8) !void { 6 | const stderr = std.fs.File.stderr().deprecatedWriter(); 7 | 8 | // Check guardrails in agent mode 9 | if (guardrails.isAgentMode()) { 10 | // Cast to const for checkBlocked 11 | const const_args: []const [:0]const u8 = @ptrCast(args); 12 | if (guardrails.checkBlocked(const_args)) |reason| { 13 | // Build the command string for display 14 | var cmd_display: [256]u8 = undefined; 15 | var cmd_len: usize = 0; 16 | for (args) |arg| { 17 | const arg_slice = std.mem.sliceTo(arg, 0); 18 | if (cmd_len > 0 and cmd_len < cmd_display.len) { 19 | cmd_display[cmd_len] = ' '; 20 | cmd_len += 1; 21 | } 22 | const to_copy = @min(arg_slice.len, cmd_display.len - cmd_len); 23 | @memcpy(cmd_display[cmd_len..][0..to_copy], arg_slice[0..to_copy]); 24 | cmd_len += to_copy; 25 | } 26 | 27 | stderr.print("error: destructive command blocked (ZAGI_AGENT is set)\n", .{}) catch {}; 28 | stderr.print("blocked: {s}\n", .{cmd_display[0..cmd_len]}) catch {}; 29 | stderr.print("reason: {s}\n", .{reason}) catch {}; 30 | std.process.exit(1); 31 | } 32 | } 33 | 34 | var git_args = std.array_list.Managed([]const u8).init(allocator); 35 | defer git_args.deinit(); 36 | 37 | try git_args.append("git"); 38 | for (args[1..]) |arg| { 39 | try git_args.append(arg); 40 | } 41 | 42 | var child = std.process.Child.init(git_args.items, allocator); 43 | child.stdin_behavior = .Inherit; 44 | child.stdout_behavior = .Inherit; 45 | child.stderr_behavior = .Inherit; 46 | 47 | const term = child.spawnAndWait() catch |err| { 48 | stderr.print("Error executing git: {s}\n", .{@errorName(err)}) catch {}; 49 | std.process.exit(1); 50 | }; 51 | 52 | switch (term) { 53 | .Exited => |code| std.process.exit(code), 54 | .Signal => |sig| { 55 | stderr.print("Git terminated by signal {d}\n", .{sig}) catch {}; 56 | std.process.exit(1); 57 | }, 58 | .Stopped => |sig| { 59 | stderr.print("Git stopped by signal {d}\n", .{sig}) catch {}; 60 | std.process.exit(1); 61 | }, 62 | .Unknown => |code| { 63 | stderr.print("Git exited with unknown status {d}\n", .{code}) catch {}; 64 | std.process.exit(1); 65 | }, 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /docs/motivation.md: -------------------------------------------------------------------------------- 1 | # Motivation 2 | 3 | ## The problem 4 | 5 | AI agents interact with git constantly. Every git command consumes tokens from the context window. Standard git output is designed for humans reading terminals, not agents parsing text. 6 | 7 | ### Context window pressure 8 | 9 | A single `git log` can output thousands of tokens. In a typical coding session, an agent might check status, view logs, diff changes, and commit dozens of times. The cumulative token cost is significant. 10 | 11 | ### Verbose output 12 | 13 | git's output includes decorative elements useful for humans but wasteful for agents: 14 | 15 | - Full commit hashes (40 chars) when short hashes (7 chars) suffice 16 | - Author email addresses 17 | - Timezone-specific date formats 18 | - Instructional hints ("use git add to stage") 19 | 20 | ### Parsing complexity 21 | 22 | Standard git output varies by command and flag combinations. Agents must handle many formats, increasing error potential. 23 | 24 | ## The solution 25 | 26 | zagi wraps git with agent-optimized output: 27 | 28 | | git output | zagi output | 29 | | -------------------------------------- | ---------------------- | 30 | | `commit abc123...` (40 chars) | `abc123f` (7 chars) | 31 | | `Author: Alice ` | `Alice:` | 32 | | `Date: Mon Jan 15 14:32:21 2025 -0800` | `(2025-01-15)` | 33 | | Multi-line with blank separators | Single line per commit | 34 | 35 | ### Design principles 36 | 37 | 1. Concise by default - every byte counts 38 | 2. Git-compatible - same commands, different output 39 | 3. Passthrough fallback - `-g` flag for full git output 40 | 4. No config files - works out of the box 41 | 5. No state - stateless operations that can be retried 42 | 6. Every command is the begining and the end, there is no interactivity 43 | 44 | ## Agent mode 45 | 46 | Beyond output efficiency, agents need safety and traceability. 47 | 48 | ### Guardrails 49 | 50 | Agents make mistakes. A `git reset --hard` or `git clean -fd` can destroy hours of work. When `ZAGI_AGENT` is set, zagi blocks commands that cause unrecoverable data loss. 51 | 52 | ### Prompt tracking 53 | 54 | Every commit can record the user prompt that created it (`--prompt`). This creates an audit trail - when reviewing agent work, you can see exactly what was asked. 55 | 56 | ```bash 57 | git log --prompts 58 | ``` 59 | 60 | ### Longer term mission 61 | 62 | As apps become infinitely customizable per user, each agent needs version control. Not a full repo - that probably wouldn't scale - but branches they can switch between, commits they can step through, history they can revert. 63 | 64 | The agent does the work. Humans review. Git is the interface between them. 65 | -------------------------------------------------------------------------------- /test/src/status.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, beforeEach, afterEach } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeEach(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterEach(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("zagi status", () => { 19 | test("produces smaller output than git status", () => { 20 | const zagiOut = zagi(["status"], { cwd: REPO_DIR }); 21 | const gitOut = git(["status"], { cwd: REPO_DIR }); 22 | 23 | expect(zagiOut.length).toBeLessThan(gitOut.length); 24 | }); 25 | 26 | test("shows branch name", () => { 27 | const result = zagi(["status"], { cwd: REPO_DIR }); 28 | expect(result).toMatch(/^branch: \w+/); 29 | }); 30 | 31 | test("detects modified files", () => { 32 | const zagiOut = zagi(["status"], { cwd: REPO_DIR }); 33 | const gitOut = git(["status", "--porcelain"], { cwd: REPO_DIR }); 34 | 35 | const gitHasModified = gitOut.includes(" M "); 36 | const zagiHasModified = zagiOut.includes("modified:"); 37 | 38 | expect(zagiHasModified).toBe(gitHasModified); 39 | }); 40 | 41 | test("detects untracked files", () => { 42 | const zagiOut = zagi(["status"], { cwd: REPO_DIR }); 43 | const gitOut = git(["status", "--porcelain"], { cwd: REPO_DIR }); 44 | 45 | const gitHasUntracked = gitOut.includes("??"); 46 | const zagiHasUntracked = zagiOut.includes("untracked:"); 47 | 48 | expect(zagiHasUntracked).toBe(gitHasUntracked); 49 | }); 50 | }); 51 | 52 | describe("zagi status path filtering", () => { 53 | test("filters by specific file path", () => { 54 | const all = zagi(["status"], { cwd: REPO_DIR }); 55 | const filtered = zagi(["status", "src/main.ts"], { cwd: REPO_DIR }); 56 | 57 | // Both should show the modified file 58 | expect(all).toContain("src/main.ts"); 59 | expect(filtered).toContain("src/main.ts"); 60 | }); 61 | 62 | test("filters by directory path", () => { 63 | const result = zagi(["status", "src/"], { cwd: REPO_DIR }); 64 | expect(result).toContain("src/main.ts"); 65 | }); 66 | 67 | test("shows nothing when path has no changes", () => { 68 | // Create and commit a file, then check status for it 69 | git(["checkout", "--", "src/main.ts"], { cwd: REPO_DIR }); 70 | 71 | const result = zagi(["status", "src/main.ts"], { cwd: REPO_DIR }); 72 | expect(result).toContain("nothing to commit"); 73 | }); 74 | 75 | test("filters out files not matching path", () => { 76 | // Check status for a path that doesn't have changes 77 | const result = zagi(["status", "nonexistent/"], { cwd: REPO_DIR }); 78 | expect(result).toContain("nothing to commit"); 79 | }); 80 | 81 | test("multiple paths work", () => { 82 | const result = zagi(["status", "src/", "README.md"], { cwd: REPO_DIR }); 83 | // Should show src/main.ts (modified in fixture) 84 | expect(result).toContain("src/main.ts"); 85 | }); 86 | }); 87 | -------------------------------------------------------------------------------- /test/src/shared.ts: -------------------------------------------------------------------------------- 1 | import { execFileSync } from "child_process"; 2 | import { resolve } from "path"; 3 | import { mkdirSync, writeFileSync, rmSync } from "fs"; 4 | 5 | export const ZAGI_BIN = resolve(__dirname, "../../zig-out/bin/zagi"); 6 | 7 | export interface ZagiOptions { 8 | /** Override or remove env vars. Use undefined to remove a var. */ 9 | env?: Record; 10 | /** Working directory */ 11 | cwd?: string; 12 | } 13 | 14 | /** 15 | * Run zagi command with isolated environment. 16 | * By default removes ZAGI_AGENT and ZAGI_STRIP_COAUTHORS to avoid test pollution. 17 | */ 18 | export function zagi(args: string[], options: ZagiOptions = {}): string { 19 | const { cwd = process.cwd(), env: envOverrides = {} } = options; 20 | 21 | // Create isolated env - start with current env 22 | const env = { ...process.env }; 23 | 24 | // By default, remove zagi env vars unless explicitly set 25 | if (!("ZAGI_AGENT" in envOverrides)) { 26 | delete env.ZAGI_AGENT; 27 | } 28 | if (!("ZAGI_STRIP_COAUTHORS" in envOverrides)) { 29 | delete env.ZAGI_STRIP_COAUTHORS; 30 | } 31 | 32 | // Apply overrides (undefined removes the key) 33 | for (const [key, value] of Object.entries(envOverrides)) { 34 | if (value === undefined) { 35 | delete env[key]; 36 | } else { 37 | env[key] = value; 38 | } 39 | } 40 | 41 | try { 42 | return execFileSync(ZAGI_BIN, args, { 43 | cwd, 44 | encoding: "utf-8", 45 | env, 46 | }) as string; 47 | } catch (err: any) { 48 | // Return combined stdout + stderr for error cases 49 | return (err.stdout || "") + (err.stderr || ""); 50 | } 51 | } 52 | 53 | /** 54 | * Run git command with isolated environment. 55 | */ 56 | export function git(args: string[], options: ZagiOptions = {}): string { 57 | const { cwd = process.cwd(), env: envOverrides = {} } = options; 58 | 59 | const env = { ...process.env }; 60 | 61 | // Apply overrides 62 | for (const [key, value] of Object.entries(envOverrides)) { 63 | if (value === undefined) { 64 | delete env[key]; 65 | } else { 66 | env[key] = value; 67 | } 68 | } 69 | 70 | try { 71 | return execFileSync("git", args, { 72 | cwd, 73 | encoding: "utf-8", 74 | env, 75 | stdio: "pipe", 76 | }) as string; 77 | } catch (err: any) { 78 | return (err.stdout || "") + (err.stderr || ""); 79 | } 80 | } 81 | 82 | /** 83 | * Creates a minimal test repository and returns its path. 84 | */ 85 | export function createTestRepo(): string { 86 | const repoId = "test-" + Date.now() + "-" + Math.random().toString(36).slice(2, 8); 87 | const repoDir = resolve(__dirname, "../fixtures/repos", repoId); 88 | 89 | mkdirSync(repoDir, { recursive: true }); 90 | 91 | git(["init", "-b", "main"], { cwd: repoDir }); 92 | git(["config", "user.email", "test@example.com"], { cwd: repoDir }); 93 | git(["config", "user.name", "Test User"], { cwd: repoDir }); 94 | 95 | writeFileSync(resolve(repoDir, "README.md"), "# Test\n"); 96 | git(["add", "."], { cwd: repoDir }); 97 | git(["commit", "-m", "Initial commit"], { cwd: repoDir }); 98 | 99 | return repoDir; 100 | } 101 | 102 | /** 103 | * Cleans up a test repository. 104 | */ 105 | export function cleanupTestRepo(repoDir: string): void { 106 | if (repoDir) { 107 | rmSync(repoDir, { recursive: true, force: true }); 108 | } 109 | } 110 | -------------------------------------------------------------------------------- /server/src/index.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * zagi-server: Git proxy for multi-user trajectories 3 | * 4 | * A git-compatible server where each user gets their own "repo" stored in SQLite. 5 | * No git binary required - can run in Durable Objects. 6 | * 7 | * URL scheme: http://server// 8 | * Example: git clone http://localhost:3000/my-app/alice 9 | */ 10 | 11 | import { Database } from "bun:sqlite"; 12 | import { existsSync } from "fs"; 13 | import { mkdir } from "fs/promises"; 14 | import { join, resolve } from "path"; 15 | 16 | import { handleGitHttpSqlite } from "./git-http-sqlite.ts"; 17 | 18 | // Configuration 19 | const PORT = parseInt(process.env.PORT ?? "3000", 10); 20 | const DATA_DIR = resolve(process.env.DATA_DIR ?? "./.data"); 21 | const DBS_DIR = join(DATA_DIR, "dbs"); // Per-user SQLite databases 22 | 23 | // Cache of open databases 24 | const dbCache = new Map(); 25 | 26 | /** 27 | * Get or create a database for a repo/user combination 28 | */ 29 | function getDb(repo: string, userId: string): Database { 30 | const key = `${repo}/${userId}`; 31 | 32 | // Check cache 33 | let db = dbCache.get(key); 34 | if (db) return db; 35 | 36 | // Create/open database 37 | const dbPath = join(DBS_DIR, repo, `${userId}.sqlite`); 38 | db = new Database(dbPath); 39 | 40 | // Enable WAL mode for better concurrent performance 41 | db.run("PRAGMA journal_mode = WAL"); 42 | 43 | dbCache.set(key, db); 44 | return db; 45 | } 46 | 47 | /** 48 | * Main server 49 | */ 50 | async function main() { 51 | // Ensure data directories exist 52 | await mkdir(DATA_DIR, { recursive: true }); 53 | await mkdir(DBS_DIR, { recursive: true }); 54 | 55 | console.log(`data: ${DATA_DIR}`); 56 | 57 | const server = Bun.serve({ 58 | port: PORT, 59 | async fetch(req) { 60 | const url = new URL(req.url); 61 | const path = url.pathname; 62 | 63 | // Health check 64 | if (path === "/health") { 65 | return Response.json({ status: "ok" }); 66 | } 67 | 68 | // Git HTTP protocol: ///info/refs, /git-upload-pack, /git-receive-pack 69 | const gitResponse = await handleGitHttpSqlite(req, url, { 70 | getDb: (repo, userId) => { 71 | // Ensure repo directory exists 72 | const repoDir = join(DBS_DIR, repo); 73 | if (!existsSync(repoDir)) { 74 | Bun.spawnSync(["mkdir", "-p", repoDir]); 75 | } 76 | return getDb(repo, userId); 77 | }, 78 | }); 79 | if (gitResponse) return gitResponse; 80 | 81 | // API info 82 | if (path === "/" || path === "/api") { 83 | return Response.json({ 84 | name: "zagi-server", 85 | version: "0.3.0", 86 | description: "Git proxy with SQLite storage - no git binary needed", 87 | usage: { 88 | clone: "git clone http://localhost:3000//", 89 | push: "git push origin main", 90 | pull: "git pull origin main", 91 | }, 92 | storage: "SQLite (Durable Objects compatible)", 93 | }); 94 | } 95 | 96 | return Response.json({ error: "Not found" }, { status: 404 }); 97 | }, 98 | }); 99 | 100 | console.log(`server: http://localhost:${server.port}`); 101 | console.log(`\nUsage:`); 102 | console.log(` git clone http://localhost:${server.port}/my-app/alice`); 103 | console.log(` cd my-app`); 104 | console.log(` # make changes...`); 105 | console.log(` git push origin main`); 106 | console.log(`\nStorage: SQLite databases in ${DBS_DIR}//.sqlite`); 107 | } 108 | 109 | // Run 110 | main().catch((e) => { 111 | console.error("Fatal error:", e); 112 | process.exit(1); 113 | }); 114 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # zagi 2 | 3 | > a better git interface for agents 4 | 5 | ## Why use zagi? 6 | 7 | - 121 git compatible commands 8 | - ~50% smaller output that doesn't overflow context windows 9 | - 1.5-2x faster than git in all implemented commands 10 | - Agent friendly features like `fork`, `prompt` and `guardrails` 11 | - Git passthrough for non implemented commands 12 | 13 | ## Installation 14 | 15 | ```bash 16 | curl -fsSL zagi.sh/install | sh 17 | ``` 18 | 19 | This downloads the binary and sets up `git` as an alias to `zagi`. Restart your shell after installation. 20 | 21 | ### From source 22 | 23 | ```bash 24 | git clone https://github.com/mattzcarey/zagi.git 25 | cd zagi 26 | zig build -Doptimize=ReleaseFast 27 | ./zig-out/bin/zagi alias # set up the alias 28 | ``` 29 | 30 | ## Usage 31 | 32 | Use git as normal: 33 | 34 | ```bash 35 | git status # compact status 36 | git log # concise commit history 37 | git diff # minimal diff format 38 | git add . # confirms what was staged 39 | git commit -m "x" # shows commit stats 40 | ``` 41 | 42 | Any commands or flags not yet implemented in zagi pass through to git. zagi also comes with its own set of features for managing code written by agents. 43 | 44 | ### Easy worktrees 45 | 46 | zagi ships with a wrapper around worktrees called `fork`: 47 | 48 | ```bash 49 | # Create named forks for different approaches your agent could take 50 | git fork nodejs-based 51 | git fork bun-based 52 | 53 | # Work in each fork 54 | cd .forks/nodejs-based 55 | # ... make changes, commit ... 56 | 57 | cd .forks/bun-based 58 | # ... make changes, commit ... 59 | 60 | # Compare results, then pick the winner 61 | cd ../.. 62 | git fork # list forks with commit counts 63 | git fork --pick bun-based # merge fork into base (keeps both histories) 64 | git fork --promote bun-based # replace base with fork (discards base commits) 65 | 66 | # Clean up 67 | git fork --delete-all 68 | ``` 69 | 70 | ### Agent mode 71 | 72 | Set `ZAGI_AGENT` to enable agent-specific features. 73 | 74 | ```bash 75 | export ZAGI_AGENT=claude-code 76 | ``` 77 | 78 | The value can be any string describing your agent (e.g. `claude-code`, `cursor`, `opencode`) - this will be used in future features for agent-specific behavior. 79 | 80 | This enables: 81 | - **Prompt tracking**: `git commit` requires `--prompt` to record the user request that created the commit 82 | - **Guardrails**: Blocks destructive commands (`reset --hard`, `checkout .`, `clean -f`, `push --force`) to prevent data loss 83 | 84 | ```bash 85 | git commit -m "Add feature" --prompt "Add a logout button to the header.." 86 | git log --prompts # view prompts 87 | ``` 88 | 89 | To prevent child processes from overriding `ZAGI_AGENT`, make it readonly: 90 | 91 | ```bash 92 | # bash/zsh 93 | export ZAGI_AGENT=claude-code 94 | readonly ZAGI_AGENT 95 | ``` 96 | 97 | ### Strip co-authors 98 | 99 | Remove `Co-Authored-By:` lines that AI tools like Claude Code add to commit messages: 100 | 101 | ```bash 102 | export ZAGI_STRIP_COAUTHORS=1 103 | git commit -m "Add feature 104 | 105 | Co-Authored-By: Claude " # stripped automatically 106 | ``` 107 | 108 | ### Git passthrough 109 | 110 | Commands zagi doesn't implement pass through to git or use `-g` to force standard git output: 111 | 112 | ```bash 113 | git -g log # native git log output 114 | git --git diff # native git diff output 115 | ``` 116 | 117 | ## Output comparison 118 | 119 | Standard git log: 120 | 121 | ``` 122 | commit abc123f4567890def1234567890abcdef12345 123 | Author: Alice Smith 124 | Date: Mon Jan 15 14:32:21 2025 -0800 125 | 126 | Add user authentication system 127 | ``` 128 | 129 | zagi log: 130 | 131 | ``` 132 | abc123f (2025-01-15) Alice: Add user authentication system 133 | ``` 134 | 135 | ## Development 136 | 137 | Requirements: Zig 0.15, Bun 138 | 139 | ```bash 140 | zig build # build 141 | zig build test # run zig tests 142 | cd test && bun i && bun run test # run integration tests 143 | ``` 144 | 145 | See [AGENTS.md](AGENTS.md) for contribution guidelines. 146 | 147 | ## License 148 | 149 | MIT 150 | -------------------------------------------------------------------------------- /test/fixtures/setup.ts: -------------------------------------------------------------------------------- 1 | import { execFileSync } from "child_process"; 2 | import { existsSync, mkdirSync, writeFileSync, rmSync, readFileSync } from "fs"; 3 | import { resolve } from "path"; 4 | 5 | const FIXTURES_BASE = resolve(__dirname, "repos"); 6 | const COMMIT_COUNT = 100; 7 | 8 | // Generate unique IDs for parallel safety 9 | function uid() { 10 | return `${Date.now()}-${Math.random().toString(36).slice(2, 11)}`; 11 | } 12 | 13 | function gitIn(repoDir: string, ...args: string[]) { 14 | execFileSync("git", args, { cwd: repoDir, stdio: "pipe" }); 15 | } 16 | 17 | /** 18 | * Creates a new isolated fixture repo and returns its path. 19 | * Each call creates a fresh repo with unique ID. 20 | */ 21 | export function createFixtureRepo(): string { 22 | const repoId = `repo-${uid()}`; 23 | const repoDir = resolve(FIXTURES_BASE, repoId); 24 | 25 | // Ensure base directory exists 26 | mkdirSync(FIXTURES_BASE, { recursive: true }); 27 | 28 | // Clean up if exists (shouldn't happen with unique IDs but just in case) 29 | if (existsSync(repoDir)) { 30 | rmSync(repoDir, { recursive: true }); 31 | } 32 | 33 | // Create directory 34 | mkdirSync(repoDir, { recursive: true }); 35 | 36 | // Initialize git repo with explicit branch name 37 | gitIn(repoDir, "init", "-b", "main"); 38 | gitIn(repoDir, "config", "user.email", "test@example.com"); 39 | gitIn(repoDir, "config", "user.name", "Test User"); 40 | 41 | // Create initial structure 42 | mkdirSync(resolve(repoDir, "src")); 43 | mkdirSync(resolve(repoDir, "tests")); 44 | mkdirSync(resolve(repoDir, "docs")); 45 | 46 | writeFileSync( 47 | resolve(repoDir, "README.md"), 48 | "# Test Repository\n\nThis is a fixture for benchmarking.\n" 49 | ); 50 | 51 | writeFileSync( 52 | resolve(repoDir, "src/main.ts"), 53 | 'export function main() {\n console.log("hello");\n}\n' 54 | ); 55 | 56 | writeFileSync( 57 | resolve(repoDir, "src/utils.ts"), 58 | "export function add(a: number, b: number) {\n return a + b;\n}\n" 59 | ); 60 | 61 | writeFileSync( 62 | resolve(repoDir, "tests/main.test.ts"), 63 | 'import { main } from "../src/main";\n\ntest("main runs", () => {\n main();\n});\n' 64 | ); 65 | 66 | // Initial commit 67 | gitIn(repoDir, "add", "."); 68 | gitIn(repoDir, "commit", "-m", "Initial commit"); 69 | 70 | // Generate commits with varied content 71 | const actions = [ 72 | "Add", 73 | "Update", 74 | "Fix", 75 | "Refactor", 76 | "Improve", 77 | "Implement", 78 | "Remove", 79 | "Clean up", 80 | ]; 81 | const subjects = [ 82 | "user authentication", 83 | "database connection", 84 | "API endpoints", 85 | "error handling", 86 | "logging system", 87 | "caching layer", 88 | "input validation", 89 | "unit tests", 90 | "documentation", 91 | "configuration", 92 | ]; 93 | 94 | for (let i = 1; i < COMMIT_COUNT; i++) { 95 | const action = actions[i % actions.length]; 96 | const subject = subjects[i % subjects.length]; 97 | const message = `${action} ${subject}`; 98 | 99 | // Modify a file 100 | const files = ["src/main.ts", "src/utils.ts", "README.md"]; 101 | const fileNum = i % files.length; 102 | const filePath = resolve(repoDir, files[fileNum]!); 103 | 104 | const content = 105 | existsSync(filePath) && fileNum !== 2 106 | ? `// Change ${i}\n` + 107 | readFileSync(filePath, "utf-8") + 108 | `\n// End change ${i}\n` 109 | : `# Test Repository\n\nChange ${i}\n`; 110 | 111 | writeFileSync(filePath, content); 112 | gitIn(repoDir, "add", "."); 113 | gitIn(repoDir, "commit", "-m", message); 114 | } 115 | 116 | // Create some uncommitted changes for status tests 117 | writeFileSync(resolve(repoDir, "src/new-file.ts"), "// New file\n"); 118 | writeFileSync( 119 | resolve(repoDir, "src/main.ts"), 120 | readFileSync(resolve(repoDir, "src/main.ts"), "utf-8") + "\n// Modified\n" 121 | ); 122 | 123 | return repoDir; 124 | } 125 | 126 | /** 127 | * Cleans up all fixture repos 128 | */ 129 | export function cleanupFixtures() { 130 | if (existsSync(FIXTURES_BASE)) { 131 | rmSync(FIXTURES_BASE, { recursive: true, force: true, maxRetries: 3 }); 132 | } 133 | } -------------------------------------------------------------------------------- /server/README.md: -------------------------------------------------------------------------------- 1 | # zagi-server 2 | 3 | Git proxy server that makes a single branch feel like a whole repo. 4 | 5 | ## Why? 6 | 7 | As software gets more customisable per user, each user needs version control akin to a whole repo - branches they can switch between, merging to main, commits they can step through. 8 | 9 | In most apps this functionality will be hidden from the user directly, but it will still be needed under the hood. 10 | 11 | GitHub doesn't handle this well. Every git instance is just one repo. Making infinite git repos is what Lovable has had to do - one for each project. This won't scale to the web. 12 | 13 | Instead: what if each user just gets a branch in GitHub (`user-alice`, `user-bob`), but our proxy makes that branch feel like a complete repo? Users get local branches stored in SQLite. When they push to `main`, it syncs to their GitHub branch. 14 | 15 | The benefit: when you want to upgrade everyone, you rebase all user branches onto the new `main` in GitHub. 16 | 17 | ## How it works 18 | 19 | ``` 20 | User's View What Actually Happens 21 | ──────────────────────── ───────────────────────────────────────────── 22 | git clone .../app/alice → Pulls user-alice branch from GitHub 23 | Creates alice.sqlite for local branches 24 | 25 | git checkout -b feature → Branch stored in SQLite only 26 | git commit → Commit stored in SQLite only 27 | git push origin feature → Still just SQLite (not GitHub) 28 | 29 | git checkout main → 30 | git merge feature → 31 | git push origin main → Syncs to user-alice branch on GitHub 32 | ``` 33 | 34 | Users get infinite branches locally. GitHub only sees their `main` as `user-alice`. 35 | 36 | ## Usage 37 | 38 | ```bash 39 | bun run dev 40 | 41 | # Clone as a user 42 | git clone http://localhost:3000/my-app/alice 43 | cd my-app 44 | 45 | # Work with branches (all local to proxy) 46 | git checkout -b feature 47 | echo "new code" > file.txt 48 | git add . && git commit -m "Add feature" 49 | git push origin feature 50 | 51 | # Merge to main (syncs to GitHub) 52 | git checkout main 53 | git merge feature 54 | git push origin main 55 | ``` 56 | 57 | ## Storage 58 | 59 | ``` 60 | GitHub (origin) Proxy (SQLite) 61 | ─────────────── ──────────────────────────── 62 | main ───────────────── .data/dbs/my-app/ 63 | user-alice ─────────── ←→ alice.sqlite (main + feature branches) 64 | user-bob ───────────── ←→ bob.sqlite (main + other branches) 65 | user-charlie ───────── ←→ charlie.sqlite 66 | ``` 67 | 68 | SQLite stores git objects (blobs, trees, commits) and refs (branches). No git binary needed. 69 | 70 | ## Architecture 71 | 72 | ``` 73 | ┌─────────────┐ 74 | │ git CLI │ 75 | └──────┬──────┘ 76 | │ HTTP (smart protocol) 77 | ▼ 78 | ┌─────────────────────────────────────┐ 79 | │ zagi-server │ 80 | │ │ 81 | │ git-http-sqlite.ts ←── protocol │ 82 | │ git-storage.ts ←── objects │ 83 | │ git-pack.ts ←── packfiles │ 84 | │ │ │ 85 | │ ▼ │ 86 | │ ┌─────────────┐ │ 87 | │ │ SQLite │ per-user DBs │ 88 | │ └─────────────┘ │ 89 | │ │ │ 90 | │ ▼ (on push to main) │ 91 | │ ┌─────────────┐ │ 92 | │ │ GitHub │ user-xxx branches │ 93 | │ └─────────────┘ │ 94 | └─────────────────────────────────────┘ 95 | ``` 96 | 97 | ## API 98 | 99 | Git protocol (used by git CLI): 100 | - `GET ///info/refs` 101 | - `POST ///git-upload-pack` 102 | - `POST ///git-receive-pack` 103 | 104 | HTTP: 105 | - `GET /health` 106 | - `GET /` 107 | 108 | ## Development 109 | 110 | ```bash 111 | bun i 112 | bun run dev 113 | bun test 114 | ``` 115 | 116 | ## Future: Durable Objects 117 | 118 | Each user = one Durable Object with SQLite storage: 119 | 120 | ```typescript 121 | export class UserRepo extends DurableObject { 122 | sql: SqlStorage; 123 | 124 | async fetch(request: Request) { 125 | return handleGitHttpSqlite(request, url, { 126 | getDb: () => this.sql, 127 | }); 128 | } 129 | } 130 | ``` 131 | 132 | ## Current status 133 | 134 | - [x] Git clone/push/pull working 135 | - [x] Unlimited branches per user 136 | - [x] Pure SQLite storage (no git binary) 137 | - [ ] GitHub upstream sync (coming next) 138 | - [ ] Admin rebase all users onto new main 139 | -------------------------------------------------------------------------------- /docs/features.md: -------------------------------------------------------------------------------- 1 | # Features 2 | 3 | ## Implemented commands 4 | 5 | ### git status 6 | 7 | Shows repository state in compact form. 8 | 9 | ``` 10 | branch: main (+2/-1 origin/main) 11 | 12 | staged: 2 files 13 | A new-file.txt 14 | M modified.txt 15 | 16 | modified: 1 file 17 | M unstaged.txt 18 | 19 | untracked: 3 files 20 | ``` 21 | 22 | Options: 23 | - `...` - filter to specific paths (e.g. `git status src/`) 24 | 25 | ### git log 26 | 27 | Shows commit history, default 10 commits. 28 | 29 | ``` 30 | abc123f (2025-01-15) Alice: Add authentication 31 | def456a (2025-01-14) Bob: Fix connection pool 32 | 33 | [8 more commits, use -n to see more] 34 | ``` 35 | 36 | Options: 37 | - `-n ` - number of commits to show 38 | - `--author=` - filter by author name or email 39 | - `--grep=` - filter by commit message 40 | - `--since=` - commits after date (e.g. "2025-01-01", "1 week ago") 41 | - `--until=` - commits before date 42 | - `-- ...` - filter to commits affecting paths 43 | 44 | ### git diff 45 | 46 | Shows changes in minimal format. 47 | 48 | ``` 49 | src/main.zig:42 50 | + const new_line = true; 51 | - const old_line = false; 52 | 53 | src/other.zig:10-15 54 | + added block 55 | + of code 56 | ``` 57 | 58 | Options: 59 | - `--staged` - show staged changes 60 | - `--stat` - show diffstat (files changed, insertions, deletions) 61 | - `--name-only` - show only names of changed files 62 | - `` - diff against commit 63 | - `..` - diff between commits 64 | - `-- ` - filter to specific paths 65 | 66 | ### git add 67 | 68 | Stages files and confirms what was staged. 69 | 70 | ``` 71 | staged: 3 files 72 | A new-file.txt 73 | M changed-file.txt 74 | M another.txt 75 | ``` 76 | 77 | ### git commit 78 | 79 | Creates commit and shows stats. 80 | 81 | ``` 82 | committed: abc123f "Add new feature" 83 | 3 files, +45 -12 84 | ``` 85 | 86 | Options: 87 | - `-m ` - commit message (required) 88 | - `-a` - stage modified tracked files 89 | - `--amend` - amend previous commit 90 | - `--prompt ` - store AI prompt (see Agent features) 91 | 92 | ## Agent features 93 | 94 | ### git fork 95 | 96 | Manage parallel working copies for experimentation. 97 | 98 | ```bash 99 | git fork feature-a # create fork in .forks/feature-a/ 100 | git fork # list forks with commit counts 101 | git fork --pick feature-a # merge fork into base (safe) 102 | git fork --promote feature-a # hard checkout fork to base (destructive) 103 | git fork --delete feature-a 104 | git fork --delete-all 105 | ``` 106 | 107 | Forks are git worktrees. The `.forks/` directory is auto-added to `.gitignore`. 108 | 109 | **Picking vs Promoting:** 110 | - `--pick` performs a proper git merge, preserving both base and fork history 111 | - `--promote` moves HEAD to the fork's commit, discarding any base-only commits (stash uncommitted changes first) 112 | 113 | ### --prompt 114 | 115 | Store the user prompt that created a commit: 116 | 117 | ```bash 118 | git commit -m "Add feature" --prompt "Add a logout button to the header" 119 | git log --prompts # view prompts in log output 120 | ``` 121 | 122 | ### ZAGI_AGENT 123 | 124 | Set `ZAGI_AGENT` to enable agent-specific features. The value can be any string describing your agent (e.g. `claude-code`, `cursor`, `aider`) - this will be used in future features for agent-specific behavior. 125 | 126 | ```bash 127 | export ZAGI_AGENT=claude-code 128 | git commit -m "x" # error: --prompt required 129 | ``` 130 | 131 | When `ZAGI_AGENT` is set: 132 | - `git commit` requires `--prompt` to record the user request 133 | - Destructive commands are blocked (guardrails) 134 | 135 | To prevent child processes from overriding `ZAGI_AGENT`, make it readonly: 136 | 137 | ```bash 138 | # bash/zsh 139 | export ZAGI_AGENT=claude-code 140 | readonly ZAGI_AGENT 141 | ``` 142 | 143 | ### ZAGI_STRIP_COAUTHORS 144 | 145 | Remove `Co-Authored-By:` lines from commit messages: 146 | 147 | ```bash 148 | export ZAGI_STRIP_COAUTHORS=1 149 | git commit -m "Add feature 150 | 151 | Co-Authored-By: Claude " 152 | # commits as: "Add feature" 153 | ``` 154 | 155 | ## Passthrough 156 | 157 | Any command not implemented passes through to git: 158 | 159 | ```bash 160 | git push # runs git push 161 | git pull # runs git pull 162 | git rebase -i # runs git rebase -i 163 | ``` 164 | 165 | Use `-g` to force passthrough for implemented commands: 166 | 167 | ```bash 168 | git -g log # runs git log (full output) 169 | git -g status # runs git status (full output) 170 | ``` 171 | 172 | ## Shell alias 173 | 174 | The `zagi alias` command sets up git as an alias: 175 | 176 | ```bash 177 | zagi alias # adds alias to shell config 178 | zagi alias --print # prints alias without adding 179 | ``` 180 | 181 | Supported shells: bash, zsh, fish, powershell 182 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -e 3 | 4 | # zagi installer 5 | # Usage: curl -fsSL zagi.sh/install | sh 6 | 7 | REPO="mattzcarey/zagi" 8 | INSTALL_DIR="${ZAGI_INSTALL_DIR:-$HOME/.local/bin}" 9 | 10 | # Colors (if terminal supports them) 11 | RED='\033[0;31m' 12 | GREEN='\033[0;32m' 13 | YELLOW='\033[0;33m' 14 | NC='\033[0m' # No Color 15 | 16 | info() { printf "${GREEN}==>${NC} %s\n" "$1"; } 17 | warn() { printf "${YELLOW}warning:${NC} %s\n" "$1"; } 18 | error() { printf "${RED}error:${NC} %s\n" "$1" >&2; exit 1; } 19 | 20 | # Detect OS and architecture 21 | detect_platform() { 22 | OS="$(uname -s)" 23 | ARCH="$(uname -m)" 24 | 25 | case "$OS" in 26 | Linux) OS="linux" ;; 27 | Darwin) OS="macos" ;; 28 | *) error "Unsupported OS: $OS" ;; 29 | esac 30 | 31 | case "$ARCH" in 32 | x86_64|amd64) ARCH="x86_64" ;; 33 | arm64|aarch64) ARCH="aarch64" ;; 34 | *) error "Unsupported architecture: $ARCH" ;; 35 | esac 36 | 37 | PLATFORM="${OS}-${ARCH}" 38 | } 39 | 40 | # Get latest release version from GitHub 41 | get_latest_version() { 42 | if command -v curl >/dev/null 2>&1; then 43 | VERSION=$(curl -fsSL "https://api.github.com/repos/${REPO}/releases/latest" | grep '"tag_name"' | sed -E 's/.*"([^"]+)".*/\1/') 44 | elif command -v wget >/dev/null 2>&1; then 45 | VERSION=$(wget -qO- "https://api.github.com/repos/${REPO}/releases/latest" | grep '"tag_name"' | sed -E 's/.*"([^"]+)".*/\1/') 46 | else 47 | error "curl or wget is required" 48 | fi 49 | 50 | if [ -z "$VERSION" ]; then 51 | error "Could not determine latest version" 52 | fi 53 | } 54 | 55 | # Download and install binary 56 | install_binary() { 57 | DOWNLOAD_URL="https://github.com/${REPO}/releases/download/${VERSION}/zagi-${PLATFORM}.tar.gz" 58 | 59 | info "Downloading zagi ${VERSION} for ${PLATFORM}..." 60 | 61 | # Create install directory 62 | mkdir -p "$INSTALL_DIR" 63 | 64 | # Download and extract 65 | TMP_DIR=$(mktemp -d) 66 | trap "rm -rf $TMP_DIR" EXIT 67 | 68 | if command -v curl >/dev/null 2>&1; then 69 | curl -fsSL "$DOWNLOAD_URL" | tar -xz -C "$TMP_DIR" 70 | else 71 | wget -qO- "$DOWNLOAD_URL" | tar -xz -C "$TMP_DIR" 72 | fi 73 | 74 | # Install binary 75 | mv "$TMP_DIR/zagi" "$INSTALL_DIR/zagi" 76 | chmod +x "$INSTALL_DIR/zagi" 77 | 78 | info "Installed zagi to $INSTALL_DIR/zagi" 79 | } 80 | 81 | # Detect shell and config file 82 | detect_shell_config() { 83 | SHELL_NAME=$(basename "$SHELL") 84 | 85 | case "$SHELL_NAME" in 86 | bash) 87 | if [ -f "$HOME/.bashrc" ]; then 88 | SHELL_CONFIG="$HOME/.bashrc" 89 | elif [ -f "$HOME/.bash_profile" ]; then 90 | SHELL_CONFIG="$HOME/.bash_profile" 91 | else 92 | SHELL_CONFIG="$HOME/.bashrc" 93 | fi 94 | ALIAS_CMD="alias git='zagi'" 95 | ;; 96 | zsh) 97 | SHELL_CONFIG="$HOME/.zshrc" 98 | ALIAS_CMD="alias git='zagi'" 99 | ;; 100 | fish) 101 | SHELL_CONFIG="$HOME/.config/fish/config.fish" 102 | ALIAS_CMD="alias git 'zagi'" 103 | ;; 104 | *) 105 | SHELL_CONFIG="" 106 | ALIAS_CMD="" 107 | ;; 108 | esac 109 | } 110 | 111 | # Setup alias in shell config 112 | setup_alias() { 113 | detect_shell_config 114 | 115 | if [ -z "$SHELL_CONFIG" ]; then 116 | warn "Could not detect shell config. Add this alias manually:" 117 | echo " alias git='zagi'" 118 | return 119 | fi 120 | 121 | # Check if alias already exists 122 | if [ -f "$SHELL_CONFIG" ] && grep -q "alias git=" "$SHELL_CONFIG" 2>/dev/null; then 123 | warn "Git alias already exists in $SHELL_CONFIG" 124 | return 125 | fi 126 | 127 | # Add alias 128 | info "Adding git alias to $SHELL_CONFIG..." 129 | 130 | # Create config file if it doesn't exist 131 | mkdir -p "$(dirname "$SHELL_CONFIG")" 132 | touch "$SHELL_CONFIG" 133 | 134 | # Append alias 135 | echo "" >> "$SHELL_CONFIG" 136 | echo "# zagi - a better git for agents" >> "$SHELL_CONFIG" 137 | echo "$ALIAS_CMD" >> "$SHELL_CONFIG" 138 | 139 | info "Added: $ALIAS_CMD" 140 | } 141 | 142 | # Ensure install dir is in PATH 143 | check_path() { 144 | case ":$PATH:" in 145 | *":$INSTALL_DIR:"*) ;; 146 | *) 147 | warn "$INSTALL_DIR is not in your PATH" 148 | echo "" 149 | echo "Add this to your shell config:" 150 | echo " export PATH=\"\$PATH:$INSTALL_DIR\"" 151 | echo "" 152 | 153 | # Try to add it automatically 154 | detect_shell_config 155 | if [ -n "$SHELL_CONFIG" ] && [ -f "$SHELL_CONFIG" ]; then 156 | if ! grep -q "$INSTALL_DIR" "$SHELL_CONFIG" 2>/dev/null; then 157 | echo "export PATH=\"\$PATH:$INSTALL_DIR\"" >> "$SHELL_CONFIG" 158 | info "Added $INSTALL_DIR to PATH in $SHELL_CONFIG" 159 | fi 160 | fi 161 | ;; 162 | esac 163 | } 164 | 165 | # Main 166 | main() { 167 | info "Installing zagi..." 168 | 169 | detect_platform 170 | get_latest_version 171 | install_binary 172 | check_path 173 | setup_alias 174 | 175 | echo "" 176 | info "Installation complete!" 177 | echo "" 178 | echo "Restart your shell or run:" 179 | echo " source $SHELL_CONFIG" 180 | echo "" 181 | echo "Then use 'git' as normal - zagi will handle it!" 182 | } 183 | 184 | main 185 | -------------------------------------------------------------------------------- /test/src/log.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, beforeEach, afterEach } from "vitest"; 2 | import { rmSync } from "fs"; 3 | import { createFixtureRepo } from "../fixtures/setup"; 4 | import { zagi, git } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeEach(() => { 9 | REPO_DIR = createFixtureRepo(); 10 | }); 11 | 12 | afterEach(() => { 13 | if (REPO_DIR) { 14 | rmSync(REPO_DIR, { recursive: true, force: true }); 15 | } 16 | }); 17 | 18 | describe("zagi log", () => { 19 | test("produces smaller output than git log", () => { 20 | const zagiOut = zagi(["log"], { cwd: REPO_DIR }); 21 | const gitOut = git(["log", "-n", "10"], { cwd: REPO_DIR }); 22 | 23 | expect(zagiOut.length).toBeLessThan(gitOut.length); 24 | }); 25 | 26 | test("defaults to 10 commits", () => { 27 | const result = zagi(["log"], { cwd: REPO_DIR }); 28 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 29 | expect(commitLines.length).toBeLessThanOrEqual(10); 30 | }); 31 | 32 | test("respects -n flag", () => { 33 | const result = zagi(["log", "-n", "3"], { cwd: REPO_DIR }); 34 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 35 | expect(commitLines.length).toBeLessThanOrEqual(3); 36 | }); 37 | 38 | test("passthrough -g gives full git output", () => { 39 | const concise = zagi(["log", "-n", "1"], { cwd: REPO_DIR }); 40 | const full = zagi(["-g", "log", "-n", "1"], { cwd: REPO_DIR }); 41 | 42 | expect(full.length).toBeGreaterThan(concise.length); 43 | expect(full).toContain("Author:"); 44 | expect(full).toContain("Date:"); 45 | }); 46 | 47 | test("output format matches spec", () => { 48 | const result = zagi(["log", "-n", "1"], { cwd: REPO_DIR }); 49 | // Format: abc123f (2025-01-15) Alice: Subject line 50 | const line = result.split("\n")[0]; 51 | expect(line).toMatch(/^[a-f0-9]{7} \(\d{4}-\d{2}-\d{2}\) \w+: .+$/); 52 | }); 53 | 54 | test("--author filters by author name", () => { 55 | const result = zagi(["log", "--author=Test", "-n", "5"], { cwd: REPO_DIR }); 56 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 57 | // All commits should be from Test User 58 | expect(commitLines.length).toBeGreaterThan(0); 59 | commitLines.forEach((line) => { 60 | expect(line).toContain("Test:"); 61 | }); 62 | }); 63 | 64 | test("--author with no matches returns empty", () => { 65 | const result = zagi(["log", "--author=NonexistentAuthor", "-n", "5"], { cwd: REPO_DIR }); 66 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 67 | expect(commitLines.length).toBe(0); 68 | }); 69 | 70 | test("--grep filters by commit message", () => { 71 | const result = zagi(["log", "--grep=Fix", "-n", "20"], { cwd: REPO_DIR }); 72 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 73 | expect(commitLines.length).toBeGreaterThan(0); 74 | commitLines.forEach((line) => { 75 | expect(line.toLowerCase()).toContain("fix"); 76 | }); 77 | }); 78 | 79 | test("--grep is case insensitive", () => { 80 | const lower = zagi(["log", "--grep=fix", "-n", "20"], { cwd: REPO_DIR }); 81 | const upper = zagi(["log", "--grep=FIX", "-n", "20"], { cwd: REPO_DIR }); 82 | const lowerLines = lower.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 83 | const upperLines = upper.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 84 | expect(lowerLines.length).toBe(upperLines.length); 85 | }); 86 | 87 | test("--since filters by date", () => { 88 | // All commits in fixture are recent, so --since yesterday should include them 89 | const result = zagi(["log", "--since=2020-01-01", "-n", "5"], { cwd: REPO_DIR }); 90 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 91 | expect(commitLines.length).toBeGreaterThan(0); 92 | }); 93 | 94 | test("--until filters by date", () => { 95 | // All commits are recent, so --until 2020 should be empty 96 | const result = zagi(["log", "--until=2020-01-01", "-n", "5"], { cwd: REPO_DIR }); 97 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 98 | expect(commitLines.length).toBe(0); 99 | }); 100 | 101 | test("-- path filters by file path", () => { 102 | const result = zagi(["log", "--", "src/main.ts", "-n", "20"], { cwd: REPO_DIR }); 103 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 104 | // Should have commits that touched main.ts 105 | expect(commitLines.length).toBeGreaterThan(0); 106 | }); 107 | 108 | test("path filter excludes commits not touching path", () => { 109 | // Create a file, commit, then check log for another path 110 | const noMatch = zagi(["log", "--", "nonexistent.txt", "-n", "10"], { cwd: REPO_DIR }); 111 | const commitLines = noMatch.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 112 | expect(commitLines.length).toBe(0); 113 | }); 114 | 115 | test("multiple filters combine (AND logic)", () => { 116 | // --grep=Fix AND --author=Test should work 117 | const result = zagi(["log", "--grep=Fix", "--author=Test", "-n", "20"], { cwd: REPO_DIR }); 118 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 119 | expect(commitLines.length).toBeGreaterThan(0); 120 | commitLines.forEach((line) => { 121 | expect(line.toLowerCase()).toContain("fix"); 122 | expect(line).toContain("Test:"); 123 | }); 124 | }); 125 | 126 | test("--oneline is accepted (already default format)", () => { 127 | const result = zagi(["log", "--oneline", "-n", "3"], { cwd: REPO_DIR }); 128 | const commitLines = result.split("\n").filter((l) => /^[a-f0-9]{7} /.test(l)); 129 | expect(commitLines.length).toBeLessThanOrEqual(3); 130 | }); 131 | }); 132 | -------------------------------------------------------------------------------- /src/cmds/git.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | pub const c = @cImport(@cInclude("git2.h")); 3 | 4 | pub const Error = error{ 5 | InitFailed, 6 | NotARepository, 7 | IndexOpenFailed, 8 | IndexWriteFailed, 9 | StatusFailed, 10 | FileNotFound, 11 | RevwalkFailed, 12 | UsageError, 13 | WriteFailed, 14 | AddFailed, 15 | NothingToCommit, 16 | CommitFailed, 17 | UnsupportedFlag, 18 | }; 19 | 20 | pub fn indexMarker(status: c_uint) []const u8 { 21 | if (status & c.GIT_STATUS_INDEX_NEW != 0) return "A "; 22 | if (status & c.GIT_STATUS_INDEX_MODIFIED != 0) return "M "; 23 | if (status & c.GIT_STATUS_INDEX_DELETED != 0) return "D "; 24 | if (status & c.GIT_STATUS_INDEX_RENAMED != 0) return "R "; 25 | if (status & c.GIT_STATUS_INDEX_TYPECHANGE != 0) return "T "; 26 | return " "; 27 | } 28 | 29 | pub fn workdirMarker(status: c_uint) []const u8 { 30 | if (status & c.GIT_STATUS_WT_MODIFIED != 0) return " M"; 31 | if (status & c.GIT_STATUS_WT_DELETED != 0) return " D"; 32 | if (status & c.GIT_STATUS_WT_RENAMED != 0) return " R"; 33 | if (status & c.GIT_STATUS_WT_TYPECHANGE != 0) return " T"; 34 | return " "; 35 | } 36 | 37 | /// Counts uncommitted changes in a repository 38 | pub const UncommittedCounts = struct { 39 | staged: usize, 40 | unstaged: usize, 41 | untracked: usize, 42 | 43 | pub fn total(self: UncommittedCounts) usize { 44 | return self.staged + self.unstaged + self.untracked; 45 | } 46 | 47 | pub fn workdirTotal(self: UncommittedCounts) usize { 48 | return self.unstaged + self.untracked; 49 | } 50 | }; 51 | 52 | pub fn countUncommitted(repo: ?*c.git_repository) ?UncommittedCounts { 53 | var status_list: ?*c.git_status_list = null; 54 | var opts = std.mem.zeroes(c.git_status_options); 55 | opts.version = c.GIT_STATUS_OPTIONS_VERSION; 56 | opts.show = c.GIT_STATUS_SHOW_INDEX_AND_WORKDIR; 57 | opts.flags = c.GIT_STATUS_OPT_INCLUDE_UNTRACKED; 58 | 59 | if (c.git_status_list_new(&status_list, repo, &opts) < 0) { 60 | return null; 61 | } 62 | defer c.git_status_list_free(status_list); 63 | 64 | var counts = UncommittedCounts{ .staged = 0, .unstaged = 0, .untracked = 0 }; 65 | const count = c.git_status_list_entrycount(status_list); 66 | 67 | for (0..count) |idx| { 68 | const entry = c.git_status_byindex(status_list, idx); 69 | if (entry == null) continue; 70 | 71 | const s = entry.*.status; 72 | 73 | // Staged (index) changes 74 | if (s & (c.GIT_STATUS_INDEX_NEW | c.GIT_STATUS_INDEX_MODIFIED | c.GIT_STATUS_INDEX_DELETED | c.GIT_STATUS_INDEX_RENAMED) != 0) { 75 | counts.staged += 1; 76 | } 77 | // Unstaged workdir changes (modified/deleted tracked files) 78 | if (s & (c.GIT_STATUS_WT_MODIFIED | c.GIT_STATUS_WT_DELETED | c.GIT_STATUS_WT_RENAMED) != 0) { 79 | counts.unstaged += 1; 80 | } 81 | // Untracked files 82 | if (s & c.GIT_STATUS_WT_NEW != 0) { 83 | counts.untracked += 1; 84 | } 85 | } 86 | 87 | return counts; 88 | } 89 | 90 | const testing = std.testing; 91 | 92 | test "indexMarker - new file" { 93 | try testing.expectEqualStrings("A ", indexMarker(c.GIT_STATUS_INDEX_NEW)); 94 | } 95 | 96 | test "indexMarker - modified file" { 97 | try testing.expectEqualStrings("M ", indexMarker(c.GIT_STATUS_INDEX_MODIFIED)); 98 | } 99 | 100 | test "indexMarker - deleted file" { 101 | try testing.expectEqualStrings("D ", indexMarker(c.GIT_STATUS_INDEX_DELETED)); 102 | } 103 | 104 | test "indexMarker - renamed file" { 105 | try testing.expectEqualStrings("R ", indexMarker(c.GIT_STATUS_INDEX_RENAMED)); 106 | } 107 | 108 | test "indexMarker - typechange" { 109 | try testing.expectEqualStrings("T ", indexMarker(c.GIT_STATUS_INDEX_TYPECHANGE)); 110 | } 111 | 112 | test "indexMarker - unknown status returns spaces" { 113 | try testing.expectEqualStrings(" ", indexMarker(0)); 114 | } 115 | 116 | test "workdirMarker - modified file" { 117 | try testing.expectEqualStrings(" M", workdirMarker(c.GIT_STATUS_WT_MODIFIED)); 118 | } 119 | 120 | test "workdirMarker - deleted file" { 121 | try testing.expectEqualStrings(" D", workdirMarker(c.GIT_STATUS_WT_DELETED)); 122 | } 123 | 124 | test "workdirMarker - renamed file" { 125 | try testing.expectEqualStrings(" R", workdirMarker(c.GIT_STATUS_WT_RENAMED)); 126 | } 127 | 128 | test "workdirMarker - typechange" { 129 | try testing.expectEqualStrings(" T", workdirMarker(c.GIT_STATUS_WT_TYPECHANGE)); 130 | } 131 | 132 | test "workdirMarker - unknown status returns spaces" { 133 | try testing.expectEqualStrings(" ", workdirMarker(0)); 134 | } 135 | 136 | test "indexMarker - combined status picks first match" { 137 | // When multiple flags are set, should return first match (NEW) 138 | const combined = c.GIT_STATUS_INDEX_NEW | c.GIT_STATUS_INDEX_MODIFIED; 139 | try testing.expectEqualStrings("A ", indexMarker(combined)); 140 | } 141 | 142 | test "UncommittedCounts.total sums all categories" { 143 | const counts = UncommittedCounts{ .staged = 2, .unstaged = 3, .untracked = 5 }; 144 | try testing.expectEqual(@as(usize, 10), counts.total()); 145 | } 146 | 147 | test "UncommittedCounts.total returns zero when empty" { 148 | const counts = UncommittedCounts{ .staged = 0, .unstaged = 0, .untracked = 0 }; 149 | try testing.expectEqual(@as(usize, 0), counts.total()); 150 | } 151 | 152 | test "UncommittedCounts.workdirTotal excludes staged" { 153 | const counts = UncommittedCounts{ .staged = 10, .unstaged = 3, .untracked = 5 }; 154 | try testing.expectEqual(@as(usize, 8), counts.workdirTotal()); 155 | } 156 | 157 | test "UncommittedCounts.workdirTotal with only staged returns zero" { 158 | const counts = UncommittedCounts{ .staged = 5, .unstaged = 0, .untracked = 0 }; 159 | try testing.expectEqual(@as(usize, 0), counts.workdirTotal()); 160 | } 161 | -------------------------------------------------------------------------------- /src/cmds/add.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const git = @import("git.zig"); 3 | const c = git.c; 4 | 5 | pub const help = 6 | \\usage: git add ... 7 | \\ 8 | \\Stage files for commit. 9 | \\ 10 | \\Arguments: 11 | \\ File or directory to stage (use . for all) 12 | \\ 13 | ; 14 | 15 | pub fn run(allocator: std.mem.Allocator, args: [][:0]u8) (git.Error || error{WriteError})!void { 16 | _ = allocator; 17 | const stdout = std.fs.File.stdout().deprecatedWriter(); 18 | 19 | // Check for unsupported flags first 20 | for (args[2..]) |arg| { 21 | const a = std.mem.sliceTo(arg, 0); 22 | if (std.mem.eql(u8, a, "-h") or std.mem.eql(u8, a, "--help")) { 23 | stdout.print("{s}", .{help}) catch {}; 24 | return; 25 | } else if (std.mem.startsWith(u8, a, "-")) { 26 | // Interactive flags (-p, -i, etc.) not supported 27 | return git.Error.UnsupportedFlag; 28 | } 29 | } 30 | 31 | // Initialize libgit2 32 | if (c.git_libgit2_init() < 0) { 33 | return git.Error.InitFailed; 34 | } 35 | defer _ = c.git_libgit2_shutdown(); 36 | 37 | // Open repository 38 | var repo: ?*c.git_repository = null; 39 | if (c.git_repository_open_ext(&repo, ".", 0, null) < 0) { 40 | return git.Error.NotARepository; 41 | } 42 | defer c.git_repository_free(repo); 43 | 44 | // Get index 45 | var index: ?*c.git_index = null; 46 | if (c.git_repository_index(&index, repo) < 0) { 47 | return git.Error.IndexOpenFailed; 48 | } 49 | defer c.git_index_free(index); 50 | 51 | // Collect paths to add (skip "zagi" and "add") 52 | if (args.len < 3) { 53 | return git.Error.UsageError; 54 | } 55 | 56 | for (args[2..]) |path| { 57 | const path_slice = std.mem.sliceTo(path, 0); 58 | 59 | if (std.mem.eql(u8, path_slice, ".")) { 60 | // Add all files 61 | if (c.git_index_add_all(index, null, c.GIT_INDEX_ADD_DEFAULT, null, null) < 0) { 62 | return git.Error.AddFailed; 63 | } 64 | } else { 65 | // Add specific file 66 | const result = c.git_index_add_bypath(index, path); 67 | if (result < 0) { 68 | return git.Error.FileNotFound; 69 | } 70 | } 71 | } 72 | 73 | // Write index to disk 74 | if (c.git_index_write(index) < 0) { 75 | return git.Error.IndexWriteFailed; 76 | } 77 | 78 | // Show what was staged by getting status 79 | var status_list: ?*c.git_status_list = null; 80 | var opts: c.git_status_options = undefined; 81 | _ = c.git_status_options_init(&opts, c.GIT_STATUS_OPTIONS_VERSION); 82 | opts.show = c.GIT_STATUS_SHOW_INDEX_ONLY; 83 | opts.flags = c.GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX; 84 | 85 | if (c.git_status_list_new(&status_list, repo, &opts) < 0) { 86 | return git.Error.StatusFailed; 87 | } 88 | defer c.git_status_list_free(status_list); 89 | 90 | const count = c.git_status_list_entrycount(status_list); 91 | if (count == 0) { 92 | stdout.print("nothing to add\n", .{}) catch return error.WriteError; 93 | return; 94 | } 95 | 96 | stdout.print("staged: {d} file{s}\n", .{ count, if (count == 1) "" else "s" }) catch return error.WriteError; 97 | 98 | var i: usize = 0; 99 | while (i < count) : (i += 1) { 100 | const entry = c.git_status_byindex(status_list, i); 101 | if (entry == null) continue; 102 | 103 | const status = entry.*.status; 104 | const delta = entry.*.head_to_index; 105 | 106 | if (delta) |d| { 107 | const path = if (d.*.new_file.path) |p| std.mem.sliceTo(p, 0) else ""; 108 | const marker = git.indexMarker(status); 109 | stdout.print(" {s} {s}\n", .{ marker, path }) catch return error.WriteError; 110 | } 111 | } 112 | } 113 | 114 | // Output formatting functions (testable without libgit2) 115 | 116 | pub fn formatStagedHeader(writer: anytype, count: usize) !void { 117 | if (count == 0) { 118 | try writer.print("nothing to add\n", .{}); 119 | } else { 120 | try writer.print("staged: {d} file{s}\n", .{ count, if (count == 1) "" else "s" }); 121 | } 122 | } 123 | 124 | pub fn formatStagedFile(writer: anytype, marker: []const u8, path: []const u8) !void { 125 | try writer.print(" {s} {s}\n", .{ marker, path }); 126 | } 127 | 128 | // Tests 129 | const testing = std.testing; 130 | 131 | test "formatStagedHeader with zero files" { 132 | var output = std.array_list.Managed(u8).init(testing.allocator); 133 | defer output.deinit(); 134 | 135 | try formatStagedHeader(output.writer(), 0); 136 | 137 | try testing.expectEqualStrings("nothing to add\n", output.items); 138 | } 139 | 140 | test "formatStagedHeader with one file" { 141 | var output = std.array_list.Managed(u8).init(testing.allocator); 142 | defer output.deinit(); 143 | 144 | try formatStagedHeader(output.writer(), 1); 145 | 146 | try testing.expectEqualStrings("staged: 1 file\n", output.items); 147 | } 148 | 149 | test "formatStagedHeader with multiple files" { 150 | var output = std.array_list.Managed(u8).init(testing.allocator); 151 | defer output.deinit(); 152 | 153 | try formatStagedHeader(output.writer(), 5); 154 | 155 | try testing.expectEqualStrings("staged: 5 files\n", output.items); 156 | } 157 | 158 | test "formatStagedFile formats correctly" { 159 | var output = std.array_list.Managed(u8).init(testing.allocator); 160 | defer output.deinit(); 161 | 162 | try formatStagedFile(output.writer(), "A ", "src/main.zig"); 163 | 164 | try testing.expectEqualStrings(" A src/main.zig\n", output.items); 165 | } 166 | 167 | test "formatStagedFile with modified marker" { 168 | var output = std.array_list.Managed(u8).init(testing.allocator); 169 | defer output.deinit(); 170 | 171 | try formatStagedFile(output.writer(), "M ", "README.md"); 172 | 173 | try testing.expectEqualStrings(" M README.md\n", output.items); 174 | } 175 | -------------------------------------------------------------------------------- /src/main.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const passthrough = @import("passthrough.zig"); 3 | const log = @import("cmds/log.zig"); 4 | const status = @import("cmds/status.zig"); 5 | const add = @import("cmds/add.zig"); 6 | const alias = @import("cmds/alias.zig"); 7 | const commit = @import("cmds/commit.zig"); 8 | const diff = @import("cmds/diff.zig"); 9 | const fork = @import("cmds/fork.zig"); 10 | const git = @import("cmds/git.zig"); 11 | 12 | const version = "0.1.0"; 13 | 14 | const Command = enum { 15 | log_cmd, 16 | status_cmd, 17 | add_cmd, 18 | alias_cmd, 19 | commit_cmd, 20 | diff_cmd, 21 | fork_cmd, 22 | other, 23 | }; 24 | 25 | var current_command: Command = .other; 26 | 27 | pub fn main() void { 28 | var gpa = std.heap.GeneralPurposeAllocator(.{}){}; 29 | defer _ = gpa.deinit(); 30 | const allocator = gpa.allocator(); 31 | 32 | const args = std.process.argsAlloc(allocator) catch { 33 | std.process.exit(1); 34 | }; 35 | defer std.process.argsFree(allocator, args); 36 | 37 | run(allocator, args) catch |err| { 38 | // UnsupportedFlag: pass through to git 39 | if (err == git.Error.UnsupportedFlag) { 40 | passthrough.run(allocator, args) catch {}; 41 | return; 42 | } 43 | handleError(err, current_command); 44 | }; 45 | } 46 | 47 | fn run(allocator: std.mem.Allocator, args: [][:0]u8) !void { 48 | 49 | const stdout = std.fs.File.stdout().deprecatedWriter(); 50 | 51 | if (args.len < 2) { 52 | printHelp(stdout) catch {}; 53 | return; 54 | } 55 | 56 | const cmd = args[1]; 57 | 58 | // Handle global flags 59 | if (std.mem.eql(u8, cmd, "--help") or std.mem.eql(u8, cmd, "-h") or std.mem.eql(u8, cmd, "help")) { 60 | printHelp(stdout) catch {}; 61 | return; 62 | } 63 | 64 | if (std.mem.eql(u8, cmd, "--version") or std.mem.eql(u8, cmd, "-v")) { 65 | stdout.print("zagi {s}\n", .{version}) catch {}; 66 | return; 67 | } 68 | 69 | // Passthrough mode: -g/--git passes remaining args directly to git 70 | if (std.mem.eql(u8, cmd, "-g") or std.mem.eql(u8, cmd, "--git")) { 71 | try passthrough.run(allocator, args[1..]); 72 | return; 73 | } 74 | 75 | // Zagi commands 76 | if (std.mem.eql(u8, cmd, "log")) { 77 | current_command = .log_cmd; 78 | try log.run(allocator, args); 79 | } else if (std.mem.eql(u8, cmd, "status")) { 80 | current_command = .status_cmd; 81 | try status.run(allocator, args); 82 | } else if (std.mem.eql(u8, cmd, "add")) { 83 | current_command = .add_cmd; 84 | try add.run(allocator, args); 85 | } else if (std.mem.eql(u8, cmd, "alias")) { 86 | current_command = .alias_cmd; 87 | try alias.run(allocator, args); 88 | } else if (std.mem.eql(u8, cmd, "commit")) { 89 | current_command = .commit_cmd; 90 | try commit.run(allocator, args); 91 | } else if (std.mem.eql(u8, cmd, "diff")) { 92 | current_command = .diff_cmd; 93 | try diff.run(allocator, args); 94 | } else if (std.mem.eql(u8, cmd, "fork")) { 95 | current_command = .fork_cmd; 96 | try fork.run(allocator, args); 97 | } else { 98 | // Unknown command: pass through to git 99 | current_command = .other; 100 | try passthrough.run(allocator, args); 101 | } 102 | } 103 | 104 | fn printHelp(stdout: anytype) !void { 105 | try stdout.print( 106 | \\zagi - git for agents 107 | \\ 108 | \\usage: zagi [args...] 109 | \\usage: git [args...] (when aliased) 110 | \\ 111 | \\commands: 112 | \\ status Show working tree status 113 | \\ log Show commit history 114 | \\ diff Show changes 115 | \\ add Stage files for commit 116 | \\ commit Create a commit 117 | \\ fork Manage parallel worktrees 118 | \\ alias Create an alias to git 119 | \\ 120 | \\options: 121 | \\ -h, --help Show this help 122 | \\ -v, --version Show version 123 | \\ -g, --git Git passthrough mode (e.g. git -g log) 124 | \\ 125 | \\Unrecognized commands are passed through to git. 126 | \\ 127 | \\ 128 | , .{}); 129 | } 130 | 131 | fn handleError(err: anyerror, cmd: Command) void { 132 | const stderr = std.fs.File.stderr().deprecatedWriter(); 133 | 134 | const exit_code: u8 = switch (err) { 135 | git.Error.NotARepository => blk: { 136 | stderr.print("fatal: not a git repository\n", .{}) catch {}; 137 | break :blk 128; 138 | }, 139 | git.Error.InitFailed => blk: { 140 | stderr.print("fatal: failed to initialize libgit2\n", .{}) catch {}; 141 | break :blk 1; 142 | }, 143 | git.Error.IndexOpenFailed => blk: { 144 | stderr.print("fatal: failed to open index\n", .{}) catch {}; 145 | break :blk 1; 146 | }, 147 | git.Error.IndexWriteFailed => blk: { 148 | stderr.print("fatal: failed to write index\n", .{}) catch {}; 149 | break :blk 1; 150 | }, 151 | git.Error.StatusFailed => blk: { 152 | stderr.print("fatal: failed to get status\n", .{}) catch {}; 153 | break :blk 1; 154 | }, 155 | git.Error.FileNotFound => blk: { 156 | stderr.print("error: file not found\n", .{}) catch {}; 157 | break :blk 128; 158 | }, 159 | git.Error.AddFailed => blk: { 160 | stderr.print("error: failed to add files\n", .{}) catch {}; 161 | break :blk 1; 162 | }, 163 | git.Error.RevwalkFailed => blk: { 164 | stderr.print("fatal: failed to walk commits\n", .{}) catch {}; 165 | break :blk 1; 166 | }, 167 | git.Error.UsageError => blk: { 168 | printUsageHelp(stderr, cmd); 169 | break :blk 1; 170 | }, 171 | git.Error.WriteFailed => blk: { 172 | stderr.print("fatal: write failed\n", .{}) catch {}; 173 | break :blk 1; 174 | }, 175 | git.Error.NothingToCommit => blk: { 176 | stderr.print("error: nothing to commit\n", .{}) catch {}; 177 | break :blk 1; 178 | }, 179 | git.Error.CommitFailed => blk: { 180 | stderr.print("error: commit failed\n", .{}) catch {}; 181 | break :blk 1; 182 | }, 183 | error.OutOfMemory => blk: { 184 | stderr.print("fatal: out of memory\n", .{}) catch {}; 185 | break :blk 1; 186 | }, 187 | else => blk: { 188 | stderr.print("error: {}\n", .{err}) catch {}; 189 | break :blk 1; 190 | }, 191 | }; 192 | 193 | std.process.exit(exit_code); 194 | } 195 | 196 | fn printUsageHelp(stderr: anytype, cmd: Command) void { 197 | const help_text = switch (cmd) { 198 | .add_cmd => add.help, 199 | .commit_cmd => commit.help, 200 | .status_cmd => status.help, 201 | .log_cmd => log.help, 202 | .alias_cmd => alias.help, 203 | .diff_cmd => diff.help, 204 | .fork_cmd => fork.help, 205 | .other => "usage: git [args...]\n", 206 | }; 207 | 208 | stderr.print("{s}", .{help_text}) catch {}; 209 | } 210 | -------------------------------------------------------------------------------- /src/cmds/status.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const git = @import("git.zig"); 3 | const c = git.c; 4 | 5 | pub const help = 6 | \\usage: git status [...] 7 | \\ 8 | \\Show working tree status. 9 | \\ 10 | \\Examples: 11 | \\ git status Show all changes 12 | \\ git status src/ Show changes in src/ directory 13 | \\ git status *.ts Show changes to TypeScript files 14 | \\ 15 | ; 16 | 17 | const MAX_PATHSPECS = 16; 18 | 19 | pub fn run(allocator: std.mem.Allocator, args: [][:0]u8) (git.Error || error{OutOfMemory})!void { 20 | const stdout = std.fs.File.stdout().deprecatedWriter(); 21 | 22 | // Parse arguments 23 | var pathspecs: [MAX_PATHSPECS][*c]u8 = undefined; 24 | var pathspec_count: usize = 0; 25 | 26 | for (args[2..]) |arg| { 27 | const a = std.mem.sliceTo(arg, 0); 28 | if (std.mem.eql(u8, a, "-h") or std.mem.eql(u8, a, "--help")) { 29 | stdout.print("{s}", .{help}) catch {}; 30 | return; 31 | } else if (std.mem.eql(u8, a, "-s") or std.mem.eql(u8, a, "--short")) { 32 | // Already short format by default, ignore 33 | } else if (std.mem.eql(u8, a, "-b") or std.mem.eql(u8, a, "--branch")) { 34 | // Already show branch by default, ignore 35 | } else if (std.mem.startsWith(u8, a, "-")) { 36 | // Other flags unsupported (--porcelain, etc.) 37 | return git.Error.UnsupportedFlag; 38 | } else { 39 | // Path argument 40 | if (pathspec_count < MAX_PATHSPECS) { 41 | pathspecs[pathspec_count] = @constCast(arg.ptr); 42 | pathspec_count += 1; 43 | } 44 | } 45 | } 46 | 47 | // Initialize libgit2 48 | if (c.git_libgit2_init() < 0) { 49 | return git.Error.InitFailed; 50 | } 51 | defer _ = c.git_libgit2_shutdown(); 52 | 53 | // Open repository 54 | var repo: ?*c.git_repository = null; 55 | if (c.git_repository_open_ext(&repo, ".", 0, null) < 0) { 56 | return git.Error.NotARepository; 57 | } 58 | defer c.git_repository_free(repo); 59 | 60 | // Get current branch 61 | var head: ?*c.git_reference = null; 62 | const head_err = c.git_repository_head(&head, repo); 63 | defer if (head != null) c.git_reference_free(head); 64 | 65 | if (head_err == 0 and head != null) { 66 | const branch_name = c.git_reference_shorthand(head); 67 | if (branch_name) |name| { 68 | const branch = std.mem.sliceTo(name, 0); 69 | stdout.print("branch: {s}", .{branch}) catch return git.Error.WriteFailed; 70 | 71 | // Check upstream status 72 | var upstream: ?*c.git_reference = null; 73 | if (c.git_branch_upstream(&upstream, head) == 0 and upstream != null) { 74 | defer c.git_reference_free(upstream); 75 | 76 | var ahead: usize = 0; 77 | var behind: usize = 0; 78 | const local_oid = c.git_reference_target(head); 79 | const upstream_oid = c.git_reference_target(upstream); 80 | 81 | if (local_oid != null and upstream_oid != null) { 82 | _ = c.git_graph_ahead_behind(&ahead, &behind, repo, local_oid, upstream_oid); 83 | 84 | if (ahead == 0 and behind == 0) { 85 | stdout.print(" (up to date)", .{}) catch return git.Error.WriteFailed; 86 | } else if (ahead > 0 and behind == 0) { 87 | stdout.print(" (ahead {d})", .{ahead}) catch return git.Error.WriteFailed; 88 | } else if (behind > 0 and ahead == 0) { 89 | stdout.print(" (behind {d})", .{behind}) catch return git.Error.WriteFailed; 90 | } else { 91 | stdout.print(" (ahead {d}, behind {d})", .{ ahead, behind }) catch return git.Error.WriteFailed; 92 | } 93 | } 94 | } 95 | stdout.print("\n", .{}) catch return git.Error.WriteFailed; 96 | } 97 | } else if (head_err == c.GIT_EUNBORNBRANCH) { 98 | stdout.print("branch: (no commits yet)\n", .{}) catch return git.Error.WriteFailed; 99 | } else { 100 | stdout.print("branch: HEAD detached\n", .{}) catch return git.Error.WriteFailed; 101 | } 102 | 103 | // Get status 104 | var status_list: ?*c.git_status_list = null; 105 | var opts: c.git_status_options = undefined; 106 | _ = c.git_status_options_init(&opts, c.GIT_STATUS_OPTIONS_VERSION); 107 | opts.show = c.GIT_STATUS_SHOW_INDEX_AND_WORKDIR; 108 | opts.flags = c.GIT_STATUS_OPT_INCLUDE_UNTRACKED | 109 | c.GIT_STATUS_OPT_RENAMES_HEAD_TO_INDEX | 110 | c.GIT_STATUS_OPT_SORT_CASE_SENSITIVELY; 111 | 112 | // Set up pathspec filtering if paths were provided 113 | if (pathspec_count > 0) { 114 | opts.pathspec.strings = &pathspecs; 115 | opts.pathspec.count = pathspec_count; 116 | } 117 | 118 | if (c.git_status_list_new(&status_list, repo, &opts) < 0) { 119 | return git.Error.StatusFailed; 120 | } 121 | defer c.git_status_list_free(status_list); 122 | 123 | const count = c.git_status_list_entrycount(status_list); 124 | 125 | if (count == 0) { 126 | stdout.print("\nnothing to commit, working tree clean\n", .{}) catch return git.Error.WriteFailed; 127 | return; 128 | } 129 | 130 | // Collect files by category 131 | var staged = std.array_list.Managed(FileStatus).init(allocator); 132 | defer staged.deinit(); 133 | var modified = std.array_list.Managed(FileStatus).init(allocator); 134 | defer modified.deinit(); 135 | var untracked = std.array_list.Managed(FileStatus).init(allocator); 136 | defer untracked.deinit(); 137 | 138 | var i: usize = 0; 139 | while (i < count) : (i += 1) { 140 | const entry = c.git_status_byindex(status_list, i); 141 | if (entry == null) continue; 142 | 143 | const status = entry.*.status; 144 | const diff_delta = entry.*.head_to_index; 145 | const wt_delta = entry.*.index_to_workdir; 146 | 147 | // Staged changes (index) 148 | if (status & (c.GIT_STATUS_INDEX_NEW | c.GIT_STATUS_INDEX_MODIFIED | c.GIT_STATUS_INDEX_DELETED | c.GIT_STATUS_INDEX_RENAMED | c.GIT_STATUS_INDEX_TYPECHANGE) != 0) { 149 | if (diff_delta) |delta| { 150 | const path = if (delta.*.new_file.path) |p| std.mem.sliceTo(p, 0) else ""; 151 | const marker = git.indexMarker(status); 152 | try staged.append(.{ .marker = marker, .path = path }); 153 | } 154 | } 155 | 156 | // Workdir changes (modified but not staged) 157 | if (status & (c.GIT_STATUS_WT_MODIFIED | c.GIT_STATUS_WT_DELETED | c.GIT_STATUS_WT_TYPECHANGE | c.GIT_STATUS_WT_RENAMED) != 0) { 158 | if (wt_delta) |delta| { 159 | const path = if (delta.*.new_file.path) |p| std.mem.sliceTo(p, 0) else ""; 160 | const marker = git.workdirMarker(status); 161 | try modified.append(.{ .marker = marker, .path = path }); 162 | } 163 | } 164 | 165 | // Untracked 166 | if (status & c.GIT_STATUS_WT_NEW != 0) { 167 | if (wt_delta) |delta| { 168 | const path = if (delta.*.new_file.path) |p| std.mem.sliceTo(p, 0) else ""; 169 | try untracked.append(.{ .marker = "??", .path = path }); 170 | } 171 | } 172 | } 173 | 174 | // Print staged 175 | if (staged.items.len > 0) { 176 | stdout.print("\nstaged: {d} files\n", .{staged.items.len}) catch return git.Error.WriteFailed; 177 | for (staged.items) |file| { 178 | stdout.print(" {s} {s}\n", .{ file.marker, file.path }) catch return git.Error.WriteFailed; 179 | } 180 | } 181 | 182 | // Print modified 183 | if (modified.items.len > 0) { 184 | stdout.print("\nmodified: {d} files\n", .{modified.items.len}) catch return git.Error.WriteFailed; 185 | for (modified.items) |file| { 186 | stdout.print(" {s} {s}\n", .{ file.marker, file.path }) catch return git.Error.WriteFailed; 187 | } 188 | } 189 | 190 | // Print untracked 191 | if (untracked.items.len > 0) { 192 | stdout.print("\nuntracked: {d} files\n", .{untracked.items.len}) catch return git.Error.WriteFailed; 193 | const max_show: usize = 5; 194 | for (untracked.items, 0..) |file, idx| { 195 | if (idx >= max_show) { 196 | stdout.print(" + {d} more\n", .{untracked.items.len - max_show}) catch return git.Error.WriteFailed; 197 | break; 198 | } 199 | stdout.print(" {s} {s}\n", .{ file.marker, file.path }) catch return git.Error.WriteFailed; 200 | } 201 | } 202 | } 203 | 204 | const FileStatus = struct { 205 | marker: []const u8, 206 | path: []const u8, 207 | }; 208 | -------------------------------------------------------------------------------- /server/src/git-http-sqlite.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Git Smart HTTP Handler - SQLite Backend 3 | * 4 | * This implements the git smart HTTP protocol using pure SQLite storage. 5 | * No git binary required - works in Durable Objects. 6 | * 7 | * URL scheme: ///[info/refs|git-upload-pack|git-receive-pack] 8 | */ 9 | 10 | import { Database } from "bun:sqlite"; 11 | import { 12 | initGitStorage, 13 | initRepository, 14 | getRef, 15 | setRef, 16 | listRefs, 17 | getObject, 18 | hasObject, 19 | } from "./git-storage.ts"; 20 | import { 21 | pktLine, 22 | pktFlush, 23 | parsePackfile, 24 | generatePackfile, 25 | generateRefAdvertisement, 26 | parseUploadPackRequest, 27 | parseReceivePackRequest, 28 | collectReachableObjects, 29 | } from "./git-pack.ts"; 30 | 31 | export interface GitHttpSqliteContext { 32 | getDb: (repo: string, userId: string) => Database; 33 | } 34 | 35 | /** 36 | * Parse the URL to extract repo name and user ID 37 | */ 38 | function parseGitUrl(pathname: string): { 39 | repo: string; 40 | userId: string; 41 | service: string | null; 42 | } | null { 43 | const match = pathname.match( 44 | /^\/([^/]+)\/([^/]+)(\/info\/refs|\/git-upload-pack|\/git-receive-pack)?$/ 45 | ); 46 | if (!match) return null; 47 | 48 | return { 49 | repo: match[1]!, 50 | userId: match[2]!, 51 | service: match[3] ?? null, 52 | }; 53 | } 54 | 55 | /** 56 | * Get or create a user's git database 57 | */ 58 | function ensureUserDb( 59 | ctx: GitHttpSqliteContext, 60 | repo: string, 61 | userId: string 62 | ): Database { 63 | const db = ctx.getDb(repo, userId); 64 | 65 | // Check if initialized 66 | const hasRefs = db 67 | .query("SELECT name FROM sqlite_master WHERE type='table' AND name='git_refs'") 68 | .get(); 69 | 70 | if (!hasRefs) { 71 | // Initialize new repo 72 | initRepository(db); 73 | } 74 | 75 | return db; 76 | } 77 | 78 | /** 79 | * Handle GET /info/refs?service=git-upload-pack or git-receive-pack 80 | */ 81 | async function handleInfoRefs( 82 | db: Database, 83 | service: string 84 | ): Promise { 85 | // Get all refs 86 | const allRefs = listRefs(db); 87 | 88 | // Format refs for advertisement 89 | // Convert internal names and add HEAD 90 | const head = getRef(db, "HEAD"); 91 | const refs: { name: string; hash: string }[] = []; 92 | 93 | if (head) { 94 | refs.push({ name: "HEAD", hash: head }); 95 | } 96 | 97 | for (const ref of allRefs) { 98 | refs.push(ref); 99 | } 100 | 101 | const body = generateRefAdvertisement(db, service, refs); 102 | 103 | return new Response(body, { 104 | headers: { 105 | "Content-Type": `application/x-${service}-advertisement`, 106 | "Cache-Control": "no-cache", 107 | }, 108 | }); 109 | } 110 | 111 | /** 112 | * Handle POST /git-upload-pack (fetch/clone) 113 | */ 114 | async function handleUploadPack( 115 | req: Request, 116 | db: Database 117 | ): Promise { 118 | const body = Buffer.from(await req.arrayBuffer()); 119 | const { wants, haves, done } = parseUploadPackRequest(body); 120 | 121 | if (wants.length === 0) { 122 | // No objects wanted - return empty 123 | return new Response(pktFlush(), { 124 | headers: { 125 | "Content-Type": "application/x-git-upload-pack-result", 126 | "Cache-Control": "no-cache", 127 | }, 128 | }); 129 | } 130 | 131 | // Collect objects to send 132 | // Start from wanted commits, exclude objects the client already has 133 | const objectsToSend = collectReachableObjects(db, wants, haves); 134 | 135 | // Generate response 136 | const parts: Buffer[] = []; 137 | 138 | // NAK if no common objects, or ACK for each have we recognize 139 | if (haves.length === 0) { 140 | parts.push(pktLine("NAK\n")); 141 | } else { 142 | for (const have of haves) { 143 | if (hasObject(db, have)) { 144 | parts.push(pktLine(`ACK ${have}\n`)); 145 | } 146 | } 147 | if (parts.length === 0) { 148 | parts.push(pktLine("NAK\n")); 149 | } 150 | } 151 | 152 | // Generate and send packfile (without side-band for simplicity) 153 | if (objectsToSend.length > 0) { 154 | const packfile = generatePackfile(db, objectsToSend); 155 | parts.push(packfile); 156 | } 157 | 158 | return new Response(Buffer.concat(parts), { 159 | headers: { 160 | "Content-Type": "application/x-git-upload-pack-result", 161 | "Cache-Control": "no-cache", 162 | }, 163 | }); 164 | } 165 | 166 | /** 167 | * Handle POST /git-receive-pack (push) 168 | */ 169 | async function handleReceivePack( 170 | req: Request, 171 | db: Database 172 | ): Promise { 173 | const body = Buffer.from(await req.arrayBuffer()); 174 | const { updates, packData } = parseReceivePackRequest(body); 175 | 176 | const results: { refName: string; ok: boolean; error?: string }[] = []; 177 | 178 | // Process packfile first (if any) 179 | if (packData) { 180 | try { 181 | parsePackfile(db, packData); 182 | } catch (e) { 183 | const error = e instanceof Error ? e.message : "Pack error"; 184 | // All updates fail if pack fails 185 | for (const update of updates) { 186 | results.push({ refName: update.refName, ok: false, error }); 187 | } 188 | 189 | return generateReceivePackResponse(results); 190 | } 191 | } 192 | 193 | // Process ref updates 194 | for (const update of updates) { 195 | try { 196 | const { oldHash, newHash, refName } = update; 197 | 198 | // Verify old hash matches current ref (for non-create operations) 199 | const currentHash = getRef(db, refName); 200 | const isCreate = oldHash === "0000000000000000000000000000000000000000"; 201 | const isDelete = newHash === "0000000000000000000000000000000000000000"; 202 | 203 | if (!isCreate && currentHash !== oldHash) { 204 | results.push({ 205 | refName, 206 | ok: false, 207 | error: "non-fast-forward", 208 | }); 209 | continue; 210 | } 211 | 212 | if (isDelete) { 213 | // Delete ref 214 | db.run("DELETE FROM git_refs WHERE name = ?", [refName]); 215 | } else { 216 | // Verify new object exists 217 | if (!hasObject(db, newHash)) { 218 | results.push({ 219 | refName, 220 | ok: false, 221 | error: "missing object", 222 | }); 223 | continue; 224 | } 225 | 226 | // Update ref 227 | setRef(db, refName, newHash); 228 | } 229 | 230 | results.push({ refName, ok: true }); 231 | } catch (e) { 232 | const error = e instanceof Error ? e.message : "Update failed"; 233 | results.push({ refName: update.refName, ok: false, error }); 234 | } 235 | } 236 | 237 | return generateReceivePackResponse(results); 238 | } 239 | 240 | /** 241 | * Generate receive-pack response with status 242 | */ 243 | function generateReceivePackResponse( 244 | results: { refName: string; ok: boolean; error?: string }[] 245 | ): Response { 246 | const parts: Buffer[] = []; 247 | 248 | // Unpack status 249 | parts.push(pktLine("unpack ok\n")); 250 | 251 | // Ref statuses 252 | for (const result of results) { 253 | if (result.ok) { 254 | parts.push(pktLine(`ok ${result.refName}\n`)); 255 | } else { 256 | parts.push(pktLine(`ng ${result.refName} ${result.error}\n`)); 257 | } 258 | } 259 | 260 | parts.push(pktFlush()); 261 | 262 | return new Response(Buffer.concat(parts), { 263 | headers: { 264 | "Content-Type": "application/x-git-receive-pack-result", 265 | "Cache-Control": "no-cache", 266 | }, 267 | }); 268 | } 269 | 270 | /** 271 | * Main handler for git HTTP requests using SQLite storage 272 | */ 273 | export async function handleGitHttpSqlite( 274 | req: Request, 275 | url: URL, 276 | ctx: GitHttpSqliteContext 277 | ): Promise { 278 | const parsed = parseGitUrl(url.pathname); 279 | if (!parsed) return null; 280 | 281 | const { repo, userId, service } = parsed; 282 | 283 | // Get or create user's database 284 | const db = ensureUserDb(ctx, repo, userId); 285 | 286 | // GET /info/refs?service=git-upload-pack or git-receive-pack 287 | if (req.method === "GET" && service === "/info/refs") { 288 | const requestedService = url.searchParams.get("service"); 289 | if (!requestedService) { 290 | return new Response("service parameter required", { status: 400 }); 291 | } 292 | if ( 293 | requestedService !== "git-upload-pack" && 294 | requestedService !== "git-receive-pack" 295 | ) { 296 | return new Response("Invalid service", { status: 400 }); 297 | } 298 | return handleInfoRefs(db, requestedService); 299 | } 300 | 301 | // POST /git-upload-pack 302 | if (req.method === "POST" && service === "/git-upload-pack") { 303 | return handleUploadPack(req, db); 304 | } 305 | 306 | // POST /git-receive-pack 307 | if (req.method === "POST" && service === "/git-receive-pack") { 308 | return handleReceivePack(req, db); 309 | } 310 | 311 | return null; 312 | } 313 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | # zagi 2 | 3 | This document describes the process for adding new git commands to zagi. 4 | 5 | ## Requirements 6 | 7 | - Zig 0.15+ 8 | - Bun (for integration tests) 9 | 10 | ## Style 11 | 12 | - zagi is always lowercase 13 | - No emojis in code or output 14 | - Concise output optimized for agents 15 | 16 | ## Committing with Prompts 17 | 18 | When committing code, **always include the complete user prompt** that created the change: 19 | 20 | ```bash 21 | git commit -m "" --prompt "" 22 | ``` 23 | 24 | The `--prompt` should contain the **full user request** that led to this commit, not a summary. This preserves context for code review. 25 | 26 | Example: 27 | ```bash 28 | git commit -m "Add logout button" --prompt "Add a logout button to the header. When clicked it should clear the session and redirect to /login" 29 | ``` 30 | 31 | View prompts with: 32 | ```bash 33 | git log --prompts 34 | ``` 35 | 36 | ### Environment Setup 37 | 38 | Set `ZAGI_AGENT` to enable prompt enforcement: 39 | 40 | ```bash 41 | export ZAGI_AGENT=claude-code 42 | ``` 43 | 44 | When this is set: 45 | 1. `git commit` will fail without `--prompt`, ensuring all AI-generated commits have their prompts recorded 46 | 2. Destructive commands are blocked to prevent data loss 47 | 48 | ### Blocked Commands (in agent mode) 49 | 50 | These commands cause unrecoverable data loss and are blocked when `ZAGI_AGENT` is set: 51 | 52 | | Command | Reason | 53 | |---------|--------| 54 | | `reset --hard` | Discards all uncommitted changes | 55 | | `checkout .` | Discards all working tree changes | 56 | | `clean -f/-fd/-fx` | Permanently deletes untracked files | 57 | | `restore .` | Discards all working tree changes | 58 | | `restore --worktree` | Discards working tree changes | 59 | | `push --force/-f` | Overwrites remote history | 60 | | `stash drop` | Permanently deletes stashed changes | 61 | | `stash clear` | Permanently deletes all stashes | 62 | | `branch -D` | Force deletes branch (even if not merged) | 63 | 64 | Safe alternatives: 65 | - `reset --soft` - keeps changes staged 66 | - `reset` (no flags) - keeps changes in working tree 67 | - `clean -n` - dry run, shows what would be deleted and ask user to delete 68 | - `branch -d` - only deletes if merged 69 | 70 | ## Flow 71 | 72 | ### 1. Investigate the git command 73 | 74 | Before implementing, understand the existing git command: 75 | 76 | ```bash 77 | # Run the command and observe output 78 | git 79 | 80 | # Test different scenarios 81 | git 82 | 83 | # Check exit codes 84 | git ; echo "exit: $?" 85 | 86 | # Test error cases 87 | git nonexistent 88 | ``` 89 | 90 | Document: 91 | - What does it output on success? 92 | - What does it output on failure? 93 | - What are common flags/options? 94 | - What exit codes does it use? 95 | 96 | ### 2. Design agent-friendly output 97 | 98 | Identify what would be better for agents: 99 | 100 | | Problem | Solution | 101 | |---------|----------| 102 | | Silent success (no confirmation) | Show what was done | 103 | | Verbose errors | Concise error messages | 104 | | Multi-line output with decoration | Compact, parseable format | 105 | | Unclear state | Show current state after action | 106 | 107 | Key questions: 108 | - What information does an agent need to continue? 109 | - What feedback confirms the action worked? 110 | - Can we reduce output while preserving meaning? 111 | 112 | ### 3. Confirm the API 113 | 114 | Before implementing, confirm with the user: 115 | - Proposed output format 116 | - Default flags/behavior differences from git 117 | - Error message format 118 | 119 | Example: 120 | ``` 121 | Proposed `zagi add` output: 122 | 123 | Success: 124 | staged: 2 files 125 | A new-file.txt 126 | M changed-file.txt 127 | 128 | Error: 129 | error: file.txt not found 130 | 131 | Confirm? (y/n) 132 | ``` 133 | 134 | ### 4. Implement in Zig 135 | 136 | Create `src/cmds/.zig`: 137 | 138 | ```zig 139 | const std = @import("std"); 140 | const c = @cImport(@cInclude("git2.h")); 141 | const git = @import("git.zig"); 142 | 143 | pub const Error = error{ 144 | NotARepository, 145 | // command-specific errors 146 | }; 147 | 148 | pub fn run(allocator: std.mem.Allocator, args: [][:0]u8) Error!void { 149 | // Implementation using libgit2 150 | // Return errors instead of calling std.process.exit() 151 | } 152 | ``` 153 | 154 | **Important:** Never call `std.process.exit()` in command modules. Always return errors to main.zig for centralized handling. This enables unit testing and keeps exit codes consistent. 155 | 156 | Add routing in `src/main.zig`: 157 | ```zig 158 | const cmd_name = @import("cmds/.zig"); 159 | // ... 160 | } else if (std.mem.eql(u8, cmd, "")) { 161 | cmd_name.run(allocator, args) catch |err| { 162 | try handleError(err); 163 | }; 164 | } 165 | ``` 166 | 167 | ### 5. Consider abstractions (after 2+ implementations) 168 | 169 | After implementing similar code twice, ask before abstracting: 170 | 171 | > "I've now implemented marker functions in both status.zig and add.zig. Should I extract these to a shared git.zig module?" 172 | 173 | Only abstract when: 174 | - Same code appears in 2+ places 175 | - The abstraction is obvious and stable 176 | - User confirms it's worth the indirection 177 | 178 | ### 6. Add Zig tests 179 | 180 | Add tests for pure functions in the module: 181 | 182 | ```zig 183 | const testing = std.testing; 184 | 185 | test "functionName - description" { 186 | try testing.expectEqualStrings("expected", functionName(input)); 187 | } 188 | ``` 189 | 190 | For functions that use `std.process.exit()`, test via integration tests instead. 191 | 192 | Update `build.zig` to include new test files: 193 | ```zig 194 | const cmd_tests = b.addTest(.{ 195 | .root_module = b.createModule(.{ 196 | .root_source_file = b.path("src/cmds/.zig"), 197 | .target = target, 198 | .optimize = optimize, 199 | }), 200 | }); 201 | cmd_tests.root_module.linkLibrary(libgit2_dep.artifact("git2")); 202 | ``` 203 | 204 | ### 7. Build and run 205 | 206 | ```bash 207 | zig build # build 208 | zig build test # run zig unit tests 209 | ./zig-out/bin/zagi 210 | ``` 211 | 212 | Fix any compiler errors. Test manually with various inputs. 213 | 214 | ### 8. Add integration tests 215 | 216 | Create `test/src/.test.ts`: 217 | 218 | ```typescript 219 | import { describe, test, expect } from "vitest"; 220 | import { execFileSync } from "child_process"; 221 | 222 | describe("zagi ", () => { 223 | test("produces smaller output than git", () => { 224 | // Compare output size 225 | }); 226 | 227 | test("functional correctness", () => { 228 | // Verify behavior matches git 229 | }); 230 | }); 231 | 232 | describe("performance", () => { 233 | test("zagi is reasonably fast", () => { 234 | // Benchmark timing 235 | }); 236 | }); 237 | ``` 238 | 239 | Run with: 240 | ```bash 241 | cd test && bun i && bun run test 242 | ``` 243 | 244 | To run all tests (Zig + TypeScript): 245 | ```bash 246 | zig build test && cd test && bun run test 247 | ``` 248 | 249 | ### 9. Optimize (if needed) 250 | 251 | If benchmarks show issues: 252 | 1. Profile to find bottlenecks 253 | 2. Consider caching libgit2 state 254 | 3. Reduce allocations 255 | 4. Batch operations where possible 256 | 257 | ## File Structure 258 | 259 | ``` 260 | src/ 261 | main.zig # Entry point, command routing 262 | passthrough.zig # Pass-through to git CLI 263 | cmds/ 264 | git.zig # Shared utilities (markers, errors) 265 | log.zig # zagi log 266 | status.zig # zagi status 267 | diff.zig # zagi diff 268 | add.zig # zagi add 269 | commit.zig # zagi commit 270 | .zig # New commands 271 | 272 | test/ 273 | src/ 274 | log.test.ts # log integration tests 275 | status.test.ts # status integration tests 276 | diff.test.ts # diff integration tests 277 | add.test.ts # add integration tests 278 | commit.test.ts # commit integration tests 279 | fixtures/ 280 | setup.ts # Test fixture repo creation 281 | ``` 282 | 283 | ## Design Decisions 284 | 285 | ### No `--full` or `--verbose` flags 286 | 287 | Zagi commands only output concise, agent-optimized formats. We don't provide flags like `--full` or `--verbose` to get git's standard output. 288 | 289 | **Reasoning:** If a user wants the full git output, they can use the passthrough flag: 290 | ```bash 291 | zagi -g log # runs: git log 292 | zagi -g diff # runs: git diff 293 | ``` 294 | 295 | This avoids duplicating git's output formatting in zagi. Every zagi command should do one thing well: provide a concise format optimized for agents. 296 | 297 | ## Checklist for new commands 298 | 299 | - [ ] Investigate git command behavior 300 | - [ ] Design agent-friendly output format 301 | - [ ] Confirm API with user 302 | - [ ] Implement in `src/cmds/.zig` 303 | - [ ] Add routing in `main.zig` 304 | - [ ] Extract shared code (if 2+ usages, ask first) 305 | - [ ] Add Zig unit tests 306 | - [ ] Build and test manually 307 | - [ ] Add TypeScript integration tests 308 | - [ ] Run benchmarks 309 | - [ ] Optimize if needed 310 | -------------------------------------------------------------------------------- /test/src/commit.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, beforeEach, afterEach } from "vitest"; 2 | import { resolve } from "path"; 3 | import { writeFileSync, appendFileSync, rmSync } from "fs"; 4 | import { createFixtureRepo } from "../fixtures/setup"; 5 | import { zagi, git, createTestRepo, cleanupTestRepo } from "./shared"; 6 | 7 | let REPO_DIR: string; 8 | 9 | function stageTestFile() { 10 | const testFile = resolve(REPO_DIR, "commit-test.txt"); 11 | writeFileSync(testFile, `test content ${Date.now()}\n`); 12 | git(["add", "commit-test.txt"], { cwd: REPO_DIR }); 13 | } 14 | 15 | beforeEach(() => { 16 | REPO_DIR = createFixtureRepo(); 17 | }); 18 | 19 | afterEach(() => { 20 | if (REPO_DIR) { 21 | rmSync(REPO_DIR, { recursive: true, force: true }); 22 | } 23 | }); 24 | 25 | describe("zagi commit", () => { 26 | test("commits staged changes with message", () => { 27 | stageTestFile(); 28 | const result = zagi(["commit", "-m", "Test commit"], { cwd: REPO_DIR }); 29 | 30 | expect(result).toContain("committed:"); 31 | expect(result).toContain("Test commit"); 32 | expect(result).toMatch(/[0-9a-f]{7}/); 33 | }); 34 | 35 | test("shows file count and stats", () => { 36 | stageTestFile(); 37 | const result = zagi(["commit", "-m", "Test with stats"], { cwd: REPO_DIR }); 38 | 39 | expect(result).toMatch(/\d+ file/); 40 | expect(result).toMatch(/\+\d+/); 41 | expect(result).toMatch(/-\d+/); 42 | }); 43 | 44 | test("error when nothing staged", () => { 45 | const result = zagi(["commit", "-m", "Empty commit"], { cwd: REPO_DIR }); 46 | 47 | // Fixture repo has unstaged changes, so hint is shown 48 | expect(result).toContain("error: nothing to commit"); 49 | }); 50 | 51 | test("shows usage when no message provided", () => { 52 | stageTestFile(); 53 | const result = zagi(["commit"], { cwd: REPO_DIR }); 54 | 55 | expect(result).toContain("usage:"); 56 | expect(result).toContain("-m"); 57 | }); 58 | 59 | test("supports -m flag with equals sign", () => { 60 | stageTestFile(); 61 | const result = zagi(["commit", "--message=Equals format"], { cwd: REPO_DIR }); 62 | 63 | expect(result).toContain("Equals format"); 64 | }); 65 | }); 66 | 67 | describe("zagi commit --prompt", () => { 68 | test("stores prompt and shows confirmation", () => { 69 | stageTestFile(); 70 | const result = zagi([ 71 | "commit", 72 | "-m", 73 | "Add test file", 74 | "--prompt", 75 | "Create a test file for testing", 76 | ], { cwd: REPO_DIR }); 77 | 78 | expect(result).toContain("committed:"); 79 | expect(result).toContain("prompt saved"); 80 | }); 81 | 82 | test("--prompt= syntax works", () => { 83 | stageTestFile(); 84 | const result = zagi([ 85 | "commit", 86 | "-m", 87 | "Test equals syntax", 88 | "--prompt=This is the prompt", 89 | ], { cwd: REPO_DIR }); 90 | 91 | expect(result).toContain("prompt saved"); 92 | }); 93 | 94 | test("prompt can be viewed with git notes", () => { 95 | stageTestFile(); 96 | zagi([ 97 | "commit", 98 | "-m", 99 | "Commit with prompt", 100 | "--prompt", 101 | "My test prompt text", 102 | ], { cwd: REPO_DIR }); 103 | 104 | // Read the note using git notes command 105 | const noteResult = git(["notes", "--ref=prompts", "show", "HEAD"], { cwd: REPO_DIR }); 106 | 107 | expect(noteResult).toContain("My test prompt text"); 108 | }); 109 | 110 | test("prompt shown with --prompts in log", () => { 111 | stageTestFile(); 112 | zagi([ 113 | "commit", 114 | "-m", 115 | "Commit for log test", 116 | "--prompt", 117 | "Prompt visible in log", 118 | ], { cwd: REPO_DIR }); 119 | 120 | const logResult = zagi(["log", "-n", "1", "--prompts"], { cwd: REPO_DIR }); 121 | 122 | expect(logResult).toContain("Commit for log test"); 123 | expect(logResult).toContain("prompt: Prompt visible in log"); 124 | }); 125 | 126 | test("log without --prompts hides prompt", () => { 127 | stageTestFile(); 128 | zagi([ 129 | "commit", 130 | "-m", 131 | "Hidden prompt commit", 132 | "--prompt", 133 | "This should be hidden", 134 | ], { cwd: REPO_DIR }); 135 | 136 | const logResult = zagi(["log", "-n", "1"], { cwd: REPO_DIR }); 137 | 138 | expect(logResult).toContain("Hidden prompt commit"); 139 | expect(logResult).not.toContain("prompt:"); 140 | expect(logResult).not.toContain("This should be hidden"); 141 | }); 142 | }); 143 | 144 | describe("ZAGI_AGENT", () => { 145 | test("ZAGI_AGENT requires --prompt", () => { 146 | stageTestFile(); 147 | const result = zagi( 148 | ["commit", "-m", "Agent commit"], 149 | { cwd: REPO_DIR, env: { ZAGI_AGENT: "claude-code" } } 150 | ); 151 | 152 | expect(result).toContain("--prompt required"); 153 | expect(result).toContain("ZAGI_AGENT"); 154 | }); 155 | 156 | test("ZAGI_AGENT succeeds with --prompt", () => { 157 | stageTestFile(); 158 | const result = zagi( 159 | ["commit", "-m", "Agent commit", "--prompt", "Agent prompt"], 160 | { cwd: REPO_DIR, env: { ZAGI_AGENT: "claude-code" } } 161 | ); 162 | 163 | expect(result).toContain("committed:"); 164 | }); 165 | }); 166 | 167 | describe("ZAGI_STRIP_COAUTHORS", () => { 168 | test("strips Co-Authored-By lines when enabled", () => { 169 | stageTestFile(); 170 | const message = `Add feature 171 | 172 | Co-Authored-By: Claude `; 173 | 174 | const result = zagi( 175 | ["commit", "-m", message], 176 | { cwd: REPO_DIR, env: { ZAGI_STRIP_COAUTHORS: "1" } } 177 | ); 178 | 179 | expect(result).toContain("committed:"); 180 | 181 | // Check the actual commit message 182 | const logResult = git(["log", "-1", "--format=%B"], { cwd: REPO_DIR }); 183 | 184 | expect(logResult.trim()).toBe("Add feature"); 185 | expect(logResult).not.toContain("Co-Authored-By"); 186 | }); 187 | 188 | test("preserves Co-Authored-By when not enabled", () => { 189 | stageTestFile(); 190 | const message = `Add feature 191 | 192 | Co-Authored-By: Claude `; 193 | 194 | const result = zagi(["commit", "-m", message], { cwd: REPO_DIR }); 195 | 196 | expect(result).toContain("committed:"); 197 | 198 | // Check the actual commit message 199 | const logResult = git(["log", "-1", "--format=%B"], { cwd: REPO_DIR }); 200 | 201 | expect(logResult).toContain("Co-Authored-By: Claude"); 202 | }); 203 | 204 | test("strips multiple Co-Authored-By lines", () => { 205 | stageTestFile(); 206 | const message = `Fix bug 207 | 208 | Co-Authored-By: Alice 209 | Co-Authored-By: Bob `; 210 | 211 | const result = zagi( 212 | ["commit", "-m", message], 213 | { cwd: REPO_DIR, env: { ZAGI_STRIP_COAUTHORS: "1" } } 214 | ); 215 | 216 | expect(result).toContain("committed:"); 217 | 218 | const logResult = git(["log", "-1", "--format=%B"], { cwd: REPO_DIR }); 219 | 220 | expect(logResult.trim()).toBe("Fix bug"); 221 | expect(logResult).not.toContain("Co-Authored-By"); 222 | }); 223 | 224 | test("preserves other message content", () => { 225 | stageTestFile(); 226 | const message = `Implement feature 227 | 228 | This adds a great new feature. 229 | 230 | Co-Authored-By: Claude 231 | 232 | Signed-off-by: Matt`; 233 | 234 | const result = zagi( 235 | ["commit", "-m", message], 236 | { cwd: REPO_DIR, env: { ZAGI_STRIP_COAUTHORS: "1" } } 237 | ); 238 | 239 | expect(result).toContain("committed:"); 240 | 241 | const logResult = git(["log", "-1", "--format=%B"], { cwd: REPO_DIR }); 242 | 243 | expect(logResult).toContain("Implement feature"); 244 | expect(logResult).toContain("This adds a great new feature"); 245 | expect(logResult).toContain("Signed-off-by: Matt"); 246 | expect(logResult).not.toContain("Co-Authored-By"); 247 | }); 248 | }); 249 | 250 | describe("commit with unstaged changes", () => { 251 | let testRepoDir: string; 252 | 253 | beforeEach(() => { 254 | testRepoDir = createTestRepo(); 255 | }); 256 | 257 | afterEach(() => { 258 | cleanupTestRepo(testRepoDir); 259 | }); 260 | 261 | test("shows hint when nothing staged but files modified", () => { 262 | // Modify a tracked file without staging 263 | appendFileSync(resolve(testRepoDir, "README.md"), "\nModified line\n"); 264 | 265 | const output = zagi(["commit", "-m", "test"], { cwd: testRepoDir }); 266 | 267 | expect(output).toContain("hint: did you mean to add?"); 268 | expect(output).toContain("unstaged:"); 269 | expect(output).toContain("README.md"); 270 | expect(output).toContain("error: nothing to commit"); 271 | }); 272 | 273 | test("shows hint with untracked files", () => { 274 | // Create untracked file 275 | writeFileSync(resolve(testRepoDir, "new-file.txt"), "new content\n"); 276 | 277 | const output = zagi(["commit", "-m", "test"], { cwd: testRepoDir }); 278 | 279 | expect(output).toContain("hint: did you mean to add?"); 280 | expect(output).toContain("??"); 281 | expect(output).toContain("new-file.txt"); 282 | }); 283 | 284 | test("no hint when working tree is clean", () => { 285 | const output = zagi(["commit", "-m", "test"], { cwd: testRepoDir }); 286 | 287 | expect(output).not.toContain("hint:"); 288 | expect(output).toContain("error: nothing to commit"); 289 | }); 290 | }); 291 | -------------------------------------------------------------------------------- /server/test/server.test.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Integration tests for zagi-server (SQLite backend) 3 | * 4 | * Tests git protocol compatibility - clone, push, pull using real git CLI. 5 | * No git binary on server - pure SQLite storage. 6 | */ 7 | 8 | import { describe, test, expect, beforeAll, afterAll } from "bun:test"; 9 | import { rm, mkdir, readFile, writeFile } from "fs/promises"; 10 | import { resolve, join } from "path"; 11 | import type { Subprocess } from "bun"; 12 | 13 | const TEST_PORT = 3456; 14 | const TEST_DATA_DIR = resolve(import.meta.dir, ".test-data"); 15 | const TEST_CLONE_DIR = resolve(import.meta.dir, ".test-clones"); 16 | const BASE_URL = `http://localhost:${TEST_PORT}`; 17 | 18 | let serverProcess: Subprocess | null = null; 19 | 20 | async function runGit( 21 | args: string[], 22 | cwd: string 23 | ): Promise<{ stdout: string; stderr: string; exitCode: number }> { 24 | const proc = Bun.spawn(["git", ...args], { 25 | cwd, 26 | stdout: "pipe", 27 | stderr: "pipe", 28 | }); 29 | 30 | const [stdout, stderr, exitCode] = await Promise.all([ 31 | new Response(proc.stdout).text(), 32 | new Response(proc.stderr).text(), 33 | proc.exited, 34 | ]); 35 | 36 | return { stdout: stdout.trim(), stderr: stderr.trim(), exitCode }; 37 | } 38 | 39 | async function startServer(): Promise { 40 | await rm(TEST_DATA_DIR, { recursive: true, force: true }); 41 | await rm(TEST_CLONE_DIR, { recursive: true, force: true }); 42 | await mkdir(TEST_DATA_DIR, { recursive: true }); 43 | await mkdir(TEST_CLONE_DIR, { recursive: true }); 44 | 45 | serverProcess = Bun.spawn(["bun", "run", "src/index.ts"], { 46 | cwd: resolve(import.meta.dir, ".."), 47 | env: { 48 | ...process.env, 49 | PORT: String(TEST_PORT), 50 | DATA_DIR: TEST_DATA_DIR, 51 | }, 52 | stdout: "pipe", 53 | stderr: "pipe", 54 | }); 55 | 56 | const maxAttempts = 30; 57 | for (let i = 0; i < maxAttempts; i++) { 58 | try { 59 | const res = await fetch(`${BASE_URL}/health`); 60 | if (res.ok) return; 61 | } catch { 62 | // Server not ready yet 63 | } 64 | await Bun.sleep(100); 65 | } 66 | throw new Error("Server failed to start"); 67 | } 68 | 69 | async function stopServer(): Promise { 70 | if (serverProcess) { 71 | serverProcess.kill(); 72 | serverProcess = null; 73 | } 74 | await rm(TEST_DATA_DIR, { recursive: true, force: true }); 75 | await rm(TEST_CLONE_DIR, { recursive: true, force: true }); 76 | } 77 | 78 | describe("zagi-server (SQLite)", () => { 79 | beforeAll(async () => { 80 | await startServer(); 81 | }); 82 | 83 | afterAll(async () => { 84 | await stopServer(); 85 | }); 86 | 87 | describe("health and info", () => { 88 | test("GET /health returns ok", async () => { 89 | const res = await fetch(`${BASE_URL}/health`); 90 | expect(res.status).toBe(200); 91 | const data = (await res.json()) as { status: string }; 92 | expect(data.status).toBe("ok"); 93 | }); 94 | 95 | test("GET / returns API info", async () => { 96 | const res = await fetch(BASE_URL); 97 | expect(res.status).toBe(200); 98 | const data = (await res.json()) as { name: string; storage: string }; 99 | expect(data.name).toBe("zagi-server"); 100 | expect(data.storage).toBe("SQLite (Durable Objects compatible)"); 101 | }); 102 | }); 103 | 104 | describe("git clone", () => { 105 | test("can clone a user repo", async () => { 106 | const clonePath = join(TEST_CLONE_DIR, "clone-test"); 107 | const result = await runGit( 108 | ["clone", `${BASE_URL}/myapp/alice`, clonePath], 109 | TEST_CLONE_DIR 110 | ); 111 | expect(result.exitCode).toBe(0); 112 | 113 | // Check git log 114 | const log = await runGit(["log", "--oneline"], clonePath); 115 | expect(log.stdout).toContain("Initial commit"); 116 | }); 117 | 118 | test("different users get separate repos", async () => { 119 | const alicePath = join(TEST_CLONE_DIR, "alice-sep"); 120 | const bobPath = join(TEST_CLONE_DIR, "bob-sep"); 121 | 122 | await runGit( 123 | ["clone", `${BASE_URL}/app/alice`, alicePath], 124 | TEST_CLONE_DIR 125 | ); 126 | await runGit(["clone", `${BASE_URL}/app/bob`, bobPath], TEST_CLONE_DIR); 127 | 128 | await runGit(["config", "user.email", "alice@test.com"], alicePath); 129 | await runGit(["config", "user.name", "Alice"], alicePath); 130 | await runGit(["config", "user.email", "bob@test.com"], bobPath); 131 | await runGit(["config", "user.name", "Bob"], bobPath); 132 | 133 | // Alice commits 134 | await writeFile(join(alicePath, "alice.txt"), "Alice's file"); 135 | await runGit(["add", "alice.txt"], alicePath); 136 | await runGit(["commit", "-m", "Alice commit"], alicePath); 137 | await runGit(["push", "origin", "main"], alicePath); 138 | 139 | // Bob commits (independently) 140 | await writeFile(join(bobPath, "bob.txt"), "Bob's file"); 141 | await runGit(["add", "bob.txt"], bobPath); 142 | await runGit(["commit", "-m", "Bob commit"], bobPath); 143 | await runGit(["push", "origin", "main"], bobPath); 144 | 145 | // Verify they have different content 146 | const aliceLog = await runGit(["log", "--oneline"], alicePath); 147 | const bobLog = await runGit(["log", "--oneline"], bobPath); 148 | 149 | expect(aliceLog.stdout).toContain("Alice commit"); 150 | expect(aliceLog.stdout).not.toContain("Bob commit"); 151 | expect(bobLog.stdout).toContain("Bob commit"); 152 | expect(bobLog.stdout).not.toContain("Alice commit"); 153 | }); 154 | }); 155 | 156 | describe("git push", () => { 157 | test("can push changes to server", async () => { 158 | const clonePath = join(TEST_CLONE_DIR, "push-test"); 159 | await runGit( 160 | ["clone", `${BASE_URL}/pushapp/pusher`, clonePath], 161 | TEST_CLONE_DIR 162 | ); 163 | 164 | await runGit(["config", "user.email", "pusher@test.com"], clonePath); 165 | await runGit(["config", "user.name", "Pusher"], clonePath); 166 | 167 | await writeFile(join(clonePath, "pushed.txt"), "Pushed content"); 168 | await runGit(["add", "pushed.txt"], clonePath); 169 | await runGit(["commit", "-m", "Push test"], clonePath); 170 | 171 | const pushResult = await runGit(["push", "origin", "main"], clonePath); 172 | expect(pushResult.exitCode).toBe(0); 173 | }); 174 | }); 175 | 176 | describe("git pull", () => { 177 | test("can pull changes after push", async () => { 178 | const clone1 = join(TEST_CLONE_DIR, "pull-1"); 179 | const clone2 = join(TEST_CLONE_DIR, "pull-2"); 180 | 181 | await runGit( 182 | ["clone", `${BASE_URL}/pullapp/puller`, clone1], 183 | TEST_CLONE_DIR 184 | ); 185 | await runGit( 186 | ["clone", `${BASE_URL}/pullapp/puller`, clone2], 187 | TEST_CLONE_DIR 188 | ); 189 | 190 | await runGit(["config", "user.email", "puller@test.com"], clone1); 191 | await runGit(["config", "user.name", "Puller"], clone1); 192 | 193 | // Push from clone1 194 | await writeFile(join(clone1, "shared.txt"), "Shared"); 195 | await runGit(["add", "shared.txt"], clone1); 196 | await runGit(["commit", "-m", "Shared commit"], clone1); 197 | await runGit(["push", "origin", "main"], clone1); 198 | 199 | // Pull in clone2 200 | const pullResult = await runGit(["pull", "origin", "main"], clone2); 201 | expect(pullResult.exitCode).toBe(0); 202 | 203 | const content = await readFile(join(clone2, "shared.txt"), "utf-8"); 204 | expect(content).toBe("Shared"); 205 | }); 206 | }); 207 | 208 | describe("branches", () => { 209 | test("can create and push feature branches", async () => { 210 | const clonePath = join(TEST_CLONE_DIR, "branch-test"); 211 | await runGit( 212 | ["clone", `${BASE_URL}/branchapp/user`, clonePath], 213 | TEST_CLONE_DIR 214 | ); 215 | 216 | await runGit(["config", "user.email", "user@test.com"], clonePath); 217 | await runGit(["config", "user.name", "User"], clonePath); 218 | 219 | // Create feature branch 220 | await runGit(["checkout", "-b", "feature"], clonePath); 221 | await writeFile(join(clonePath, "feature.txt"), "Feature"); 222 | await runGit(["add", "feature.txt"], clonePath); 223 | await runGit(["commit", "-m", "Feature commit"], clonePath); 224 | 225 | // Push feature branch 226 | const pushResult = await runGit(["push", "origin", "feature"], clonePath); 227 | expect(pushResult.exitCode).toBe(0); 228 | expect(pushResult.stderr).toContain("new branch"); 229 | 230 | // Fetch and verify 231 | await runGit(["fetch", "origin"], clonePath); 232 | const branches = await runGit(["branch", "-a"], clonePath); 233 | expect(branches.stdout).toContain("origin/feature"); 234 | }); 235 | 236 | test("branches are isolated per user", async () => { 237 | const alice = join(TEST_CLONE_DIR, "branch-alice"); 238 | const bob = join(TEST_CLONE_DIR, "branch-bob"); 239 | 240 | await runGit(["clone", `${BASE_URL}/iso/alice`, alice], TEST_CLONE_DIR); 241 | await runGit(["clone", `${BASE_URL}/iso/bob`, bob], TEST_CLONE_DIR); 242 | 243 | await runGit(["config", "user.email", "a@t.com"], alice); 244 | await runGit(["config", "user.name", "A"], alice); 245 | 246 | // Alice creates a branch 247 | await runGit(["checkout", "-b", "alice-feature"], alice); 248 | await writeFile(join(alice, "a.txt"), "A"); 249 | await runGit(["add", "a.txt"], alice); 250 | await runGit(["commit", "-m", "A"], alice); 251 | await runGit(["push", "origin", "alice-feature"], alice); 252 | 253 | // Bob shouldn't see Alice's branch 254 | await runGit(["fetch", "origin"], bob); 255 | const bobBranches = await runGit(["branch", "-a"], bob); 256 | expect(bobBranches.stdout).not.toContain("alice-feature"); 257 | }); 258 | }); 259 | }); 260 | -------------------------------------------------------------------------------- /src/cmds/alias.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | const git = @import("git.zig"); 3 | 4 | pub const help = 5 | \\usage: zagi alias [--print] 6 | \\ 7 | \\Set up git alias to zagi in your shell config. 8 | \\ 9 | \\Options: 10 | \\ --print, -p Print alias command instead of adding it 11 | \\ 12 | ; 13 | 14 | const Shell = enum { 15 | bash, 16 | zsh, 17 | fish, 18 | unknown, 19 | }; 20 | 21 | pub fn run(allocator: std.mem.Allocator, args: [][:0]u8) git.Error!void { 22 | const stdout = std.fs.File.stdout().deprecatedWriter(); 23 | 24 | var print_only = false; 25 | 26 | // Check for flags 27 | for (args[2..]) |arg| { 28 | if (std.mem.eql(u8, arg, "--help") or std.mem.eql(u8, arg, "-h")) { 29 | printHelp(stdout) catch return git.Error.WriteFailed; 30 | return; 31 | } 32 | if (std.mem.eql(u8, arg, "--print") or std.mem.eql(u8, arg, "-p")) { 33 | print_only = true; 34 | } 35 | } 36 | 37 | const shell = detectShell(); 38 | 39 | if (print_only) { 40 | printInitInstructions(stdout, shell) catch return git.Error.WriteFailed; 41 | return; 42 | } 43 | 44 | // Try to automatically add to shell config 45 | addToShellConfig(allocator, shell, stdout) catch return git.Error.WriteFailed; 46 | } 47 | 48 | fn detectShell() Shell { 49 | const shell_path = std.posix.getenv("SHELL") orelse return .unknown; 50 | 51 | if (std.mem.endsWith(u8, shell_path, "/bash") or std.mem.eql(u8, shell_path, "bash")) { 52 | return .bash; 53 | } else if (std.mem.endsWith(u8, shell_path, "/zsh") or std.mem.eql(u8, shell_path, "zsh")) { 54 | return .zsh; 55 | } else if (std.mem.endsWith(u8, shell_path, "/fish") or std.mem.eql(u8, shell_path, "fish")) { 56 | return .fish; 57 | } 58 | 59 | return .unknown; 60 | } 61 | 62 | fn printHelp(writer: anytype) !void { 63 | try writer.print( 64 | \\zagi alias - Set up zagi as your git command 65 | \\ 66 | \\Usage: zagi alias [options] 67 | \\ 68 | \\Automatically adds 'alias git=zagi' to your shell config file. 69 | \\ 70 | \\Options: 71 | \\ --print, -p Print the alias command instead of adding it 72 | \\ --help, -h Show this help 73 | \\ 74 | \\Supported shells: 75 | \\ - bash (~/.bashrc) 76 | \\ - zsh (~/.zshrc) 77 | \\ - fish (~/.config/fish/config.fish) 78 | \\ 79 | , .{}); 80 | } 81 | 82 | fn printInitInstructions(writer: anytype, shell: Shell) !void { 83 | switch (shell) { 84 | .bash => { 85 | try writer.print( 86 | \\# Add to ~/.bashrc: 87 | \\alias git='{s}' 88 | \\ 89 | , .{getZagiPath()}); 90 | }, 91 | .zsh => { 92 | try writer.print( 93 | \\# Add to ~/.zshrc: 94 | \\alias git='{s}' 95 | \\ 96 | , .{getZagiPath()}); 97 | }, 98 | .fish => { 99 | try writer.print( 100 | \\# Add to ~/.config/fish/config.fish: 101 | \\alias git '{s}' 102 | \\ 103 | , .{getZagiPath()}); 104 | }, 105 | .unknown => { 106 | try writer.print( 107 | \\# Could not detect shell. Common configurations: 108 | \\ 109 | \\# bash/zsh: 110 | \\alias git='{s}' 111 | \\ 112 | \\# fish: 113 | \\alias git '{s}' 114 | \\ 115 | , .{ getZagiPath(), getZagiPath() }); 116 | }, 117 | } 118 | } 119 | 120 | fn getZagiPath() []const u8 { 121 | // For now, assume zagi is in PATH 122 | // Could be enhanced to detect actual binary location 123 | return "zagi"; 124 | } 125 | 126 | fn addToShellConfig(allocator: std.mem.Allocator, shell: Shell, writer: anytype) !void { 127 | const home = std.posix.getenv("HOME") orelse { 128 | try writer.print("Could not determine HOME directory. Use --print to see the alias command.\n", .{}); 129 | return; 130 | }; 131 | 132 | const config_path: ?[]const u8 = switch (shell) { 133 | .bash => blk: { 134 | const bashrc = std.fmt.allocPrint(allocator, "{s}/.bashrc", .{home}) catch return error.OutOfMemory; 135 | break :blk bashrc; 136 | }, 137 | .zsh => std.fmt.allocPrint(allocator, "{s}/.zshrc", .{home}) catch return error.OutOfMemory, 138 | .fish => std.fmt.allocPrint(allocator, "{s}/.config/fish/config.fish", .{home}) catch return error.OutOfMemory, 139 | .unknown => null, 140 | }; 141 | 142 | if (config_path == null) { 143 | try writer.print("Automatic setup not supported for this shell. Use --print to see the alias command.\n", .{}); 144 | return; 145 | } 146 | 147 | const path = config_path.?; 148 | defer allocator.free(path); 149 | 150 | const alias_line = switch (shell) { 151 | .bash, .zsh => "alias git='zagi'", 152 | .fish => "alias git 'zagi'", 153 | else => unreachable, 154 | }; 155 | 156 | // Check if alias already exists 157 | if (std.fs.cwd().openFile(path, .{})) |file| { 158 | defer file.close(); 159 | const content = file.readToEndAlloc(allocator, 1024 * 1024) catch { 160 | try writer.print("Could not read {s}\n", .{path}); 161 | return; 162 | }; 163 | defer allocator.free(content); 164 | 165 | if (std.mem.indexOf(u8, content, "alias git=") != null or 166 | std.mem.indexOf(u8, content, "alias git ") != null) 167 | { 168 | try writer.print("Git alias already exists in {s}\n", .{path}); 169 | return; 170 | } 171 | } else |_| { 172 | // File doesn't exist, we'll create it 173 | } 174 | 175 | // Append alias to config file 176 | const file = std.fs.cwd().openFile(path, .{ .mode = .write_only }) catch |err| { 177 | if (err == error.FileNotFound) { 178 | // Create the file if it doesn't exist 179 | const new_file = std.fs.cwd().createFile(path, .{}) catch { 180 | try writer.print("Could not create {s}\n", .{path}); 181 | return; 182 | }; 183 | new_file.close(); 184 | // Re-open for appending 185 | const f = std.fs.cwd().openFile(path, .{ .mode = .write_only }) catch { 186 | try writer.print("Could not open {s} for writing\n", .{path}); 187 | return; 188 | }; 189 | return writeAlias(f, path, alias_line, writer); 190 | } 191 | try writer.print("Could not open {s} for writing\n", .{path}); 192 | return; 193 | }; 194 | 195 | return writeAlias(file, path, alias_line, writer); 196 | } 197 | 198 | fn writeAlias(file: std.fs.File, path: []const u8, alias_line: []const u8, writer: anytype) !void { 199 | defer file.close(); 200 | 201 | // Seek to end 202 | file.seekFromEnd(0) catch {}; 203 | 204 | // Write alias using writeAll instead of writer 205 | file.writeAll("\n# zagi - a better git for agents\n") catch { 206 | try writer.print("Could not write to {s}\n", .{path}); 207 | return; 208 | }; 209 | file.writeAll(alias_line) catch { 210 | try writer.print("Could not write to {s}\n", .{path}); 211 | return; 212 | }; 213 | file.writeAll("\n") catch { 214 | try writer.print("Could not write to {s}\n", .{path}); 215 | return; 216 | }; 217 | 218 | try writer.print("Added to {s}:\n {s}\n\nRestart your shell or run: source {s}\n", .{ path, alias_line, path }); 219 | } 220 | 221 | // Tests 222 | const testing = std.testing; 223 | 224 | test "printHelp outputs usage information" { 225 | var output = std.array_list.Managed(u8).init(testing.allocator); 226 | defer output.deinit(); 227 | 228 | try printHelp(output.writer()); 229 | 230 | const result = output.items; 231 | try testing.expect(std.mem.indexOf(u8, result, "zagi alias") != null); 232 | try testing.expect(std.mem.indexOf(u8, result, "--print") != null); 233 | try testing.expect(std.mem.indexOf(u8, result, "--help") != null); 234 | } 235 | 236 | test "printInitInstructions for bash" { 237 | var output = std.array_list.Managed(u8).init(testing.allocator); 238 | defer output.deinit(); 239 | 240 | try printInitInstructions(output.writer(), .bash); 241 | 242 | const result = output.items; 243 | try testing.expect(std.mem.indexOf(u8, result, ".bashrc") != null); 244 | try testing.expect(std.mem.indexOf(u8, result, "alias git='zagi'") != null); 245 | } 246 | 247 | test "printInitInstructions for zsh" { 248 | var output = std.array_list.Managed(u8).init(testing.allocator); 249 | defer output.deinit(); 250 | 251 | try printInitInstructions(output.writer(), .zsh); 252 | 253 | const result = output.items; 254 | try testing.expect(std.mem.indexOf(u8, result, ".zshrc") != null); 255 | try testing.expect(std.mem.indexOf(u8, result, "alias git='zagi'") != null); 256 | } 257 | 258 | test "printInitInstructions for fish" { 259 | var output = std.array_list.Managed(u8).init(testing.allocator); 260 | defer output.deinit(); 261 | 262 | try printInitInstructions(output.writer(), .fish); 263 | 264 | const result = output.items; 265 | try testing.expect(std.mem.indexOf(u8, result, "config.fish") != null); 266 | try testing.expect(std.mem.indexOf(u8, result, "alias git 'zagi'") != null); 267 | } 268 | 269 | test "printInitInstructions for unknown shell shows all options" { 270 | var output = std.array_list.Managed(u8).init(testing.allocator); 271 | defer output.deinit(); 272 | 273 | try printInitInstructions(output.writer(), .unknown); 274 | 275 | const result = output.items; 276 | try testing.expect(std.mem.indexOf(u8, result, "bash/zsh") != null); 277 | try testing.expect(std.mem.indexOf(u8, result, "fish") != null); 278 | } 279 | 280 | test "getZagiPath returns zagi" { 281 | try testing.expectEqualStrings("zagi", getZagiPath()); 282 | } 283 | -------------------------------------------------------------------------------- /src/guardrails.zig: -------------------------------------------------------------------------------- 1 | const std = @import("std"); 2 | 3 | /// Guardrails for agent mode (ZAGI_AGENT). 4 | /// Blocks commands that can cause actual data loss. 5 | /// 6 | /// Philosophy: Only block commands where data is UNRECOVERABLE. 7 | /// - Discarding uncommitted work = data loss (no way to get it back) 8 | /// - Deleting untracked files = data loss 9 | /// - Force pushing = remote data loss 10 | /// - Rewriting shared history = data loss for collaborators 11 | /// 12 | /// NOT blocked (recoverable via reflog/remote): 13 | /// - git reset (soft) - commits still in reflog 14 | /// - git branch -d - only deletes if merged 15 | /// - git checkout - just switches, doesn't discard 16 | 17 | pub const BlockedCommand = struct { 18 | pattern: Pattern, 19 | reason: []const u8, 20 | }; 21 | 22 | pub const Pattern = union(enum) { 23 | /// Command + specific flag combination (e.g., "reset" + "--hard") 24 | cmd_with_flag: struct { 25 | cmd: []const u8, 26 | flag: []const u8, 27 | }, 28 | /// Command + any of these flags 29 | cmd_with_any_flag: struct { 30 | cmd: []const u8, 31 | flags: []const []const u8, 32 | }, 33 | /// Command + argument pattern (e.g., "checkout" + ".") 34 | cmd_with_arg: struct { 35 | cmd: []const u8, 36 | arg: []const u8, 37 | }, 38 | /// Command + flag + argument pattern (e.g., "checkout" + "--" + any path) 39 | cmd_flag_then_arg: struct { 40 | cmd: []const u8, 41 | flag: []const u8, 42 | }, 43 | /// Subcommand (e.g., "stash drop") 44 | subcommand: struct { 45 | cmd: []const u8, 46 | sub: []const u8, 47 | }, 48 | }; 49 | 50 | /// Commands that cause unrecoverable data loss. 51 | pub const blocked_commands = [_]BlockedCommand{ 52 | // Working tree destroyers 53 | .{ 54 | .pattern = .{ .cmd_with_flag = .{ .cmd = "reset", .flag = "--hard" } }, 55 | .reason = "discards all uncommitted changes (unrecoverable)", 56 | }, 57 | .{ 58 | .pattern = .{ .cmd_with_arg = .{ .cmd = "checkout", .arg = "." } }, 59 | .reason = "discards all working tree changes (unrecoverable)", 60 | }, 61 | .{ 62 | .pattern = .{ .cmd_with_any_flag = .{ .cmd = "clean", .flags = &.{ "-f", "--force", "-fd", "-fx", "-fxd", "-d", "-x" } } }, 63 | .reason = "permanently deletes untracked files", 64 | }, 65 | .{ 66 | .pattern = .{ .cmd_with_arg = .{ .cmd = "restore", .arg = "." } }, 67 | .reason = "discards all working tree changes (unrecoverable)", 68 | }, 69 | .{ 70 | .pattern = .{ .cmd_with_flag = .{ .cmd = "restore", .flag = "--worktree" } }, 71 | .reason = "discards working tree changes (unrecoverable)", 72 | }, 73 | 74 | // Remote history destroyers 75 | .{ 76 | .pattern = .{ .cmd_with_any_flag = .{ .cmd = "push", .flags = &.{ "-f", "--force", "--force-with-lease", "--force-if-includes" } } }, 77 | .reason = "overwrites remote history (may cause data loss for collaborators)", 78 | }, 79 | 80 | // Stash destroyers 81 | .{ 82 | .pattern = .{ .subcommand = .{ .cmd = "stash", .sub = "drop" } }, 83 | .reason = "permanently deletes stashed changes", 84 | }, 85 | .{ 86 | .pattern = .{ .subcommand = .{ .cmd = "stash", .sub = "clear" } }, 87 | .reason = "permanently deletes all stashed changes", 88 | }, 89 | 90 | // Branch force delete 91 | .{ 92 | .pattern = .{ .cmd_with_flag = .{ .cmd = "branch", .flag = "-D" } }, 93 | .reason = "force deletes branch even if not merged (potential data loss)", 94 | }, 95 | }; 96 | 97 | /// Check if a command should be blocked in agent mode. 98 | /// Returns the reason if blocked, null if allowed. 99 | pub fn checkBlocked(args: []const [:0]const u8) ?[]const u8 { 100 | // Need at least: zagi 101 | if (args.len < 2) return null; 102 | 103 | // Skip the executable name (could be "zagi", "git", or full path like "/usr/bin/zagi") 104 | // Check if args[0] ends with "zagi" or "git" or is exactly one of them 105 | const arg0 = std.mem.sliceTo(args[0], 0); 106 | const is_wrapper = std.mem.eql(u8, arg0, "zagi") or 107 | std.mem.eql(u8, arg0, "git") or 108 | std.mem.endsWith(u8, arg0, "/zagi") or 109 | std.mem.endsWith(u8, arg0, "/git"); 110 | const cmd_start: usize = if (is_wrapper) 1 else 0; 111 | 112 | if (args.len <= cmd_start) return null; 113 | 114 | const cmd = std.mem.sliceTo(args[cmd_start], 0); 115 | const rest = args[cmd_start + 1 ..]; 116 | 117 | for (blocked_commands) |blocked| { 118 | if (matchesPattern(cmd, rest, blocked.pattern)) { 119 | return blocked.reason; 120 | } 121 | } 122 | 123 | return null; 124 | } 125 | 126 | fn matchesPattern(cmd: []const u8, rest: []const [:0]const u8, pattern: Pattern) bool { 127 | switch (pattern) { 128 | .cmd_with_flag => |p| { 129 | if (!std.mem.eql(u8, cmd, p.cmd)) return false; 130 | return hasFlag(rest, p.flag); 131 | }, 132 | .cmd_with_any_flag => |p| { 133 | if (!std.mem.eql(u8, cmd, p.cmd)) return false; 134 | for (p.flags) |flag| { 135 | if (hasFlag(rest, flag)) return true; 136 | } 137 | return false; 138 | }, 139 | .cmd_with_arg => |p| { 140 | if (!std.mem.eql(u8, cmd, p.cmd)) return false; 141 | return hasArg(rest, p.arg); 142 | }, 143 | .cmd_flag_then_arg => |p| { 144 | if (!std.mem.eql(u8, cmd, p.cmd)) return false; 145 | // Check for "--" followed by any argument 146 | for (rest, 0..) |arg_ptr, i| { 147 | const arg = std.mem.sliceTo(arg_ptr, 0); 148 | if (std.mem.eql(u8, arg, p.flag) and i + 1 < rest.len) { 149 | return true; 150 | } 151 | } 152 | return false; 153 | }, 154 | .subcommand => |p| { 155 | if (!std.mem.eql(u8, cmd, p.cmd)) return false; 156 | if (rest.len == 0) return false; 157 | return std.mem.eql(u8, std.mem.sliceTo(rest[0], 0), p.sub); 158 | }, 159 | } 160 | } 161 | 162 | fn hasFlag(args: []const [:0]const u8, flag: []const u8) bool { 163 | for (args) |arg_ptr| { 164 | const arg = std.mem.sliceTo(arg_ptr, 0); 165 | if (std.mem.eql(u8, arg, flag)) return true; 166 | // Handle combined short flags like -fd 167 | if (flag.len == 2 and flag[0] == '-' and flag[1] != '-') { 168 | if (arg.len > 1 and arg[0] == '-' and arg[1] != '-') { 169 | // Check if the flag char is in the combined flags 170 | if (std.mem.indexOfScalar(u8, arg[1..], flag[1]) != null) { 171 | return true; 172 | } 173 | } 174 | } 175 | } 176 | return false; 177 | } 178 | 179 | fn hasArg(args: []const [:0]const u8, target: []const u8) bool { 180 | for (args) |arg_ptr| { 181 | const arg = std.mem.sliceTo(arg_ptr, 0); 182 | if (std.mem.eql(u8, arg, target)) return true; 183 | } 184 | return false; 185 | } 186 | 187 | /// Check if guardrails should be enforced. 188 | pub fn isAgentMode() bool { 189 | return std.posix.getenv("ZAGI_AGENT") != null; 190 | } 191 | 192 | // Tests 193 | const testing = std.testing; 194 | 195 | fn toArgs(comptime strings: []const []const u8) [strings.len][:0]const u8 { 196 | var result: [strings.len][:0]const u8 = undefined; 197 | inline for (strings, 0..) |s, i| { 198 | result[i] = s ++ ""; 199 | } 200 | return result; 201 | } 202 | 203 | test "blocks reset --hard" { 204 | const args = toArgs(&.{ "git", "reset", "--hard" }); 205 | try testing.expect(checkBlocked(&args) != null); 206 | } 207 | 208 | test "blocks reset --hard HEAD~1" { 209 | const args = toArgs(&.{ "git", "reset", "--hard", "HEAD~1" }); 210 | try testing.expect(checkBlocked(&args) != null); 211 | } 212 | 213 | test "allows reset --soft" { 214 | const args = toArgs(&.{ "git", "reset", "--soft", "HEAD~1" }); 215 | try testing.expect(checkBlocked(&args) == null); 216 | } 217 | 218 | test "allows reset without flags" { 219 | const args = toArgs(&.{ "git", "reset", "HEAD~1" }); 220 | try testing.expect(checkBlocked(&args) == null); 221 | } 222 | 223 | test "blocks checkout ." { 224 | const args = toArgs(&.{ "git", "checkout", "." }); 225 | try testing.expect(checkBlocked(&args) != null); 226 | } 227 | 228 | test "allows checkout -- file (targeted revert is ok)" { 229 | const args = toArgs(&.{ "git", "checkout", "--", "file.txt" }); 230 | try testing.expect(checkBlocked(&args) == null); 231 | } 232 | 233 | test "allows checkout branch" { 234 | const args = toArgs(&.{ "git", "checkout", "main" }); 235 | try testing.expect(checkBlocked(&args) == null); 236 | } 237 | 238 | test "allows checkout -b newbranch" { 239 | const args = toArgs(&.{ "git", "checkout", "-b", "feature" }); 240 | try testing.expect(checkBlocked(&args) == null); 241 | } 242 | 243 | test "blocks clean -f" { 244 | const args = toArgs(&.{ "git", "clean", "-f" }); 245 | try testing.expect(checkBlocked(&args) != null); 246 | } 247 | 248 | test "blocks clean -fd" { 249 | const args = toArgs(&.{ "git", "clean", "-fd" }); 250 | try testing.expect(checkBlocked(&args) != null); 251 | } 252 | 253 | test "blocks clean -d (combined flags)" { 254 | const args = toArgs(&.{ "git", "clean", "-d" }); 255 | try testing.expect(checkBlocked(&args) != null); 256 | } 257 | 258 | test "allows clean -n (dry run)" { 259 | const args = toArgs(&.{ "git", "clean", "-n" }); 260 | try testing.expect(checkBlocked(&args) == null); 261 | } 262 | 263 | test "blocks push --force" { 264 | const args = toArgs(&.{ "git", "push", "--force" }); 265 | try testing.expect(checkBlocked(&args) != null); 266 | } 267 | 268 | test "blocks push -f" { 269 | const args = toArgs(&.{ "git", "push", "-f" }); 270 | try testing.expect(checkBlocked(&args) != null); 271 | } 272 | 273 | test "allows push" { 274 | const args = toArgs(&.{ "git", "push", "origin", "main" }); 275 | try testing.expect(checkBlocked(&args) == null); 276 | } 277 | 278 | test "blocks stash drop" { 279 | const args = toArgs(&.{ "git", "stash", "drop" }); 280 | try testing.expect(checkBlocked(&args) != null); 281 | } 282 | 283 | test "blocks stash clear" { 284 | const args = toArgs(&.{ "git", "stash", "clear" }); 285 | try testing.expect(checkBlocked(&args) != null); 286 | } 287 | 288 | test "allows stash" { 289 | const args = toArgs(&.{ "git", "stash" }); 290 | try testing.expect(checkBlocked(&args) == null); 291 | } 292 | 293 | test "allows stash pop" { 294 | const args = toArgs(&.{ "git", "stash", "pop" }); 295 | try testing.expect(checkBlocked(&args) == null); 296 | } 297 | 298 | test "blocks branch -D" { 299 | const args = toArgs(&.{ "git", "branch", "-D", "feature" }); 300 | try testing.expect(checkBlocked(&args) != null); 301 | } 302 | 303 | test "allows branch -d" { 304 | const args = toArgs(&.{ "git", "branch", "-d", "feature" }); 305 | try testing.expect(checkBlocked(&args) == null); 306 | } 307 | 308 | test "blocks restore ." { 309 | const args = toArgs(&.{ "git", "restore", "." }); 310 | try testing.expect(checkBlocked(&args) != null); 311 | } 312 | 313 | test "blocks restore --worktree" { 314 | const args = toArgs(&.{ "git", "restore", "--worktree", "file.txt" }); 315 | try testing.expect(checkBlocked(&args) != null); 316 | } 317 | 318 | test "allows restore --staged" { 319 | const args = toArgs(&.{ "git", "restore", "--staged", "file.txt" }); 320 | try testing.expect(checkBlocked(&args) == null); 321 | } 322 | -------------------------------------------------------------------------------- /test/src/diff.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, beforeEach, afterEach } from "vitest"; 2 | import { resolve } from "path"; 3 | import { rmSync, writeFileSync, readFileSync } from "fs"; 4 | import { createFixtureRepo } from "../fixtures/setup"; 5 | import { zagi, git } from "./shared"; 6 | 7 | let REPO_DIR: string; 8 | 9 | beforeEach(() => { 10 | REPO_DIR = createFixtureRepo(); 11 | }); 12 | 13 | afterEach(() => { 14 | if (REPO_DIR) { 15 | rmSync(REPO_DIR, { recursive: true, force: true }); 16 | } 17 | }); 18 | 19 | describe("zagi diff", () => { 20 | test("produces smaller output than git diff", () => { 21 | const zagiOut = zagi(["diff"], { cwd: REPO_DIR }); 22 | const gitOut = git(["diff"], { cwd: REPO_DIR }); 23 | 24 | expect(zagiOut.length).toBeLessThan(gitOut.length); 25 | }); 26 | 27 | test("shows file path with line number", () => { 28 | const result = zagi(["diff"], { cwd: REPO_DIR }); 29 | // Format: path/to/file.ts:123 30 | expect(result).toMatch(/^[\w/.-]+:\d+/m); 31 | }); 32 | 33 | test("shows additions with + prefix", () => { 34 | const result = zagi(["diff"], { cwd: REPO_DIR }); 35 | expect(result).toMatch(/^\+ /m); 36 | }); 37 | 38 | test("shows deletions with - prefix", () => { 39 | // Remove a line to create a deletion 40 | const filePath = resolve(REPO_DIR, "src/main.ts"); 41 | const content = readFileSync(filePath, "utf-8"); 42 | const lines = content.split("\n"); 43 | lines.splice(5, 1); // Remove line 6 44 | writeFileSync(filePath, lines.join("\n")); 45 | 46 | const result = zagi(["diff"], { cwd: REPO_DIR }); 47 | expect(result).toMatch(/^- /m); 48 | }); 49 | 50 | test("--staged shows staged changes", () => { 51 | // Stage the existing modified file 52 | git(["add", "src/main.ts"], { cwd: REPO_DIR }); 53 | 54 | const result = zagi(["diff", "--staged"], { cwd: REPO_DIR }); 55 | expect(result).toContain("src/main.ts"); 56 | }); 57 | 58 | test("no changes shows 'no changes'", () => { 59 | // Reset all changes 60 | git(["checkout", "--", "."], { cwd: REPO_DIR }); 61 | git(["clean", "-fd"], { cwd: REPO_DIR }); 62 | 63 | const result = zagi(["diff"], { cwd: REPO_DIR }); 64 | expect(result).toBe("no changes\n"); 65 | }); 66 | 67 | test("path filter shows only specified file", () => { 68 | // Create another modified file 69 | writeFileSync(resolve(REPO_DIR, "README.md"), "# Modified\n"); 70 | 71 | const result = zagi(["diff", "--", "src/main.ts"], { cwd: REPO_DIR }); 72 | expect(result).toContain("src/main.ts"); 73 | expect(result).not.toContain("README.md"); 74 | }); 75 | 76 | test("path filter with directory", () => { 77 | // Modify a file outside src/ 78 | writeFileSync(resolve(REPO_DIR, "README.md"), "# Modified\n"); 79 | 80 | const result = zagi(["diff", "--", "src/"], { cwd: REPO_DIR }); 81 | expect(result).toContain("src/main.ts"); 82 | expect(result).not.toContain("README.md"); 83 | }); 84 | 85 | test("revision range shows changes between commits", () => { 86 | // Commit the current changes first 87 | git(["add", "."], { cwd: REPO_DIR }); 88 | git(["commit", "-m", "test commit"], { cwd: REPO_DIR }); 89 | 90 | // Now diff between previous and current 91 | const result = zagi(["diff", "HEAD~1..HEAD"], { cwd: REPO_DIR }); 92 | expect(result).toContain("src/main.ts"); 93 | }); 94 | 95 | test("single revision shows changes since that commit", () => { 96 | // Commit the current changes first 97 | git(["add", "."], { cwd: REPO_DIR }); 98 | git(["commit", "-m", "test commit"], { cwd: REPO_DIR }); 99 | 100 | // Diff from previous commit to HEAD 101 | const result = zagi(["diff", "HEAD~1"], { cwd: REPO_DIR }); 102 | expect(result).toContain("src/main.ts"); 103 | }); 104 | 105 | test("revision with path filter", () => { 106 | // Modify README too 107 | writeFileSync(resolve(REPO_DIR, "README.md"), "# Modified\n"); 108 | 109 | // Commit all changes 110 | git(["add", "."], { cwd: REPO_DIR }); 111 | git(["commit", "-m", "test commit"], { cwd: REPO_DIR }); 112 | 113 | // Diff with path filter - should only show src/main.ts 114 | const result = zagi(["diff", "HEAD~1..HEAD", "--", "src/main.ts"], { cwd: REPO_DIR }); 115 | expect(result).toContain("src/main.ts"); 116 | expect(result).not.toContain("README.md"); 117 | }); 118 | 119 | test("triple dot shows changes since branches diverged", () => { 120 | // Create a branch from current state 121 | git(["checkout", "-b", "feature"], { cwd: REPO_DIR }); 122 | 123 | // Make a commit on feature branch 124 | git(["add", "."], { cwd: REPO_DIR }); 125 | git(["commit", "-m", "feature commit"], { cwd: REPO_DIR }); 126 | 127 | // Go back to main and make different changes 128 | git(["checkout", "main"], { cwd: REPO_DIR }); 129 | writeFileSync(resolve(REPO_DIR, "README.md"), "# Main branch change\n"); 130 | git(["add", "."], { cwd: REPO_DIR }); 131 | git(["commit", "-m", "main commit"], { cwd: REPO_DIR }); 132 | 133 | // Triple dot should show feature branch changes (not main changes) 134 | const result = zagi(["diff", "main...feature"], { cwd: REPO_DIR }); 135 | expect(result).toContain("src/main.ts"); // Feature branch change 136 | expect(result).not.toContain("Main branch change"); // Not main branch change 137 | }); 138 | }); 139 | 140 | describe("zagi diff output format", () => { 141 | test("header format is file:line for single line change", () => { 142 | // Create a file with a single line change 143 | const filePath = resolve(REPO_DIR, "single.txt"); 144 | writeFileSync(filePath, "line1\nline2\nline3\n"); 145 | git(["add", "single.txt"], { cwd: REPO_DIR }); 146 | git(["commit", "-m", "add single.txt"], { cwd: REPO_DIR }); 147 | 148 | // Change only line 2 149 | writeFileSync(filePath, "line1\nmodified\nline3\n"); 150 | 151 | const result = zagi(["diff"], { cwd: REPO_DIR }); 152 | // Should have format: single.txt:2 153 | expect(result).toMatch(/single\.txt:\d+\n/); 154 | }); 155 | 156 | test("header format is file:start-end for multi-line change", () => { 157 | // Create a file 158 | const filePath = resolve(REPO_DIR, "multi.txt"); 159 | writeFileSync(filePath, "line1\nline2\nline3\nline4\nline5\n"); 160 | git(["add", "multi.txt"], { cwd: REPO_DIR }); 161 | git(["commit", "-m", "add multi.txt"], { cwd: REPO_DIR }); 162 | 163 | // Change multiple consecutive lines 164 | writeFileSync(filePath, "line1\nchanged2\nchanged3\nchanged4\nline5\n"); 165 | 166 | const result = zagi(["diff"], { cwd: REPO_DIR }); 167 | // Should have format: multi.txt:2-4 168 | expect(result).toMatch(/multi\.txt:\d+-\d+\n/); 169 | }); 170 | 171 | test("additions are prefixed with + and space", () => { 172 | const result = zagi(["diff"], { cwd: REPO_DIR }); 173 | const lines = result.split("\n"); 174 | const additionLines = lines.filter((l) => l.startsWith("+")); 175 | 176 | expect(additionLines.length).toBeGreaterThan(0); 177 | // Each addition should be "+ content" (plus, space, content) 178 | for (const line of additionLines) { 179 | expect(line).toMatch(/^\+ .*/); 180 | } 181 | }); 182 | 183 | test("deletions are prefixed with - and space", () => { 184 | // Remove a line to create a deletion 185 | const filePath = resolve(REPO_DIR, "src/main.ts"); 186 | const content = readFileSync(filePath, "utf-8"); 187 | const lines = content.split("\n"); 188 | lines.splice(5, 1); // Remove line 6 189 | writeFileSync(filePath, lines.join("\n")); 190 | 191 | const result = zagi(["diff"], { cwd: REPO_DIR }); 192 | const deletionLines = result.split("\n").filter((l) => l.startsWith("-")); 193 | 194 | expect(deletionLines.length).toBeGreaterThan(0); 195 | // Each deletion should be "- content" (minus, space, content) 196 | for (const line of deletionLines) { 197 | expect(line).toMatch(/^- .*/); 198 | } 199 | }); 200 | 201 | test("multiple hunks show separate file:line headers", () => { 202 | // Create a file with content 203 | const filePath = resolve(REPO_DIR, "hunks.txt"); 204 | const lines = Array.from({ length: 20 }, (_, i) => `line${i + 1}`); 205 | writeFileSync(filePath, lines.join("\n") + "\n"); 206 | git(["add", "hunks.txt"], { cwd: REPO_DIR }); 207 | git(["commit", "-m", "add hunks.txt"], { cwd: REPO_DIR }); 208 | 209 | // Change lines at beginning and end (creating separate hunks) 210 | lines[1] = "modified2"; 211 | lines[18] = "modified19"; 212 | writeFileSync(filePath, lines.join("\n") + "\n"); 213 | 214 | const result = zagi(["diff", "--", "hunks.txt"], { cwd: REPO_DIR }); 215 | // Should have multiple file:line headers (one per hunk) 216 | const headers = result.match(/hunks\.txt:\d+/g); 217 | expect(headers).not.toBeNull(); 218 | expect(headers!.length).toBeGreaterThanOrEqual(2); 219 | }); 220 | 221 | test("output has no git diff headers (---, +++, @@)", () => { 222 | const result = zagi(["diff"], { cwd: REPO_DIR }); 223 | expect(result).not.toContain("---"); 224 | expect(result).not.toContain("+++"); 225 | expect(result).not.toContain("@@"); 226 | expect(result).not.toContain("diff --git"); 227 | }); 228 | }); 229 | 230 | describe("zagi diff --stat", () => { 231 | test("shows file names with change counts", () => { 232 | const result = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 233 | // Format: " filename | N ++--" 234 | expect(result).toMatch(/^\s+\S+\s+\|\s+\d+/m); 235 | }); 236 | 237 | test("shows +/- visualization bar", () => { 238 | const result = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 239 | // Should contain + or - in the output 240 | expect(result).toMatch(/[+-]/); 241 | }); 242 | 243 | test("shows summary line with file count", () => { 244 | const result = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 245 | // Format: " N files changed, X insertions(+), Y deletions(-)" 246 | expect(result).toMatch(/\d+ files changed/); 247 | }); 248 | 249 | test("shows insertions count when present", () => { 250 | const result = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 251 | expect(result).toMatch(/\d+ insertions?\(\+\)/); 252 | }); 253 | 254 | test("--stat with no changes shows 'no changes'", () => { 255 | // Reset all changes 256 | git(["checkout", "--", "."], { cwd: REPO_DIR }); 257 | git(["clean", "-fd"], { cwd: REPO_DIR }); 258 | 259 | const result = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 260 | expect(result).toBe("no changes\n"); 261 | }); 262 | 263 | test("--stat with --staged works", () => { 264 | git(["add", "src/main.ts"], { cwd: REPO_DIR }); 265 | 266 | const result = zagi(["diff", "--staged", "--stat"], { cwd: REPO_DIR }); 267 | expect(result).toContain("src/main.ts"); 268 | expect(result).toMatch(/files changed/); 269 | }); 270 | 271 | test("--stat shows summary info not full diff content", () => { 272 | const stat = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 273 | // Stat mode should not contain actual diff lines 274 | expect(stat).not.toMatch(/^\+ /m); 275 | expect(stat).not.toMatch(/^- /m); 276 | // But should contain the summary 277 | expect(stat).toMatch(/files changed/); 278 | }); 279 | }); 280 | 281 | describe("zagi diff --name-only", () => { 282 | test("shows only file names", () => { 283 | const result = zagi(["diff", "--name-only"], { cwd: REPO_DIR }); 284 | // Should just be filenames, one per line 285 | expect(result).toContain("src/main.ts"); 286 | // Should not have any diff content 287 | expect(result).not.toMatch(/^\+/m); 288 | expect(result).not.toMatch(/^-/m); 289 | }); 290 | 291 | test("--name-only lists each file once", () => { 292 | const result = zagi(["diff", "--name-only"], { cwd: REPO_DIR }); 293 | const lines = result.trim().split("\n").filter(Boolean); 294 | const unique = new Set(lines); 295 | expect(lines.length).toBe(unique.size); 296 | }); 297 | 298 | test("--name-only with no changes shows 'no changes'", () => { 299 | // Reset all changes 300 | git(["checkout", "--", "."], { cwd: REPO_DIR }); 301 | git(["clean", "-fd"], { cwd: REPO_DIR }); 302 | 303 | const result = zagi(["diff", "--name-only"], { cwd: REPO_DIR }); 304 | expect(result).toBe("no changes\n"); 305 | }); 306 | 307 | test("--name-only with --staged works", () => { 308 | git(["add", "src/main.ts"], { cwd: REPO_DIR }); 309 | 310 | const result = zagi(["diff", "--staged", "--name-only"], { cwd: REPO_DIR }); 311 | expect(result.trim()).toBe("src/main.ts"); 312 | }); 313 | 314 | test("--name-only produces smallest output", () => { 315 | const nameOnly = zagi(["diff", "--name-only"], { cwd: REPO_DIR }); 316 | const stat = zagi(["diff", "--stat"], { cwd: REPO_DIR }); 317 | const patch = zagi(["diff"], { cwd: REPO_DIR }); 318 | 319 | expect(nameOnly.length).toBeLessThanOrEqual(stat.length); 320 | expect(nameOnly.length).toBeLessThan(patch.length); 321 | }); 322 | 323 | test("--name-only with revision range", () => { 324 | git(["add", "."], { cwd: REPO_DIR }); 325 | git(["commit", "-m", "test commit"], { cwd: REPO_DIR }); 326 | 327 | const result = zagi(["diff", "--name-only", "HEAD~1..HEAD"], { cwd: REPO_DIR }); 328 | expect(result).toContain("src/main.ts"); 329 | }); 330 | }); 331 | -------------------------------------------------------------------------------- /server/src/git-storage.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Pure SQLite Git Storage 3 | * 4 | * Implements git object storage without requiring the git binary. 5 | * This can run in Durable Objects with just SQLite. 6 | * 7 | * Git objects are content-addressed by SHA-1: 8 | * - blob: file content 9 | * - tree: directory listing (mode, name, hash) 10 | * - commit: tree hash, parent(s), author, message 11 | * 12 | * Refs are named pointers to commits (branches, tags, HEAD). 13 | */ 14 | 15 | import { Database } from "bun:sqlite"; 16 | import { createHash } from "crypto"; 17 | import { deflate, inflate } from "zlib"; 18 | import { promisify } from "util"; 19 | 20 | const deflateAsync = promisify(deflate); 21 | const inflateAsync = promisify(inflate); 22 | 23 | // Git object types 24 | export type GitObjectType = "blob" | "tree" | "commit" | "tag"; 25 | 26 | export interface GitObject { 27 | type: GitObjectType; 28 | size: number; 29 | data: Buffer; 30 | } 31 | 32 | export interface TreeEntry { 33 | mode: string; // "100644" for file, "040000" for dir, "100755" for executable 34 | name: string; 35 | hash: string; 36 | } 37 | 38 | export interface CommitData { 39 | tree: string; 40 | parents: string[]; 41 | author: { name: string; email: string; timestamp: number; tz: string }; 42 | committer: { name: string; email: string; timestamp: number; tz: string }; 43 | message: string; 44 | } 45 | 46 | /** 47 | * Initialize the SQLite schema for git storage 48 | */ 49 | export function initGitStorage(db: Database): void { 50 | // Git objects table - stores blobs, trees, commits, tags 51 | db.run(` 52 | CREATE TABLE IF NOT EXISTS git_objects ( 53 | hash TEXT PRIMARY KEY, 54 | type TEXT NOT NULL, 55 | size INTEGER NOT NULL, 56 | data BLOB NOT NULL 57 | ) 58 | `); 59 | 60 | // Refs table - branches, tags, HEAD 61 | db.run(` 62 | CREATE TABLE IF NOT EXISTS git_refs ( 63 | name TEXT PRIMARY KEY, 64 | hash TEXT NOT NULL, 65 | symbolic INTEGER DEFAULT 0 66 | ) 67 | `); 68 | 69 | // Create index for faster type lookups 70 | db.run(` 71 | CREATE INDEX IF NOT EXISTS idx_git_objects_type ON git_objects(type) 72 | `); 73 | } 74 | 75 | /** 76 | * Compute SHA-1 hash of a git object 77 | * Git hashes: SHA1(type + ' ' + size + '\0' + content) 78 | */ 79 | export function hashObject(type: GitObjectType, data: Buffer): string { 80 | const header = Buffer.from(`${type} ${data.length}\0`); 81 | const full = Buffer.concat([header, data]); 82 | return createHash("sha1").update(full).digest("hex"); 83 | } 84 | 85 | /** 86 | * Store a git object 87 | */ 88 | export function storeObject( 89 | db: Database, 90 | type: GitObjectType, 91 | data: Buffer 92 | ): string { 93 | const hash = hashObject(type, data); 94 | 95 | // Check if already exists 96 | const existing = db 97 | .query("SELECT 1 FROM git_objects WHERE hash = ?") 98 | .get(hash); 99 | if (existing) return hash; 100 | 101 | // Store compressed 102 | db.run("INSERT INTO git_objects (hash, type, size, data) VALUES (?, ?, ?, ?)", [ 103 | hash, 104 | type, 105 | data.length, 106 | data, // Store raw for now, could compress with zlib 107 | ]); 108 | 109 | return hash; 110 | } 111 | 112 | /** 113 | * Retrieve a git object 114 | */ 115 | export function getObject(db: Database, hash: string): GitObject | null { 116 | const row = db 117 | .query("SELECT type, size, data FROM git_objects WHERE hash = ?") 118 | .get(hash) as { type: string; size: number; data: Buffer } | null; 119 | 120 | if (!row) return null; 121 | 122 | return { 123 | type: row.type as GitObjectType, 124 | size: row.size, 125 | data: Buffer.from(row.data), 126 | }; 127 | } 128 | 129 | /** 130 | * Check if an object exists 131 | */ 132 | export function hasObject(db: Database, hash: string): boolean { 133 | const row = db 134 | .query("SELECT 1 FROM git_objects WHERE hash = ?") 135 | .get(hash); 136 | return row !== null; 137 | } 138 | 139 | /** 140 | * Store a blob (file content) 141 | */ 142 | export function storeBlob(db: Database, content: Buffer | string): string { 143 | const data = typeof content === "string" ? Buffer.from(content) : content; 144 | return storeObject(db, "blob", data); 145 | } 146 | 147 | /** 148 | * Get blob content 149 | */ 150 | export function getBlob(db: Database, hash: string): Buffer | null { 151 | const obj = getObject(db, hash); 152 | if (!obj || obj.type !== "blob") return null; 153 | return obj.data; 154 | } 155 | 156 | /** 157 | * Create a tree object from entries 158 | */ 159 | export function createTree(db: Database, entries: TreeEntry[]): string { 160 | // Sort entries by name (git requirement) 161 | const sorted = [...entries].sort((a, b) => a.name.localeCompare(b.name)); 162 | 163 | // Build tree data: mode + ' ' + name + '\0' + hash_bytes 164 | const parts: Buffer[] = []; 165 | for (const entry of sorted) { 166 | const modeName = Buffer.from(`${entry.mode} ${entry.name}\0`); 167 | const hashBytes = Buffer.from(entry.hash, "hex"); 168 | parts.push(modeName, hashBytes); 169 | } 170 | 171 | const data = Buffer.concat(parts); 172 | return storeObject(db, "tree", data); 173 | } 174 | 175 | /** 176 | * Parse a tree object into entries 177 | */ 178 | export function parseTree(db: Database, hash: string): TreeEntry[] | null { 179 | const obj = getObject(db, hash); 180 | if (!obj || obj.type !== "tree") return null; 181 | 182 | const entries: TreeEntry[] = []; 183 | let offset = 0; 184 | const data = obj.data; 185 | 186 | while (offset < data.length) { 187 | // Find space after mode 188 | const spaceIdx = data.indexOf(0x20, offset); 189 | if (spaceIdx === -1) break; 190 | 191 | const mode = data.slice(offset, spaceIdx).toString(); 192 | 193 | // Find null after name 194 | const nullIdx = data.indexOf(0x00, spaceIdx + 1); 195 | if (nullIdx === -1) break; 196 | 197 | const name = data.slice(spaceIdx + 1, nullIdx).toString(); 198 | 199 | // Next 20 bytes are the hash 200 | const hashBytes = data.slice(nullIdx + 1, nullIdx + 21); 201 | const entryHash = hashBytes.toString("hex"); 202 | 203 | entries.push({ mode, name, hash: entryHash }); 204 | offset = nullIdx + 21; 205 | } 206 | 207 | return entries; 208 | } 209 | 210 | /** 211 | * Create a commit object 212 | */ 213 | export function createCommit(db: Database, commit: CommitData): string { 214 | const lines: string[] = []; 215 | 216 | lines.push(`tree ${commit.tree}`); 217 | for (const parent of commit.parents) { 218 | lines.push(`parent ${parent}`); 219 | } 220 | 221 | const { author, committer } = commit; 222 | lines.push( 223 | `author ${author.name} <${author.email}> ${author.timestamp} ${author.tz}` 224 | ); 225 | lines.push( 226 | `committer ${committer.name} <${committer.email}> ${committer.timestamp} ${committer.tz}` 227 | ); 228 | lines.push(""); 229 | lines.push(commit.message); 230 | 231 | const data = Buffer.from(lines.join("\n")); 232 | return storeObject(db, "commit", data); 233 | } 234 | 235 | /** 236 | * Parse a commit object 237 | */ 238 | export function parseCommit(db: Database, hash: string): CommitData | null { 239 | const obj = getObject(db, hash); 240 | if (!obj || obj.type !== "commit") return null; 241 | 242 | const content = obj.data.toString(); 243 | const lines = content.split("\n"); 244 | 245 | let tree = ""; 246 | const parents: string[] = []; 247 | let author = { name: "", email: "", timestamp: 0, tz: "+0000" }; 248 | let committer = { name: "", email: "", timestamp: 0, tz: "+0000" }; 249 | let messageStart = 0; 250 | 251 | for (let i = 0; i < lines.length; i++) { 252 | const line = lines[i]!; 253 | if (line === "") { 254 | messageStart = i + 1; 255 | break; 256 | } 257 | 258 | if (line.startsWith("tree ")) { 259 | tree = line.slice(5); 260 | } else if (line.startsWith("parent ")) { 261 | parents.push(line.slice(7)); 262 | } else if (line.startsWith("author ")) { 263 | author = parseAuthorLine(line.slice(7)); 264 | } else if (line.startsWith("committer ")) { 265 | committer = parseAuthorLine(line.slice(10)); 266 | } 267 | } 268 | 269 | const message = lines.slice(messageStart).join("\n"); 270 | 271 | return { tree, parents, author, committer, message }; 272 | } 273 | 274 | /** 275 | * Parse author/committer line: "Name timestamp tz" 276 | */ 277 | function parseAuthorLine(line: string): { 278 | name: string; 279 | email: string; 280 | timestamp: number; 281 | tz: string; 282 | } { 283 | const match = line.match(/^(.+) <(.+)> (\d+) ([+-]\d{4})$/); 284 | if (!match) { 285 | return { name: "Unknown", email: "unknown@unknown", timestamp: 0, tz: "+0000" }; 286 | } 287 | return { 288 | name: match[1]!, 289 | email: match[2]!, 290 | timestamp: parseInt(match[3]!, 10), 291 | tz: match[4]!, 292 | }; 293 | } 294 | 295 | /** 296 | * Get a ref (branch, tag, HEAD) 297 | */ 298 | export function getRef(db: Database, name: string): string | null { 299 | const row = db 300 | .query("SELECT hash, symbolic FROM git_refs WHERE name = ?") 301 | .get(name) as { hash: string; symbolic: number } | null; 302 | 303 | if (!row) return null; 304 | 305 | // If symbolic ref, follow it 306 | if (row.symbolic) { 307 | return getRef(db, row.hash); 308 | } 309 | 310 | return row.hash; 311 | } 312 | 313 | /** 314 | * Set a ref 315 | */ 316 | export function setRef( 317 | db: Database, 318 | name: string, 319 | hash: string, 320 | symbolic = false 321 | ): void { 322 | db.run( 323 | `INSERT OR REPLACE INTO git_refs (name, hash, symbolic) VALUES (?, ?, ?)`, 324 | [name, hash, symbolic ? 1 : 0] 325 | ); 326 | } 327 | 328 | /** 329 | * Delete a ref 330 | */ 331 | export function deleteRef(db: Database, name: string): boolean { 332 | const result = db.run("DELETE FROM git_refs WHERE name = ?", [name]); 333 | return result.changes > 0; 334 | } 335 | 336 | /** 337 | * List all refs matching a pattern 338 | */ 339 | export function listRefs( 340 | db: Database, 341 | prefix = "" 342 | ): { name: string; hash: string }[] { 343 | const rows = db 344 | .query("SELECT name, hash, symbolic FROM git_refs WHERE name LIKE ?") 345 | .all(`${prefix}%`) as { name: string; hash: string; symbolic: number }[]; 346 | 347 | return rows.map((row) => ({ 348 | name: row.name, 349 | hash: row.symbolic ? getRef(db, row.hash) ?? row.hash : row.hash, 350 | })); 351 | } 352 | 353 | /** 354 | * Get all branches (refs/heads/*) 355 | */ 356 | export function listBranches(db: Database): { name: string; hash: string }[] { 357 | return listRefs(db, "refs/heads/").map((ref) => ({ 358 | name: ref.name.replace("refs/heads/", ""), 359 | hash: ref.hash, 360 | })); 361 | } 362 | 363 | /** 364 | * Initialize a new repository with an empty commit 365 | */ 366 | export function initRepository(db: Database): string { 367 | initGitStorage(db); 368 | 369 | // Create empty tree 370 | const emptyTreeHash = createTree(db, []); 371 | 372 | // Create initial commit 373 | const now = Math.floor(Date.now() / 1000); 374 | const initialCommit = createCommit(db, { 375 | tree: emptyTreeHash, 376 | parents: [], 377 | author: { name: "System", email: "system@zagi.local", timestamp: now, tz: "+0000" }, 378 | committer: { name: "System", email: "system@zagi.local", timestamp: now, tz: "+0000" }, 379 | message: "Initial commit", 380 | }); 381 | 382 | // Set refs 383 | setRef(db, "refs/heads/main", initialCommit); 384 | setRef(db, "HEAD", "refs/heads/main", true); // symbolic ref 385 | 386 | return initialCommit; 387 | } 388 | 389 | /** 390 | * Create a simple commit with file changes 391 | * This is a helper for basic operations - real git uses more complex staging 392 | */ 393 | export function simpleCommit( 394 | db: Database, 395 | branch: string, 396 | files: { path: string; content: string }[], 397 | message: string, 398 | author: { name: string; email: string } 399 | ): string { 400 | // Get current branch head 401 | const parentHash = getRef(db, `refs/heads/${branch}`); 402 | const parents = parentHash ? [parentHash] : []; 403 | 404 | // Get parent tree entries (if any) 405 | let existingEntries: TreeEntry[] = []; 406 | if (parentHash) { 407 | const parentCommit = parseCommit(db, parentHash); 408 | if (parentCommit) { 409 | existingEntries = parseTree(db, parentCommit.tree) ?? []; 410 | } 411 | } 412 | 413 | // Build new tree with file changes 414 | // Note: This is simplified - doesn't handle nested directories properly 415 | const entriesMap = new Map(); 416 | for (const entry of existingEntries) { 417 | entriesMap.set(entry.name, entry); 418 | } 419 | 420 | for (const file of files) { 421 | const blobHash = storeBlob(db, file.content); 422 | entriesMap.set(file.path, { 423 | mode: "100644", 424 | name: file.path, 425 | hash: blobHash, 426 | }); 427 | } 428 | 429 | const treeHash = createTree(db, Array.from(entriesMap.values())); 430 | 431 | // Create commit 432 | const now = Math.floor(Date.now() / 1000); 433 | const commitHash = createCommit(db, { 434 | tree: treeHash, 435 | parents, 436 | author: { ...author, timestamp: now, tz: "+0000" }, 437 | committer: { ...author, timestamp: now, tz: "+0000" }, 438 | message, 439 | }); 440 | 441 | // Update branch ref 442 | setRef(db, `refs/heads/${branch}`, commitHash); 443 | 444 | return commitHash; 445 | } 446 | 447 | /** 448 | * Get commit history for a branch 449 | */ 450 | export function getHistory( 451 | db: Database, 452 | startHash: string, 453 | limit = 10 454 | ): CommitData[] { 455 | const history: CommitData[] = []; 456 | const seen = new Set(); 457 | const queue = [startHash]; 458 | 459 | while (queue.length > 0 && history.length < limit) { 460 | const hash = queue.shift()!; 461 | if (seen.has(hash)) continue; 462 | seen.add(hash); 463 | 464 | const commit = parseCommit(db, hash); 465 | if (!commit) continue; 466 | 467 | history.push(commit); 468 | queue.push(...commit.parents); 469 | } 470 | 471 | return history; 472 | } 473 | -------------------------------------------------------------------------------- /server/src/git-pack.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Git Pack Protocol Implementation 3 | * 4 | * Handles parsing and generating git packfiles for the smart HTTP protocol. 5 | * This allows git clients to push/pull without needing the git binary. 6 | * 7 | * Pack format: 8 | * - 4 bytes: "PACK" 9 | * - 4 bytes: version (network byte order, usually 2) 10 | * - 4 bytes: number of objects 11 | * - N objects (each with type, size, compressed data) 12 | * - 20 bytes: SHA-1 checksum of entire pack 13 | */ 14 | 15 | import { createHash } from "crypto"; 16 | import { inflateSync, deflateSync } from "zlib"; 17 | import type { Database } from "bun:sqlite"; 18 | import { 19 | storeObject, 20 | getObject, 21 | hasObject, 22 | type GitObjectType, 23 | } from "./git-storage.ts"; 24 | 25 | // Object type numbers in pack format 26 | const OBJ_COMMIT = 1; 27 | const OBJ_TREE = 2; 28 | const OBJ_BLOB = 3; 29 | const OBJ_TAG = 4; 30 | const OBJ_OFS_DELTA = 6; 31 | const OBJ_REF_DELTA = 7; 32 | 33 | const typeToNum: Record = { 34 | commit: OBJ_COMMIT, 35 | tree: OBJ_TREE, 36 | blob: OBJ_BLOB, 37 | tag: OBJ_TAG, 38 | }; 39 | 40 | const numToType: Record = { 41 | [OBJ_COMMIT]: "commit", 42 | [OBJ_TREE]: "tree", 43 | [OBJ_BLOB]: "blob", 44 | [OBJ_TAG]: "tag", 45 | }; 46 | 47 | /** 48 | * Pkt-line helpers 49 | */ 50 | export function pktLine(data: string | Buffer): Buffer { 51 | const content = typeof data === "string" ? Buffer.from(data) : data; 52 | const len = content.length + 4; 53 | const lenHex = len.toString(16).padStart(4, "0"); 54 | return Buffer.concat([Buffer.from(lenHex), content]); 55 | } 56 | 57 | export function pktFlush(): Buffer { 58 | return Buffer.from("0000"); 59 | } 60 | 61 | /** 62 | * Parse pkt-lines from a buffer 63 | */ 64 | export function* parsePktLines(data: Buffer): Generator { 65 | let offset = 0; 66 | 67 | while (offset < data.length) { 68 | if (offset + 4 > data.length) break; 69 | 70 | const lenHex = data.slice(offset, offset + 4).toString(); 71 | const len = parseInt(lenHex, 16); 72 | 73 | if (len === 0) { 74 | // Flush packet 75 | yield null; 76 | offset += 4; 77 | continue; 78 | } 79 | 80 | if (len < 4 || offset + len > data.length) break; 81 | 82 | const content = data.slice(offset + 4, offset + len); 83 | yield content; 84 | offset += len; 85 | } 86 | } 87 | 88 | /** 89 | * Parse a packfile and store objects in SQLite 90 | */ 91 | export function parsePackfile( 92 | db: Database, 93 | packData: Buffer 94 | ): { count: number; hashes: string[] } { 95 | let offset = 0; 96 | 97 | // Check header 98 | const header = packData.slice(0, 4).toString(); 99 | if (header !== "PACK") { 100 | throw new Error("Invalid pack header"); 101 | } 102 | offset += 4; 103 | 104 | // Version 105 | const version = packData.readUInt32BE(offset); 106 | if (version !== 2 && version !== 3) { 107 | throw new Error(`Unsupported pack version: ${version}`); 108 | } 109 | offset += 4; 110 | 111 | // Object count 112 | const count = packData.readUInt32BE(offset); 113 | offset += 4; 114 | 115 | const hashes: string[] = []; 116 | const objectOffsets = new Map(); 117 | 118 | for (let i = 0; i < count; i++) { 119 | const objOffset = offset; 120 | const result = parsePackObject(packData, offset, objectOffsets); 121 | offset = result.newOffset; 122 | 123 | // Store the object 124 | const hash = storeObject(db, result.type, result.data); 125 | hashes.push(hash); 126 | 127 | // Remember for delta resolution 128 | objectOffsets.set(objOffset, { type: result.type, data: result.data, hash }); 129 | } 130 | 131 | return { count, hashes }; 132 | } 133 | 134 | /** 135 | * Parse a single object from a packfile 136 | */ 137 | function parsePackObject( 138 | data: Buffer, 139 | offset: number, 140 | objectOffsets: Map 141 | ): { type: GitObjectType; data: Buffer; newOffset: number } { 142 | const startOffset = offset; 143 | 144 | // First byte: type (bits 4-6) and size (bits 0-3) 145 | let byte = data[offset++]!; 146 | const type = (byte >> 4) & 0x07; 147 | let size = byte & 0x0f; 148 | let shift = 4; 149 | 150 | // Variable-length size encoding 151 | while (byte & 0x80) { 152 | byte = data[offset++]!; 153 | size |= (byte & 0x7f) << shift; 154 | shift += 7; 155 | } 156 | 157 | // Handle delta objects 158 | if (type === OBJ_REF_DELTA) { 159 | // 20 bytes: base object SHA-1 160 | const baseHash = data.slice(offset, offset + 20).toString("hex"); 161 | offset += 20; 162 | 163 | // Decompress delta data 164 | const { result: deltaData, bytesRead } = decompressObject(data, offset, size); 165 | offset += bytesRead; 166 | 167 | // Find base object (must already be stored) 168 | // For now, skip ref deltas - they require the base to be in the same pack or already stored 169 | throw new Error("REF_DELTA not yet supported - base object needed"); 170 | } 171 | 172 | if (type === OBJ_OFS_DELTA) { 173 | // Variable-length negative offset to base object 174 | let baseOffset = 0; 175 | byte = data[offset++]!; 176 | baseOffset = byte & 0x7f; 177 | while (byte & 0x80) { 178 | byte = data[offset++]!; 179 | baseOffset = ((baseOffset + 1) << 7) | (byte & 0x7f); 180 | } 181 | 182 | const baseObjOffset = startOffset - baseOffset; 183 | const baseObj = objectOffsets.get(baseObjOffset); 184 | if (!baseObj) { 185 | throw new Error(`OFS_DELTA base not found at offset ${baseObjOffset}`); 186 | } 187 | 188 | // Decompress delta data 189 | const { result: deltaData, bytesRead } = decompressObject(data, offset, size); 190 | offset += bytesRead; 191 | 192 | // Apply delta 193 | const resultData = applyDelta(baseObj.data, deltaData); 194 | 195 | return { type: baseObj.type, data: resultData, newOffset: offset }; 196 | } 197 | 198 | // Regular object - decompress 199 | const objType = numToType[type]; 200 | if (!objType) { 201 | throw new Error(`Unknown object type: ${type}`); 202 | } 203 | 204 | const { result: objData, bytesRead } = decompressObject(data, offset, size); 205 | offset += bytesRead; 206 | 207 | return { type: objType, data: objData, newOffset: offset }; 208 | } 209 | 210 | /** 211 | * Decompress a zlib-compressed object from the pack 212 | * Uses streaming inflate to find exact compressed size 213 | */ 214 | function decompressObject( 215 | data: Buffer, 216 | offset: number, 217 | expectedSize: number 218 | ): { result: Buffer; bytesRead: number } { 219 | // Use zlib's inflateRaw with a custom approach 220 | // Try with increasing buffer sizes and track consumed bytes 221 | const remaining = data.slice(offset); 222 | 223 | // inflateSync will throw if there's not enough data 224 | // We need to find how many bytes were consumed 225 | try { 226 | const result = inflateSync(remaining, { finishFlush: 2 }); // Z_SYNC_FLUSH 227 | 228 | // Unfortunately inflateSync doesn't tell us consumed bytes 229 | // We need to use a workaround: try decompress with increasing input sizes 230 | // until we get the expected output size, then that's our compressed size 231 | 232 | // Start with a reasonable estimate based on compression ratio 233 | let compressedSize = Math.ceil(expectedSize * 0.5) + 20; 234 | 235 | for (let size = 20; size <= remaining.length; size += 10) { 236 | try { 237 | const chunk = remaining.slice(0, size); 238 | const testResult = inflateSync(chunk); 239 | if (testResult.length >= expectedSize) { 240 | // Found it! Now binary search for exact size 241 | let lo = Math.max(1, size - 20); 242 | let hi = size; 243 | while (lo < hi) { 244 | const mid = Math.floor((lo + hi) / 2); 245 | try { 246 | const testChunk = remaining.slice(0, mid); 247 | inflateSync(testChunk); 248 | hi = mid; 249 | } catch { 250 | lo = mid + 1; 251 | } 252 | } 253 | return { result: testResult.slice(0, expectedSize), bytesRead: lo }; 254 | } 255 | } catch { 256 | // Need more data 257 | } 258 | } 259 | 260 | // If all else fails, use the full decompression 261 | return { result: result.slice(0, expectedSize), bytesRead: remaining.length }; 262 | } catch (e) { 263 | throw new Error(`Failed to decompress: ${e}`); 264 | } 265 | } 266 | 267 | /** 268 | * Apply a git delta to a base object 269 | * Delta format: source size (varint), target size (varint), instructions 270 | */ 271 | function applyDelta(base: Buffer, delta: Buffer): Buffer { 272 | let offset = 0; 273 | 274 | // Read source size (varint) 275 | let sourceSize = 0; 276 | let shift = 0; 277 | let byte: number; 278 | do { 279 | byte = delta[offset++]!; 280 | sourceSize |= (byte & 0x7f) << shift; 281 | shift += 7; 282 | } while (byte & 0x80); 283 | 284 | // Read target size (varint) 285 | let targetSize = 0; 286 | shift = 0; 287 | do { 288 | byte = delta[offset++]!; 289 | targetSize |= (byte & 0x7f) << shift; 290 | shift += 7; 291 | } while (byte & 0x80); 292 | 293 | const result = Buffer.alloc(targetSize); 294 | let resultOffset = 0; 295 | 296 | // Apply instructions 297 | while (offset < delta.length) { 298 | const cmd = delta[offset++]!; 299 | 300 | if (cmd & 0x80) { 301 | // Copy from base 302 | let copyOffset = 0; 303 | let copySize = 0; 304 | 305 | if (cmd & 0x01) copyOffset = delta[offset++]!; 306 | if (cmd & 0x02) copyOffset |= delta[offset++]! << 8; 307 | if (cmd & 0x04) copyOffset |= delta[offset++]! << 16; 308 | if (cmd & 0x08) copyOffset |= delta[offset++]! << 24; 309 | 310 | if (cmd & 0x10) copySize = delta[offset++]!; 311 | if (cmd & 0x20) copySize |= delta[offset++]! << 8; 312 | if (cmd & 0x40) copySize |= delta[offset++]! << 16; 313 | 314 | if (copySize === 0) copySize = 0x10000; 315 | 316 | base.copy(result, resultOffset, copyOffset, copyOffset + copySize); 317 | resultOffset += copySize; 318 | } else if (cmd > 0) { 319 | // Insert new data 320 | delta.copy(result, resultOffset, offset, offset + cmd); 321 | resultOffset += cmd; 322 | offset += cmd; 323 | } else { 324 | throw new Error("Invalid delta instruction"); 325 | } 326 | } 327 | 328 | return result; 329 | } 330 | 331 | /** 332 | * Generate a packfile from a list of object hashes 333 | */ 334 | export function generatePackfile(db: Database, hashes: string[]): Buffer { 335 | const parts: Buffer[] = []; 336 | 337 | // Header 338 | const header = Buffer.alloc(12); 339 | header.write("PACK", 0); 340 | header.writeUInt32BE(2, 4); // version 2 341 | header.writeUInt32BE(hashes.length, 8); 342 | parts.push(header); 343 | 344 | // Objects 345 | for (const hash of hashes) { 346 | const obj = getObject(db, hash); 347 | if (!obj) continue; 348 | 349 | const typeNum = typeToNum[obj.type]; 350 | const compressed = deflateSync(obj.data); 351 | 352 | // Encode type and size 353 | const sizeBytes = encodeTypeAndSize(typeNum, obj.size); 354 | parts.push(sizeBytes, compressed); 355 | } 356 | 357 | // Compute checksum 358 | const pack = Buffer.concat(parts); 359 | const checksum = createHash("sha1").update(pack).digest(); 360 | parts.push(checksum); 361 | 362 | return Buffer.concat(parts); 363 | } 364 | 365 | /** 366 | * Encode object type and size for pack format 367 | */ 368 | function encodeTypeAndSize(type: number, size: number): Buffer { 369 | const bytes: number[] = []; 370 | 371 | // First byte: type in bits 4-6, size bits 0-3 372 | let firstByte = (type << 4) | (size & 0x0f); 373 | size >>= 4; 374 | 375 | if (size > 0) { 376 | firstByte |= 0x80; 377 | } 378 | bytes.push(firstByte); 379 | 380 | // Remaining size bytes 381 | while (size > 0) { 382 | let byte = size & 0x7f; 383 | size >>= 7; 384 | if (size > 0) byte |= 0x80; 385 | bytes.push(byte); 386 | } 387 | 388 | return Buffer.from(bytes); 389 | } 390 | 391 | /** 392 | * Generate ref advertisement for info/refs endpoint 393 | */ 394 | export function generateRefAdvertisement( 395 | db: Database, 396 | service: string, 397 | refs: { name: string; hash: string }[] 398 | ): Buffer { 399 | const parts: Buffer[] = []; 400 | 401 | // Service announcement 402 | parts.push(pktLine(`# service=${service}\n`)); 403 | parts.push(pktFlush()); 404 | 405 | if (refs.length === 0) { 406 | // Empty repo - advertise capabilities on a zero ref 407 | const caps = "report-status delete-refs ofs-delta"; 408 | parts.push(pktLine(`0000000000000000000000000000000000000000 capabilities^{}\0${caps}\n`)); 409 | } else { 410 | // First ref includes capabilities 411 | const caps = "report-status delete-refs ofs-delta"; 412 | const first = refs[0]!; 413 | parts.push(pktLine(`${first.hash} ${first.name}\0${caps}\n`)); 414 | 415 | // Remaining refs 416 | for (let i = 1; i < refs.length; i++) { 417 | const ref = refs[i]!; 418 | parts.push(pktLine(`${ref.hash} ${ref.name}\n`)); 419 | } 420 | } 421 | 422 | parts.push(pktFlush()); 423 | return Buffer.concat(parts); 424 | } 425 | 426 | /** 427 | * Parse upload-pack request (want/have lines) 428 | */ 429 | export function parseUploadPackRequest(data: Buffer): { 430 | wants: string[]; 431 | haves: string[]; 432 | done: boolean; 433 | } { 434 | const wants: string[] = []; 435 | const haves: string[] = []; 436 | let done = false; 437 | 438 | for (const line of parsePktLines(data)) { 439 | if (line === null) continue; 440 | 441 | const str = line.toString().trim(); 442 | if (str.startsWith("want ")) { 443 | wants.push(str.slice(5, 45)); // 40 char hash 444 | } else if (str.startsWith("have ")) { 445 | haves.push(str.slice(5, 45)); 446 | } else if (str === "done") { 447 | done = true; 448 | } 449 | } 450 | 451 | return { wants, haves, done }; 452 | } 453 | 454 | /** 455 | * Parse receive-pack request (ref updates + packfile) 456 | */ 457 | export function parseReceivePackRequest(data: Buffer): { 458 | updates: { oldHash: string; newHash: string; refName: string }[]; 459 | packData: Buffer | null; 460 | } { 461 | const updates: { oldHash: string; newHash: string; refName: string }[] = []; 462 | let packStart = -1; 463 | let offset = 0; 464 | 465 | // Parse ref updates 466 | for (const line of parsePktLines(data)) { 467 | if (line === null) { 468 | // Flush packet - pack data follows 469 | offset = data.indexOf(Buffer.from("0000"), offset) + 4; 470 | packStart = offset; 471 | break; 472 | } 473 | 474 | const str = line.toString(); 475 | // Format: old-hash new-hash refname\0capabilities 476 | const match = str.match(/^([0-9a-f]{40}) ([0-9a-f]{40}) ([^\0\n]+)/); 477 | if (match) { 478 | updates.push({ 479 | oldHash: match[1]!, 480 | newHash: match[2]!, 481 | refName: match[3]!, 482 | }); 483 | } 484 | 485 | offset += 4 + line.length; 486 | } 487 | 488 | // Check for pack data 489 | let packData: Buffer | null = null; 490 | if (packStart >= 0 && packStart < data.length) { 491 | const remaining = data.slice(packStart); 492 | if (remaining.slice(0, 4).toString() === "PACK") { 493 | packData = remaining; 494 | } 495 | } 496 | 497 | return { updates, packData }; 498 | } 499 | 500 | /** 501 | * Collect all objects reachable from a commit (for generating a pack) 502 | */ 503 | export function collectReachableObjects( 504 | db: Database, 505 | startHashes: string[], 506 | excludeHashes: string[] = [] 507 | ): string[] { 508 | const collected = new Set(); 509 | const excluded = new Set(excludeHashes); 510 | const queue = [...startHashes]; 511 | 512 | while (queue.length > 0) { 513 | const hash = queue.shift()!; 514 | if (collected.has(hash) || excluded.has(hash)) continue; 515 | 516 | const obj = getObject(db, hash); 517 | if (!obj) continue; 518 | 519 | collected.add(hash); 520 | 521 | if (obj.type === "commit") { 522 | // Parse commit to get tree and parents 523 | const content = obj.data.toString(); 524 | const lines = content.split("\n"); 525 | for (const line of lines) { 526 | if (line.startsWith("tree ")) { 527 | queue.push(line.slice(5)); 528 | } else if (line.startsWith("parent ")) { 529 | queue.push(line.slice(7)); 530 | } else if (line === "") { 531 | break; 532 | } 533 | } 534 | } else if (obj.type === "tree") { 535 | // Parse tree to get child entries 536 | let off = 0; 537 | const data = obj.data; 538 | while (off < data.length) { 539 | const spaceIdx = data.indexOf(0x20, off); 540 | if (spaceIdx === -1) break; 541 | const nullIdx = data.indexOf(0x00, spaceIdx + 1); 542 | if (nullIdx === -1) break; 543 | const entryHash = data.slice(nullIdx + 1, nullIdx + 21).toString("hex"); 544 | queue.push(entryHash); 545 | off = nullIdx + 21; 546 | } 547 | } 548 | } 549 | 550 | return Array.from(collected); 551 | } 552 | -------------------------------------------------------------------------------- /test/src/fork.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, test, expect, beforeEach, afterEach } from "vitest"; 2 | import { resolve } from "path"; 3 | import { writeFileSync, readFileSync, existsSync } from "fs"; 4 | import { zagi, git, createTestRepo, cleanupTestRepo } from "./shared"; 5 | 6 | let REPO_DIR: string; 7 | 8 | beforeEach(() => { 9 | REPO_DIR = createTestRepo(); 10 | }); 11 | 12 | afterEach(() => { 13 | cleanupTestRepo(REPO_DIR); 14 | }); 15 | 16 | describe("git fork", () => { 17 | test("creates a fork", () => { 18 | const result = zagi(["fork", "test-fork"], { cwd: REPO_DIR }); 19 | 20 | expect(result).toContain("forked: test-fork"); 21 | expect(result).toContain(".forks/test-fork/"); 22 | 23 | // Verify directory exists 24 | expect(existsSync(resolve(REPO_DIR, ".forks/test-fork"))).toBe(true); 25 | }); 26 | 27 | test("lists forks when no args", () => { 28 | zagi(["fork", "alpha"], { cwd: REPO_DIR }); 29 | zagi(["fork", "beta"], { cwd: REPO_DIR }); 30 | 31 | const result = zagi(["fork"], { cwd: REPO_DIR }); 32 | 33 | expect(result).toContain("forks:"); 34 | expect(result).toContain("alpha"); 35 | expect(result).toContain("beta"); 36 | }); 37 | 38 | test("shows no forks message", () => { 39 | const result = zagi(["fork"], { cwd: REPO_DIR }); 40 | expect(result).toBe("no forks\n"); 41 | }); 42 | 43 | test("shows commits ahead count", () => { 44 | zagi(["fork", "feature"], { cwd: REPO_DIR }); 45 | 46 | // Make a commit in the fork 47 | const forkDir = resolve(REPO_DIR, ".forks/feature"); 48 | writeFileSync(resolve(forkDir, "new.txt"), "new file\n"); 49 | git(["add", "."], { cwd: forkDir }); 50 | git(["commit", "-m", "Add new file"], { cwd: forkDir }); 51 | 52 | const result = zagi(["fork"], { cwd: REPO_DIR }); 53 | 54 | expect(result).toContain("feature"); 55 | expect(result).toContain("1 commit ahead"); 56 | }); 57 | 58 | test("errors when inside a fork", () => { 59 | zagi(["fork", "test"], { cwd: REPO_DIR }); 60 | 61 | const forkDir = resolve(REPO_DIR, ".forks/test"); 62 | const result = zagi(["fork", "nested"], { cwd: forkDir }); 63 | 64 | expect(result).toContain("already in a fork"); 65 | expect(result).toContain("run from base"); 66 | }); 67 | 68 | test("auto-adds .forks/ to .gitignore on first fork", () => { 69 | // No .gitignore exists initially 70 | expect(existsSync(resolve(REPO_DIR, ".gitignore"))).toBe(false); 71 | 72 | zagi(["fork", "test"], { cwd: REPO_DIR }); 73 | 74 | // .gitignore should now exist with .forks/ 75 | expect(existsSync(resolve(REPO_DIR, ".gitignore"))).toBe(true); 76 | const content = readFileSync(resolve(REPO_DIR, ".gitignore"), "utf-8"); 77 | expect(content).toContain(".forks/"); 78 | }); 79 | }); 80 | 81 | describe("git fork --promote", () => { 82 | test("promotes fork commits to base", () => { 83 | zagi(["fork", "feature"], { cwd: REPO_DIR }); 84 | 85 | // Make changes in fork 86 | const forkDir = resolve(REPO_DIR, ".forks/feature"); 87 | writeFileSync(resolve(forkDir, "README.md"), "updated content\n"); 88 | git(["add", "."], { cwd: forkDir }); 89 | git(["commit", "-m", "Update file"], { cwd: forkDir }); 90 | 91 | // Promote the fork 92 | const result = zagi(["fork", "--promote", "feature"], { cwd: REPO_DIR }); 93 | 94 | expect(result).toContain("promoted: feature"); 95 | expect(result).toContain("1 commit"); 96 | expect(result).toContain("applied to base"); 97 | 98 | // Verify base has the changes 99 | const content = git(["show", "HEAD:README.md"], { cwd: REPO_DIR }); 100 | expect(content).toBe("updated content\n"); 101 | }); 102 | 103 | test("errors for non-existent fork", () => { 104 | const result = zagi(["fork", "--promote", "nonexistent"], { cwd: REPO_DIR }); 105 | 106 | expect(result).toContain("not found"); 107 | }); 108 | 109 | test("preserves local uncommitted changes when fork has no new commits", () => { 110 | // Create a fork (no changes made in fork) 111 | zagi(["fork", "empty-fork"], { cwd: REPO_DIR }); 112 | 113 | // Make local uncommitted changes in base 114 | writeFileSync(resolve(REPO_DIR, "local-changes.txt"), "my local work\n"); 115 | writeFileSync(resolve(REPO_DIR, "README.md"), "modified locally\n"); 116 | 117 | // Promote the fork (which has no commits ahead) 118 | const result = zagi(["fork", "--promote", "empty-fork"], { cwd: REPO_DIR }); 119 | 120 | expect(result).toContain("promoted: empty-fork"); 121 | 122 | // Verify local changes are preserved 123 | const localFile = git(["diff", "--name-only"], { cwd: REPO_DIR }); 124 | expect(localFile).toContain("README.md"); 125 | }); 126 | 127 | test("preserves local uncommitted changes when fork has non-conflicting commits", () => { 128 | zagi(["fork", "feature"], { cwd: REPO_DIR }); 129 | 130 | // Make changes in fork to a DIFFERENT file 131 | const forkDir = resolve(REPO_DIR, ".forks/feature"); 132 | writeFileSync(resolve(forkDir, "new-feature.txt"), "feature content\n"); 133 | git(["add", "."], { cwd: forkDir }); 134 | git(["commit", "-m", "Add feature"], { cwd: forkDir }); 135 | 136 | // Make local uncommitted changes in base to a DIFFERENT file 137 | writeFileSync(resolve(REPO_DIR, "local-work.txt"), "my local work\n"); 138 | 139 | // Promote the fork 140 | const result = zagi(["fork", "--promote", "feature"], { cwd: REPO_DIR }); 141 | 142 | expect(result).toContain("promoted: feature"); 143 | 144 | // Verify fork changes are applied 145 | expect(existsSync(resolve(REPO_DIR, "new-feature.txt"))).toBe(true); 146 | 147 | // Verify local uncommitted changes are preserved 148 | expect(existsSync(resolve(REPO_DIR, "local-work.txt"))).toBe(true); 149 | }); 150 | 151 | test("fails safely when fork has conflicting changes", () => { 152 | zagi(["fork", "conflict"], { cwd: REPO_DIR }); 153 | 154 | // Make changes in fork to README.md 155 | const forkDir = resolve(REPO_DIR, ".forks/conflict"); 156 | writeFileSync(resolve(forkDir, "README.md"), "fork version\n"); 157 | git(["add", "."], { cwd: forkDir }); 158 | git(["commit", "-m", "Change file"], { cwd: forkDir }); 159 | 160 | // Make local uncommitted changes to the SAME file in base 161 | writeFileSync(resolve(REPO_DIR, "README.md"), "local version\n"); 162 | 163 | // Promote should fail due to conflict 164 | const result = zagi(["fork", "--promote", "conflict"], { cwd: REPO_DIR }); 165 | 166 | // Should have error or conflict message 167 | expect(result.length).toBeGreaterThan(0); 168 | }); 169 | }); 170 | 171 | describe("git fork --delete", () => { 172 | test("deletes a specific fork", () => { 173 | zagi(["fork", "to-delete"], { cwd: REPO_DIR }); 174 | expect(existsSync(resolve(REPO_DIR, ".forks/to-delete"))).toBe(true); 175 | 176 | const result = zagi(["fork", "--delete", "to-delete"], { cwd: REPO_DIR }); 177 | 178 | expect(result).toContain("deleted: to-delete"); 179 | expect(existsSync(resolve(REPO_DIR, ".forks/to-delete"))).toBe(false); 180 | }); 181 | 182 | test("errors for non-existent fork", () => { 183 | const result = zagi(["fork", "--delete", "nonexistent"], { cwd: REPO_DIR }); 184 | 185 | expect(result).toContain("not found"); 186 | }); 187 | }); 188 | 189 | describe("git fork --delete-all", () => { 190 | test("deletes all forks", () => { 191 | zagi(["fork", "a"], { cwd: REPO_DIR }); 192 | zagi(["fork", "b"], { cwd: REPO_DIR }); 193 | zagi(["fork", "c"], { cwd: REPO_DIR }); 194 | 195 | const result = zagi(["fork", "--delete-all"], { cwd: REPO_DIR }); 196 | 197 | expect(result).toContain("deleted:"); 198 | expect(result).toContain("a"); 199 | expect(result).toContain("b"); 200 | expect(result).toContain("c"); 201 | 202 | // Verify forks are gone 203 | const listResult = zagi(["fork"], { cwd: REPO_DIR }); 204 | expect(listResult).toBe("no forks\n"); 205 | }); 206 | 207 | test("shows message when no forks exist", () => { 208 | const result = zagi(["fork", "--delete-all"], { cwd: REPO_DIR }); 209 | expect(result).toContain("no forks to delete"); 210 | }); 211 | }); 212 | 213 | describe("git fork --pick", () => { 214 | test("merges fork commits to base (fast-forward)", () => { 215 | zagi(["fork", "feature"], { cwd: REPO_DIR }); 216 | 217 | // Make changes in fork 218 | const forkDir = resolve(REPO_DIR, ".forks/feature"); 219 | writeFileSync(resolve(forkDir, "new.txt"), "new content\n"); 220 | git(["add", "."], { cwd: forkDir }); 221 | git(["commit", "-m", "Add new file"], { cwd: forkDir }); 222 | 223 | // Pick the fork (should fast-forward since base hasn't changed) 224 | const result = zagi(["fork", "--pick", "feature"], { cwd: REPO_DIR }); 225 | 226 | expect(result).toContain("picked: feature"); 227 | expect(result).toContain("fast-forward"); 228 | 229 | // Verify base has the changes 230 | expect(existsSync(resolve(REPO_DIR, "new.txt"))).toBe(true); 231 | }); 232 | 233 | test("creates merge commit when base has diverged", () => { 234 | zagi(["fork", "feature"], { cwd: REPO_DIR }); 235 | 236 | // Make changes in fork 237 | const forkDir = resolve(REPO_DIR, ".forks/feature"); 238 | writeFileSync(resolve(forkDir, "fork-file.txt"), "fork content\n"); 239 | git(["add", "."], { cwd: forkDir }); 240 | git(["commit", "-m", "Fork commit"], { cwd: forkDir }); 241 | 242 | // Make different changes in base (diverge) 243 | writeFileSync(resolve(REPO_DIR, "base-file.txt"), "base content\n"); 244 | git(["add", "."], { cwd: REPO_DIR }); 245 | git(["commit", "-m", "Base commit"], { cwd: REPO_DIR }); 246 | 247 | // Pick the fork (should create merge commit) 248 | const result = zagi(["fork", "--pick", "feature"], { cwd: REPO_DIR }); 249 | 250 | expect(result).toContain("picked: feature"); 251 | expect(result).toContain("merged"); 252 | 253 | // Verify both files exist (merge succeeded) 254 | expect(existsSync(resolve(REPO_DIR, "fork-file.txt"))).toBe(true); 255 | expect(existsSync(resolve(REPO_DIR, "base-file.txt"))).toBe(true); 256 | 257 | // Verify merge commit was created (should have 2 parents) 258 | const logOutput = git(["log", "--oneline", "--merges", "-1"], { cwd: REPO_DIR }); 259 | expect(logOutput).toContain("Merge fork"); 260 | }); 261 | 262 | test("reports already up to date", () => { 263 | zagi(["fork", "empty-fork"], { cwd: REPO_DIR }); 264 | 265 | // Pick without making any changes in fork 266 | const result = zagi(["fork", "--pick", "empty-fork"], { cwd: REPO_DIR }); 267 | 268 | expect(result).toContain("already up to date"); 269 | }); 270 | 271 | test("handles merge conflicts gracefully", () => { 272 | zagi(["fork", "conflict"], { cwd: REPO_DIR }); 273 | 274 | // Make changes in fork to README.md 275 | const forkDir = resolve(REPO_DIR, ".forks/conflict"); 276 | writeFileSync(resolve(forkDir, "README.md"), "fork version\n"); 277 | git(["add", "."], { cwd: forkDir }); 278 | git(["commit", "-m", "Fork change"], { cwd: forkDir }); 279 | 280 | // Make conflicting changes to same file in base 281 | writeFileSync(resolve(REPO_DIR, "README.md"), "base version\n"); 282 | git(["add", "."], { cwd: REPO_DIR }); 283 | git(["commit", "-m", "Base change"], { cwd: REPO_DIR }); 284 | 285 | // Pick should succeed but report conflicts 286 | const result = zagi(["fork", "--pick", "conflict"], { cwd: REPO_DIR }); 287 | 288 | expect(result).toContain("picked: conflict"); 289 | expect(result).toContain("conflicts"); 290 | expect(result).toContain("resolve conflicts"); 291 | }); 292 | 293 | test("errors for non-existent fork", () => { 294 | const result = zagi(["fork", "--pick", "nonexistent"], { cwd: REPO_DIR }); 295 | 296 | expect(result).toContain("not found"); 297 | }); 298 | 299 | test("preserves local uncommitted changes", () => { 300 | zagi(["fork", "feature"], { cwd: REPO_DIR }); 301 | 302 | // Make changes in fork to a different file 303 | const forkDir = resolve(REPO_DIR, ".forks/feature"); 304 | writeFileSync(resolve(forkDir, "new-feature.txt"), "feature content\n"); 305 | git(["add", "."], { cwd: forkDir }); 306 | git(["commit", "-m", "Add feature"], { cwd: forkDir }); 307 | 308 | // Make local uncommitted changes in base 309 | writeFileSync(resolve(REPO_DIR, "local-work.txt"), "my local work\n"); 310 | 311 | // Pick the fork 312 | const result = zagi(["fork", "--pick", "feature"], { cwd: REPO_DIR }); 313 | 314 | expect(result).toContain("picked: feature"); 315 | 316 | // Verify fork changes are applied 317 | expect(existsSync(resolve(REPO_DIR, "new-feature.txt"))).toBe(true); 318 | 319 | // Verify local uncommitted changes are preserved 320 | expect(existsSync(resolve(REPO_DIR, "local-work.txt"))).toBe(true); 321 | }); 322 | 323 | test("warns when fork has uncommitted changes", () => { 324 | // Create a fork 325 | const createOutput = zagi(["fork", "test-fork"], { cwd: REPO_DIR }); 326 | expect(createOutput).toContain("forked: test-fork"); 327 | 328 | // Add uncommitted changes to the fork 329 | const forkDir = resolve(REPO_DIR, ".forks/test-fork"); 330 | writeFileSync(resolve(forkDir, "uncommitted.txt"), "uncommitted content\n"); 331 | 332 | // Try to pick - should warn about uncommitted changes 333 | const pickOutput = zagi(["fork", "--pick", "test-fork"], { cwd: REPO_DIR }); 334 | 335 | expect(pickOutput).toContain("warning: fork 'test-fork' has uncommitted changes"); 336 | expect(pickOutput).toContain("1 file not committed"); 337 | expect(pickOutput).toContain("hint:"); 338 | 339 | // Clean up 340 | zagi(["fork", "--delete", "test-fork"], { cwd: REPO_DIR }); 341 | }); 342 | 343 | test("no warning when fork is clean", () => { 344 | // Create a fork 345 | zagi(["fork", "clean-fork"], { cwd: REPO_DIR }); 346 | 347 | // Pick without changes - should not warn 348 | const pickOutput = zagi(["fork", "--pick", "clean-fork"], { cwd: REPO_DIR }); 349 | 350 | expect(pickOutput).not.toContain("warning:"); 351 | expect(pickOutput).toContain("already up to date"); 352 | 353 | // Clean up 354 | zagi(["fork", "--delete", "clean-fork"], { cwd: REPO_DIR }); 355 | }); 356 | 357 | test("warns with multiple uncommitted files", () => { 358 | // Create a fork 359 | zagi(["fork", "multi-fork"], { cwd: REPO_DIR }); 360 | 361 | // Add multiple uncommitted files 362 | const forkDir = resolve(REPO_DIR, ".forks/multi-fork"); 363 | writeFileSync(resolve(forkDir, "file1.txt"), "content 1\n"); 364 | writeFileSync(resolve(forkDir, "file2.txt"), "content 2\n"); 365 | writeFileSync(resolve(forkDir, "file3.txt"), "content 3\n"); 366 | 367 | const pickOutput = zagi(["fork", "--pick", "multi-fork"], { cwd: REPO_DIR }); 368 | 369 | expect(pickOutput).toContain("warning: fork 'multi-fork' has uncommitted changes"); 370 | expect(pickOutput).toContain("3 files not committed"); 371 | 372 | // Clean up 373 | zagi(["fork", "--delete", "multi-fork"], { cwd: REPO_DIR }); 374 | }); 375 | }); 376 | 377 | describe("git fork --help", () => { 378 | test("shows help", () => { 379 | const result = zagi(["fork", "--help"], { cwd: REPO_DIR }); 380 | 381 | expect(result).toContain("usage:"); 382 | expect(result).toContain("--pick"); 383 | expect(result).toContain("--promote"); 384 | expect(result).toContain("--delete"); 385 | expect(result).toContain("--delete-all"); 386 | }); 387 | }); 388 | 389 | describe("git fork validation", () => { 390 | test("rejects empty fork name", () => { 391 | const result = zagi(["fork", ""], { cwd: REPO_DIR }); 392 | 393 | expect(result).toContain("fork name cannot be empty"); 394 | }); 395 | 396 | test("rejects fork name with slash", () => { 397 | const result = zagi(["fork", "my/nested/fork"], { cwd: REPO_DIR }); 398 | 399 | expect(result).toContain("cannot contain"); 400 | }); 401 | 402 | test("rejects fork name starting with dot", () => { 403 | const result = zagi(["fork", ".hidden"], { cwd: REPO_DIR }); 404 | 405 | expect(result).toContain("cannot contain"); 406 | }); 407 | 408 | test("rejects fork name with path traversal", () => { 409 | const result = zagi(["fork", "../../escape"], { cwd: REPO_DIR }); 410 | 411 | expect(result).toContain("cannot contain"); 412 | }); 413 | 414 | test("rejects fork name matching existing branch", () => { 415 | const result = zagi(["fork", "main"], { cwd: REPO_DIR }); 416 | 417 | expect(result).toContain("branch 'main' already exists"); 418 | }); 419 | 420 | test("rejects creating duplicate fork", () => { 421 | zagi(["fork", "existing"], { cwd: REPO_DIR }); 422 | 423 | const result = zagi(["fork", "existing"], { cwd: REPO_DIR }); 424 | 425 | expect(result).toContain("already exists"); 426 | }); 427 | 428 | test("--pick errors in detached HEAD state", () => { 429 | zagi(["fork", "test-fork"], { cwd: REPO_DIR }); 430 | 431 | // Detach HEAD 432 | git(["checkout", "HEAD~0"], { cwd: REPO_DIR }); 433 | 434 | const result = zagi(["fork", "--pick", "test-fork"], { cwd: REPO_DIR }); 435 | 436 | expect(result).toContain("detached HEAD"); 437 | expect(result).toContain("checkout a branch"); 438 | }); 439 | 440 | test("--promote errors in detached HEAD state", () => { 441 | zagi(["fork", "test-fork"], { cwd: REPO_DIR }); 442 | 443 | // Detach HEAD 444 | git(["checkout", "HEAD~0"], { cwd: REPO_DIR }); 445 | 446 | const result = zagi(["fork", "--promote", "test-fork"], { cwd: REPO_DIR }); 447 | 448 | expect(result).toContain("detached HEAD"); 449 | expect(result).toContain("checkout a branch"); 450 | }); 451 | 452 | test("--pick shows conflict resolution hints", () => { 453 | zagi(["fork", "conflict"], { cwd: REPO_DIR }); 454 | 455 | // Make conflicting changes 456 | const forkDir = resolve(REPO_DIR, ".forks/conflict"); 457 | writeFileSync(resolve(forkDir, "README.md"), "fork version\n"); 458 | git(["add", "."], { cwd: forkDir }); 459 | git(["commit", "-m", "Fork change"], { cwd: forkDir }); 460 | 461 | writeFileSync(resolve(REPO_DIR, "README.md"), "base version\n"); 462 | git(["add", "."], { cwd: REPO_DIR }); 463 | git(["commit", "-m", "Base change"], { cwd: REPO_DIR }); 464 | 465 | const result = zagi(["fork", "--pick", "conflict"], { cwd: REPO_DIR }); 466 | 467 | expect(result).toContain("conflicts"); 468 | expect(result).toContain("git add"); 469 | expect(result).toContain("git commit"); 470 | expect(result).toContain("git merge --abort"); 471 | }); 472 | 473 | test("--pick errors when merge already in progress", () => { 474 | zagi(["fork", "conflict"], { cwd: REPO_DIR }); 475 | 476 | // Create a conflict situation 477 | const forkDir = resolve(REPO_DIR, ".forks/conflict"); 478 | writeFileSync(resolve(forkDir, "README.md"), "fork version\n"); 479 | git(["add", "."], { cwd: forkDir }); 480 | git(["commit", "-m", "Fork change"], { cwd: forkDir }); 481 | 482 | writeFileSync(resolve(REPO_DIR, "README.md"), "base version\n"); 483 | git(["add", "."], { cwd: REPO_DIR }); 484 | git(["commit", "-m", "Base change"], { cwd: REPO_DIR }); 485 | 486 | // First pick creates merge state 487 | zagi(["fork", "--pick", "conflict"], { cwd: REPO_DIR }); 488 | 489 | // Second pick should error 490 | const result = zagi(["fork", "--pick", "conflict"], { cwd: REPO_DIR }); 491 | 492 | expect(result).toContain("merge is already in progress"); 493 | expect(result).toContain("git merge --abort"); 494 | }); 495 | 496 | test("rejects fork name that would exceed path limit", () => { 497 | // Create a name that's very long (should exceed path limit with workdir) 498 | const longName = "a".repeat(4000); 499 | 500 | const result = zagi(["fork", longName], { cwd: REPO_DIR }); 501 | 502 | expect(result).toContain("too long"); 503 | }); 504 | }); 505 | --------------------------------------------------------------------------------