├── .cargo └── config.toml ├── .gitignore ├── .vscode └── launch.json ├── Cargo.lock ├── Cargo.toml ├── bindings ├── ACType.ts ├── ActionError.ts ├── ActionState.ts ├── BlankLine.ts ├── BranchState.ts ├── CFLine.ts ├── CFSection.ts ├── CFSectionLine.ts ├── CFSide.ts ├── CloneOptions.ts ├── CodeSearchOpts.ts ├── CommandOptions.ts ├── Commit.ts ├── CommitAncestorOpts.ts ├── CommitDiffOpts.ts ├── CommitFilter.ts ├── CommitInfo.ts ├── CommitOnBranchOpts.ts ├── CommitsOnBranchOpts.ts ├── ConflictLine.ts ├── ConflictedFile.ts ├── CoreSearchResult.ts ├── Credentials.ts ├── DataStoreValues.ts ├── DateResult.ts ├── ES.ts ├── FileMatch.ts ├── GitAddOptions.ts ├── GitConfig.ts ├── GitVersion.ts ├── Hunk.ts ├── HunkLine.ts ├── HunkLineStatus.ts ├── HunkRange.ts ├── LoadConflictOptions.ts ├── LocalRefCommitDiff.ts ├── MessageAC.ts ├── OkLine.ts ├── Patch.ts ├── PatchType.ts ├── PollOptions.ts ├── PollSearchOpts.ts ├── PollSearchResult.ts ├── RefCommitDiff.ts ├── RefDiffOptions.ts ├── RefInfo.ts ├── RefLocation.ts ├── RefType.ts ├── RepoStatus.ts ├── ReqCommitsOptions2.ts ├── ReqHunkOptions.ts ├── ReqImageOptions.ts ├── ReqOptions.ts ├── ReqPatchCodeOptions.ts ├── ReqPatchesForCommitOpts.ts ├── ReqWipHunksOptions.ts ├── ResultStatus.ts ├── RunOptions.ts ├── ScanOptions.ts ├── SearchMatchType.ts ├── SearchOptions.ts ├── SlotLine.ts ├── StashStagedOptions.ts ├── ThemeColour.ts ├── TopCommitOptions.ts ├── UnPushedCommits.ts ├── UserConfigResult.ts ├── WipHunksSplit.ts ├── WipPatch.ts ├── WipPatchType.ts ├── WipPatches.ts └── WriteFileOpts.ts ├── loggers ├── Cargo.lock ├── Cargo.toml └── src │ └── lib.rs ├── readme.md ├── rustfmt.toml └── src ├── config.rs ├── git ├── action_state.rs ├── actions │ ├── add.rs │ ├── clone.rs │ ├── command.rs │ ├── create_repo.rs │ ├── credentials.rs │ ├── fake_action.rs │ ├── fetch.rs │ ├── mod.rs │ └── stash.rs ├── conflicts │ ├── api.rs │ ├── conflicted_file.rs │ └── mod.rs ├── git_settings.rs ├── git_types.rs ├── git_types_extra_impl.rs ├── git_version.rs ├── mod.rs ├── queries │ ├── commit_calcs.rs │ ├── commit_filters.rs │ ├── commits.rs │ ├── commits_parsers.rs │ ├── commits_test.rs │ ├── config │ │ ├── config_file_parser.rs │ │ ├── config_output_parser.rs │ │ ├── mod.rs │ │ └── test │ │ │ └── submodule1.zip │ ├── hunks │ │ ├── html_code.rs │ │ ├── html_code_split.rs │ │ ├── hunk_line_parsers.rs │ │ ├── hunk_parsers.rs │ │ ├── images.rs │ │ ├── load_hunks.rs │ │ └── mod.rs │ ├── mod.rs │ ├── patches │ │ ├── cache.rs │ │ ├── cache_test.rs │ │ ├── mod.rs │ │ ├── patch_parsers.rs │ │ ├── patches.rs │ │ └── patches_for_commit.rs │ ├── refs.rs │ ├── refs │ │ ├── head_info.rs │ │ └── ref_diffs.rs │ ├── run.rs │ ├── scan_workspace.rs │ ├── search │ │ ├── matching_hunk_lines.rs │ │ ├── mod.rs │ │ ├── search_code.rs │ │ ├── search_commits.rs │ │ └── search_request.rs │ ├── stashes.rs │ ├── stashes_test.rs │ ├── syntax_colouring.rs │ ├── unpushed_commits.rs │ ├── wip.rs │ ├── wip │ │ ├── create_hunks.rs │ │ ├── wip_diff.rs │ │ ├── wip_patch_parsers.rs │ │ └── wip_patches.rs │ └── workspace │ │ ├── load_current_branch.rs │ │ ├── load_packed_refs.rs │ │ ├── mod.rs │ │ └── repo_status.rs ├── run_git.rs ├── run_git_action.rs └── store.rs ├── index ├── ac_index.rs ├── ac_node.rs ├── auto_complete.rs ├── commit_message_ac.rs ├── create_branch_ac.rs └── mod.rs ├── main.rs ├── parser ├── input.rs ├── mod.rs ├── parser_types.rs └── standard_parsers.rs ├── server ├── custom_server │ ├── http.rs │ └── server.rs ├── git_request.rs ├── mod.rs ├── request_util.rs ├── requests.rs └── static_files.rs └── util ├── data_store.rs ├── debug_print.rs ├── global.rs ├── mod.rs └── short_cache.rs /.cargo/config.toml: -------------------------------------------------------------------------------- 1 | [target.x86_64-pc-windows-msvc] 2 | rustflags = ["-C", "target-feature=+crt-static"] 3 | 4 | [target.aarch64-pc-windows-msvc] 5 | rustflags = ["-C", "target-feature=+crt-static"] 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .DS_Store 3 | /loggers/target/ 4 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "lldb", 9 | "request": "launch", 10 | "name": "Debug executable 'gitfiend-core'", 11 | "cargo": { 12 | "args": [ 13 | "build", 14 | "--bin=gitfiend-core", 15 | "--package=gitfiend-core" 16 | ], 17 | "filter": { 18 | "name": "gitfiend-core", 19 | "kind": "bin" 20 | } 21 | }, 22 | "args": [], 23 | "cwd": "${workspaceFolder}" 24 | }, 25 | { 26 | "type": "lldb", 27 | "request": "launch", 28 | "name": "Debug unit tests in executable 'gitfiend-core'", 29 | "cargo": { 30 | "args": [ 31 | "test", 32 | "--no-run", 33 | "--bin=gitfiend-core", 34 | "--package=gitfiend-core" 35 | ], 36 | "filter": { 37 | "name": "gitfiend-core", 38 | "kind": "bin" 39 | } 40 | }, 41 | "args": [], 42 | "cwd": "${workspaceFolder}" 43 | } 44 | ] 45 | } -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "gitfiend-core" 3 | version = "1.0.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | tiny_http = "0.12.0" 10 | serde = { version = "1.0.214", features = ["derive"] } 11 | serde_json = "1.0.132" 12 | ts-rs = "10.0.0" 13 | directories = "5.0.1" 14 | similar = "2.6.0" 15 | rayon = "1.10.0" 16 | ahash = "0.8.11" 17 | once_cell = "1.20.2" 18 | mime_guess = "2.0.5" 19 | loggers = {path = "./loggers"} 20 | syntect = "5.2.0" 21 | chardetng = "0.1.17" 22 | encoding_rs = "0.8.35" 23 | fix-path-env = {git = "https://github.com/tauri-apps/fix-path-env-rs"} 24 | 25 | [profile.dev] 26 | #opt-level = 1 27 | #incremental = false 28 | 29 | [profile.release] 30 | #debug = true 31 | strip = true 32 | lto = true 33 | codegen-units = 1 34 | 35 | # Set the default for dependencies. 36 | [profile.dev.package."*"] 37 | #opt-level = 3 38 | 39 | #[lints] 40 | #redundant_closure_call = true -------------------------------------------------------------------------------- /bindings/ACType.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ACType = "CommitMessage" | "CreateBranch"; 4 | -------------------------------------------------------------------------------- /bindings/ActionError.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ActionError = "credential" | "git" | { "iO": string }; 4 | -------------------------------------------------------------------------------- /bindings/ActionState.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { ActionError } from "./ActionError"; 3 | 4 | export type ActionState = { stdout: Array, stderr: Array, done: boolean, error: ActionError | null, }; 5 | -------------------------------------------------------------------------------- /bindings/BlankLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type BlankLine = { section: number, }; 4 | -------------------------------------------------------------------------------- /bindings/BranchState.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type BranchState = "Local" | "Remote" | "Both"; 4 | -------------------------------------------------------------------------------- /bindings/CFLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { OkLine } from "./OkLine"; 3 | import type { SlotLine } from "./SlotLine"; 4 | 5 | export type CFLine = { "ok": OkLine } | { "slot": SlotLine }; 6 | -------------------------------------------------------------------------------- /bindings/CFSection.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { CFSectionLine } from "./CFSectionLine"; 3 | 4 | export type CFSection = { left: Array, right: Array, }; 5 | -------------------------------------------------------------------------------- /bindings/CFSectionLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { BlankLine } from "./BlankLine"; 3 | import type { ConflictLine } from "./ConflictLine"; 4 | 5 | export type CFSectionLine = { "blank": BlankLine } | { "conflict": ConflictLine }; 6 | -------------------------------------------------------------------------------- /bindings/CFSide.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CFSide = "left" | "right"; 4 | -------------------------------------------------------------------------------- /bindings/CloneOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CloneOptions = { repoPath: string, url: string, }; 4 | -------------------------------------------------------------------------------- /bindings/CodeSearchOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CodeSearchOpts = { repoPath: string, searchText: string, numResults: number, startCommitIndex: number, }; 4 | -------------------------------------------------------------------------------- /bindings/CommandOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CommandOptions = { repoPath: string, args: Array, }; 4 | -------------------------------------------------------------------------------- /bindings/Commit.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { DateResult } from "./DateResult"; 3 | 4 | export type Commit = { author: string, email: string, date: DateResult, id: string, index: number, parentIds: Array, isMerge: boolean, message: string, stashId: string, refs: Array, filtered: boolean, numSkipped: number, }; 5 | -------------------------------------------------------------------------------- /bindings/CommitAncestorOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CommitAncestorOpts = { repoPath: string, commitId: string, ancestorCandidateId: string, }; 4 | -------------------------------------------------------------------------------- /bindings/CommitDiffOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CommitDiffOpts = { repoPath: string, commitId1: string, commitId2: string, }; 4 | -------------------------------------------------------------------------------- /bindings/CommitFilter.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CommitFilter = { "branch": { id: string, short_name: string, } } | { "user": { author: string, email: string, } } | { "commit": { commit_id: string, } } | { "file": { file_name: string, } }; 4 | -------------------------------------------------------------------------------- /bindings/CommitInfo.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { DateResult } from "./DateResult"; 3 | import type { RefInfo } from "./RefInfo"; 4 | 5 | export type CommitInfo = { author: string, email: string, date: DateResult, id: string, index: number, parentIds: Array, isMerge: boolean, message: string, stashId: string, refs: Array, filtered: boolean, numSkipped: number, }; 6 | -------------------------------------------------------------------------------- /bindings/CommitOnBranchOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CommitOnBranchOpts = { repoPath: string, commitId: string, includeDescendants: boolean, }; 4 | -------------------------------------------------------------------------------- /bindings/CommitsOnBranchOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type CommitsOnBranchOpts = { repoPath: string, includeDescendants: boolean, }; 4 | -------------------------------------------------------------------------------- /bindings/ConflictLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { CFSide } from "./CFSide"; 3 | 4 | export type ConflictLine = { text: string, side: CFSide, section: number, key: string, }; 5 | -------------------------------------------------------------------------------- /bindings/ConflictedFile.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { CFLine } from "./CFLine"; 3 | import type { CFSection } from "./CFSection"; 4 | 5 | export type ConflictedFile = { lines: Array, sections: Array, refNameTop: string, refNameBottom: string, lineEnding: string, maxLineLength: number, }; 6 | -------------------------------------------------------------------------------- /bindings/CoreSearchResult.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { FileMatch } from "./FileMatch"; 3 | import type { Patch } from "./Patch"; 4 | import type { SearchMatchType } from "./SearchMatchType"; 5 | 6 | export type CoreSearchResult = { commitId: string, matches: Array, patches: Array, diffs: Array, refIds: Array, }; 7 | -------------------------------------------------------------------------------- /bindings/Credentials.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type Credentials = { username: string, password: string, }; 4 | -------------------------------------------------------------------------------- /bindings/DataStoreValues.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type DataStoreValues = { data: { [key in string]?: string }, }; 4 | -------------------------------------------------------------------------------- /bindings/DateResult.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type DateResult = { ms: number, adjustment: number, }; 4 | -------------------------------------------------------------------------------- /bindings/ES.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ES = { "Text": string }; 4 | -------------------------------------------------------------------------------- /bindings/FileMatch.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { HunkLine } from "./HunkLine"; 3 | import type { Patch } from "./Patch"; 4 | 5 | export type FileMatch = { patch: Patch, lines: Array, }; 6 | -------------------------------------------------------------------------------- /bindings/GitAddOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type GitAddOptions = { repoPath: string, files: Array, }; 4 | -------------------------------------------------------------------------------- /bindings/GitConfig.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type GitConfig = { entries: { [key in string]?: string }, remotes: { [key in string]?: string }, submodules: { [key in string]?: string }, }; 4 | -------------------------------------------------------------------------------- /bindings/GitVersion.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type GitVersion = { major: number, minor: number, patch: number, }; 4 | -------------------------------------------------------------------------------- /bindings/Hunk.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { HunkLine } from "./HunkLine"; 3 | import type { HunkRange } from "./HunkRange"; 4 | 5 | export type Hunk = { oldLineRange: HunkRange, newLineRange: HunkRange, contextLine: string, lines: Array, index: number, }; 6 | -------------------------------------------------------------------------------- /bindings/HunkLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { HunkLineStatus } from "./HunkLineStatus"; 3 | 4 | export type HunkLine = { status: HunkLineStatus, oldNum: number | null, newNum: number | null, hunkIndex: number, text: string, index: number, lineEnding: string, }; 5 | -------------------------------------------------------------------------------- /bindings/HunkLineStatus.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type HunkLineStatus = "+" | "-" | " " | "HeaderStart" | "HeaderEnd" | "Skip"; 4 | -------------------------------------------------------------------------------- /bindings/HunkRange.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type HunkRange = { start: number, length: number, }; 4 | -------------------------------------------------------------------------------- /bindings/LoadConflictOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { WipPatch } from "./WipPatch"; 3 | 4 | export type LoadConflictOptions = { repoPath: string, patch: WipPatch, }; 5 | -------------------------------------------------------------------------------- /bindings/LocalRefCommitDiff.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type LocalRefCommitDiff = { aheadOfRemote: number, behindRemote: number, aheadOfHead: number, behindHead: number, }; 4 | -------------------------------------------------------------------------------- /bindings/MessageAC.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { ACType } from "./ACType"; 3 | 4 | export type MessageAC = { currentWord: string, repoPath: string, maxNum: number, kind: ACType, }; 5 | -------------------------------------------------------------------------------- /bindings/OkLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type OkLine = { text: string, }; 4 | -------------------------------------------------------------------------------- /bindings/Patch.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { PatchType } from "./PatchType"; 3 | 4 | export type Patch = { commitId: string, oldFile: string, newFile: string, patchType: PatchType, id: string, isImage: boolean, }; 5 | -------------------------------------------------------------------------------- /bindings/PatchType.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type PatchType = "A" | "C" | "B" | "D" | "M" | "R" | "T" | "U" | "X"; 4 | -------------------------------------------------------------------------------- /bindings/PollOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type PollOptions = { actionId: number, }; 4 | -------------------------------------------------------------------------------- /bindings/PollSearchOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type PollSearchOpts = { searchId: number, }; 4 | -------------------------------------------------------------------------------- /bindings/PollSearchResult.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { FileMatch } from "./FileMatch"; 3 | 4 | export type PollSearchResult = { searchId: number, complete: boolean, results: Array<[string, Array]> | null, }; 5 | -------------------------------------------------------------------------------- /bindings/RefCommitDiff.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type RefCommitDiff = { aheadOfHead: number, behindHead: number, }; 4 | -------------------------------------------------------------------------------- /bindings/RefDiffOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type RefDiffOptions = { repoPath: string, headCommitId: string, }; 4 | -------------------------------------------------------------------------------- /bindings/RefInfo.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { RefLocation } from "./RefLocation"; 3 | import type { RefType } from "./RefType"; 4 | 5 | export type RefInfo = { id: string, location: RefLocation, fullName: string, shortName: string, remoteName: string | null, siblingId: string, refType: RefType, head: boolean, commitId: string, time: number, }; 6 | -------------------------------------------------------------------------------- /bindings/RefLocation.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type RefLocation = "Local" | "Remote"; 4 | -------------------------------------------------------------------------------- /bindings/RefType.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type RefType = "Branch" | "Tag" | "Stash"; 4 | -------------------------------------------------------------------------------- /bindings/RepoStatus.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { BranchState } from "./BranchState"; 3 | import type { GitConfig } from "./GitConfig"; 4 | import type { WipPatches } from "./WipPatches"; 5 | 6 | export type RepoStatus = { patches: WipPatches, config: GitConfig, branches: Array, branchName: string, headRefId: string, localCommitId: string | null, remoteCommitId: string | null, remoteAhead: number, remoteBehind: number, state: BranchState, }; 7 | -------------------------------------------------------------------------------- /bindings/ReqCommitsOptions2.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { CommitFilter } from "./CommitFilter"; 3 | 4 | export type ReqCommitsOptions2 = { repoPath: string, numCommits: number, filters: Array, fast: boolean, skipStashes: boolean, }; 5 | -------------------------------------------------------------------------------- /bindings/ReqHunkOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { Commit } from "./Commit"; 3 | import type { Patch } from "./Patch"; 4 | 5 | export type ReqHunkOptions = { repoPath: string, commit: Commit, patch: Patch, }; 6 | -------------------------------------------------------------------------------- /bindings/ReqImageOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ReqImageOptions = { repoPath: string, commitId: string, originalImagePath: string, tempImagePath: string, }; 4 | -------------------------------------------------------------------------------- /bindings/ReqOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ReqOptions = { repoPath: string, }; 4 | -------------------------------------------------------------------------------- /bindings/ReqPatchCodeOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { Commit } from "./Commit"; 3 | import type { Patch } from "./Patch"; 4 | import type { ThemeColour } from "./ThemeColour"; 5 | 6 | export type ReqPatchCodeOptions = { repoPath: string, commit: Commit, patch: Patch, theme: ThemeColour, split: boolean, }; 7 | -------------------------------------------------------------------------------- /bindings/ReqPatchesForCommitOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ReqPatchesForCommitOpts = { repoPath: string, commitId: string, }; 4 | -------------------------------------------------------------------------------- /bindings/ReqWipHunksOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { WipPatch } from "./WipPatch"; 3 | 4 | export type ReqWipHunksOptions = { repoPath: string, patch: WipPatch, headCommit: string | null, }; 5 | -------------------------------------------------------------------------------- /bindings/ResultStatus.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ResultStatus = { success: boolean, message: string, }; 4 | -------------------------------------------------------------------------------- /bindings/RunOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type RunOptions = { repoPath: string, args: Array, }; 4 | -------------------------------------------------------------------------------- /bindings/ScanOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ScanOptions = { repoPath: string, workspacesEnabled: boolean, }; 4 | -------------------------------------------------------------------------------- /bindings/SearchMatchType.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type SearchMatchType = "CommitId" | "CommitMessage" | "Email" | "Author"; 4 | -------------------------------------------------------------------------------- /bindings/SearchOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type SearchOptions = { repoPath: string, searchText: string, numResults: number, }; 4 | -------------------------------------------------------------------------------- /bindings/SlotLine.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type SlotLine = { section: number, index: number, }; 4 | -------------------------------------------------------------------------------- /bindings/StashStagedOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type StashStagedOptions = { repoPath: string, headCommitId: string, }; 4 | -------------------------------------------------------------------------------- /bindings/ThemeColour.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type ThemeColour = "light" | "dark"; 4 | -------------------------------------------------------------------------------- /bindings/TopCommitOptions.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type TopCommitOptions = { repoPath: string, branchName: string, }; 4 | -------------------------------------------------------------------------------- /bindings/UnPushedCommits.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type UnPushedCommits = { thisBranch: Array, allBranches: Array, }; 4 | -------------------------------------------------------------------------------- /bindings/UserConfigResult.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type UserConfigResult = { "error": string } | { "config": { [key in string]?: string } }; 4 | -------------------------------------------------------------------------------- /bindings/WipHunksSplit.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { Hunk } from "./Hunk"; 3 | import type { HunkLine } from "./HunkLine"; 4 | 5 | export type WipHunksSplit = { left: Array, right: Array, hunks: Array, patch_size: number, valid_utf8: boolean, }; 6 | -------------------------------------------------------------------------------- /bindings/WipPatch.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { WipPatchType } from "./WipPatchType"; 3 | 4 | export type WipPatch = { oldFile: string, newFile: string, patchType: WipPatchType, stagedType: WipPatchType, unStagedType: WipPatchType, conflicted: boolean, id: string, isImage: boolean, }; 5 | -------------------------------------------------------------------------------- /bindings/WipPatchType.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type WipPatchType = "?" | " " | "!" | "A" | "C" | "D" | "M" | "R" | "U" | "T"; 4 | -------------------------------------------------------------------------------- /bindings/WipPatches.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | import type { WipPatch } from "./WipPatch"; 3 | 4 | export type WipPatches = { patches: Array, conflict_commit_id: string | null, }; 5 | -------------------------------------------------------------------------------- /bindings/WriteFileOpts.ts: -------------------------------------------------------------------------------- 1 | // This file was generated by [ts-rs](https://github.com/Aleph-Alpha/ts-rs). Do not edit this file manually. 2 | 3 | export type WriteFileOpts = { filePath: string, content: string, }; 4 | -------------------------------------------------------------------------------- /loggers/Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | version = 3 4 | 5 | [[package]] 6 | name = "loggers" 7 | version = "0.1.0" 8 | dependencies = [ 9 | "quote", 10 | "syn", 11 | ] 12 | 13 | [[package]] 14 | name = "proc-macro2" 15 | version = "1.0.47" 16 | source = "registry+https://github.com/rust-lang/crates.io-index" 17 | checksum = "5ea3d908b0e36316caf9e9e2c4625cdde190a7e6f440d794667ed17a1855e725" 18 | dependencies = [ 19 | "unicode-ident", 20 | ] 21 | 22 | [[package]] 23 | name = "quote" 24 | version = "1.0.21" 25 | source = "registry+https://github.com/rust-lang/crates.io-index" 26 | checksum = "bbe448f377a7d6961e30f5955f9b8d106c3f5e449d493ee1b125c1d43c2b5179" 27 | dependencies = [ 28 | "proc-macro2", 29 | ] 30 | 31 | [[package]] 32 | name = "syn" 33 | version = "1.0.103" 34 | source = "registry+https://github.com/rust-lang/crates.io-index" 35 | checksum = "a864042229133ada95abf3b54fdc62ef5ccabe9515b64717bcb9a1919e59445d" 36 | dependencies = [ 37 | "proc-macro2", 38 | "quote", 39 | "unicode-ident", 40 | ] 41 | 42 | [[package]] 43 | name = "unicode-ident" 44 | version = "1.0.5" 45 | source = "registry+https://github.com/rust-lang/crates.io-index" 46 | checksum = "6ceab39d59e4c9499d4e5a8ee0e2735b891bb7308ac83dfb4e80cad195c9f6f3" 47 | -------------------------------------------------------------------------------- /loggers/Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "loggers" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 7 | 8 | [dependencies] 9 | quote = "1.0.21" 10 | syn = {version = "1.0.103", features = ["full"]} 11 | 12 | [lib] 13 | proc-macro = true -------------------------------------------------------------------------------- /loggers/src/lib.rs: -------------------------------------------------------------------------------- 1 | #[allow(unused_imports)] 2 | use proc_macro::TokenStream; 3 | #[cfg(debug_assertions)] 4 | use quote::quote; 5 | #[cfg(debug_assertions)] 6 | use syn::{Item, ItemFn}; 7 | 8 | /* 9 | This is taken and modified from https://github.com/nmeylan/elapsed-time 10 | */ 11 | 12 | // We don't bother printing time if it took less than this. 13 | #[cfg(debug_assertions)] 14 | const MIN_ELAPSED_MS: u128 = 1; 15 | 16 | #[cfg(not(debug_assertions))] 17 | #[proc_macro_attribute] 18 | pub fn elapsed(_args: TokenStream, function_def: TokenStream) -> TokenStream { 19 | function_def 20 | } 21 | 22 | #[cfg(debug_assertions)] 23 | #[proc_macro_attribute] 24 | pub fn elapsed(_args: TokenStream, function_def: TokenStream) -> TokenStream { 25 | let mut item = syn::parse(function_def).unwrap(); 26 | 27 | let fn_item = match &mut item { 28 | Item::Fn(fn_item) => fn_item, 29 | _ => panic!("elapsed proc macro expected a function"), 30 | }; 31 | 32 | let ItemFn { 33 | attrs, 34 | vis, 35 | sig, 36 | block, 37 | } = fn_item; 38 | 39 | let function_body = block.clone(); 40 | let fn_name = sig.ident.clone(); 41 | 42 | let log_ms = format!("{{}}ms for \"{}\".", fn_name); 43 | 44 | let new_function_def = quote! { 45 | #(#attrs)* #vis #sig { 46 | let now = std::time::Instant::now(); 47 | 48 | let mut wrapped_func = || #function_body; 49 | let res = wrapped_func(); 50 | let name = #fn_name; 51 | 52 | let ms = now.elapsed().as_millis(); 53 | 54 | if ms > #MIN_ELAPSED_MS { 55 | println!(#log_ms, now.elapsed().as_millis()); 56 | } 57 | 58 | res 59 | } 60 | }; 61 | 62 | TokenStream::from(new_function_def) 63 | } 64 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | # GitFiend Core 2 | 3 | This is the internal server process GitFiend uses to query repo data and run git commands. 4 | 5 | #### Run server 6 | `cargo run` 7 | 8 | #### Generate typescript types 9 | `cargo test` 10 | 11 | #### Release build 12 | `cargo build --release` 13 | 14 | ## Building static for linux 15 | 16 | Default release build on linux doesn't work on CentOS 7.9 due to missing glibc (https://github.com/GitFiend/Support/issues/132). 17 | 18 | Steps taken from https://blog.davidvassallo.me/2021/06/10/lessons-learned-building-statically-linked-rust-binaries-openssl/ 19 | 20 | ```shell 21 | ### Compile static binary using rust 22 | 23 | # 1. Update rustup 24 | rustup update 25 | 26 | # 2. Add some MUSL dependencies 27 | sudo apt-get install pkg-config musl-tools 28 | # Or on Fedora 29 | sudo dnf install musl-gcc 30 | 31 | # 3. Add the Linux MUSL toolchain 32 | rustup target add x86_64-unknown-linux-musl 33 | ``` 34 | 35 | `cargo build --target x86_64-unknown-linux-musl --release` 36 | -------------------------------------------------------------------------------- /rustfmt.toml: -------------------------------------------------------------------------------- 1 | tab_spaces = 2 2 | #struct_field_align_threshold = 20 3 | wrap_comments = true 4 | max_width = 90 5 | control_brace_style = "ClosingNextLine" -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | // These need to match the values in gitfiend-shell-mac 2 | pub const QUALIFIER: &str = "com"; 3 | pub const ORGANISATION: &str = "tobysuggate"; 4 | pub const APPLICATION: &str = "GitFiend"; 5 | -------------------------------------------------------------------------------- /src/git/action_state.rs: -------------------------------------------------------------------------------- 1 | use ahash::AHashMap; 2 | use serde::Serialize; 3 | use ts_rs::TS; 4 | 5 | use crate::git::run_git_action::ActionError; 6 | use crate::global; 7 | use crate::util::global::Global; 8 | 9 | #[derive(Debug, Clone, Serialize, TS)] 10 | #[serde(rename_all = "camelCase")] 11 | #[ts(export)] 12 | pub struct ActionState { 13 | pub stdout: Vec, 14 | pub stderr: Vec, 15 | pub done: bool, 16 | pub error: Option, 17 | } 18 | 19 | impl ActionState { 20 | pub fn new() -> Self { 21 | Self { 22 | stdout: Vec::new(), 23 | stderr: Vec::new(), 24 | done: false, 25 | error: None, 26 | } 27 | } 28 | } 29 | 30 | // 0 will be treated as an error. 31 | static ACTION_IDS: Global = global!(1); 32 | 33 | fn get_next_action_id() -> u32 { 34 | if let Some(id) = ACTION_IDS.get() { 35 | let new_id = id + 1; 36 | ACTION_IDS.set(new_id); 37 | new_id 38 | } else { 39 | 0 40 | } 41 | } 42 | 43 | pub static ACTIONS: Global> = global!(AHashMap::new()); 44 | 45 | // TODO: this should probably get the id and return it instead of having separate calls. 46 | pub fn start_action() -> u32 { 47 | let id = get_next_action_id(); 48 | 49 | ACTIONS.insert(id, ActionState::new()); 50 | 51 | id 52 | } 53 | 54 | pub fn add_stderr_log(id: u32, text: &str) { 55 | if let Some(mut action) = ACTIONS.get_by_key(&id) { 56 | action.stderr.push(text.to_string()); 57 | 58 | // TODO: Do we actually need to insert it again? 59 | ACTIONS.insert(id, action); 60 | } else { 61 | eprintln!("add_stderr_log: Didn't find action id {}", id); 62 | } 63 | } 64 | 65 | pub fn add_stdout_log(id: u32, text: &str) { 66 | if let Some(mut action) = ACTIONS.get_by_key(&id) { 67 | action.stdout.push(text.to_string()); 68 | 69 | // TODO: Do we actually need to insert it again? 70 | ACTIONS.insert(id, action); 71 | } else { 72 | eprintln!("add_stdout_log: Didn't find action id {}", id); 73 | } 74 | } 75 | 76 | pub fn set_action_error(id: u32, error: ActionError) { 77 | if let Some(mut action) = ACTIONS.get_by_key(&id) { 78 | action.error = Some(error); 79 | action.done = true; 80 | 81 | // TODO: Do we actually need to insert it again? 82 | ACTIONS.insert(id, action); 83 | } else { 84 | eprintln!("set_action_error: Didn't find action id {}", id); 85 | } 86 | } 87 | 88 | pub fn set_action_done(id: u32) { 89 | if let Some(mut action) = ACTIONS.get_by_key(&id) { 90 | action.done = true; 91 | 92 | // TODO: Do we actually need to insert it again? 93 | ACTIONS.insert(id, action); 94 | } else { 95 | eprintln!("set_action_done: Didn't find action id {}", id); 96 | } 97 | } 98 | 99 | #[cfg(test)] 100 | mod tests { 101 | use crate::git::action_state::{add_stdout_log, start_action, ACTIONS}; 102 | 103 | #[test] 104 | fn test_start_action() { 105 | let id = start_action(); 106 | 107 | assert!(ACTIONS.get_by_key(&id).is_some()); 108 | } 109 | 110 | #[test] 111 | fn test_add_log() { 112 | let id = start_action(); 113 | add_stdout_log(id, "stdout text"); 114 | 115 | assert!(!ACTIONS.get_by_key(&id).unwrap().stdout.is_empty()); 116 | assert_eq!(ACTIONS.get_by_key(&id).unwrap().stdout[0], "stdout text"); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /src/git/actions/add.rs: -------------------------------------------------------------------------------- 1 | use crate::git::run_git_action::run_git_action_with_vec; 2 | use serde::Deserialize; 3 | use ts_rs::TS; 4 | 5 | #[derive(Debug, Deserialize, TS)] 6 | #[serde(rename_all = "camelCase")] 7 | #[ts(export)] 8 | pub struct GitAddOptions { 9 | pub repo_path: String, 10 | pub files: Vec, 11 | } 12 | 13 | pub fn git_add_files(options: &GitAddOptions) -> u32 { 14 | let GitAddOptions { repo_path, files } = options; 15 | 16 | let commands: Vec> = files 17 | .chunks(10_1000) 18 | .map(|chunk| { 19 | let mut command = vec![String::from("add")]; 20 | command.extend(chunk.iter().map(|f| f.to_string())); 21 | command 22 | }) 23 | .collect(); 24 | 25 | run_git_action_with_vec(repo_path, commands) 26 | } 27 | -------------------------------------------------------------------------------- /src/git/actions/clone.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use std::fs::create_dir_all; 3 | use ts_rs::TS; 4 | 5 | use crate::dprintln; 6 | use crate::git::run_git_action::{run_git_action, RunGitActionOptions}; 7 | use crate::git::store::STORE; 8 | 9 | #[derive(Debug, Deserialize, TS)] 10 | #[serde(rename_all = "camelCase")] 11 | #[ts(export)] 12 | pub struct CloneOptions { 13 | // Dir to clone into. 14 | pub repo_path: String, 15 | pub url: String, 16 | } 17 | 18 | pub fn clone_repo(options: &CloneOptions) -> u32 { 19 | if create_dir_all(&options.repo_path).is_err() { 20 | return 0; 21 | } 22 | 23 | let version = STORE.get_git_version(); 24 | 25 | let command = if version.major > 1 && version.minor > 12 { 26 | vec!["clone", "--recurse-submodules", "--progress", &options.url] 27 | } else { 28 | vec!["clone", "--recursive", "--progress", &options.url] 29 | }; 30 | 31 | let out = run_git_action(RunGitActionOptions { 32 | repo_path: &options.repo_path, 33 | commands: [command], 34 | }); 35 | 36 | dprintln!("{:?}", out); 37 | 38 | out 39 | } 40 | -------------------------------------------------------------------------------- /src/git/actions/command.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use ts_rs::TS; 3 | 4 | use crate::git::run_git_action::{run_git_action, RunGitActionOptions}; 5 | 6 | #[derive(Debug, Deserialize, TS)] 7 | #[serde(rename_all = "camelCase")] 8 | #[ts(export)] 9 | pub struct CommandOptions { 10 | pub repo_path: String, 11 | pub args: Vec, 12 | } 13 | 14 | pub fn command(options: &CommandOptions) -> u32 { 15 | run_git_action(RunGitActionOptions { 16 | repo_path: &options.repo_path, 17 | commands: [options 18 | .args 19 | .iter() 20 | .map(|a| a.as_str()) 21 | .collect::>()], 22 | }) 23 | } 24 | 25 | // #[derive(Debug, Deserialize, TS)] 26 | // #[serde(rename_all = "camelCase")] 27 | // #[ts(export)] 28 | // pub struct CommandsOptions { 29 | // pub repo_path: String, 30 | // pub commands: Vec>, 31 | // } 32 | -------------------------------------------------------------------------------- /src/git/actions/create_repo.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | 3 | use crate::dprintln; 4 | use crate::git::run_git_action::{run_git_action, RunGitActionOptions}; 5 | use crate::server::git_request::ReqOptions; 6 | 7 | pub fn create_repo(options: &ReqOptions) -> u32 { 8 | let ReqOptions { repo_path } = options; 9 | 10 | if let Err(_e) = fs::create_dir_all(repo_path) { 11 | dprintln!("{:?}", _e); 12 | return 0; 13 | } 14 | 15 | run_git_action(RunGitActionOptions { 16 | repo_path, 17 | commands: [vec!["init"]], 18 | }) 19 | } 20 | -------------------------------------------------------------------------------- /src/git/actions/credentials.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::path::PathBuf; 3 | 4 | use crate::dprintln; 5 | use crate::server::request_util::{ES, R}; 6 | use serde::Deserialize; 7 | use ts_rs::TS; 8 | 9 | #[derive(Clone, Debug, PartialEq, Eq, Deserialize, TS)] 10 | #[ts(export)] 11 | pub struct Credentials { 12 | pub username: String, 13 | pub password: String, 14 | } 15 | 16 | pub fn set_credentials(credentials: &Credentials) -> R<()> { 17 | env::set_var("GITFIEND_USERNAME", &credentials.username); 18 | env::set_var("GITFIEND_PASSWORD", &credentials.password); 19 | 20 | let path = get_ask_pass_path()?; 21 | dprintln!("Setting GIT_ASKPASS to {:?}", path.to_str()); 22 | 23 | env::set_var( 24 | "GIT_ASKPASS", 25 | path 26 | .to_str() 27 | .ok_or(ES::from("set_credentials: Failed to convert path to str"))?, 28 | ); 29 | 30 | Ok(()) 31 | 32 | // if let Some(path) = get_ask_pass_path() { 33 | // dprintln!("Setting GIT_ASKPASS to {:?}", path.to_str()); 34 | // 35 | // env::set_var("GIT_ASKPASS", path.to_str()?); 36 | // } 37 | // 38 | // Some(()) 39 | } 40 | 41 | pub fn get_ask_pass_path() -> R { 42 | let name = if env::consts::OS == "windows" { 43 | "ask-pass.exe" 44 | } else { 45 | "ask-pass" 46 | }; 47 | 48 | #[cfg(debug_assertions)] 49 | let dir = env::current_dir()?; 50 | 51 | let missing_parent = ES::from("get_ask_pass_path: Couldn't get parent dir."); 52 | 53 | #[cfg(debug_assertions)] 54 | return Ok( 55 | dir 56 | .parent() 57 | .ok_or(missing_parent)? 58 | .join("git-fiend") 59 | .join("src") 60 | .join("ask-pass") 61 | .join("target") 62 | .join("release") // Use release version as we typically have no reason to build debug. 63 | .join(name), 64 | ); 65 | 66 | #[cfg(not(debug_assertions))] 67 | Ok( 68 | env::current_exe()? 69 | .parent() 70 | .ok_or(missing_parent.clone())? 71 | .parent() 72 | .ok_or(missing_parent)? 73 | .join("ask-pass") 74 | .join(name), 75 | ) 76 | } 77 | -------------------------------------------------------------------------------- /src/git/actions/fake_action.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::path::PathBuf; 3 | 4 | pub fn _fake_action_script_path() -> Option { 5 | Some( 6 | env::current_dir() 7 | .ok()? 8 | .parent()? 9 | .join("git-fiend") 10 | .join("scripts") 11 | .join("fake-action.sh"), 12 | ) 13 | } 14 | -------------------------------------------------------------------------------- /src/git/actions/fetch.rs: -------------------------------------------------------------------------------- 1 | use crate::git::run_git_action::{run_git_action, RunGitActionOptions}; 2 | use crate::server::git_request::ReqOptions; 3 | 4 | pub fn fetch_all(options: &ReqOptions) -> u32 { 5 | run_git_action(RunGitActionOptions { 6 | repo_path: &options.repo_path, 7 | commands: [vec!["fetch", "--all", "--prune"]], 8 | }) 9 | } 10 | -------------------------------------------------------------------------------- /src/git/actions/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod add; 2 | pub(crate) mod clone; 3 | pub(crate) mod command; 4 | pub(crate) mod create_repo; 5 | pub(crate) mod credentials; 6 | pub(crate) mod fake_action; 7 | pub(crate) mod fetch; 8 | pub(crate) mod stash; 9 | -------------------------------------------------------------------------------- /src/git/actions/stash.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use ts_rs::TS; 3 | 4 | use crate::git::run_git_action::{run_git_action, RunGitActionOptions}; 5 | use crate::server::git_request::ReqOptions; 6 | 7 | pub fn stash_changes(options: &ReqOptions) -> u32 { 8 | run_git_action(RunGitActionOptions { 9 | repo_path: &options.repo_path, 10 | commands: [vec!["add", "--all"], vec!["stash", "push"]], 11 | }) 12 | } 13 | 14 | #[derive(Debug, Deserialize, TS)] 15 | #[serde(rename_all = "camelCase")] 16 | #[ts(export)] 17 | pub struct StashStagedOptions { 18 | pub repo_path: String, 19 | pub head_commit_id: String, 20 | } 21 | 22 | pub fn stash_staged(options: &StashStagedOptions) -> u32 { 23 | let StashStagedOptions { 24 | repo_path, 25 | head_commit_id, 26 | } = options; 27 | 28 | run_git_action(RunGitActionOptions { 29 | repo_path, 30 | commands: [ 31 | vec![ 32 | "commit", 33 | "-m", 34 | "TEMP_COMMIT: If you are seeing this commit there has been an error while stashing.", 35 | ], 36 | vec!["add", "--all"], 37 | vec!["stash", "push"], 38 | vec!["reset", "--soft", head_commit_id], 39 | vec!["add", "--all"], 40 | vec!["stash", "push"], 41 | vec!["stash", "pop", "stash@{1}"], 42 | vec!["reset"], 43 | ], 44 | }) 45 | } 46 | -------------------------------------------------------------------------------- /src/git/conflicts/conflicted_file.rs: -------------------------------------------------------------------------------- 1 | use core::fmt; 2 | use serde::Serialize; 3 | use std::fmt::Formatter; 4 | use ts_rs::TS; 5 | 6 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 7 | #[serde(rename_all = "camelCase")] 8 | #[ts(export)] 9 | pub struct ConflictedFile { 10 | pub lines: Vec, 11 | pub sections: Vec, 12 | pub ref_name_top: String, 13 | pub ref_name_bottom: String, 14 | pub line_ending: String, 15 | pub max_line_length: usize, 16 | } 17 | 18 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 19 | #[serde(rename_all = "camelCase")] 20 | #[ts(export)] 21 | pub enum CFLine { 22 | Ok(OkLine), 23 | Slot(SlotLine), 24 | } 25 | 26 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 27 | #[serde(rename_all = "camelCase")] 28 | #[ts(export)] 29 | pub enum CFSectionLine { 30 | Blank(BlankLine), 31 | Conflict(ConflictLine), 32 | } 33 | 34 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 35 | #[ts(export)] 36 | pub struct OkLine { 37 | pub text: String, 38 | } 39 | 40 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 41 | #[ts(export)] 42 | pub struct SlotLine { 43 | pub section: usize, 44 | pub index: usize, 45 | } 46 | 47 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 48 | #[ts(export)] 49 | pub struct BlankLine { 50 | pub section: usize, 51 | } 52 | 53 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 54 | #[ts(export)] 55 | pub struct ConflictLine { 56 | pub text: String, 57 | pub side: CFSide, 58 | pub section: usize, 59 | pub key: String, 60 | } 61 | 62 | impl ConflictedFile { 63 | pub fn new() -> Self { 64 | Self { 65 | lines: vec![], 66 | sections: vec![], 67 | ref_name_top: String::new(), 68 | ref_name_bottom: String::new(), 69 | line_ending: String::from("\n"), 70 | max_line_length: 0, 71 | } 72 | } 73 | } 74 | 75 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 76 | #[ts(export)] 77 | pub struct CFSection { 78 | pub left: Vec, 79 | pub right: Vec, 80 | } 81 | 82 | impl CFSection { 83 | pub fn get(&self, side: &CFSide) -> &Vec { 84 | match side { 85 | CFSide::Left => &self.left, 86 | CFSide::Right => &self.right, 87 | } 88 | } 89 | 90 | pub fn get_mut(&mut self, side: &CFSide) -> &mut Vec { 91 | match side { 92 | CFSide::Left => &mut self.left, 93 | CFSide::Right => &mut self.right, 94 | } 95 | } 96 | } 97 | 98 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, TS)] 99 | #[serde(rename_all = "camelCase")] 100 | #[ts(export)] 101 | pub enum CFSide { 102 | Left, 103 | Right, 104 | } 105 | 106 | impl fmt::Display for CFSide { 107 | fn fmt(&self, f: &mut Formatter) -> fmt::Result { 108 | match *self { 109 | CFSide::Left => write!(f, "Left"), 110 | CFSide::Right => write!(f, "Right"), 111 | } 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /src/git/conflicts/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod api; 2 | pub(crate) mod conflicted_file; 3 | -------------------------------------------------------------------------------- /src/git/git_settings.rs: -------------------------------------------------------------------------------- 1 | use once_cell::sync::Lazy; 2 | use std::env; 3 | use std::path::PathBuf; 4 | 5 | pub static GIT_PATH: Lazy = Lazy::new(|| PathBuf::from("git")); 6 | 7 | pub fn set_git_env() { 8 | // We don't want any prompts in the terminal (e.g for password). 9 | env::set_var("GIT_TERMINAL_PROMPT", "0"); 10 | 11 | if env::consts::OS == "macos" { 12 | if let Ok(path) = env::var("PATH") { 13 | if !path.contains("usr/local/bin") { 14 | env::set_var("PATH", format!("{}:/usr/local/bin", path)); 15 | } 16 | } 17 | } 18 | 19 | if let Err(err) = fix_path_env::fix() { 20 | eprintln!("{err}"); 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/git/git_types.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use ts_rs::TS; 3 | 4 | #[derive(Debug, PartialEq, Clone, Serialize, Deserialize, TS)] 5 | #[ts(export)] 6 | pub struct DateResult { 7 | pub ms: usize, 8 | pub adjustment: i32, 9 | } 10 | 11 | #[derive(Debug, PartialEq, Eq, Clone, Serialize, Deserialize, TS)] 12 | #[ts(export)] 13 | pub enum RefType { 14 | Branch, 15 | Tag, 16 | Stash, 17 | } 18 | 19 | #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, TS)] 20 | #[ts(export)] 21 | pub enum RefLocation { 22 | Local, 23 | Remote, 24 | } 25 | 26 | #[derive(Debug, Clone, Serialize, Deserialize, TS)] 27 | #[serde(rename_all = "camelCase")] 28 | #[ts(export)] 29 | pub struct CommitInfo { 30 | pub author: String, 31 | pub email: String, 32 | pub date: DateResult, 33 | pub id: String, 34 | pub index: usize, 35 | pub parent_ids: Vec, 36 | pub is_merge: bool, 37 | pub message: String, 38 | pub stash_id: String, 39 | pub refs: Vec, 40 | 41 | pub filtered: bool, 42 | pub num_skipped: u32, 43 | } 44 | 45 | #[derive(Debug, Clone, Serialize, Deserialize, TS)] 46 | #[serde(rename_all = "camelCase")] 47 | #[ts(export)] 48 | pub struct Commit { 49 | pub author: String, 50 | pub email: String, 51 | pub date: DateResult, 52 | pub id: String, 53 | pub index: usize, 54 | pub parent_ids: Vec, 55 | pub is_merge: bool, 56 | pub message: String, 57 | pub stash_id: String, 58 | pub refs: Vec, 59 | 60 | pub filtered: bool, 61 | pub num_skipped: u32, 62 | } 63 | 64 | #[derive(Debug, Clone, Serialize, Deserialize, TS)] 65 | #[serde(rename_all = "camelCase")] 66 | #[ts(export)] 67 | pub struct RefInfo { 68 | pub id: String, 69 | pub location: RefLocation, 70 | pub full_name: String, 71 | pub short_name: String, 72 | pub remote_name: Option, 73 | pub sibling_id: String, 74 | pub ref_type: RefType, 75 | pub head: bool, 76 | pub commit_id: String, 77 | pub time: usize, 78 | } 79 | 80 | #[derive(Debug, Clone, Serialize, Deserialize, TS)] 81 | #[serde(rename_all = "camelCase")] 82 | #[ts(export)] 83 | pub struct LocalRefCommitDiff { 84 | pub ahead_of_remote: u32, 85 | pub behind_remote: u32, 86 | pub ahead_of_head: u32, 87 | pub behind_head: u32, 88 | } 89 | 90 | #[derive(Debug, Clone, Serialize, Deserialize, TS)] 91 | #[serde(rename_all = "camelCase")] 92 | #[ts(export)] 93 | pub struct RefCommitDiff { 94 | pub ahead_of_head: u32, 95 | pub behind_head: u32, 96 | } 97 | 98 | // See https://git-scm.com/docs/git-status for meaning. 99 | #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, TS)] 100 | #[ts(export)] 101 | pub enum PatchType { 102 | A, 103 | C, 104 | B, 105 | D, 106 | M, 107 | R, 108 | T, 109 | U, 110 | X, 111 | } 112 | 113 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, TS)] 114 | #[serde(rename_all = "camelCase")] 115 | #[ts(export)] 116 | pub struct Patch { 117 | pub commit_id: String, 118 | pub old_file: String, 119 | pub new_file: String, 120 | pub patch_type: PatchType, 121 | pub id: String, 122 | pub is_image: bool, 123 | } 124 | 125 | #[derive(Debug, Clone, Serialize, Deserialize, TS)] 126 | #[serde(rename_all = "camelCase")] 127 | #[ts(export)] 128 | pub struct Hunk { 129 | pub old_line_range: HunkRange, 130 | pub new_line_range: HunkRange, 131 | pub context_line: String, 132 | pub lines: Vec, 133 | pub index: i32, 134 | } 135 | 136 | #[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize, TS)] 137 | #[serde(rename_all = "camelCase")] 138 | #[ts(export)] 139 | pub struct HunkLine { 140 | pub status: HunkLineStatus, 141 | pub old_num: Option, 142 | pub new_num: Option, 143 | pub hunk_index: i32, 144 | pub text: String, 145 | pub index: u32, 146 | pub line_ending: String, 147 | } 148 | 149 | #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, TS)] 150 | #[ts(export)] 151 | pub enum HunkLineStatus { 152 | #[serde(rename = "+")] 153 | Added, 154 | #[serde(rename = "-")] 155 | Removed, 156 | #[serde(rename = " ")] 157 | Unchanged, 158 | HeaderStart, 159 | HeaderEnd, 160 | Skip, 161 | } 162 | 163 | #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, TS)] 164 | #[serde(rename_all = "camelCase")] 165 | #[ts(export)] 166 | pub struct HunkRange { 167 | pub start: i32, 168 | pub length: i32, 169 | } 170 | 171 | #[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, TS)] 172 | #[ts(export)] 173 | pub enum WipPatchType { 174 | #[serde(rename = "?")] 175 | Question, 176 | #[serde(rename = " ")] 177 | Empty, 178 | #[serde(rename = "!")] 179 | Ignored, 180 | A, 181 | C, 182 | D, 183 | M, 184 | R, 185 | U, 186 | T, 187 | } 188 | 189 | #[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, TS)] 190 | #[serde(rename_all = "camelCase")] 191 | #[ts(export)] 192 | pub struct WipPatch { 193 | pub old_file: String, 194 | pub new_file: String, 195 | pub patch_type: WipPatchType, 196 | pub staged_type: WipPatchType, 197 | pub un_staged_type: WipPatchType, 198 | pub conflicted: bool, 199 | pub id: String, 200 | pub is_image: bool, 201 | } 202 | -------------------------------------------------------------------------------- /src/git/git_types_extra_impl.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{ 2 | Hunk, HunkLine, HunkRange, Patch, PatchType, RefLocation, WipPatchType, 3 | }; 4 | use std::ffi::OsStr; 5 | use std::fmt; 6 | use std::fmt::Formatter; 7 | use std::path::Path; 8 | 9 | impl fmt::Display for PatchType { 10 | fn fmt(&self, f: &mut Formatter) -> fmt::Result { 11 | match *self { 12 | PatchType::A => write!(f, "A"), 13 | PatchType::B => write!(f, "B"), 14 | PatchType::C => write!(f, "C"), 15 | PatchType::D => write!(f, "D"), 16 | PatchType::M => write!(f, "M"), 17 | PatchType::R => write!(f, "R"), 18 | PatchType::T => write!(f, "T"), 19 | PatchType::U => write!(f, "U"), 20 | PatchType::X => write!(f, "X"), 21 | } 22 | } 23 | } 24 | 25 | impl fmt::Display for WipPatchType { 26 | fn fmt(&self, f: &mut Formatter) -> fmt::Result { 27 | match *self { 28 | WipPatchType::Question => write!(f, "?"), 29 | WipPatchType::Empty => write!(f, " "), 30 | WipPatchType::Ignored => write!(f, "!"), 31 | WipPatchType::A => write!(f, "A"), 32 | WipPatchType::C => write!(f, "C"), 33 | WipPatchType::D => write!(f, "D"), 34 | WipPatchType::M => write!(f, "M"), 35 | WipPatchType::R => write!(f, "R"), 36 | WipPatchType::U => write!(f, "U"), 37 | WipPatchType::T => write!(f, "T"), 38 | } 39 | } 40 | } 41 | 42 | impl fmt::Display for RefLocation { 43 | fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { 44 | match *self { 45 | RefLocation::Local => write!(f, "Local"), 46 | RefLocation::Remote => write!(f, "Remote"), 47 | } 48 | } 49 | } 50 | 51 | impl Hunk { 52 | pub fn new() -> Hunk { 53 | Hunk { 54 | lines: Vec::new(), 55 | old_line_range: HunkRange::new(), 56 | new_line_range: HunkRange::new(), 57 | context_line: String::new(), 58 | index: -1, 59 | } 60 | } 61 | } 62 | 63 | impl HunkLine { 64 | pub fn get_hunk<'a>(&self, hunks: &'a [Hunk]) -> Option<&'a Hunk> { 65 | if self.hunk_index >= 0 { 66 | return hunks.get(self.hunk_index as usize); 67 | } 68 | None 69 | } 70 | } 71 | 72 | impl HunkRange { 73 | pub fn new() -> HunkRange { 74 | HunkRange { 75 | start: 0, 76 | length: 0, 77 | } 78 | } 79 | } 80 | 81 | impl Patch { 82 | pub fn get_file_extension(&self) -> String { 83 | let file_name = if self.new_file.is_empty() { 84 | &self.old_file 85 | } else { 86 | &self.new_file 87 | }; 88 | 89 | Path::new(file_name) 90 | .extension() 91 | .and_then(OsStr::to_str) 92 | .unwrap_or("") 93 | .to_string() 94 | } 95 | } 96 | -------------------------------------------------------------------------------- /src/git/git_version.rs: -------------------------------------------------------------------------------- 1 | use crate::git::run_git::{run_git_err, RunGitOptions}; 2 | use crate::git::store::{GIT_VERSION, STORE}; 3 | use crate::parser::standard_parsers::UNSIGNED_INT; 4 | use crate::parser::{parse_part, Parser}; 5 | use crate::server::git_request::ReqOptions; 6 | use crate::{and, character, map, rep_parser_sep, take_char_while}; 7 | use serde::Serialize; 8 | use ts_rs::TS; 9 | 10 | pub fn load_git_version() { 11 | if let Ok(version_str) = run_git_err(RunGitOptions { 12 | repo_path: ".", 13 | args: ["--version"], 14 | }) { 15 | if let Some(version) = parse_version(&version_str.stdout) { 16 | if let Ok(mut gv) = GIT_VERSION.write() { 17 | *gv = version; 18 | } 19 | } 20 | } 21 | } 22 | 23 | // Expect this to return none if Git is not installed. 24 | pub fn git_version(_: &ReqOptions) -> Option { 25 | let version = STORE.get_git_version(); 26 | 27 | if !version.valid() { 28 | load_git_version(); 29 | 30 | let version = STORE.get_git_version(); 31 | 32 | if version.valid() { 33 | return Some(version); 34 | } 35 | 36 | return None; 37 | } 38 | 39 | Some(version) 40 | } 41 | 42 | #[derive(PartialEq, Eq, Debug, Clone, Serialize, TS)] 43 | #[ts(export)] 44 | pub struct GitVersion { 45 | pub major: u32, 46 | pub minor: u32, 47 | pub patch: u32, 48 | } 49 | 50 | impl GitVersion { 51 | pub fn new() -> Self { 52 | Self { 53 | major: 0, 54 | minor: 0, 55 | patch: 0, 56 | } 57 | } 58 | 59 | pub fn valid(&self) -> bool { 60 | self.major > 0 61 | } 62 | } 63 | 64 | fn parse_version(text: &str) -> Option { 65 | parse_part(P_VERSION, text)? 66 | } 67 | 68 | // Takes something like "git version 2.32.0" 69 | const P_VERSION_STRING: Parser<(String, Vec)> = and!( 70 | take_char_while!(|c: char| !c.is_numeric()), 71 | rep_parser_sep!(UNSIGNED_INT, character!('.')) 72 | ); 73 | 74 | const P_VERSION: Parser> = 75 | map!(P_VERSION_STRING, |res: (String, Vec)| { 76 | let (_, nums) = res; 77 | 78 | Some(GitVersion { 79 | major: nums 80 | .first() 81 | .unwrap_or(&String::from("")) 82 | .parse() 83 | .unwrap_or(0), 84 | minor: nums 85 | .get(1) 86 | .unwrap_or(&String::from("")) 87 | .parse() 88 | .unwrap_or(0), 89 | patch: nums 90 | .get(2) 91 | .unwrap_or(&String::from("")) 92 | .parse() 93 | .unwrap_or(0), 94 | }) 95 | }); 96 | 97 | #[cfg(test)] 98 | mod tests { 99 | use crate::git::git_version::{parse_version, GitVersion, P_VERSION_STRING}; 100 | use crate::parser::parse_all; 101 | 102 | #[test] 103 | fn test_p_version_string() { 104 | let result = parse_all(P_VERSION_STRING, "git version 2.32.0"); 105 | 106 | assert!(result.is_some()); 107 | assert_eq!( 108 | result.unwrap(), 109 | ( 110 | "git version ".to_string(), 111 | vec!["2".to_string(), "32".to_string(), "0".to_string()] 112 | ) 113 | ); 114 | } 115 | 116 | #[test] 117 | fn test_p_version() { 118 | let result = parse_version("git version 2.32.0"); 119 | 120 | assert!(result.is_some()); 121 | 122 | assert_eq!( 123 | result.unwrap(), 124 | GitVersion { 125 | major: 2, 126 | minor: 32, 127 | patch: 0 128 | } 129 | ); 130 | } 131 | 132 | #[test] 133 | fn test_p_short_version() { 134 | let result = parse_version("git version 2.32"); 135 | 136 | assert!(result.is_some()); 137 | 138 | assert_eq!( 139 | result.unwrap(), 140 | GitVersion { 141 | major: 2, 142 | minor: 32, 143 | patch: 0 144 | } 145 | ); 146 | } 147 | 148 | #[test] 149 | fn test_p_mac_version() { 150 | let result = parse_version("git version 2.32.1 (Apple Git-133)"); 151 | 152 | assert!(result.is_some()); 153 | 154 | assert_eq!( 155 | result.unwrap(), 156 | GitVersion { 157 | major: 2, 158 | minor: 32, 159 | patch: 1 160 | } 161 | ); 162 | } 163 | 164 | #[test] 165 | fn test_p_windows_version() { 166 | let result = parse_version("git version 2.37.3.windows.1"); 167 | 168 | assert!(result.is_some()); 169 | 170 | assert_eq!( 171 | result.unwrap(), 172 | GitVersion { 173 | major: 2, 174 | minor: 37, 175 | patch: 3 176 | } 177 | ); 178 | } 179 | } 180 | -------------------------------------------------------------------------------- /src/git/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod action_state; 2 | pub(crate) mod actions; 3 | pub(crate) mod conflicts; 4 | pub(crate) mod git_settings; 5 | pub(crate) mod git_types; 6 | pub(crate) mod git_types_extra_impl; 7 | pub(crate) mod git_version; 8 | pub(crate) mod queries; 9 | pub(crate) mod run_git; 10 | pub(crate) mod run_git_action; 11 | pub(crate) mod store; 12 | -------------------------------------------------------------------------------- /src/git/queries/commit_calcs.rs: -------------------------------------------------------------------------------- 1 | use ahash::{AHashMap, AHashSet}; 2 | use crate::git::git_types::Commit; 3 | use crate::git::run_git::{run_git_err, RunGitOptions}; 4 | use crate::git::store::REF_DIFFS; 5 | 6 | fn _get_commit_map(commits: &[Commit]) -> AHashMap<&String, &Commit> { 7 | commits.iter().map(|c| (&c.id, c)).collect() 8 | } 9 | 10 | pub fn get_commit_map_cloned(commits: &[Commit]) -> AHashMap { 11 | commits.iter().map(|c| (c.id.clone(), c.clone())).collect() 12 | } 13 | 14 | pub fn find_commit_ancestors<'a>( 15 | commit: &'a Commit, 16 | commits: &'a AHashMap, 17 | ) -> AHashSet<&'a str> { 18 | let mut ancestors = AHashSet::<&'a str>::new(); 19 | let mut ancestor_commits: Vec<&Commit> = vec![commit]; 20 | 21 | while !ancestor_commits.is_empty() { 22 | if let Some(c) = ancestor_commits.pop() { 23 | for id in c.parent_ids.iter() { 24 | if !ancestors.contains(id as &str) { 25 | ancestors.insert(id); 26 | if let Some(parent) = commits.get(id) { 27 | ancestor_commits.push(parent); 28 | } 29 | } 30 | } 31 | } 32 | } 33 | 34 | ancestors 35 | } 36 | 37 | pub fn find_commit_descendants(commit: &Commit, commits: &[Commit]) -> Vec { 38 | let mut descendants: Vec = Vec::new(); 39 | 40 | find_commit_descendants_inner(commit, commits, &mut descendants); 41 | 42 | descendants 43 | } 44 | 45 | fn find_commit_descendants_inner( 46 | commit: &Commit, 47 | commits: &[Commit], 48 | descendants: &mut Vec, 49 | ) { 50 | if commit.index == 0 { 51 | return; 52 | } 53 | 54 | let mut i = commit.index - 1; 55 | 56 | loop { 57 | let c = &commits[i]; 58 | if c.stash_id.is_empty() && c.parent_ids.contains(&commit.id) { 59 | descendants.push(c.id.clone()); 60 | find_commit_descendants_inner(c, commits, descendants); 61 | break; 62 | } else if i > 0 { 63 | i -= 1; 64 | } else { 65 | break; 66 | } 67 | } 68 | } 69 | 70 | // How many commits ahead is a. The order matters. 71 | pub fn count_commits_between_commit_ids( 72 | a_id: &String, 73 | b_id: &String, 74 | commits: &AHashMap, 75 | ) -> u32 { 76 | let key = format!("{}{}", a_id, b_id); 77 | 78 | if let Ok(count) = REF_DIFFS.read() { 79 | if let Some(count) = count.get(&key) { 80 | return *count; 81 | } 82 | } 83 | 84 | if let Some(a) = commits.get(a_id) { 85 | if let Some(b) = commits.get(b_id) { 86 | if a.id == b.id { 87 | return 0; 88 | } 89 | 90 | let mut num = 0; 91 | 92 | let mut a_ancestors = find_commit_ancestors(a, commits); 93 | a_ancestors.insert(&a.id); 94 | 95 | let mut b_ancestors = find_commit_ancestors(b, commits); 96 | b_ancestors.insert(&b.id); 97 | 98 | for id in a_ancestors.into_iter() { 99 | if !b_ancestors.contains(&id) { 100 | num += 1; 101 | } 102 | } 103 | 104 | if let Ok(mut diffs) = REF_DIFFS.write() { 105 | diffs.insert(key, num); 106 | } 107 | 108 | return num; 109 | } 110 | } 111 | 112 | 0 113 | } 114 | 115 | // How many commits ahead is a. The order matters. 116 | pub fn get_commit_ids_between_commit_ids( 117 | a_id: &String, 118 | b_id: &String, 119 | commits: &AHashMap, 120 | ) -> Option> { 121 | let a = commits.get(a_id)?; 122 | let b = commits.get(b_id)?; 123 | 124 | Some(get_commit_ids_between_commits(a, b, commits)) 125 | } 126 | 127 | // How many commits ahead is a. The order matters. 128 | fn get_commit_ids_between_commits( 129 | a: &Commit, 130 | b: &Commit, 131 | commits: &AHashMap, 132 | ) -> Vec { 133 | let mut ids: Vec = Vec::new(); 134 | 135 | if a.id == b.id { 136 | return ids; 137 | } 138 | 139 | let mut a_ancestors = find_commit_ancestors(a, commits); 140 | a_ancestors.insert(&a.id); 141 | 142 | let mut b_ancestors = find_commit_ancestors(b, commits); 143 | b_ancestors.insert(&b.id); 144 | 145 | for id in a_ancestors.into_iter() { 146 | if !b_ancestors.contains(&id) { 147 | ids.push(id.to_string()); 148 | } 149 | } 150 | 151 | ids 152 | } 153 | 154 | pub fn count_commits_between_fallback( 155 | repo_path: &str, 156 | commit_id1: &str, 157 | commit_id2: &str, 158 | ) -> u32 { 159 | if commit_id1 == commit_id2 { 160 | return 0; 161 | } 162 | 163 | let out = run_git_err(RunGitOptions { 164 | args: [ 165 | "rev-list", 166 | &format!("{}..{}", commit_id1, commit_id2), 167 | "--count", 168 | ], 169 | repo_path, 170 | }); 171 | 172 | if let Ok(out) = out { 173 | return out.stdout.trim().parse::().ok().unwrap_or(0); 174 | } 175 | 176 | 0 177 | } 178 | 179 | #[cfg(test)] 180 | mod tests { 181 | use crate::git::store::REF_DIFFS; 182 | 183 | #[test] 184 | fn test_ref_diffs() { 185 | if let Ok(mut diffs) = REF_DIFFS.write() { 186 | diffs.insert("OMG".to_string(), 1); 187 | } 188 | // REF_DIFFS.insert("OMG".to_string(), 1); 189 | 190 | assert!(REF_DIFFS.read().unwrap().get("OMG").is_some()); 191 | // assert!(REF_DIFFS.get_diff("OMG").is_some()); 192 | } 193 | } 194 | -------------------------------------------------------------------------------- /src/git/queries/commit_filters.rs: -------------------------------------------------------------------------------- 1 | use ahash::{AHashMap, AHashSet}; 2 | use serde::Deserialize; 3 | use ts_rs::TS; 4 | 5 | use crate::git::git_types::{Commit, RefInfo}; 6 | use crate::git::queries::commit_calcs::{find_commit_ancestors, get_commit_map_cloned}; 7 | use crate::git::queries::patches::patches::load_patches; 8 | 9 | // TODO: Investigate unused properties. 10 | #[allow(dead_code)] 11 | #[derive(Debug, Clone, Deserialize, TS)] 12 | #[serde(rename_all = "camelCase")] 13 | #[ts(export)] 14 | pub enum CommitFilter { 15 | Branch { id: String, short_name: String }, 16 | User { author: String, email: String }, 17 | Commit { commit_id: String }, 18 | File { file_name: String }, 19 | } 20 | 21 | pub fn apply_commit_filters( 22 | repo_path: &str, 23 | commits: Vec, 24 | refs: &[RefInfo], 25 | filters: &[CommitFilter], 26 | ) -> Vec { 27 | let commit_map = get_commit_map_cloned(&commits); 28 | 29 | let results: Vec> = filters 30 | .iter() 31 | .map(|filter| match filter { 32 | CommitFilter::Branch { short_name, .. } => { 33 | get_all_commits_with_branch_name(short_name, &commit_map, refs) 34 | } 35 | CommitFilter::User { author, .. } => get_commits_for_user(author, &commits), 36 | CommitFilter::Commit { commit_id } => [commit_id.as_str()].into_iter().collect(), 37 | CommitFilter::File { file_name } => { 38 | if let Ok(patches) = load_patches(repo_path, &commits) { 39 | return commits 40 | .iter() 41 | .filter(|c| { 42 | if let Some(files) = patches.get(&c.id) { 43 | return files 44 | .iter() 45 | .any(|p| p.new_file == *file_name || p.old_file == *file_name); 46 | } 47 | 48 | false 49 | }) 50 | .map(|c| c.id.as_str()) 51 | .collect(); 52 | } 53 | AHashSet::new() 54 | } 55 | }) 56 | .collect(); 57 | 58 | let ids: AHashSet = commits 59 | .iter() 60 | .filter(|c| results.iter().all(|r| r.contains(c.id.as_str()))) 61 | .map(|c| c.id.clone()) 62 | .collect(); 63 | 64 | let mut remaining_commits: Vec = Vec::new(); 65 | let mut skipped = 0; 66 | let mut index = 0; 67 | 68 | for c in commits.iter() { 69 | if ids.contains(c.id.as_str()) { 70 | let mut c = c.clone(); 71 | 72 | c.index = index; 73 | index += 1; 74 | 75 | c.num_skipped = skipped; 76 | skipped = 0; 77 | remaining_commits.push(c); 78 | } else { 79 | skipped += 1; 80 | } 81 | } 82 | 83 | remaining_commits 84 | } 85 | 86 | fn get_all_commits_with_branch_name<'a>( 87 | short_name: &str, 88 | commits: &'a AHashMap, 89 | refs: &[RefInfo], 90 | ) -> AHashSet<&'a str> { 91 | let mut ids_to_keep = AHashSet::<&'a str>::new(); 92 | 93 | refs 94 | .iter() 95 | .filter(|r| r.short_name == short_name) 96 | .flat_map(|r| commits.get(&r.commit_id)) 97 | .for_each(|c| { 98 | if !ids_to_keep.contains(c.id.as_str()) { 99 | let ancestors = find_commit_ancestors(c, commits); 100 | 101 | ids_to_keep.insert(c.id.as_str()); 102 | ids_to_keep.extend(ancestors); 103 | } 104 | }); 105 | 106 | // We include any stashes that have one of our commits as a parent. 107 | for (id, c) in commits { 108 | if !c.stash_id.is_empty() 109 | && c 110 | .parent_ids 111 | .iter() 112 | .any(|id| ids_to_keep.contains(id.as_str())) 113 | { 114 | ids_to_keep.insert(id); 115 | } 116 | } 117 | 118 | ids_to_keep 119 | } 120 | 121 | fn get_commits_for_user<'a>(author: &str, commits: &'a [Commit]) -> AHashSet<&'a str> { 122 | commits 123 | .iter() 124 | .filter(|c| c.author == author) 125 | .map(|c| c.id.as_str()) 126 | .collect() 127 | } 128 | -------------------------------------------------------------------------------- /src/git/queries/commits_parsers.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{CommitInfo, DateResult}; 2 | use crate::git::queries::refs::{make_ref_info, RefInfoPart}; 3 | use crate::git::queries::{RefInfo, P_OPTIONAL_REFS}; 4 | use crate::parser::standard_parsers::{ 5 | ANY_WORD, SIGNED_INT, UNSIGNED_INT, UNTIL_LINE_END, WS, 6 | }; 7 | use crate::parser::Parser; 8 | use crate::{and, character, many, map, or, rep_parser_sep, take_char_while, until_str}; 9 | 10 | const END: &str = "4a41380f-a4e8-4251-9ca2-bf55186ed32a"; 11 | pub const PRETTY_FORMATTED: &str = 12 | "--pretty=format:%an; %ae; %ad; %H; %P; %B4a41380f-a4e8-4251-9ca2-bf55186ed32a; %d"; 13 | 14 | pub const SEP_CHAR: char = ';'; 15 | 16 | pub const P_GROUP: Parser = take_char_while!(|c: char| { c != SEP_CHAR }); 17 | 18 | const P_SEP: Parser = map!(and!(WS, character!(SEP_CHAR), WS), |_res: ( 19 | String, 20 | char, 21 | String 22 | )| { SEP_CHAR }); 23 | 24 | const P_EMAIL: Parser = or!(P_GROUP, WS); 25 | 26 | const P_DATE: Parser = map!(and!(UNSIGNED_INT, WS, SIGNED_INT), |res: ( 27 | String, 28 | String, 29 | String 30 | )| { 31 | DateResult { 32 | ms: res.0.parse::().unwrap_or_default() * 1000, 33 | adjustment: res.2.parse().unwrap_or_default(), 34 | } 35 | }); 36 | 37 | const P_PARENTS: Parser> = rep_parser_sep!(ANY_WORD, WS); 38 | 39 | const P_MESSAGE: Parser = until_str!(END); 40 | 41 | type PCommitResult = ( 42 | /* 0 */ String, 43 | /* 1 */ char, 44 | /* 2 */ String, 45 | /* 3 */ char, 46 | /* 4 */ DateResult, 47 | /* 5 */ char, 48 | /* 6 */ String, 49 | /* 7 */ char, 50 | /* 8 */ Vec, 51 | /* 9 */ char, 52 | /* 10 */ String, 53 | /* 11 */ char, 54 | /* 12 */ Vec, 55 | /* 13 */ String, 56 | ); 57 | 58 | // Don't put a comma on the last one otherwise the macro will complain 59 | pub const P_COMMIT_ROW: Parser = map!( 60 | and!( 61 | /* 0 */ P_GROUP, // author 62 | /* 1 */ P_SEP, 63 | /* 2 */ P_EMAIL, 64 | /* 3 */ P_SEP, 65 | /* 4 */ P_DATE, 66 | /* 5 */ P_SEP, 67 | /* 6 */ P_GROUP, // commit id 68 | /* 7 */ P_SEP, 69 | /* 8 */ P_PARENTS, 70 | /* 9 */ P_SEP, 71 | /* 10 */ P_MESSAGE, 72 | /* 11 */ P_SEP, 73 | /* 12 */ P_OPTIONAL_REFS, 74 | /* 13 */ WS 75 | ), 76 | |result: PCommitResult| { 77 | let refs = result 78 | .12 79 | .into_iter() 80 | .map(|info: RefInfoPart| make_ref_info(info, result.6.to_owned(), result.4.ms)) 81 | .collect::>(); 82 | 83 | let num_parents = result.8.len(); 84 | 85 | CommitInfo { 86 | author: result.0, 87 | email: result.2, 88 | date: result.4, 89 | id: result.6, 90 | index: 0, 91 | parent_ids: result.8, 92 | is_merge: num_parents > 1, 93 | message: result.10, 94 | stash_id: String::new(), 95 | refs, 96 | filtered: false, 97 | num_skipped: 0, 98 | } 99 | } 100 | ); 101 | 102 | pub const P_COMMITS: Parser> = many!(P_COMMIT_ROW); 103 | 104 | pub const P_ID_LIST: Parser> = rep_parser_sep!(ANY_WORD, UNTIL_LINE_END); 105 | -------------------------------------------------------------------------------- /src/git/queries/commits_test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use std::env::current_dir; 4 | 5 | use crate::git::queries::commits::load_commits; 6 | use crate::git::queries::commits_parsers::{P_COMMIT_ROW, P_GROUP, P_ID_LIST}; 7 | use crate::parser::{parse_all, parse_part}; 8 | 9 | #[test] 10 | fn test_p_group() { 11 | let result = parse_part(P_GROUP, "omg,"); 12 | 13 | assert!(result.is_some()); 14 | } 15 | 16 | #[test] 17 | fn test_p_commit_row() { 18 | let res = parse_all( 19 | P_COMMIT_ROW, 20 | "Firstname Lastname; sugto555@gmail.com; 1648863350 +1300; \ 21 | dd5733ad96082f0f33164bd1e2d72f7540bf7d9f; 2e8966986f620f491c34e6243a546d85dd2322e0; \ 22 | Write commit row parser. Added necessary new git types. 4a41380f-a4e8-4251-9ca2-bf55186ed32a\ 23 | ; (HEAD -> refs/heads/master, refs/remotes/origin/master)", 24 | ); 25 | 26 | assert!(res.is_some()); 27 | let c = res.unwrap(); 28 | assert_eq!(c.date.ms, 1648863350000); 29 | assert_eq!(c.author, "Firstname Lastname"); 30 | } 31 | 32 | #[test] 33 | fn test_load_commits() { 34 | let cwd = current_dir().unwrap(); 35 | let repo_path = cwd.into_os_string().into_string().unwrap(); 36 | 37 | let result = load_commits(&repo_path, 5); 38 | 39 | assert!(result.is_ok()); 40 | } 41 | 42 | // #[test] 43 | // fn test_load_commits_and_stashes() { 44 | // let result = load_commits_and_stashes(&ReqCommitsOptions2 { 45 | // repo_path: "/home/toby/Repos/gitfiend-seed/git-fiend".to_string(), 46 | // num_commits: 1000, 47 | // filters: Vec::new(), 48 | // }); 49 | // 50 | // println!("{:?}", result); 51 | // assert!(true); 52 | // } 53 | 54 | #[test] 55 | fn test_p_id_list() { 56 | let a = "febe3a062cc8c4c156a3d869310976173d64c04a"; 57 | let b = "2269bc6b714890412d0c983825cf9e9d340291ce"; 58 | let c = "1b7a69a328a61e9ad54dfb302cf3301448ab0cfe"; 59 | let d = "fd48376ff1e2cb213bd6d1919cf0f07f81a553a6"; 60 | 61 | let ids = format!("{a}\n{b}\n{c}\n{d}"); 62 | 63 | let out = parse_all(P_ID_LIST, &ids); 64 | 65 | assert!(out.is_some()); 66 | assert_eq!(out.unwrap().len(), 4); 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/git/queries/config/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::git::queries::config::config_file_parser::make_config_log; 2 | use crate::git::queries::config::config_output_parser::P_SUBMODULE_NAME; 3 | use crate::git::run_git::{run_git_err, RunGitOptions}; 4 | use crate::git::store::{CONFIG, STORE}; 5 | use crate::parser::{parse_all_err, run_parser, ParseOptions}; 6 | use crate::server::git_request::ReqOptions; 7 | use crate::server::request_util::R; 8 | use config_output_parser::{P_CONFIG, P_REMOTE_NAME}; 9 | use serde::Serialize; 10 | use std::collections::HashMap; 11 | use std::fs::read_to_string; 12 | use ts_rs::TS; 13 | 14 | pub(crate) mod config_file_parser; 15 | mod config_output_parser; 16 | 17 | #[derive(Debug, Clone, Serialize, TS)] 18 | #[ts(export)] 19 | pub struct GitConfig { 20 | pub entries: HashMap, 21 | // Key is remote name E.g "origin", value is url or path on disk. 22 | pub remotes: HashMap, 23 | pub submodules: HashMap, 24 | } 25 | 26 | impl GitConfig { 27 | pub fn new() -> GitConfig { 28 | GitConfig { 29 | entries: HashMap::new(), 30 | remotes: HashMap::new(), 31 | submodules: HashMap::new(), 32 | } 33 | } 34 | 35 | // We take short_name because this is the same between remote and local refs. 36 | pub fn get_remote_for_branch(&self, short_name: &str) -> String { 37 | let GitConfig { entries, .. } = self; 38 | 39 | if let Some(push_remote) = entries.get(&format!("branch.{}.pushremote", short_name)) { 40 | return push_remote.clone(); 41 | } 42 | 43 | if let Some(push_default) = entries.get("remote.pushdefault") { 44 | return push_default.clone(); 45 | } 46 | 47 | if let Some(remote) = entries.get(&format!("branch.{}.remote", short_name)) { 48 | return remote.clone(); 49 | } 50 | 51 | String::from("origin") 52 | } 53 | 54 | pub fn get_tracking_branch_name(&self, local_branch: &str) -> String { 55 | let remote = self.get_remote_for_branch(local_branch); 56 | 57 | format!("refs/remotes/{}/{}", remote, local_branch) 58 | } 59 | } 60 | 61 | // Use this version on focus of GitFiend only. Get it from the store otherwise. 62 | pub fn load_full_config(options: &ReqOptions) -> R { 63 | let ReqOptions { repo_path } = options; 64 | 65 | let repo = STORE.get_repo_path(repo_path)?; 66 | let config_path = repo.git_path.join("config"); 67 | 68 | let result_text = if let Ok(text) = read_to_string(config_path) { 69 | make_config_log(&text) 70 | } else { 71 | // If new config parser fails, fallback to the old one. 72 | Ok( 73 | run_git_err(RunGitOptions { 74 | repo_path, 75 | args: ["config", "--list"], 76 | })? 77 | .stdout, 78 | ) 79 | }; 80 | 81 | let config_result = parse_all_err(P_CONFIG, result_text?.as_str()); 82 | let entries = config_result?; 83 | let mut remotes = HashMap::new(); 84 | let mut submodules = HashMap::new(); 85 | 86 | for (key, value) in entries.iter() { 87 | if key.starts_with("remote") { 88 | let name = run_parser( 89 | P_REMOTE_NAME, 90 | key, 91 | ParseOptions { 92 | must_parse_all: true, 93 | print_error: false, 94 | }, 95 | ); 96 | 97 | if let Some(name) = name { 98 | remotes.insert(name, value.clone()); 99 | } 100 | } else if key.starts_with("submodule") { 101 | let name = run_parser( 102 | P_SUBMODULE_NAME, 103 | key, 104 | ParseOptions { 105 | must_parse_all: true, 106 | print_error: false, 107 | }, 108 | ); 109 | 110 | if let Some(name) = name { 111 | submodules.insert(name, value.clone()); 112 | } 113 | } 114 | } 115 | 116 | let config = GitConfig { 117 | entries, 118 | remotes, 119 | submodules, 120 | }; 121 | 122 | CONFIG.insert(repo_path.clone(), config.clone()); 123 | 124 | Ok(config) 125 | } 126 | -------------------------------------------------------------------------------- /src/git/queries/config/test/submodule1.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GitFiend/gitfiend-core/2d522525a92efe8b121b1320f6539e7264b6dc87/src/git/queries/config/test/submodule1.zip -------------------------------------------------------------------------------- /src/git/queries/hunks/html_code_split.rs: -------------------------------------------------------------------------------- 1 | use crate::f; 2 | use crate::git::git_types::{Hunk, HunkLine, HunkLineStatus}; 3 | use crate::git::queries::hunks::html_code::{ 4 | add_line, div, get_margin_width, make_spaces, pad_left, s, 5 | }; 6 | use crate::git::queries::syntax_colouring::ColourLine; 7 | 8 | pub fn generate_lines_split( 9 | hl_left: &[HunkLine], 10 | hl_right: &[HunkLine], 11 | hunks: &[Hunk], 12 | colour: &mut ColourLine, 13 | ) -> String { 14 | println!("{} {}", hl_left.len(), hl_right.len()); 15 | let char_width = get_longest_line(hl_left, hl_right); 16 | 17 | let (left_margin, left_lines) = 18 | gen_side(hl_left, hunks, colour, Side::Left, char_width); 19 | let (right_margin, right_lines) = 20 | gen_side(hl_right, hunks, colour, Side::Right, char_width); 21 | 22 | let mut left = div("margin", &left_margin); 23 | left += &div("code", &left_lines); 24 | 25 | let mut right = div("margin", &right_margin); 26 | right += &div("code", &right_lines); 27 | 28 | if hl_left.is_empty() { 29 | return div("codeRight", &right); 30 | } else if hl_right.is_empty() { 31 | return div("codeLeft", &left); 32 | } 33 | 34 | f!("{}{}", div("codeLeft", &left), div("codeRight", &right)) 35 | } 36 | 37 | fn gen_side( 38 | hunk_lines: &[HunkLine], 39 | hunks: &[Hunk], 40 | colour: &mut ColourLine, 41 | side: Side, 42 | char_width: usize, 43 | ) -> (String, String) { 44 | use HunkLineStatus::*; 45 | 46 | let margin_width = get_margin_width(hunk_lines); 47 | 48 | let mut margin = String::new(); 49 | let mut lines = String::new(); 50 | 51 | for line in hunk_lines { 52 | let hunk = line.get_hunk(hunks); 53 | 54 | match &line.status { 55 | HeaderStart => { 56 | colour.end_fragment(); 57 | } 58 | HeaderEnd => { 59 | colour.start_fragment(); 60 | } 61 | _ => {} 62 | } 63 | 64 | add_margin_line(&mut margin, line, margin_width, side); 65 | add_line(&mut lines, hunk, line, colour); 66 | } 67 | 68 | lines += &div("fillWidth", &make_spaces(char_width)); 69 | 70 | (margin, lines) 71 | } 72 | 73 | #[derive(Clone, Copy, PartialEq)] 74 | enum Side { 75 | Left, 76 | Right, 77 | } 78 | 79 | fn add_margin_line( 80 | margin: &mut String, 81 | line: &HunkLine, 82 | margin_width: usize, 83 | side: Side, 84 | ) { 85 | use HunkLineStatus::*; 86 | 87 | let HunkLine { status, .. } = line; 88 | 89 | match status { 90 | Added => { 91 | *margin += &div("added", &f!(" {:>margin_width$} ", s(line.new_num, "+"))); 92 | } 93 | Removed => { 94 | *margin += &div("removed", &f!(" {:>margin_width$} ", s(line.old_num, "-"))); 95 | } 96 | Unchanged => { 97 | if side == Side::Left { 98 | *margin += &pad_left(s(line.old_num, ""), margin_width + 1); 99 | } else { 100 | *margin += &pad_left(s(line.new_num, ""), margin_width + 1); 101 | } 102 | *margin += " \n"; 103 | } 104 | HeaderStart => { 105 | *margin += &div("headerStart", ""); 106 | } 107 | HeaderEnd => { 108 | *margin += &div("headerEnd", ""); 109 | } 110 | Skip => { 111 | *margin += &div("empty", ""); 112 | } 113 | } 114 | } 115 | 116 | fn get_longest_line(left: &[HunkLine], right: &[HunkLine]) -> usize { 117 | left 118 | .iter() 119 | .chain(right.iter()) 120 | .map(|hl| hl.text.len()) 121 | .max_by(|a, b| a.cmp(b)) 122 | .unwrap_or(0) 123 | } 124 | -------------------------------------------------------------------------------- /src/git/queries/hunks/hunk_parsers.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{Hunk, HunkLine, HunkLineStatus, HunkRange}; 2 | use crate::git::queries::hunks::hunk_line_parsers::{ 3 | Line, P_HUNK_LINES, P_HUNK_LINE_RANGES, 4 | }; 5 | use crate::parser::standard_parsers::{UNTIL_LINE_END, WS}; 6 | use crate::parser::Parser; 7 | use crate::{and, many, map, map2, or, word}; 8 | 9 | type IgnoredLine<'a> = (&'a str, String); 10 | 11 | const P_DIFF_LINE: Parser = and!(word!("diff"), UNTIL_LINE_END); 12 | 13 | const P_OPTIONAL_HEADER: Parser = or!( 14 | map2!(and!(word!("deleted"), UNTIL_LINE_END), __, { 15 | String::from("deleted") 16 | }), 17 | map2!(and!(word!("new file"), UNTIL_LINE_END), __, { 18 | String::from("new file") 19 | }), 20 | P_RENAME_HEADER, 21 | WS 22 | ); 23 | 24 | /* 25 | Parse something like: 26 | 27 | similarity index 88% 28 | rename from BetterName.txt 29 | rename to BetterName2.txt 30 | */ 31 | const P_RENAME_HEADER: Parser = map2!( 32 | and!( 33 | and!(word!("similarity"), UNTIL_LINE_END), 34 | and!(word!("rename"), UNTIL_LINE_END), 35 | and!(word!("rename"), UNTIL_LINE_END) 36 | ), 37 | __, 38 | String::from("rename") 39 | ); 40 | 41 | const P_INDEX_LINE: Parser = and!(word!("index"), UNTIL_LINE_END); 42 | 43 | const P_OLD_FILE: Parser = and!(word!("---"), UNTIL_LINE_END); 44 | 45 | const P_NEW_FILE: Parser = and!(word!("+++"), UNTIL_LINE_END); 46 | 47 | const P_BINARY_INFO: Parser = and!(word!("Binary"), UNTIL_LINE_END); 48 | 49 | struct FileInfo { 50 | is_binary: bool, 51 | } 52 | const P_FILE_INFO: Parser = or!( 53 | map2!( 54 | and!(P_OLD_FILE, P_NEW_FILE), 55 | __, 56 | FileInfo { is_binary: false } 57 | ), 58 | map2!(P_BINARY_INFO, __, FileInfo { is_binary: true }) 59 | ); 60 | 61 | const P_DIFF_HEADER: Parser = map2!( 62 | and!(P_DIFF_LINE, P_OPTIONAL_HEADER, P_INDEX_LINE, P_FILE_INFO), 63 | res, 64 | res.3 65 | ); 66 | 67 | const P_HUNK: Parser = map2!( 68 | and!(P_HUNK_LINE_RANGES, UNTIL_LINE_END, P_HUNK_LINES), 69 | res, 70 | { 71 | let old_line_range = res.0 .0; 72 | let new_line_range = res.0 .1; 73 | 74 | let old_num = old_line_range.start; 75 | let new_num = new_line_range.start; 76 | 77 | Hunk { 78 | old_line_range, 79 | new_line_range, 80 | context_line: String::from(""), 81 | lines: get_hunk_lines(old_num, new_num, res.2), 82 | index: -1, 83 | } 84 | } 85 | ); 86 | 87 | pub const P_HUNKS: Parser> = map!( 88 | and!(P_DIFF_HEADER, many!(P_HUNK)), 89 | |res: (FileInfo, Vec)| { 90 | if res.0.is_binary { 91 | return vec![Hunk { 92 | old_line_range: HunkRange { 93 | start: 0, 94 | length: 0, 95 | }, 96 | new_line_range: HunkRange { 97 | start: 0, 98 | length: 0, 99 | }, 100 | context_line: String::from(""), 101 | lines: Vec::new(), 102 | index: 0, 103 | }]; 104 | } 105 | 106 | res 107 | .1 108 | .into_iter() 109 | .enumerate() 110 | .map(|(i, mut hunk)| { 111 | let index = i as i32; 112 | 113 | hunk.index = index; 114 | 115 | hunk.lines = hunk 116 | .lines 117 | .into_iter() 118 | .map(|mut line| { 119 | line.hunk_index = index; 120 | line 121 | }) 122 | .collect(); 123 | 124 | hunk 125 | }) 126 | .collect() 127 | } 128 | ); 129 | 130 | fn get_hunk_lines(old_num: i32, new_num: i32, lines: Vec) -> Vec { 131 | let mut old_num = old_num; 132 | let mut new_num = new_num; 133 | 134 | let mut hunk_lines: Vec = Vec::new(); 135 | 136 | for (i, line) in lines.into_iter().enumerate() { 137 | match line.status { 138 | HunkLineStatus::Unchanged => { 139 | hunk_lines.push(HunkLine::from_line( 140 | line, 141 | i as u32, 142 | -1, 143 | Some(old_num), 144 | Some(new_num), 145 | )); 146 | old_num += 1; 147 | new_num += 1; 148 | } 149 | HunkLineStatus::Added => { 150 | hunk_lines.push(HunkLine::from_line(line, i as u32, -1, None, Some(new_num))); 151 | new_num += 1; 152 | } 153 | HunkLineStatus::Removed => { 154 | hunk_lines.push(HunkLine::from_line(line, i as u32, -1, Some(old_num), None)); 155 | old_num += 1; 156 | } 157 | _ => {} 158 | }; 159 | } 160 | 161 | hunk_lines 162 | } 163 | 164 | #[cfg(test)] 165 | mod tests { 166 | use crate::git::queries::hunks::hunk_parsers::{P_DIFF_HEADER, P_HUNK}; 167 | use crate::parser::parse_all; 168 | 169 | #[test] 170 | fn test_p_diff_header() { 171 | let diff_header = "diff --git a/src2/renderer-process/redux-store/repo-state/commits/commits-reducer.test.ts b/src2/renderer-process/redux-store/repo-state/commits/commits-reducer.test.ts 172 | index 4296fe4..5b0d387 100644 173 | --- a/src2/renderer-process/redux-store/repo-state/commits/commits-reducer.test.ts 174 | +++ b/src2/renderer-process/redux-store/repo-state/commits/commits-reducer.test.ts"; 175 | 176 | let result = parse_all(P_DIFF_HEADER, diff_header); 177 | 178 | assert!(result.is_some()); 179 | } 180 | 181 | #[test] 182 | fn test_p_hunk() { 183 | let line_range = "@@ -1,19 +1,17 @@"; 184 | let hunk_line1 = " describe('test commits state', () => {\r\n"; 185 | let hunk_line2 = "\n"; 186 | let hunk_line3 = "- it(`can load ${pathToThisRepo}`, async () => {\r\n"; 187 | let hunk_line4 = "+ it('todo', () => {"; 188 | let hunk_lines = format!("{hunk_line1}{hunk_line2}{hunk_line3}{hunk_line4}"); 189 | 190 | let hunk_text = format!("{}\n{}", line_range, hunk_lines); 191 | 192 | let out = parse_all(P_HUNK, &hunk_text); 193 | 194 | assert!(out.is_some()); 195 | } 196 | } 197 | -------------------------------------------------------------------------------- /src/git/queries/hunks/images.rs: -------------------------------------------------------------------------------- 1 | use serde::{Deserialize, Serialize}; 2 | use std::fs; 3 | use ts_rs::TS; 4 | 5 | use crate::git::run_git::{run_git_buffer, RunGitOptions}; 6 | 7 | #[derive(Debug, Deserialize, Serialize, TS)] 8 | #[serde(rename_all = "camelCase")] 9 | #[ts(export)] 10 | pub struct ReqImageOptions { 11 | pub repo_path: String, 12 | pub commit_id: String, 13 | pub original_image_path: String, 14 | pub temp_image_path: String, 15 | } 16 | 17 | pub fn load_commit_image(options: &ReqImageOptions) -> bool { 18 | let ReqImageOptions { 19 | repo_path, 20 | commit_id, 21 | original_image_path, 22 | temp_image_path, 23 | } = options; 24 | 25 | if let Some(buffer) = run_git_buffer(RunGitOptions { 26 | repo_path, 27 | args: ["show", &format!("{}:{}", commit_id, original_image_path)], 28 | }) { 29 | return fs::write(temp_image_path, buffer).is_ok(); 30 | } 31 | false 32 | } 33 | -------------------------------------------------------------------------------- /src/git/queries/hunks/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod html_code; 2 | pub mod html_code_split; 3 | mod hunk_line_parsers; 4 | pub mod hunk_parsers; 5 | pub mod images; 6 | pub mod load_hunks; 7 | -------------------------------------------------------------------------------- /src/git/queries/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::RefInfo; 2 | use crate::git::queries::refs::P_OPTIONAL_REFS; 3 | 4 | pub(crate) mod commits; 5 | mod commits_parsers; 6 | mod commits_test; 7 | 8 | pub(crate) mod commit_calcs; 9 | mod commit_filters; 10 | pub(crate) mod config; 11 | pub(crate) mod hunks; 12 | pub(crate) mod patches; 13 | pub(crate) mod refs; 14 | pub(crate) mod run; 15 | pub(crate) mod scan_workspace; 16 | pub(crate) mod search; 17 | pub(crate) mod stashes; 18 | mod stashes_test; 19 | mod syntax_colouring; 20 | pub(crate) mod unpushed_commits; 21 | pub(crate) mod wip; 22 | pub mod workspace; 23 | 24 | pub const COMMIT_0_ID: &str = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"; 25 | -------------------------------------------------------------------------------- /src/git/queries/patches/cache.rs: -------------------------------------------------------------------------------- 1 | extern crate directories; 2 | 3 | use std::collections::HashMap; 4 | use std::error::Error; 5 | use std::fs::{create_dir_all, remove_dir_all, File}; 6 | use std::io::{BufReader, Read, Write}; 7 | use std::path::{Path, PathBuf}; 8 | 9 | use crate::config::{APPLICATION, ORGANISATION, QUALIFIER}; 10 | use crate::dprintln; 11 | use directories::ProjectDirs; 12 | use loggers::elapsed; 13 | 14 | use crate::git::git_types::Patch; 15 | use crate::git::store::STORE; 16 | 17 | pub fn write_patches_cache( 18 | repo_path: &str, 19 | patches: &HashMap>, 20 | ) -> Option<()> { 21 | let cache_dir = get_cache_dir()?; 22 | let file_name = generate_file_name(repo_path); 23 | 24 | let full_path = cache_dir.join(file_name); 25 | 26 | STORE.insert_patches(repo_path, patches); 27 | 28 | write_patches_to_file(full_path, patches).ok() 29 | } 30 | 31 | pub fn load_patches_cache(repo_path: &str) -> Option>> { 32 | if let Some(patches) = STORE.get_patches(repo_path) { 33 | return Some(patches); 34 | } 35 | 36 | let cache_dir = get_cache_dir()?; 37 | create_dir_all(&cache_dir).ok()?; 38 | 39 | let file_name = generate_file_name(repo_path); 40 | let cache_file = cache_dir.join(file_name); 41 | 42 | let maybe_patches = read_patches_from_file(cache_file).ok(); 43 | 44 | if let Some(patches) = maybe_patches { 45 | STORE.insert_patches(repo_path, &patches); 46 | 47 | return Some(patches); 48 | } 49 | 50 | None 51 | } 52 | 53 | fn get_cache_dir() -> Option { 54 | if let Some(proj_dirs) = ProjectDirs::from(QUALIFIER, ORGANISATION, APPLICATION) { 55 | let cache_dir = proj_dirs.cache_dir(); 56 | 57 | Some(cache_dir.join("patches")) 58 | } else { 59 | None 60 | } 61 | } 62 | 63 | /// This generates a file name from the repo path e.g. 64 | /// c:\user\something\thing -> cusersomethingthing.json 65 | fn generate_file_name(repo_path: &str) -> String { 66 | let id = Path::new(&repo_path) 67 | .iter() 68 | .map(|p| p.to_str().unwrap_or("")) 69 | .collect::>() 70 | .join("") 71 | .replace(['\\', ':', '/'], ""); 72 | 73 | format!("{}.json", id) 74 | } 75 | 76 | #[elapsed] 77 | fn read_patches_from_file( 78 | path: PathBuf, 79 | ) -> Result>, Box> { 80 | let file = File::open(&path)?; 81 | 82 | let mut reader = BufReader::new(file); 83 | let mut text = String::new(); 84 | 85 | reader.read_to_string(&mut text)?; 86 | 87 | let patches = serde_json::from_str(&text)?; 88 | 89 | Ok(patches) 90 | } 91 | 92 | fn write_patches_to_file>( 93 | path: P, 94 | patches: &HashMap>, 95 | ) -> Result<(), Box> { 96 | let str = serde_json::to_string(&patches)?; 97 | 98 | let mut file = File::create(&path)?; 99 | 100 | file.write_all(str.as_ref())?; 101 | 102 | dprintln!("Wrote patches to '{:?}'", path.as_ref().to_str()); 103 | 104 | Ok(()) 105 | } 106 | 107 | pub fn clear_patch_cache() -> Option<()> { 108 | let cache_dir = get_cache_dir()?; 109 | 110 | remove_dir_all(cache_dir).ok()?; 111 | 112 | Some(()) 113 | } 114 | -------------------------------------------------------------------------------- /src/git/queries/patches/cache_test.rs: -------------------------------------------------------------------------------- 1 | // #[cfg(test)] 2 | // mod tests { 3 | // #[test] 4 | // fn test_something() { 5 | // // TODO? 6 | // } 7 | // } 8 | -------------------------------------------------------------------------------- /src/git/queries/patches/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod cache; 2 | mod cache_test; 3 | pub(crate) mod patch_parsers; 4 | pub(crate) mod patches; 5 | pub(crate) mod patches_for_commit; 6 | 7 | const IMAGE_EXTENSIONS: [&str; 10] = [ 8 | ".apng", ".bmp", ".gif", ".ico", ".cur", ".jpg", ".jpeg", ".png", ".svg", ".webp", 9 | ]; 10 | 11 | pub fn file_is_image(file_name: &str) -> bool { 12 | let name = file_name.to_lowercase(); 13 | 14 | IMAGE_EXTENSIONS.iter().any(|ext| name.ends_with(ext)) 15 | } 16 | -------------------------------------------------------------------------------- /src/git/queries/patches/patch_parsers.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{Patch, PatchType}; 2 | use crate::git::queries::patches; 3 | use crate::many; 4 | use crate::parser::standard_parsers::{UNSIGNED_INT, UNTIL_NUL, WS}; 5 | use crate::parser::Parser; 6 | use crate::{and, map, until_parser}; 7 | use crate::{character, or}; 8 | 9 | #[derive(Debug, PartialEq)] 10 | pub struct PatchData { 11 | old_file: String, 12 | new_file: String, 13 | patch_type: PatchType, 14 | id: String, 15 | } 16 | 17 | pub const P_PATCHES_WITH_COMMIT_ID: Parser<(String, Vec)> = map!( 18 | and!( 19 | until_parser!(and!(character!(','), WS)), 20 | P_PATCHES, 21 | or!(UNTIL_NUL, WS) 22 | ), 23 | |result: (String, Vec, String)| { 24 | let (commit_id, patches, ..) = result; 25 | 26 | ( 27 | commit_id.clone(), 28 | patches 29 | .into_iter() 30 | .map(|data| map_data_to_patch(data, commit_id.clone())) 31 | .collect(), 32 | ) 33 | } 34 | ); 35 | 36 | pub const P_MANY_PATCHES_WITH_COMMIT_IDS: Parser)>> = 37 | many!(P_PATCHES_WITH_COMMIT_ID); 38 | 39 | pub fn map_data_to_patch(data: PatchData, commit_id: String) -> Patch { 40 | let PatchData { 41 | patch_type, 42 | old_file, 43 | new_file, 44 | id, 45 | } = data; 46 | 47 | let is_image = patches::file_is_image(&new_file); 48 | 49 | Patch { 50 | patch_type, 51 | old_file, 52 | new_file, 53 | id, 54 | commit_id, 55 | is_image, 56 | } 57 | } 58 | 59 | pub const P_PATCHES: Parser> = 60 | many!(or!(P_RENAME_PATCH, P_COPY_PATCH, P_OTHER_PATCH)); 61 | 62 | const P_RENAME_PATCH: Parser = map!( 63 | and!( 64 | and!(character!('R'), UNSIGNED_INT), 65 | UNTIL_NUL, 66 | UNTIL_NUL, 67 | UNTIL_NUL 68 | ), 69 | |result: ((char, String), String, String, String)| { 70 | PatchData { 71 | patch_type: PatchType::R, 72 | old_file: result.2.clone(), 73 | new_file: result.3.clone(), 74 | id: format!("{}-{}{}", result.3, result.0 .0, result.0 .1), 75 | } 76 | } 77 | ); 78 | 79 | const P_COPY_PATCH: Parser = map!( 80 | and!( 81 | and!(character!('C'), UNSIGNED_INT), 82 | UNTIL_NUL, 83 | UNTIL_NUL, 84 | UNTIL_NUL 85 | ), 86 | |result: ((char, String), String, String, String)| { 87 | PatchData { 88 | patch_type: PatchType::C, 89 | old_file: result.2.clone(), 90 | new_file: result.3.clone(), 91 | id: format!("{}-{}{}", result.3, result.0 .0, result.0 .1), 92 | } 93 | } 94 | ); 95 | 96 | const P_OTHER_PATCH: Parser = map!( 97 | and!(P_STATUS, UNTIL_NUL, UNTIL_NUL), 98 | |result: (PatchType, String, String)| { 99 | let (t, _, n) = result; 100 | let type_str = t.to_string(); 101 | 102 | PatchData { 103 | patch_type: t, 104 | old_file: n.clone(), 105 | new_file: n.clone(), 106 | id: format!("{}-{}", n, type_str), 107 | } 108 | } 109 | ); 110 | 111 | const P_STATUS: Parser = map!( 112 | or!( 113 | character!('A'), 114 | character!('B'), 115 | character!('C'), 116 | character!('D'), 117 | character!('M'), 118 | character!('T'), 119 | character!('U'), 120 | character!('X') 121 | ), 122 | |result: char| { 123 | match result { 124 | 'A' => PatchType::A, 125 | 'B' => PatchType::B, 126 | 'C' => PatchType::C, 127 | 'D' => PatchType::D, 128 | 'M' => PatchType::M, 129 | 'T' => PatchType::T, 130 | 'U' => PatchType::U, 131 | _ => PatchType::X, 132 | } 133 | } 134 | ); 135 | 136 | #[cfg(test)] 137 | mod tests { 138 | use crate::git::git_types::PatchType; 139 | use crate::git::queries::patches::patch_parsers::{ 140 | PatchData, P_COPY_PATCH, P_OTHER_PATCH, P_RENAME_PATCH, 141 | }; 142 | use crate::parser::parse_all; 143 | 144 | const P1: &str = "src2/parser-lib/input.ts"; 145 | const P2: &str = 146 | "src2/renderer-process/redux-store/repo-state/commits/commits-reducer.test.ts"; 147 | // const P3: &str = "\"src2/Parser Lib/input.ts\""; 148 | 149 | #[test] 150 | fn test_p_rename_patch() { 151 | let log = &format!("R100\0{}\0{}\0", P1, P2); 152 | let res = parse_all(P_RENAME_PATCH, log); 153 | 154 | assert!(res.is_some()); 155 | assert_eq!( 156 | res.unwrap(), 157 | PatchData { 158 | patch_type: PatchType::R, 159 | old_file: P1.to_string(), 160 | new_file: P2.to_string(), 161 | id: format!("{P2}-R100") 162 | } 163 | ) 164 | } 165 | 166 | #[test] 167 | fn test_p_copy_patch() { 168 | let log = &format!("C100\0{}\0{}\0", P1, P2); 169 | let res = parse_all(P_COPY_PATCH, log); 170 | 171 | assert!(res.is_some()); 172 | assert_eq!( 173 | res.unwrap(), 174 | PatchData { 175 | patch_type: PatchType::C, 176 | old_file: P1.to_string(), 177 | new_file: P2.to_string(), 178 | id: format!("{P2}-C100") 179 | } 180 | ) 181 | } 182 | 183 | #[test] 184 | fn test_p_other_patch() { 185 | let log = &format!("M\0{P2}\0"); 186 | let res = parse_all(P_OTHER_PATCH, log); 187 | 188 | assert!(res.is_some()); 189 | assert_eq!( 190 | res.unwrap(), 191 | PatchData { 192 | patch_type: PatchType::M, 193 | old_file: P2.to_string(), 194 | new_file: P2.to_string(), 195 | id: format!("{P2}-M") 196 | } 197 | ) 198 | } 199 | } 200 | -------------------------------------------------------------------------------- /src/git/queries/patches/patches_for_commit.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::Patch; 2 | use crate::git::queries::patches::patches::load_patches; 3 | use crate::git::store::STORE; 4 | use crate::server::request_util::{ES, R}; 5 | use serde::{Deserialize, Serialize}; 6 | use ts_rs::TS; 7 | 8 | #[derive(Debug, Deserialize, Serialize, TS)] 9 | #[serde(rename_all = "camelCase")] 10 | #[ts(export)] 11 | pub struct ReqPatchesForCommitOpts { 12 | pub repo_path: String, 13 | pub commit_id: String, 14 | } 15 | 16 | pub fn load_patches_for_commit(options: &ReqPatchesForCommitOpts) -> R> { 17 | let ReqPatchesForCommitOpts { 18 | repo_path, 19 | commit_id, 20 | } = options; 21 | 22 | let (commits, _) = STORE 23 | .get_commits_and_refs(repo_path) 24 | .ok_or(ES::from("load_patches_for_commit: Couldn't get commits."))?; 25 | 26 | let all_patches = load_patches(repo_path, &commits)?; 27 | 28 | Ok( 29 | all_patches 30 | .get(commit_id) 31 | .ok_or(ES::from( 32 | "load_patches_for_commit: Missing patches for commit.", 33 | ))? 34 | .clone(), 35 | ) 36 | } 37 | -------------------------------------------------------------------------------- /src/git/queries/refs/ref_diffs.rs: -------------------------------------------------------------------------------- 1 | use ahash::AHashMap; 2 | use rayon::prelude::*; 3 | use serde::{Deserialize, Serialize}; 4 | use std::collections::HashMap; 5 | use ts_rs::TS; 6 | 7 | use crate::git::git_types::{ 8 | Commit, LocalRefCommitDiff, RefCommitDiff, RefInfo, RefLocation, 9 | }; 10 | use crate::git::queries::commit_calcs::count_commits_between_commit_ids; 11 | use crate::git::queries::config::GitConfig; 12 | use crate::git::store::{CONFIG, STORE}; 13 | 14 | #[derive(Debug, Deserialize, Serialize, TS)] 15 | #[serde(rename_all = "camelCase")] 16 | #[ts(export)] 17 | pub struct RefDiffOptions { 18 | pub repo_path: String, 19 | pub head_commit_id: String, 20 | } 21 | 22 | pub fn calc_ref_diffs( 23 | options: &RefDiffOptions, 24 | ) -> Option<( 25 | HashMap, 26 | HashMap, 27 | )> { 28 | let RefDiffOptions { 29 | repo_path, 30 | head_commit_id, 31 | .. 32 | } = options; 33 | 34 | let (commits, refs) = STORE.get_commits_and_refs(repo_path)?; 35 | let config = CONFIG.get_by_key(repo_path).unwrap_or_else(GitConfig::new); 36 | 37 | Some(calc_ref_diffs_inner( 38 | &commits, 39 | &refs, 40 | &config, 41 | head_commit_id, 42 | )) 43 | } 44 | 45 | // We need to pass in head as it may not be found in provided commits in some cases. 46 | pub fn calc_ref_diffs_inner( 47 | commits: &[Commit], 48 | refs: &[RefInfo], 49 | config: &GitConfig, 50 | head_commit_id: &String, 51 | ) -> ( 52 | HashMap, 53 | HashMap, 54 | ) { 55 | let ref_map = refs.iter().map(|r| (r.id.clone(), r.clone())).collect(); 56 | // let refs = get_ref_info_map_from_commits(commits); 57 | let pairs = get_ref_pairs(&ref_map, config); 58 | 59 | let commit_map: AHashMap = 60 | commits.iter().map(|c| (c.id.clone(), c.clone())).collect(); 61 | 62 | let local_ref_diffs = calc_local_ref_diffs(head_commit_id, pairs, &commit_map); 63 | let remote_ref_diffs = calc_remote_ref_diffs(head_commit_id, &ref_map, &commit_map); 64 | 65 | (local_ref_diffs, remote_ref_diffs) 66 | } 67 | 68 | pub fn calc_remote_ref_diffs( 69 | head_commit_id: &String, 70 | refs: &AHashMap, 71 | commits: &AHashMap, 72 | ) -> HashMap { 73 | refs 74 | .par_iter() 75 | .map(|(_, info)| { 76 | ( 77 | info.id.clone(), 78 | calc_remote_ref_diff(head_commit_id, info, commits), 79 | ) 80 | }) 81 | .collect() 82 | } 83 | 84 | fn calc_remote_ref_diff( 85 | head_commit_id: &String, 86 | info: &RefInfo, 87 | commits: &AHashMap, 88 | ) -> RefCommitDiff { 89 | let ref_commit_id = &info.commit_id; 90 | 91 | let ahead_of_head = 92 | count_commits_between_commit_ids(ref_commit_id, head_commit_id, commits); 93 | let behind_head = 94 | count_commits_between_commit_ids(head_commit_id, ref_commit_id, commits); 95 | 96 | RefCommitDiff { 97 | ahead_of_head, 98 | behind_head, 99 | } 100 | } 101 | 102 | fn calc_local_ref_diffs( 103 | head_commit_id: &String, 104 | pairs: Vec<(RefInfo, Option)>, 105 | commits: &AHashMap, 106 | ) -> HashMap { 107 | pairs 108 | .into_par_iter() 109 | .map(|(local, remote)| { 110 | ( 111 | local.id.clone(), 112 | calc_local_ref_diff(head_commit_id, local, remote, commits), 113 | ) 114 | }) 115 | .collect() 116 | } 117 | 118 | fn calc_local_ref_diff( 119 | head_commit_id: &String, 120 | local: RefInfo, 121 | remote: Option, 122 | commits: &AHashMap, 123 | ) -> LocalRefCommitDiff { 124 | let local_id = &local.commit_id; 125 | 126 | let ahead_of_head = count_commits_between_commit_ids(local_id, head_commit_id, commits); 127 | let behind_head = count_commits_between_commit_ids(head_commit_id, local_id, commits); 128 | 129 | if let Some(remote) = remote { 130 | let remote_id = &remote.commit_id; 131 | 132 | let ahead_of_remote = count_commits_between_commit_ids(local_id, remote_id, commits); 133 | let behind_remote = count_commits_between_commit_ids(remote_id, local_id, commits); 134 | 135 | LocalRefCommitDiff { 136 | ahead_of_remote, 137 | behind_remote, 138 | ahead_of_head, 139 | behind_head, 140 | } 141 | } else { 142 | LocalRefCommitDiff { 143 | ahead_of_remote: 0, 144 | behind_remote: 0, 145 | ahead_of_head, 146 | behind_head, 147 | } 148 | } 149 | } 150 | 151 | fn get_ref_pairs( 152 | refs: &AHashMap, 153 | config: &GitConfig, 154 | ) -> Vec<(RefInfo, Option)> { 155 | refs 156 | .iter() 157 | .map(|(_, r)| r) 158 | .filter(|r| r.location == RefLocation::Local) 159 | .map(|r| (r.clone(), get_sibling(r, config, refs))) 160 | .collect() 161 | } 162 | 163 | fn get_sibling( 164 | ref_info: &RefInfo, 165 | config: &GitConfig, 166 | refs: &AHashMap, 167 | ) -> Option { 168 | let RefInfo { 169 | sibling_id, 170 | short_name, 171 | .. 172 | } = ref_info; 173 | 174 | if !sibling_id.is_empty() { 175 | if let Some(sibling) = refs.get(sibling_id) { 176 | let remote = config.get_remote_for_branch(short_name); 177 | 178 | if let Some(name) = &sibling.remote_name { 179 | if remote == *name { 180 | return Some(sibling.clone()); 181 | } 182 | } 183 | } 184 | } 185 | 186 | None 187 | } 188 | 189 | // pub fn get_ref_info_map_from_commits(commits: &[Commit]) -> AHashMap { 190 | // let mut refs: AHashMap = AHashMap::new(); 191 | // 192 | // for c in commits.iter() { 193 | // for r in c.refs.iter() { 194 | // if !r.full_name.contains("HEAD") { 195 | // refs.insert(r.id.clone(), r.clone()); 196 | // } 197 | // } 198 | // } 199 | // 200 | // refs 201 | // } 202 | -------------------------------------------------------------------------------- /src/git/queries/run.rs: -------------------------------------------------------------------------------- 1 | use crate::git::run_git::{run_git_err, RunGitOptions}; 2 | use crate::server::request_util::R; 3 | use serde::Deserialize; 4 | use ts_rs::TS; 5 | 6 | #[derive(Debug, Deserialize, TS)] 7 | #[serde(rename_all = "camelCase")] 8 | #[ts(export)] 9 | pub struct RunOptions { 10 | pub repo_path: String, 11 | pub args: Vec, 12 | } 13 | 14 | pub fn run(options: &RunOptions) -> R { 15 | Ok( 16 | run_git_err(RunGitOptions { 17 | repo_path: &options.repo_path, 18 | args: &options.args, 19 | })? 20 | .stdout, 21 | ) 22 | } 23 | -------------------------------------------------------------------------------- /src/git/queries/scan_workspace.rs: -------------------------------------------------------------------------------- 1 | use crate::git::queries::config::config_file_parser::{ 2 | parse_config_file, ConfigFile, ConfigSection, Row, 3 | }; 4 | use crate::git::store::{RepoPath, STORE}; 5 | use crate::server::request_util::{ES, R}; 6 | use serde::Deserialize; 7 | use std::fs::{read_dir, read_to_string}; 8 | use std::path::{Path, PathBuf}; 9 | use ahash::HashSet; 10 | use ts_rs::TS; 11 | use crate::dprintln; 12 | 13 | const MAX_SCAN_DEPTH: u8 = 5; 14 | const MAX_DIR_SIZE: usize = 50; 15 | 16 | #[derive(Debug, Deserialize, TS)] 17 | #[serde(rename_all = "camelCase")] 18 | #[ts(export)] 19 | pub struct ScanOptions { 20 | pub repo_path: String, 21 | pub workspaces_enabled: bool, 22 | } 23 | 24 | pub fn scan_workspace(options: &ScanOptions) -> HashSet { 25 | let dir = PathBuf::from(&options.repo_path); 26 | 27 | let repo_paths = if !options.workspaces_enabled { 28 | scan_single_repo(dir) 29 | } else { 30 | let mut repo_paths: Vec = Vec::new(); 31 | scan_workspace_inner(dir, &mut repo_paths, 0); 32 | repo_paths 33 | }; 34 | 35 | dprintln!("repo_paths: {:?}", repo_paths); 36 | 37 | let result = repo_paths.iter().map(|r| r.path.clone()).collect(); 38 | 39 | // We don't continue opening a repo if empty. Don't clobber REPO_PATHS 40 | if !repo_paths.is_empty() { 41 | STORE.set_repo_paths(repo_paths); 42 | } 43 | 44 | result 45 | } 46 | 47 | fn scan_single_repo(dir: PathBuf) -> Vec { 48 | get_git_repo(&dir).into_iter().collect() 49 | } 50 | 51 | fn scan_workspace_inner(dir: PathBuf, repo_paths: &mut Vec, depth: u8) { 52 | if let Some(repo_path) = get_git_repo(&dir) { 53 | repo_paths.push(repo_path); 54 | } 55 | 56 | if depth < MAX_SCAN_DEPTH { 57 | let entries = get_dir_entries(&dir); 58 | 59 | if let Some(path) = entries.iter().find(|p| p.ends_with(".gitmodules")) { 60 | if let Ok(submodules) = read_git_modules(path) { 61 | println!("submodules: {:?}", submodules); 62 | repo_paths.extend(submodules); 63 | } 64 | } 65 | if entries.len() < MAX_DIR_SIZE || depth == 0 { 66 | for e in entries { 67 | if e.is_dir() && !is_hidden(&e) { 68 | scan_workspace_inner(e, repo_paths, depth + 1); 69 | } 70 | } 71 | } 72 | } 73 | } 74 | 75 | fn read_git_modules(file_path: &PathBuf) -> R> { 76 | let text = read_to_string(file_path)?; 77 | let config: Vec = parse_config_file(&text)?; 78 | 79 | let mut submodules: Vec = Vec::new(); 80 | let parent_repo_dir = file_path.parent().ok_or(ES::from("No parent dir"))?; 81 | 82 | for c in config { 83 | if let ConfigFile::Section(section) = c { 84 | let ConfigSection(heading, rows) = section; 85 | if heading.0 == "submodule" { 86 | if let Some(Row::Data(_, path)) = rows.iter().find(|row| match row { 87 | Row::Data(path, _) => path == "path", 88 | Row::Other(_) => false, 89 | }) { 90 | let submodule_path = parent_repo_dir.join(path); 91 | if let Some(repo) = get_git_repo(&submodule_path) { 92 | submodules.push(repo); 93 | } 94 | } 95 | } 96 | } 97 | } 98 | 99 | Ok(submodules) 100 | } 101 | 102 | fn get_dir_entries(dir: &PathBuf) -> Vec { 103 | if let Ok(entries) = read_dir(dir) { 104 | let paths: Vec = entries 105 | .filter(|e| e.is_ok()) 106 | .map(|e| e.unwrap().path()) 107 | .collect(); 108 | 109 | return paths; 110 | } 111 | 112 | vec![] 113 | } 114 | 115 | fn get_git_repo(dir: &Path) -> Option { 116 | if dir.is_dir() { 117 | let git_file_path = dir.join(".git"); 118 | 119 | if git_file_path.is_file() { 120 | let text = read_to_string(&git_file_path).ok()?; 121 | let path = parse_submodule_git_file(&text)?; 122 | 123 | return Some(RepoPath { 124 | path: dir.to_path_buf(), 125 | git_path: dir.join(path), 126 | // submodule: true, 127 | }); 128 | } 129 | 130 | if git_file_path.exists() { 131 | return Some(RepoPath { 132 | path: dir.to_path_buf(), 133 | git_path: dir.join(".git"), 134 | // submodule: false, 135 | }); 136 | } 137 | } 138 | 139 | None 140 | } 141 | 142 | fn parse_submodule_git_file(text: &str) -> Option { 143 | if let Some(i) = text.chars().position(|c| c == ':') { 144 | let path = &text[(i + 1)..]; 145 | 146 | return Some(String::from(path.trim())); 147 | } 148 | None 149 | } 150 | 151 | fn is_hidden(entry: &Path) -> bool { 152 | if let Some(last) = entry.components().last() { 153 | return last.as_os_str().to_str().unwrap_or("").starts_with('.'); 154 | } 155 | false 156 | } 157 | 158 | #[cfg(test)] 159 | mod tests { 160 | use super::*; 161 | 162 | #[test] 163 | fn test_parse_git_file() { 164 | let text = "gitdir: ../.git/modules/fiend-ui"; 165 | 166 | let p = parse_submodule_git_file(text); 167 | 168 | assert!(p.is_some()); 169 | assert_eq!(p.unwrap(), "../.git/modules/fiend-ui"); 170 | } 171 | } 172 | -------------------------------------------------------------------------------- /src/git/queries/search/matching_hunk_lines.rs: -------------------------------------------------------------------------------- 1 | use std::time::Duration; 2 | 3 | use crate::git::git_types::{Commit, Hunk, HunkLine, HunkLineStatus, Patch}; 4 | use crate::git::queries::hunks::hunk_parsers::P_HUNKS; 5 | use crate::git::queries::hunks::load_hunks::load_hunks_args; 6 | use crate::git::run_git::{run_git_err, RunGitOptions}; 7 | use crate::global; 8 | use crate::parser::parse_all_err; 9 | use crate::server::request_util::R; 10 | use crate::util::global::Global; 11 | use crate::util::short_cache::ShortCache; 12 | 13 | static SHORT_HUNK_CACHE: Global>> = global!(ShortCache::new( 14 | "Hunk Cache".to_string(), 15 | Duration::from_secs(10) 16 | )); 17 | 18 | // This should match "gSearchResultDiff" 19 | pub fn get_matching_hunk_lines( 20 | repo_path: &String, 21 | commit: &Commit, 22 | patch: &Patch, 23 | search_text: &str, 24 | ) -> R> { 25 | let cache_id = format!("{}{}", commit.id, patch.id); 26 | 27 | if let Some(hunks) = get_hunks_from_cache(&cache_id) { 28 | return Ok(get_matching_lines_in_hunks(hunks, search_text)); 29 | } 30 | 31 | let out = run_git_err(RunGitOptions { 32 | repo_path, 33 | args: load_hunks_args(commit, patch), 34 | })?; 35 | 36 | let hunks = parse_all_err(P_HUNKS, &out.stdout)?; 37 | store_hunk_in_cache(&cache_id, hunks.clone()); 38 | 39 | let hunk_lines = get_matching_lines_in_hunks(hunks, search_text); 40 | 41 | Ok(hunk_lines) 42 | } 43 | 44 | fn get_hunks_from_cache(key: &str) -> Option> { 45 | if let Some(mut cached) = SHORT_HUNK_CACHE.get() { 46 | return Some(cached.get(key)?.clone()); 47 | } 48 | None 49 | } 50 | 51 | fn store_hunk_in_cache(key: &str, hunks: Vec) { 52 | if let Some(mut cache) = SHORT_HUNK_CACHE.get() { 53 | cache.insert(key, hunks); 54 | } 55 | } 56 | 57 | fn get_matching_lines_in_hunks(hunks: Vec, search_text: &str) -> Vec { 58 | let mut hunk_lines: Vec = Vec::new(); 59 | 60 | for hunk in hunks { 61 | for line in hunk.lines { 62 | let HunkLine { status, text, .. } = &line; 63 | 64 | if (*status == HunkLineStatus::Added || *status == HunkLineStatus::Removed) 65 | && text.contains(search_text) 66 | { 67 | hunk_lines.push(line); 68 | } 69 | } 70 | } 71 | 72 | hunk_lines 73 | } 74 | -------------------------------------------------------------------------------- /src/git/queries/search/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::util::global::Global; 2 | use crate::{dprintln, global}; 3 | use serde::Deserialize; 4 | use ts_rs::TS; 5 | 6 | pub(crate) mod matching_hunk_lines; 7 | mod search_code; 8 | pub(crate) mod search_commits; 9 | pub(crate) mod search_request; 10 | 11 | #[derive(Debug, Clone, Deserialize, TS)] 12 | #[serde(rename_all = "camelCase")] 13 | #[ts(export)] 14 | pub struct SearchOptions { 15 | pub repo_path: String, 16 | pub search_text: String, 17 | pub num_results: usize, 18 | } 19 | 20 | static CURRENT_SEARCH: Global = global!(0); 21 | 22 | pub fn get_next_search_id() -> u32 { 23 | if let Some(id) = CURRENT_SEARCH.get() { 24 | let new_id = id + 1; 25 | CURRENT_SEARCH.set(new_id); 26 | new_id 27 | } else { 28 | CURRENT_SEARCH.set(0); 29 | 0 30 | } 31 | } 32 | 33 | fn search_cancelled(search_id: u32) -> bool { 34 | if let Some(id) = CURRENT_SEARCH.get() { 35 | dprintln!("current: {id}, this: {search_id}"); 36 | search_id != id 37 | } else { 38 | false 39 | } 40 | } 41 | 42 | #[cfg(test)] 43 | mod tests { 44 | use crate::git::git_types::Patch; 45 | use crate::git::queries::patches::patch_parsers::P_MANY_PATCHES_WITH_COMMIT_IDS; 46 | use std::time::{Duration, Instant}; 47 | use std::{assert_eq, println, thread}; 48 | 49 | use crate::git::queries::search::get_next_search_id; 50 | use crate::git::queries::search::search_code::{search_code_command, CodeSearchOpts}; 51 | use crate::parser::parse_all; 52 | 53 | #[test] 54 | fn test_get_next_search_id() { 55 | assert_eq!(get_next_search_id(), 1); 56 | assert_eq!(get_next_search_id(), 2); 57 | assert_eq!(get_next_search_id(), 3); 58 | 59 | let now = Instant::now(); 60 | 61 | while get_next_search_id() < 1_000 {} 62 | 63 | println!("Took {}us", now.elapsed().as_micros()); 64 | } 65 | 66 | // We can't run tests in parallel as they will be killed. 67 | /* 68 | TODO: Write better tests and re-enable. 69 | 70 | These tests where written to get our search working, but depend on the repo being in a certain 71 | state so need to be disabled. 72 | 73 | */ 74 | #[ignore] 75 | #[test] 76 | fn test_thing() { 77 | let t1 = thread::spawn(move || { 78 | search_diffs(&CodeSearchOpts { 79 | num_results: 500, 80 | search_text: "this".to_string(), 81 | repo_path: ".".to_string(), 82 | start_commit_index: 0, 83 | }) 84 | }); 85 | 86 | thread::sleep(Duration::from_millis(10)); 87 | 88 | let t2 = thread::spawn(move || { 89 | search_diffs(&CodeSearchOpts { 90 | num_results: 500, 91 | search_text: "this".to_string(), 92 | repo_path: ".".to_string(), 93 | start_commit_index: 0, 94 | }) 95 | }); 96 | 97 | thread::sleep(Duration::from_millis(10)); 98 | 99 | let t3 = thread::spawn(move || { 100 | search_diffs(&CodeSearchOpts { 101 | num_results: 5, 102 | search_text: "this".to_string(), 103 | repo_path: ".".to_string(), 104 | start_commit_index: 0, 105 | }) 106 | }); 107 | 108 | let r1 = t1.join().unwrap(); 109 | let r2 = t2.join().unwrap(); 110 | let r3 = t3.join().unwrap(); 111 | 112 | println!("{:?}, {:?}, {:?}", r1, r2, r3); 113 | 114 | assert!(r1.is_none()); 115 | assert!(r2.is_none()); 116 | assert!(r3.is_some()); 117 | } 118 | 119 | pub fn search_diffs(options: &CodeSearchOpts) -> Option)>> { 120 | let search_id = get_next_search_id(); 121 | let result = search_code_command(options, search_id)?; 122 | 123 | parse_all(P_MANY_PATCHES_WITH_COMMIT_IDS, &result) 124 | } 125 | } 126 | -------------------------------------------------------------------------------- /src/git/queries/search/search_code.rs: -------------------------------------------------------------------------------- 1 | use crate::dprintln; 2 | use crate::git::git_settings::GIT_PATH; 3 | use crate::git::git_types::{HunkLine, Patch}; 4 | use crate::git::queries::patches::patch_parsers::P_MANY_PATCHES_WITH_COMMIT_IDS; 5 | use crate::git::queries::search::matching_hunk_lines::get_matching_hunk_lines; 6 | use crate::git::queries::search::search_cancelled; 7 | use crate::git::store::STORE; 8 | use crate::parser::parse_all; 9 | use serde::{Deserialize, Serialize}; 10 | use std::io::Read; 11 | use std::process::{Command, Stdio}; 12 | use std::thread; 13 | use std::time::Duration; 14 | use ts_rs::TS; 15 | 16 | #[derive(Debug, Clone, Deserialize, TS)] 17 | #[serde(rename_all = "camelCase")] 18 | #[ts(export)] 19 | pub struct CodeSearchOpts { 20 | pub repo_path: String, 21 | pub search_text: String, 22 | pub num_results: usize, 23 | pub start_commit_index: usize, 24 | } 25 | 26 | #[derive(Debug, Clone, Serialize, Eq, PartialEq, TS)] 27 | #[ts(export)] 28 | pub struct FileMatch { 29 | patch: Patch, 30 | lines: Vec, 31 | } 32 | 33 | // None result means either no results or cancelled. 34 | pub fn search_commits_for_code( 35 | options: &CodeSearchOpts, 36 | search_id: u32, 37 | ) -> Option)>> { 38 | let result_text = search_code_command(options, search_id)?; 39 | 40 | let commit_patches = parse_all(P_MANY_PATCHES_WITH_COMMIT_IDS, &result_text)?; 41 | 42 | let CodeSearchOpts { 43 | repo_path, 44 | search_text, 45 | .. 46 | } = options; 47 | 48 | let (commits, _) = STORE.get_commits_and_refs(repo_path)?; 49 | 50 | Some( 51 | commit_patches 52 | .into_iter() 53 | .flat_map(|(id, patches)| { 54 | let commit = commits.iter().find(|c| c.id == id)?; 55 | 56 | let matches = patches 57 | .into_iter() 58 | .flat_map(|patch| { 59 | Some(FileMatch { 60 | lines: get_matching_hunk_lines(repo_path, commit, &patch, search_text) 61 | .ok()?, 62 | patch, 63 | }) 64 | }) 65 | .collect::>(); 66 | 67 | Some((commit.id.clone(), matches)) 68 | }) 69 | .collect::)>>(), 70 | ) 71 | } 72 | 73 | // Just returns the raw text result from Git. 74 | pub fn search_code_command(options: &CodeSearchOpts, search_id: u32) -> Option { 75 | dprintln!( 76 | "Search for text: {}, id: {}, num: {}", 77 | options.search_text, 78 | search_id, 79 | options.num_results 80 | ); 81 | 82 | let CodeSearchOpts { 83 | repo_path, 84 | search_text, 85 | num_results, 86 | start_commit_index, 87 | } = options; 88 | 89 | let mut cmd = Command::new(GIT_PATH.as_path()) 90 | .args([ 91 | "log", 92 | // &format!("{}..{}", last_commit_id, first_commit_id), 93 | &format!("--skip={}", start_commit_index), 94 | // &format!("-S\"{}\"", search_text), 95 | "-S", 96 | search_text, 97 | "--name-status", 98 | "--branches", 99 | "--remotes", 100 | "--pretty=format:%H,", 101 | &format!("-n{}", num_results), 102 | "-z", 103 | ]) 104 | .stdout(Stdio::piped()) 105 | .current_dir(repo_path) 106 | .spawn() 107 | .ok()?; 108 | 109 | while let Ok(None) = cmd.try_wait() { 110 | if search_cancelled(search_id) { 111 | dprintln!("Killing search {search_id} \"{search_text}\""); 112 | 113 | if let Err(_e) = cmd.kill() { 114 | dprintln!("{}", _e); 115 | } 116 | return None; 117 | } 118 | 119 | thread::sleep(Duration::from_millis(50)); 120 | } 121 | 122 | if cmd.wait().ok()?.success() { 123 | let mut text = String::new(); 124 | 125 | let len = cmd.stdout?.read_to_string(&mut text).ok()?; 126 | 127 | if len > 0 { 128 | return Some(text); 129 | } 130 | } 131 | 132 | None 133 | } 134 | -------------------------------------------------------------------------------- /src/git/queries/search/search_commits.rs: -------------------------------------------------------------------------------- 1 | use std::collections::{HashMap, HashSet}; 2 | 3 | use serde::Serialize; 4 | use ts_rs::TS; 5 | 6 | use crate::git::git_types::Patch; 7 | use crate::git::queries::patches::patches::load_patches; 8 | use crate::git::queries::search::search_code::FileMatch; 9 | use crate::git::queries::search::SearchOptions; 10 | use crate::git::store::STORE; 11 | use crate::server::request_util::{ES, R}; 12 | 13 | #[derive(Debug, Eq, PartialEq, Clone, Serialize, Hash, TS)] 14 | #[ts(export)] 15 | pub enum SearchMatchType { 16 | CommitId, 17 | CommitMessage, 18 | Email, 19 | Author, 20 | } 21 | 22 | #[derive(Debug, Clone, Serialize, TS)] 23 | #[serde(rename_all = "camelCase")] 24 | #[ts(export)] 25 | pub struct CoreSearchResult { 26 | commit_id: String, 27 | matches: HashSet, 28 | patches: Vec, 29 | diffs: Vec, 30 | ref_ids: Vec, 31 | } 32 | 33 | pub fn search_commits(options: &SearchOptions) -> R> { 34 | let SearchOptions { 35 | repo_path, 36 | search_text, 37 | num_results, 38 | } = options; 39 | 40 | let (commits, refs) = STORE 41 | .get_commits_and_refs(repo_path) 42 | .ok_or(ES::from("search_commits: Couldn't get commits and refs."))?; 43 | let patches = load_patches(repo_path, &commits)?; 44 | let search_text = search_text.to_lowercase(); 45 | let mut results: Vec = Vec::new(); 46 | 47 | let ref_ids = refs 48 | .iter() 49 | .filter(|r| r.full_name.to_lowercase().contains(&search_text)) 50 | .map(|r| r.id.clone()) 51 | .collect::>(); 52 | 53 | for commit in commits { 54 | let mut matches: HashSet = HashSet::new(); 55 | 56 | if commit.id.to_lowercase().contains(&search_text) { 57 | matches.insert(SearchMatchType::CommitId); 58 | } 59 | if commit.email.to_lowercase().contains(&search_text) { 60 | matches.insert(SearchMatchType::Email); 61 | } 62 | if commit.author.to_lowercase().contains(&search_text) { 63 | matches.insert(SearchMatchType::Author); 64 | } 65 | if commit.message.to_lowercase().contains(&search_text) { 66 | matches.insert(SearchMatchType::CommitMessage); 67 | } 68 | 69 | let matching_patches = get_matching_patches(&search_text, &commit.id, &patches); 70 | 71 | let ref_ids: Vec = commit 72 | .refs 73 | .iter() 74 | .filter(|id| ref_ids.contains(id)) 75 | .cloned() 76 | .collect(); 77 | 78 | if !matches.is_empty() || !matching_patches.is_empty() || !ref_ids.is_empty() { 79 | results.push(CoreSearchResult { 80 | commit_id: commit.id.clone(), 81 | matches, 82 | patches: matching_patches, 83 | diffs: Vec::new(), 84 | ref_ids, 85 | }); 86 | } 87 | 88 | if results.len() > *num_results { 89 | break; 90 | } 91 | } 92 | 93 | Ok(results) 94 | } 95 | 96 | fn get_matching_patches( 97 | search_text: &str, 98 | commit_id: &str, 99 | patches: &HashMap>, 100 | ) -> Vec { 101 | if let Some(files) = patches.get(commit_id) { 102 | return files 103 | .iter() 104 | .filter(|p| { 105 | p.old_file.to_lowercase().contains(search_text) 106 | || p.new_file.to_lowercase().contains(search_text) 107 | }) 108 | .cloned() 109 | .collect::>(); 110 | } 111 | 112 | Vec::new() 113 | } 114 | -------------------------------------------------------------------------------- /src/git/queries/search/search_request.rs: -------------------------------------------------------------------------------- 1 | use std::thread; 2 | use std::time::Instant; 3 | 4 | use ahash::AHashMap; 5 | use serde::{Deserialize, Serialize}; 6 | use ts_rs::TS; 7 | 8 | use crate::git::queries::search::get_next_search_id; 9 | use crate::git::queries::search::search_code::{ 10 | search_commits_for_code, CodeSearchOpts, FileMatch, 11 | }; 12 | use crate::global; 13 | use crate::util::global::Global; 14 | 15 | #[derive(Debug, Clone, Eq, PartialEq)] 16 | pub struct DiffSearch { 17 | pub repo_path: String, 18 | pub search_text: String, 19 | pub search_id: u32, 20 | pub search_result: Option)>>, 21 | pub time: Instant, 22 | pub completed: bool, 23 | } 24 | 25 | impl DiffSearch { 26 | fn new(repo_path: String, search_text: String, search_id: u32) -> Self { 27 | Self { 28 | repo_path, 29 | search_text, 30 | search_id, 31 | search_result: None, 32 | time: Instant::now(), 33 | completed: false, 34 | } 35 | } 36 | } 37 | 38 | static DIFF_SEARCHES: Global> = global!(AHashMap::new()); 39 | 40 | /* 41 | This begins a search and returns the search_id. We return before completing so 42 | we don't block the server, and new searches can cancel the stale ones. 43 | */ 44 | pub fn start_diff_search(options: &CodeSearchOpts) -> u32 { 45 | let CodeSearchOpts { 46 | repo_path, 47 | search_text, 48 | .. 49 | } = options.clone(); 50 | 51 | let search = DiffSearch::new(repo_path, search_text, get_next_search_id()); 52 | 53 | DIFF_SEARCHES.insert(search.search_id, search.clone()); 54 | 55 | let o = options.clone(); 56 | 57 | thread::spawn(move || { 58 | let result = search_commits_for_code(&o, search.search_id); 59 | 60 | if let Some(searches) = DIFF_SEARCHES.get() { 61 | if let Some(initial_search) = searches.get(&search.search_id) { 62 | let mut updated_search = initial_search.clone(); 63 | 64 | updated_search.search_result = result; 65 | // This needs to be set regardless of whether we get a result. 66 | updated_search.completed = true; 67 | 68 | DIFF_SEARCHES.insert(updated_search.search_id, updated_search); 69 | } 70 | } 71 | }); 72 | 73 | search.search_id 74 | } 75 | 76 | #[derive(Debug, Clone, Deserialize, TS)] 77 | #[serde(rename_all = "camelCase")] 78 | #[ts(export)] 79 | pub struct PollSearchOpts { 80 | pub search_id: u32, 81 | } 82 | 83 | #[derive(Debug, Clone, Serialize, TS)] 84 | #[serde(rename_all = "camelCase")] 85 | #[ts(export)] 86 | pub struct PollSearchResult { 87 | pub search_id: u32, 88 | pub complete: bool, 89 | pub results: Option)>>, 90 | } 91 | 92 | pub fn poll_diff_search(options: &PollSearchOpts) -> PollSearchResult { 93 | if let Some(result) = poll_diff_search_inner(options) { 94 | return result; 95 | } 96 | 97 | // Search not found. We should return complete? 98 | PollSearchResult { 99 | search_id: options.search_id, 100 | complete: false, 101 | results: None, 102 | } 103 | } 104 | 105 | fn poll_diff_search_inner(options: &PollSearchOpts) -> Option { 106 | let searches = DIFF_SEARCHES.get()?; 107 | let search = searches.get(&options.search_id)?; 108 | 109 | if search.completed { 110 | return Some(PollSearchResult { 111 | search_id: options.search_id, 112 | complete: true, 113 | results: search.search_result.clone(), 114 | }); 115 | } 116 | 117 | None 118 | } 119 | 120 | // Be careful with this as all searches may be completed before client has polled and gotten the 121 | // last result yet. 122 | pub fn clear_completed_searches() { 123 | if let Ok(mut searches) = DIFF_SEARCHES.data.write() { 124 | *searches = (*searches) 125 | .clone() 126 | .into_iter() 127 | .filter(|search| !search.1.completed) 128 | .collect(); 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /src/git/queries/stashes.rs: -------------------------------------------------------------------------------- 1 | use loggers::elapsed; 2 | 3 | use crate::git::git_types::CommitInfo; 4 | use crate::git::queries::commits_parsers::{PRETTY_FORMATTED, P_COMMITS}; 5 | use crate::git::run_git; 6 | use crate::git::run_git::RunGitOptions; 7 | use crate::git::store::PathString; 8 | use crate::parser::parse_all_err; 9 | use crate::server::request_util::R; 10 | 11 | #[elapsed] 12 | pub fn load_stashes(repo_path: &PathString) -> R> { 13 | let out = run_git::run_git_err(RunGitOptions { 14 | args: [ 15 | "reflog", 16 | "show", 17 | "stash", 18 | // "-z", 19 | "--decorate=full", 20 | PRETTY_FORMATTED, 21 | "--date=raw", 22 | ], 23 | repo_path, 24 | })? 25 | .stdout; 26 | 27 | let mut commits = parse_all_err(P_COMMITS, out.as_str())?; 28 | 29 | for (i, c) in commits.iter_mut().enumerate() { 30 | c.stash_id = format!("refs/stash@{{{}}}", i); 31 | c.is_merge = false; 32 | c.refs.clear(); 33 | 34 | while c.parent_ids.len() > 1 { 35 | c.parent_ids.pop(); 36 | } 37 | 38 | c.message = tidy_commit_message(&c.message) 39 | } 40 | 41 | Ok(commits) 42 | } 43 | 44 | fn tidy_commit_message(message: &str) -> String { 45 | message 46 | .split(':') 47 | .next() 48 | .unwrap_or("Stash") 49 | .replace("WIP", "Stash") 50 | } 51 | -------------------------------------------------------------------------------- /src/git/queries/stashes_test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use crate::git::queries::stashes::load_stashes; 4 | 5 | #[test] 6 | fn test_load_stashes() { 7 | let result = load_stashes(&"/home/toby/Repos/gitfiend-seed/git-fiend".to_string()); 8 | 9 | println!("{:?}", result); 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /src/git/queries/syntax_colouring.rs: -------------------------------------------------------------------------------- 1 | use crate::util::global::Glo; 2 | use crate::{dprintln, f, glo}; 3 | use serde::{Deserialize, Serialize}; 4 | use syntect::easy::HighlightLines; 5 | use syntect::highlighting::{Color, Style, ThemeSet}; 6 | use syntect::parsing::SyntaxSet; 7 | use ts_rs::TS; 8 | 9 | pub static COLOURING: Glo = glo!(Colouring::new()); 10 | 11 | pub struct Colouring { 12 | pub syntax_set: SyntaxSet, 13 | pub theme_set: ThemeSet, 14 | pub theme: ThemeColour, 15 | } 16 | 17 | #[derive(Debug, Eq, PartialEq, Copy, Clone, Deserialize, Serialize, TS)] 18 | #[serde(rename_all = "camelCase")] 19 | #[ts(export)] 20 | pub enum ThemeColour { 21 | Light, 22 | Dark, 23 | } 24 | 25 | impl Colouring { 26 | pub fn new() -> Self { 27 | Self { 28 | syntax_set: SyntaxSet::load_defaults_newlines(), 29 | theme_set: ThemeSet::load_defaults(), 30 | theme: ThemeColour::Light, 31 | } 32 | } 33 | 34 | pub fn set_theme(&mut self, theme: &ThemeColour) { 35 | self.theme = *theme; 36 | } 37 | 38 | pub fn get_colour_line( 39 | &mut self, 40 | theme: &ThemeColour, 41 | file_extension: &str, 42 | ) -> ColourLine { 43 | self.set_theme(theme); 44 | let h = self.get_highlighter(file_extension); 45 | 46 | dprintln!( 47 | "file_ext: {}, highlighter loaded: {}", 48 | file_extension, 49 | h.is_some() 50 | ); 51 | 52 | ColourLine { 53 | colouring: self, 54 | highlight: h, 55 | // extension: file_extension.to_string(), 56 | } 57 | } 58 | 59 | // HighlightLines isn't thread safe, so can't be stored in a global. 60 | pub fn get_highlighter(&self, file_extension: &str) -> Option { 61 | let ext = match file_extension { 62 | "ts" => "js", 63 | "tsx" => "js", 64 | "iml" => "xml", 65 | _ => file_extension, 66 | }; 67 | 68 | let syntax = self.syntax_set.find_syntax_by_extension(ext)?; 69 | 70 | let theme_str = if self.theme == ThemeColour::Dark { 71 | "base16-ocean.dark" 72 | } else { 73 | "base16-ocean.light" 74 | }; 75 | 76 | Some(HighlightLines::new( 77 | syntax, 78 | &self.theme_set.themes[theme_str], 79 | )) 80 | } 81 | 82 | pub fn _get_supported_things(self) -> (Vec, Vec) { 83 | let themes = self.theme_set.themes.keys().cloned().collect(); 84 | 85 | let extensions = self 86 | .syntax_set 87 | .syntaxes() 88 | .iter() 89 | .flat_map(|s| s.file_extensions.clone()) 90 | .collect(); 91 | 92 | (themes, extensions) 93 | } 94 | } 95 | 96 | pub struct ColourLine<'a> { 97 | pub colouring: &'a Colouring, 98 | pub highlight: Option>, 99 | // pub extension: String, 100 | } 101 | 102 | impl<'a> ColourLine<'a> { 103 | pub fn colour<'b>(&mut self, line: &'b str) -> Result, String> { 104 | if let Some(ref mut h) = self.highlight { 105 | return h 106 | .highlight_line(line, &self.colouring.syntax_set) 107 | .map_err(|e| e.to_string()); 108 | } 109 | 110 | Err(String::from("Highlighter isn't loaded for this file")) 111 | } 112 | 113 | // We are trying to highlight fragments of code the have missing context. Fake up the context. 114 | pub fn start_fragment(&mut self) { 115 | let _ = self.colour("{\n"); 116 | } 117 | 118 | pub fn end_fragment(&mut self) { 119 | let _ = self.colour("}\n"); 120 | } 121 | } 122 | 123 | // pub fn scale_colour(colour: Color, theme: &ThemeColour) -> Color { 124 | // let Color { r, g, b, .. } = colour; 125 | // 126 | // let sum: u32 = r as u32 + g as u32 + b as u32; 127 | // 128 | // let max = r.max(g).max(b); 129 | // let min = r.min(g).min(b); 130 | // 131 | // println!("sum: {}, max: {}", sum, r.max(g).max(b)); 132 | // 133 | // if theme == &ThemeColour::Light { 134 | // if min > 0 { 135 | // let scale = 255.0 / min as f32; 136 | // println!("diff: {}", min); 137 | // 138 | // return Color { 139 | // r: ((r - min) as f32 * scale) as u8, 140 | // g: ((g - min) as f32 * scale) as u8, 141 | // b: ((b - min) as f32 * scale) as u8, 142 | // a: 255, 143 | // }; 144 | // } 145 | // } else if max < 255 { 146 | // let scale = 255.0 / max as f32; 147 | // println!("scale: {}", scale); 148 | // 149 | // return Color { 150 | // r: (r as f32 * scale).round() as u8, 151 | // g: (g as f32 * scale).round() as u8, 152 | // b: (b as f32 * scale).round() as u8, 153 | // a: 255, 154 | // }; 155 | // } 156 | // 157 | // colour 158 | // } 159 | 160 | pub fn colour_to_style(colour: Color, theme: &ThemeColour) -> String { 161 | if *theme == ThemeColour::Light { 162 | f!("hsl({}, 100%, 30%)", colour_to_hue(colour)) 163 | } else { 164 | f!("hsl({}, 100%, 80%)", colour_to_hue(colour)) 165 | } 166 | } 167 | 168 | pub fn colour_to_hue(colour: Color) -> f32 { 169 | let Color { r, g, b, .. } = colour; 170 | 171 | let r = r as f32; 172 | let g = g as f32; 173 | let b = b as f32; 174 | 175 | let c_min = r.min(g).min(b); 176 | let c_max = r.max(g).max(b); 177 | let delta = c_max - c_min; 178 | 179 | let mut hue = if delta == 0. { 180 | 0. 181 | } else if c_max == r { 182 | 60. * (((g - b) / delta) % 6.) 183 | } else if c_max == g { 184 | 60. * (((b - r) / delta) + 2.) 185 | } else { 186 | 60. * (((r - g) / delta) + 4.) 187 | } 188 | .round(); 189 | 190 | if hue < 0. { 191 | hue += 360.; 192 | } 193 | 194 | hue 195 | } 196 | -------------------------------------------------------------------------------- /src/git/queries/unpushed_commits.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{Commit, RefInfo, RefLocation, RefType}; 2 | use crate::git::queries::commit_calcs::get_commit_ids_between_commit_ids; 3 | use crate::git::queries::commits_parsers::P_ID_LIST; 4 | use crate::git::run_git::{run_git_err, RunGitOptions}; 5 | use crate::git::store::STORE; 6 | use crate::parser::parse_all_err; 7 | use crate::server::git_request::ReqOptions; 8 | use crate::{dprintln, time_result}; 9 | use ahash::{AHashMap, AHashSet}; 10 | use serde::Serialize; 11 | use ts_rs::TS; 12 | 13 | #[derive(Debug, Clone, Serialize, TS)] 14 | #[serde(rename_all = "camelCase")] 15 | #[ts(export)] 16 | pub struct UnPushedCommits { 17 | // Commits that are un-pushed on this branch, but pushed on another. 18 | pub this_branch: Vec, 19 | // Commits that haven't been pushed period. These have more edit options available. 20 | pub all_branches: Vec, 21 | } 22 | 23 | pub fn get_un_pushed_commits(options: &ReqOptions) -> UnPushedCommits { 24 | if let Some(ids) = get_un_pushed_commits_computed(options) { 25 | let all = get_unique_un_pushed_commits(&options.repo_path, &ids).unwrap_or_default(); 26 | 27 | return UnPushedCommits { 28 | this_branch: ids, 29 | all_branches: all, 30 | }; 31 | } else { 32 | dprintln!( 33 | "get_un_pushed_commits: Refs not found in commits, fall back to git request." 34 | ); 35 | } 36 | 37 | if let Ok(out) = run_git_err(RunGitOptions { 38 | repo_path: &options.repo_path, 39 | args: ["log", "HEAD", "--not", "--remotes", "--pretty=format:%H"], 40 | }) { 41 | if let Ok(ids) = parse_all_err(P_ID_LIST, &out.stdout) { 42 | return UnPushedCommits { 43 | // This branch is probably far behind the remote. 44 | // TODO: Do we include all commits then? 45 | this_branch: Vec::new(), 46 | all_branches: ids, 47 | }; 48 | } 49 | } 50 | 51 | UnPushedCommits { 52 | this_branch: Vec::new(), 53 | all_branches: Vec::new(), 54 | } 55 | } 56 | 57 | // Assumes head has some commits remote ref doesn't. If remote is ahead of ref then, could be misleading. 58 | fn get_unique_un_pushed_commits( 59 | repo_path: &String, 60 | un_pushed_ids: &[String], 61 | ) -> Option> { 62 | let (commits, refs) = STORE.get_commits_and_refs(repo_path)?; 63 | 64 | let un_pushed_ids: AHashSet = un_pushed_ids.iter().cloned().collect(); 65 | let ref_map: AHashMap = 66 | refs.iter().map(|r| (r.id.clone(), r.clone())).collect(); 67 | let commit_map: AHashMap = 68 | commits.iter().map(|c| (c.id.clone(), c.clone())).collect(); 69 | 70 | let head_ref = get_head_ref(&refs)?; 71 | let remote = find_sibling_ref(head_ref, &refs)?; 72 | let head = commits.iter().find(|c| c.id == head_ref.commit_id)?; 73 | 74 | let mut unique: Vec = Vec::new(); 75 | let mut checked: AHashSet = AHashSet::new(); 76 | 77 | un_pushed( 78 | head, 79 | &remote.commit_id, 80 | &commit_map, 81 | &ref_map, 82 | &un_pushed_ids, 83 | &mut checked, 84 | &mut unique, 85 | ); 86 | 87 | Some(unique) 88 | } 89 | 90 | fn un_pushed( 91 | current: &Commit, 92 | remote_id: &str, 93 | commits: &AHashMap, 94 | refs: &AHashMap, 95 | un_pushed_ids: &AHashSet, 96 | checked: &mut AHashSet, 97 | unique: &mut Vec, 98 | ) { 99 | if checked.contains(&*current.id) { 100 | return; 101 | } 102 | checked.insert(current.id.clone()); 103 | 104 | if current.id == remote_id 105 | || current.refs.iter().any(|ref_id| { 106 | if let Some(r) = refs.get(ref_id) { 107 | r.ref_type == RefType::Branch && r.location == RefLocation::Remote 108 | } else { 109 | false 110 | } 111 | }) 112 | { 113 | return; 114 | } else if un_pushed_ids.contains(¤t.id) { 115 | unique.push(current.id.clone()); 116 | } 117 | 118 | for id in ¤t.parent_ids { 119 | if let Some(commit) = commits.get(id) { 120 | un_pushed( 121 | commit, 122 | remote_id, 123 | commits, 124 | refs, 125 | un_pushed_ids, 126 | checked, 127 | unique, 128 | ); 129 | } 130 | } 131 | } 132 | 133 | // This will return none if head ref or remote ref can't be found in provided commits. 134 | fn get_un_pushed_commits_computed(options: &ReqOptions) -> Option> { 135 | time_result!("get_un_pushed_commits_computed", { 136 | let (commits, refs) = STORE.get_commits_and_refs(&options.repo_path)?; 137 | 138 | let commit_map: AHashMap = 139 | commits.into_iter().map(|c| (c.id.clone(), c)).collect(); 140 | 141 | let head_ref = get_head_ref(&refs)?; 142 | let remote = find_sibling_ref(head_ref, &refs)?; 143 | 144 | get_commit_ids_between_commit_ids(&head_ref.commit_id, &remote.commit_id, &commit_map) 145 | }) 146 | } 147 | 148 | fn get_head_ref(refs: &[RefInfo]) -> Option<&RefInfo> { 149 | refs.iter().find(|r| r.head) 150 | } 151 | 152 | fn find_sibling_ref<'a>(ri: &RefInfo, refs: &'a [RefInfo]) -> Option<&'a RefInfo> { 153 | if !ri.sibling_id.is_empty() { 154 | return refs.iter().find(|r| r.id == ri.sibling_id); 155 | } 156 | None 157 | } 158 | -------------------------------------------------------------------------------- /src/git/queries/wip.rs: -------------------------------------------------------------------------------- 1 | mod create_hunks; 2 | pub(crate) mod wip_diff; 3 | mod wip_patch_parsers; 4 | pub(crate) mod wip_patches; 5 | use crate::git::store::STORE; 6 | use crate::server::git_request::ReqOptions; 7 | use std::fs::read_to_string; 8 | 9 | pub fn is_rebase_in_progress(options: &ReqOptions) -> bool { 10 | if let Ok(path) = STORE.get_repo_path(&options.repo_path) { 11 | return path.git_path.join("rebase-merge").exists(); 12 | } 13 | false 14 | } 15 | 16 | // // Returns the commit id of the branch we tried to merge 17 | // // into our current if we have a conflict. 18 | // pub fn is_merge_in_progress(options: &ReqOptions) -> Option { 19 | // let ReqOptions { repo_path } = options; 20 | // 21 | // read_merge_head(repo_path) 22 | // } 23 | 24 | pub fn read_merge_head(repo_path: &str) -> Option { 25 | let path = STORE.get_repo_path(repo_path).ok()?; 26 | 27 | if let Ok(text) = read_to_string(path.git_path.join("MERGE_HEAD")) { 28 | return Some(text.trim().to_string()); 29 | } 30 | 31 | // This seems to happen when there's a conflict from un-stashing. Returns "special ref". 32 | if let Ok(text) = read_to_string(path.git_path.join("AUTO_MERGE")) { 33 | return Some(text.trim().to_string()); 34 | } 35 | 36 | None 37 | } 38 | -------------------------------------------------------------------------------- /src/git/queries/wip/create_hunks.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{Hunk, HunkLine, HunkLineStatus, HunkRange}; 2 | use std::cmp::{max, min}; 3 | use HunkLineStatus::{Added, Removed}; 4 | 5 | pub fn convert_lines_to_hunks(lines: Vec) -> (Vec, u32) { 6 | let mut hunks = Vec::::new(); 7 | let mut current_hunk = Hunk::new(); 8 | let mut started_making_hunk = false; 9 | 10 | // Hunks should be joined if there's only 6 unchanged lines between them. 11 | let mut gap_count = 0; 12 | let mut patch_size: u32 = 0; 13 | 14 | for (i, line) in lines.iter().enumerate() { 15 | let HunkLine { status, .. } = line; 16 | 17 | if *status == Added || *status == Removed { 18 | patch_size += 1; 19 | gap_count = 0; 20 | 21 | if !started_making_hunk { 22 | started_making_hunk = true; 23 | 24 | let start_i = max(0, (i as i32) - 3) as usize; 25 | let slice = &lines[start_i..i]; 26 | 27 | current_hunk.lines.extend_from_slice(slice); 28 | } 29 | current_hunk.lines.push(line.clone()); 30 | } else if started_making_hunk { 31 | if gap_count < 6 { 32 | gap_count += 1; 33 | current_hunk.lines.push(line.clone()); 34 | } else { 35 | if gap_count < 3 { 36 | current_hunk 37 | .lines 38 | .extend_from_slice(&lines[i..min(i + (3 - gap_count), lines.len())]); 39 | } else { 40 | for _ in 0..(gap_count - 3) { 41 | current_hunk.lines.pop(); 42 | } 43 | } 44 | 45 | set_line_ranges(&mut current_hunk); 46 | hunks.push(current_hunk.clone()); 47 | current_hunk = Hunk::new(); 48 | started_making_hunk = false; 49 | gap_count = 0; 50 | } 51 | } 52 | } 53 | 54 | if started_making_hunk { 55 | set_line_ranges(&mut current_hunk); 56 | hunks.push(current_hunk.clone()); 57 | } 58 | 59 | set_indices(&mut hunks); 60 | 61 | (hunks, patch_size) 62 | } 63 | 64 | fn set_line_ranges(hunk: &mut Hunk) { 65 | let Hunk { lines, .. } = hunk; 66 | 67 | if lines.is_empty() { 68 | return; 69 | } 70 | 71 | let first = &lines[0]; 72 | let last = &lines[lines.len() - 1]; 73 | 74 | hunk.old_line_range = HunkRange { 75 | start: first.old_num.unwrap_or(0), 76 | length: max( 77 | last.old_num.unwrap_or(0) - first.old_num.unwrap_or(0) + 1, 78 | 0, 79 | ), 80 | }; 81 | hunk.new_line_range = HunkRange { 82 | start: first.new_num.unwrap_or(0), 83 | length: max( 84 | last.new_num.unwrap_or(0) - first.new_num.unwrap_or(0) + 1, 85 | 0, 86 | ), 87 | }; 88 | } 89 | 90 | fn set_indices(hunks: &mut [Hunk]) { 91 | for (i, hunk) in hunks.iter_mut().enumerate() { 92 | hunk.index = i as i32; 93 | for line in &mut hunk.lines { 94 | line.hunk_index = i as i32; 95 | } 96 | } 97 | } 98 | 99 | #[cfg(test)] 100 | mod tests { 101 | use crate::git::queries::wip::create_hunks::convert_lines_to_hunks; 102 | use crate::git::queries::wip::wip_diff::calc_hunk_line_from_text; 103 | use std::cmp::max; 104 | 105 | #[test] 106 | fn test_create_hunks() { 107 | let text = "import {ThemeName} from '../views/theme/theming' 108 | 109 | export const maxNumberOfCommits = 1000 110 | export const maxNumberOfCommits = 100 111 | 112 | export const bgSize = 500 113 | 114 | export const font = `13px -apple-system,BlinkMacSystemFont,Segoe UI,Helvetica,Arial,sans-serif,Apple Color Emoji,Segoe UI Emoji` 115 | 116 | export const monoFont = `13px 'Menlo', 'Ubuntu Mono', 'Consolas', monospace` 117 | 118 | export const defaultTheme: ThemeName = 'dark' 119 | 120 | export const defaultAnimationTime: AnimationTime = { 121 | short: 150, 122 | medium: 300, 123 | long: 400, 124 | } 125 | 126 | export const animationTimeDisabled: AnimationTime = { 127 | short: 0, 128 | medium: 0, 129 | long: 0, 130 | } 131 | 132 | export interface AnimationTime { 133 | short: number 134 | medium: number 135 | long: number 136 | } 137 | "; 138 | 139 | let lines = calc_hunk_line_from_text("", text); 140 | 141 | assert_eq!(lines.len(), 30); 142 | 143 | let hunks = convert_lines_to_hunks(lines); 144 | 145 | assert_eq!(hunks.0.len(), 1); 146 | 147 | // This is a bit dumb. All lines are added 148 | assert_eq!(hunks.0[0].lines.len(), 30); 149 | } 150 | 151 | #[test] 152 | fn test_max_behaviour() { 153 | assert_eq!(max(0, -3) as u32, 0); 154 | 155 | assert_eq!([1, 2][0..0].len(), 0); 156 | 157 | // let i: usize = 0; 158 | // let n: i32 = i - 3; 159 | // 160 | // assert_eq!(i - 3, -3 as i32); 161 | } 162 | } 163 | -------------------------------------------------------------------------------- /src/git/queries/wip/wip_patches.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{WipPatch, WipPatchType}; 2 | use crate::git::queries::patches::file_is_image; 3 | use crate::git::queries::wip::read_merge_head; 4 | use crate::git::queries::wip::wip_patch_parsers::P_WIP_PATCHES; 5 | use crate::git::run_git::RunGitOptions; 6 | use crate::git::run_git::{run_git_err, GitOut}; 7 | use crate::parser::parse_all_err; 8 | use crate::server::git_request::ReqOptions; 9 | use crate::server::request_util::R; 10 | use serde::Serialize; 11 | use ts_rs::TS; 12 | 13 | #[derive(Debug, PartialEq, Eq)] 14 | pub struct WipPatchInfo { 15 | pub old_file: String, 16 | pub new_file: String, 17 | pub staged: WipPatchType, 18 | pub un_staged: WipPatchType, 19 | } 20 | 21 | #[derive(Debug, Clone, Serialize, TS)] 22 | #[ts(export)] 23 | pub struct WipPatches { 24 | pub patches: Vec, 25 | pub conflict_commit_id: Option, 26 | } 27 | 28 | pub fn load_wip_patches(options: &ReqOptions) -> R { 29 | let GitOut { stdout, .. } = run_git_err(RunGitOptions { 30 | repo_path: &options.repo_path, 31 | args: ["status", "--porcelain", "-uall", "-z"], 32 | })?; 33 | 34 | let info = parse_all_err(P_WIP_PATCHES, &stdout)?; 35 | 36 | let (patches, conflicted) = get_patches_from_info(info); 37 | 38 | if conflicted { 39 | if let Some(id) = read_merge_head(&options.repo_path) { 40 | return Ok(WipPatches { 41 | patches, 42 | conflict_commit_id: Some(id), 43 | }); 44 | } 45 | } 46 | 47 | Ok(WipPatches { 48 | patches, 49 | conflict_commit_id: None, 50 | }) 51 | } 52 | 53 | fn get_patches_from_info(info: Vec) -> (Vec, bool) { 54 | let mut patches: Vec = Vec::new(); 55 | let mut have_conflict = false; 56 | 57 | for WipPatchInfo { 58 | old_file, 59 | new_file, 60 | un_staged, 61 | staged, 62 | } in info 63 | { 64 | let conflicted = is_conflicted(&staged, &un_staged); 65 | 66 | if conflicted { 67 | have_conflict = true; 68 | } 69 | let patch_type = pick_type_from_patch(&un_staged, &staged); 70 | 71 | patches.push(WipPatch { 72 | old_file: old_file.clone(), 73 | new_file: new_file.clone(), 74 | patch_type: patch_type.clone(), 75 | staged_type: staged, 76 | un_staged_type: un_staged, 77 | conflicted, 78 | id: format!("{}{}", &new_file, patch_type), 79 | is_image: file_is_image(&new_file), 80 | }) 81 | } 82 | 83 | patches.sort_by_key(|p| p.new_file.to_lowercase()); 84 | 85 | if have_conflict { 86 | // We aren't interested in any other patches when there's a conflict. 87 | return (patches.into_iter().filter(|p| p.conflicted).collect(), true); 88 | } 89 | 90 | (patches, false) 91 | } 92 | 93 | fn is_conflicted(left: &WipPatchType, right: &WipPatchType) -> bool { 94 | *left == WipPatchType::U 95 | || *right == WipPatchType::U 96 | || (*left == WipPatchType::A && *right == WipPatchType::A) 97 | || (*left == WipPatchType::D && *right == WipPatchType::D) 98 | } 99 | 100 | fn pick_type_from_patch(un_staged: &WipPatchType, staged: &WipPatchType) -> WipPatchType { 101 | if un_staged != &WipPatchType::Empty { 102 | if un_staged == &WipPatchType::Question { 103 | return WipPatchType::A; 104 | } 105 | return un_staged.clone(); 106 | } 107 | if staged == &WipPatchType::Question { 108 | return WipPatchType::A; 109 | } 110 | 111 | staged.clone() 112 | } 113 | -------------------------------------------------------------------------------- /src/git/queries/workspace/load_current_branch.rs: -------------------------------------------------------------------------------- 1 | use crate::f; 2 | use crate::git::store::STORE; 3 | use crate::server::request_util::{ES, R}; 4 | use std::collections::HashSet; 5 | use std::fs::{read_dir, read_to_string}; 6 | use std::path::{Path, PathBuf}; 7 | 8 | pub fn load_current_branch(repo_path: &str) -> R<(String, String)> { 9 | let repo = STORE.get_repo_path(repo_path)?; 10 | let head = repo.git_path.join("HEAD"); 11 | 12 | if let Ok(text) = read_to_string(head) { 13 | return if let Some(branch) = text.split(':').last() { 14 | let id = branch.trim(); 15 | let name = id.replace("refs/heads/", ""); 16 | Ok((id.to_string(), name)) 17 | } else { 18 | Err(ES::from( 19 | "Failed to load current branch. Failed to parse .git/HEAD. Could be a detached head?", 20 | )) 21 | }; 22 | } 23 | 24 | Err(ES::from( 25 | "Failed to load current branch. Failed to read .git/HEAD", 26 | )) 27 | } 28 | 29 | // Some info 30 | // Branches in git can contain /, but not \ 31 | // We read them from disk as / on Linux and Mac, but as \ on Windows. 32 | pub fn read_refs(repo_path: &str, branch_name: &str) -> R { 33 | let mut refs = Refs { 34 | local_id: None, 35 | remote_id: None, 36 | others: HashSet::new(), 37 | }; 38 | 39 | let repo = STORE.get_repo_path(repo_path)?; 40 | let path = repo.git_path.join("refs"); 41 | 42 | let heads_dir = path.join("heads"); 43 | 44 | read_local_refs(&heads_dir, &heads_dir, branch_name, &mut refs)?; 45 | 46 | let remotes_dir = path.join("remotes"); 47 | 48 | // Sometimes remotes folder doesn't exist. 49 | if let Ok(remotes) = read_dir(remotes_dir) { 50 | for item in remotes { 51 | let p = item?.path(); 52 | let _ = read_remote_refs(&p, &p, branch_name, &mut refs); 53 | } 54 | } 55 | 56 | Ok(refs) 57 | } 58 | 59 | #[derive(Debug, Eq, PartialEq)] 60 | pub struct Refs { 61 | pub local_id: Option, 62 | pub remote_id: Option, 63 | pub others: HashSet, 64 | } 65 | 66 | fn read_local_refs( 67 | current_path: &PathBuf, 68 | start_path: &PathBuf, 69 | branch_name: &str, 70 | refs_result: &mut Refs, 71 | ) -> R<()> { 72 | for item in read_dir(current_path)? { 73 | let path = item?.path(); 74 | 75 | if path.is_dir() { 76 | read_local_refs(&path, start_path, branch_name, refs_result)?; 77 | } 78 | 79 | let file_name = path.file_name().unwrap().to_str().unwrap(); 80 | if !file_name.starts_with(".") && file_name != "HEAD" { 81 | let found_ref = get_ref_name_from_path(&path, start_path); 82 | if found_ref == branch_name { 83 | refs_result.local_id = Some(read_id_from_ref_file(&path)?); 84 | } else { 85 | refs_result.others.insert(found_ref); 86 | } 87 | } 88 | } 89 | 90 | Ok(()) 91 | } 92 | 93 | fn get_ref_name_from_path(file: &Path, start_dir: &PathBuf) -> String { 94 | let ref_path = file.strip_prefix(start_dir).unwrap(); 95 | 96 | ref_path 97 | .iter() 98 | .filter_map(|p| p.to_str()) 99 | .collect::>() 100 | .join("/") 101 | } 102 | 103 | fn read_remote_refs( 104 | current_dir: &PathBuf, 105 | start_dir: &PathBuf, 106 | branch_name: &str, 107 | refs: &mut Refs, 108 | ) -> R<()> { 109 | for item in read_dir(current_dir)? { 110 | let p = item?.path(); 111 | 112 | if p.is_dir() { 113 | read_remote_refs(&p, start_dir, branch_name, refs)?; 114 | } 115 | 116 | let file_name = p.file_name().unwrap().to_str().unwrap(); 117 | if file_name.starts_with('.') { 118 | continue; 119 | } 120 | if file_name == "HEAD" { 121 | let head_branch_full_name = read_head_file(&p)?; 122 | 123 | if head_branch_full_name.ends_with(branch_name) { 124 | refs.remote_id = refs.local_id.clone(); 125 | } 126 | } else { 127 | let found = get_ref_name_from_path(&p, start_dir); 128 | if found == branch_name { 129 | refs.remote_id = read_id_from_ref_file(&p).ok(); 130 | } else { 131 | refs.others.insert(found); 132 | } 133 | } 134 | } 135 | 136 | Ok(()) 137 | } 138 | 139 | // E.g. "ref: refs/remotes/origin/develop" 140 | fn read_head_file(head_path: &PathBuf) -> R { 141 | let text = read_to_string(head_path)?; 142 | 143 | if let Some(i) = text.chars().position(|c| c == ':') { 144 | let path = &text[(i + 1)..]; 145 | 146 | return Ok(path.trim().to_string()); 147 | } 148 | 149 | Err(ES::from(&f!("Failed to parse {:?}", head_path))) 150 | } 151 | 152 | fn read_id_from_ref_file(file: &PathBuf) -> R { 153 | Ok(read_to_string(file)?.trim().to_string()) 154 | } 155 | 156 | #[cfg(test)] 157 | mod tests { 158 | use crate::git::queries::workspace::load_current_branch::get_ref_name_from_path; 159 | use std::path::PathBuf; 160 | 161 | #[test] 162 | fn test_get_ref_name() { 163 | let start: PathBuf = ["aa", "bb"].iter().collect(); 164 | let ref_parts: PathBuf = ["aa", "bb", "cc", "dd"].iter().collect(); 165 | 166 | let res = get_ref_name_from_path(&ref_parts, &start); 167 | 168 | assert_eq!(res, "cc/dd") 169 | } 170 | } 171 | -------------------------------------------------------------------------------- /src/git/queries/workspace/load_packed_refs.rs: -------------------------------------------------------------------------------- 1 | use crate::git::store::STORE; 2 | use crate::parser::standard_parsers::{ANY_WORD, UNTIL_LINE_END}; 3 | use crate::parser::{parse_all_err, Parser}; 4 | use crate::server::request_util::R; 5 | use crate::{and, character, many, map2, or, word}; 6 | use std::fs::read_to_string; 7 | 8 | pub fn load_packed_refs(repo_path: &str) -> R> { 9 | let repo = STORE.get_repo_path(repo_path)?; 10 | let path = repo.git_path.join("packed-refs"); 11 | 12 | let text = read_to_string(path)?; 13 | 14 | parse_all_err(P_LINES, &text) 15 | } 16 | 17 | #[derive(Debug, Eq, PartialEq)] 18 | pub enum PackedRef { 19 | Local(PackedLocalRef), 20 | Remote(PackedRemoteRef), 21 | Unknown, 22 | } 23 | 24 | #[derive(Debug, Eq, PartialEq)] 25 | pub struct PackedRemoteRef { 26 | pub commit_id: String, 27 | pub remote_name: String, 28 | pub name: String, 29 | } 30 | 31 | #[derive(Debug, Eq, PartialEq)] 32 | pub struct PackedLocalRef { 33 | pub commit_id: String, 34 | pub name: String, 35 | } 36 | 37 | const P_LOCAL: Parser = map2!( 38 | and!( 39 | ANY_WORD, 40 | character!(' '), 41 | word!("refs/heads/"), 42 | UNTIL_LINE_END 43 | ), 44 | res, 45 | PackedRef::Local(PackedLocalRef { 46 | commit_id: res.0, 47 | name: res.3 48 | }) 49 | ); 50 | 51 | const P_REMOTE: Parser = map2!( 52 | and!( 53 | ANY_WORD, 54 | character!(' '), 55 | word!("refs/remotes/"), 56 | UNTIL_LINE_END 57 | ), 58 | res, 59 | { 60 | let (remote_name, name) = remove_remote(res.3); 61 | 62 | PackedRef::Remote(PackedRemoteRef { 63 | commit_id: res.0, 64 | remote_name, 65 | name, 66 | }) 67 | } 68 | ); 69 | 70 | const P_OTHER: Parser = map2!(UNTIL_LINE_END, __, PackedRef::Unknown); 71 | 72 | const P_LINE: Parser = or!(P_LOCAL, P_REMOTE, P_OTHER); 73 | 74 | const P_LINES: Parser> = many!(P_LINE); 75 | 76 | fn remove_remote(ref_part: String) -> (String, String) { 77 | if let Some((remote, tail)) = ref_part.split_once('/') { 78 | return (remote.to_string(), tail.to_string()); 79 | } 80 | (String::new(), ref_part) 81 | } 82 | -------------------------------------------------------------------------------- /src/git/queries/workspace/mod.rs: -------------------------------------------------------------------------------- 1 | mod load_current_branch; 2 | mod load_packed_refs; 3 | pub mod repo_status; 4 | -------------------------------------------------------------------------------- /src/git/queries/workspace/repo_status.rs: -------------------------------------------------------------------------------- 1 | use crate::git::queries::commit_calcs::count_commits_between_fallback; 2 | use crate::git::queries::config::load_full_config; 3 | use crate::git::queries::config::GitConfig; 4 | use crate::git::queries::wip::wip_patches::{load_wip_patches, WipPatches}; 5 | use crate::git::queries::workspace::load_current_branch::{ 6 | load_current_branch, read_refs, Refs, 7 | }; 8 | use crate::git::queries::workspace::load_packed_refs::{load_packed_refs, PackedRef}; 9 | use crate::server::git_request::ReqOptions; 10 | use crate::server::request_util::R; 11 | use serde::Serialize; 12 | use std::collections::HashSet; 13 | use ts_rs::TS; 14 | 15 | #[derive(Debug, Serialize, TS)] 16 | #[serde(rename_all = "camelCase")] 17 | #[ts(export)] 18 | pub struct RepoStatus { 19 | patches: WipPatches, 20 | config: GitConfig, 21 | // These are just short names. Don't include remote name or whether local. 22 | branches: HashSet, 23 | branch_name: String, 24 | head_ref_id: String, 25 | local_commit_id: Option, 26 | remote_commit_id: Option, 27 | remote_ahead: u32, 28 | remote_behind: u32, 29 | state: BranchState, 30 | } 31 | 32 | #[derive(Debug, Serialize, TS)] 33 | #[ts(export)] 34 | pub enum BranchState { 35 | Local, 36 | Remote, 37 | Both, 38 | } 39 | 40 | pub fn load_repo_status(options: &ReqOptions) -> R { 41 | let ReqOptions { repo_path } = options; 42 | 43 | let patches = load_wip_patches(options)?; 44 | let config = load_full_config(options)?; 45 | 46 | let (head_id, current_branch) = load_current_branch(repo_path)?; 47 | 48 | let Refs { 49 | mut local_id, 50 | mut remote_id, 51 | mut others, 52 | } = read_refs(repo_path, ¤t_branch)?; 53 | 54 | let packed_refs = load_packed_refs(repo_path).unwrap_or_else(|_| Vec::new()); 55 | 56 | if local_id.is_none() { 57 | for r in packed_refs.iter() { 58 | if let PackedRef::Local(local) = r { 59 | if local.name == current_branch { 60 | local_id = Some(local.commit_id.clone()); 61 | break; 62 | } 63 | } 64 | } 65 | } 66 | 67 | if remote_id.is_none() { 68 | for r in packed_refs.iter() { 69 | if let PackedRef::Remote(remote) = r { 70 | if remote.name == current_branch { 71 | remote_id = Some(remote.commit_id.clone()); 72 | break; 73 | } 74 | } 75 | } 76 | } 77 | 78 | others.extend(packed_refs.iter().flat_map(|r| match r { 79 | PackedRef::Local(l) => Some(l.name.clone()), 80 | PackedRef::Remote(r) => Some(r.name.clone()), 81 | PackedRef::Unknown => None, 82 | })); 83 | 84 | if let Some(local_id) = local_id.clone() { 85 | if let Some(remote_id) = remote_id { 86 | let remote_ahead = count_commits_between_fallback(repo_path, &local_id, &remote_id); 87 | let remote_behind = 88 | count_commits_between_fallback(repo_path, &remote_id, &local_id); 89 | 90 | return Ok(RepoStatus { 91 | patches, 92 | config, 93 | branches: others, 94 | branch_name: current_branch, 95 | head_ref_id: head_id, 96 | local_commit_id: Some(local_id), 97 | remote_commit_id: Some(remote_id), 98 | remote_ahead, 99 | remote_behind, 100 | state: BranchState::Both, 101 | }); 102 | } 103 | } 104 | 105 | let state = match (local_id.is_some(), remote_id.is_some()) { 106 | (true, true) => BranchState::Both, 107 | (true, false) => { 108 | if others.contains("HEAD") { 109 | BranchState::Remote 110 | } else { 111 | BranchState::Local 112 | } 113 | } 114 | (false, true) => BranchState::Remote, 115 | (false, false) => BranchState::Local, 116 | }; 117 | 118 | Ok(RepoStatus { 119 | patches, 120 | config, 121 | branches: others, 122 | branch_name: current_branch, 123 | head_ref_id: head_id, 124 | local_commit_id: local_id, 125 | remote_commit_id: remote_id, 126 | remote_ahead: 0, 127 | remote_behind: 0, 128 | state, 129 | }) 130 | } 131 | -------------------------------------------------------------------------------- /src/git/run_git.rs: -------------------------------------------------------------------------------- 1 | use crate::dprintln; 2 | use chardetng::EncodingDetector; 3 | use std::ffi::OsStr; 4 | use std::path::Path; 5 | use std::process::{Command, Output}; 6 | 7 | use crate::git::git_settings::GIT_PATH; 8 | use crate::server::request_util::R; 9 | 10 | #[derive(Clone, Debug)] 11 | pub struct RunGitOptions<'a, I, S> 12 | where 13 | I: IntoIterator, 14 | S: AsRef, 15 | { 16 | pub args: I, 17 | pub repo_path: &'a str, 18 | } 19 | 20 | #[allow(dead_code)] 21 | pub struct GitOut { 22 | pub stdout: String, 23 | pub stderr: String, 24 | } 25 | 26 | pub fn run_git_err(options: RunGitOptions) -> R 27 | where 28 | I: IntoIterator, 29 | S: AsRef, 30 | { 31 | let out = Command::new(Path::new(GIT_PATH.as_path())) 32 | .args(options.args) 33 | .current_dir(options.repo_path) 34 | .output()?; 35 | 36 | let Output { stdout, stderr, .. } = &out; 37 | 38 | Ok(GitOut { 39 | stdout: read_buffer_to_string(stdout), 40 | stderr: read_buffer_to_string(stderr), 41 | }) 42 | } 43 | 44 | fn read_buffer_to_string(bytes: &[u8]) -> String { 45 | let mut decoder = EncodingDetector::new(); 46 | decoder.feed(bytes, true); 47 | let encoding = decoder.guess(None, true); 48 | let content = encoding.decode(bytes).0; 49 | 50 | content.into_owned() 51 | } 52 | 53 | pub fn run_git_buffer(options: RunGitOptions) -> Option> 54 | where 55 | I: IntoIterator, 56 | S: AsRef, 57 | { 58 | let result = Command::new(Path::new(GIT_PATH.as_path())) 59 | .args(options.args) 60 | .current_dir(options.repo_path) 61 | .output(); 62 | 63 | if let Ok(out) = result { 64 | let Output { stdout, stderr, .. } = out; 65 | 66 | if !stdout.is_empty() { 67 | return Some(stdout); 68 | } else if !stderr.is_empty() { 69 | dprintln!("StdErr: {:?}", String::from_utf8_lossy(&stderr).to_string()); 70 | } 71 | } 72 | 73 | None 74 | } 75 | 76 | #[cfg(test)] 77 | mod tests { 78 | use std::path::Path; 79 | 80 | use crate::git::run_git; 81 | use crate::git::run_git::RunGitOptions; 82 | 83 | #[test] 84 | fn test_run_git() { 85 | let text = run_git::run_git_err(RunGitOptions { 86 | args: ["--help"], 87 | repo_path: ".", 88 | }); 89 | 90 | assert!(text.is_ok()); 91 | assert!(!text.unwrap().stdout.is_empty()); 92 | } 93 | 94 | #[test] 95 | fn test_git_path() { 96 | let p = Path::new("git"); 97 | 98 | assert_eq!(p.to_str().unwrap(), "git"); 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /src/git/store.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::{Commit, Patch, RefInfo}; 2 | use crate::git::git_version::GitVersion; 3 | use crate::git::queries::config::GitConfig; 4 | use crate::git::queries::patches::cache::clear_patch_cache; 5 | use crate::git::queries::search::search_request::clear_completed_searches; 6 | use crate::server::git_request::ReqOptions; 7 | use crate::server::request_util::{ES, R}; 8 | use crate::util::global::{Glo, Global}; 9 | use crate::{dprintln, glo, global, time_block}; 10 | use ahash::AHashMap; 11 | use std::collections::HashMap; 12 | use std::env; 13 | use std::path::PathBuf; 14 | 15 | #[derive(Debug, Clone)] 16 | pub struct RepoPath { 17 | pub path: PathBuf, 18 | pub git_path: PathBuf, 19 | } 20 | 21 | pub type PathString = String; 22 | type PatchPath = String; 23 | type CommitsAndRefs = (Vec, Vec); 24 | 25 | static REPO_PATHS: Glo> = glo!(Vec::new()); 26 | 27 | static COMMITS_AND_REFS: Glo> = 28 | glo!(AHashMap::new()); 29 | static PATCHES: Glo<(PathString, HashMap>)> = 30 | glo!((PathString::new(), HashMap::new())); 31 | // Key is 2 commit ids joined. 32 | pub static REF_DIFFS: Glo> = glo!(AHashMap::new()); 33 | // This probably needs to be per repo. We could then watch for changes? 34 | pub static CONFIG: Global> = global!(AHashMap::new()); 35 | pub static GIT_VERSION: Glo = glo!(GitVersion::new()); 36 | 37 | pub const STORE: Store = Store {}; 38 | pub struct Store {} 39 | impl Store { 40 | // Assumes git is installed. 41 | pub fn get_git_version(&self) -> GitVersion { 42 | if let Ok(version) = GIT_VERSION.read() { 43 | return (*version).to_owned(); 44 | } 45 | GitVersion::new() 46 | } 47 | 48 | pub fn insert_commits( 49 | &self, 50 | repo_path: &PathString, 51 | commits: &Vec, 52 | refs: &Vec, 53 | ) { 54 | if let Ok(mut cr) = COMMITS_AND_REFS.write() { 55 | (*cr).insert(repo_path.to_string(), (commits.to_owned(), refs.to_owned())); 56 | } 57 | } 58 | 59 | pub fn get_commits_and_refs(&self, repo_path: &PathString) -> Option { 60 | if let Ok(cr) = COMMITS_AND_REFS.read() { 61 | return Some((*cr).get(repo_path)?.to_owned()); 62 | } 63 | 64 | None 65 | } 66 | 67 | fn get_all_commits_and_refs(&self) -> Option> { 68 | let cr = COMMITS_AND_REFS.read().ok()?; 69 | 70 | Some((*cr).to_owned()) 71 | } 72 | 73 | pub fn clear_unwatched_repos_from_commits( 74 | &self, 75 | watched_repos: &HashMap, 76 | ) -> Option<()> { 77 | let commits = self 78 | .get_all_commits_and_refs()? 79 | .into_iter() 80 | .filter(|(repo_path, _)| watched_repos.contains_key(repo_path)) 81 | .collect(); 82 | 83 | if let Ok(mut cr) = COMMITS_AND_REFS.write() { 84 | *cr = commits 85 | } 86 | 87 | let configs = CONFIG 88 | .get()? 89 | .into_iter() 90 | .filter(|(repo_path, _)| watched_repos.contains_key(repo_path)) 91 | .collect(); 92 | 93 | CONFIG.set(configs); 94 | 95 | Some(()) 96 | } 97 | 98 | pub fn insert_patches(&self, repo_path: &str, patches: &HashMap>) { 99 | time_block!("insert_patches", { 100 | if let Ok(mut saved_patches) = PATCHES.write() { 101 | *saved_patches = (repo_path.to_owned(), patches.to_owned()); 102 | } 103 | }); 104 | } 105 | 106 | pub fn get_patches(&self, repo_path: &str) -> Option>> { 107 | if let Ok(stored) = PATCHES.read() { 108 | if stored.0 == repo_path && !stored.1.is_empty() { 109 | return Some(stored.1.clone()); 110 | } 111 | } 112 | 113 | None 114 | } 115 | 116 | pub fn set_repo_paths(&self, repo_paths: Vec) { 117 | if let Ok(mut rp) = REPO_PATHS.write() { 118 | *rp = repo_paths; 119 | } 120 | } 121 | 122 | pub fn get_repo_path(&self, path_string: &str) -> R { 123 | if let Some(p) = REPO_PATHS 124 | .read()? 125 | .iter() 126 | .find(|p| p.path.ends_with(path_string)) 127 | { 128 | return Ok(p.clone()); 129 | } 130 | 131 | Err(ES::from("Repo path not found")) 132 | } 133 | } 134 | 135 | pub fn clear_cache(_: &ReqOptions) { 136 | clear_completed_searches(); 137 | 138 | dprintln!("Cleared cache."); 139 | } 140 | 141 | pub fn clear_all_caches(_: &ReqOptions) { 142 | clear_completed_searches(); 143 | clear_patch_cache(); 144 | 145 | dprintln!("Cleared all caches."); 146 | } 147 | 148 | pub fn override_git_home(options: &ReqOptions) { 149 | dprintln!("HOME before override: {:?}", env::var("HOME")); 150 | 151 | env::set_var("HOME", &options.repo_path); 152 | } 153 | -------------------------------------------------------------------------------- /src/index/ac_index.rs: -------------------------------------------------------------------------------- 1 | use crate::index::ac_node::ACNode; 2 | use ahash::{HashMap, HashMapExt}; 3 | 4 | #[derive(Debug, Clone)] 5 | pub struct ACIndex { 6 | nodes: HashMap, 7 | } 8 | impl ACIndex { 9 | pub fn new() -> Self { 10 | Self { 11 | nodes: HashMap::new(), 12 | } 13 | } 14 | 15 | pub fn add_word(&mut self, word: &str) { 16 | let mut chars = word.chars(); 17 | 18 | if let Some(c) = chars.next() { 19 | if let Some(n) = self.nodes.get_mut(&c) { 20 | n.add_word(&mut chars); 21 | } else { 22 | self.nodes.insert(c, ACNode::new(c, &mut chars)); 23 | } 24 | } 25 | } 26 | 27 | pub fn find_matching(&self, word_prefix: &str) -> Vec { 28 | let chars = word_prefix.chars(); 29 | 30 | let mut nodes = &self.nodes; 31 | let mut endings = Vec::new(); 32 | 33 | for c in chars { 34 | if let Some(n) = nodes.get(&c) { 35 | nodes = &n.nodes; 36 | } else { 37 | // failed to match. 38 | return endings; 39 | } 40 | } 41 | 42 | for n in nodes.values() { 43 | let e = n.get_word_endings(); 44 | 45 | endings.extend(e); 46 | } 47 | 48 | endings.sort(); 49 | 50 | endings 51 | .into_iter() 52 | .map(|suffix| format!("{}{}", word_prefix, suffix)) 53 | .collect() 54 | } 55 | } 56 | 57 | #[cfg(test)] 58 | mod tests { 59 | use crate::index::ac_index::ACIndex; 60 | 61 | #[test] 62 | fn add_word_abcd() { 63 | let mut index = ACIndex::new(); 64 | 65 | index.add_word("abcd"); 66 | 67 | assert_eq!(index.find_matching("a"), ["abcd".to_string()]); 68 | } 69 | 70 | #[test] 71 | fn add_multiple_words() { 72 | let mut index = ACIndex::new(); 73 | 74 | index.add_word("abcd"); 75 | index.add_word("abcd"); 76 | index.add_word("aaaa"); 77 | index.add_word("abbb"); 78 | index.add_word("bbbb"); 79 | index.add_word("dddd"); 80 | 81 | assert_eq!( 82 | index.find_matching("a"), 83 | ["aaaa".to_string(), "abbb".to_string(), "abcd".to_string()] 84 | ); 85 | 86 | assert_eq!(index.find_matching("b"), ["bbbb".to_string()]); 87 | } 88 | 89 | #[test] 90 | fn add_word_abcd_with_endings() { 91 | let mut index = ACIndex::new(); 92 | 93 | index.add_word("ab"); 94 | index.add_word("abcd"); 95 | 96 | assert_eq!( 97 | index.find_matching("a"), 98 | ["ab".to_string(), "abcd".to_string()] 99 | ); 100 | } 101 | 102 | #[test] 103 | fn char_boundaries() { 104 | let test: Vec = "test".chars().collect(); 105 | 106 | assert_eq!(test[1..].len(), 3); 107 | 108 | let text = "🍝️test"; 109 | 110 | let mut it = text.chars(); 111 | assert_eq!(it.next(), Some('🍝')); 112 | 113 | // U+FE0F Variation Selector-16 114 | assert_eq!(it.next(), Some('\u{fe0f}')); 115 | 116 | let test: Vec = text.chars().collect(); 117 | 118 | assert_eq!(test[0].to_string(), "🍝"); 119 | assert_eq!(test[0], '🍝'); 120 | assert_eq!(test[0..1], ['🍝']); 121 | assert_eq!(test[1], '\u{fe0f}'); 122 | assert_eq!(test[0..].len(), 6); 123 | assert_eq!(test[1..].len(), 5); 124 | 125 | let s = String::from(text); 126 | 127 | assert_eq!(s.len(), 11); 128 | assert_eq!(&s[7..9], "te"); 129 | 130 | assert!(s.is_char_boundary(0)); 131 | assert!(!s.is_char_boundary(1)); 132 | assert!(!s.is_char_boundary(2)); 133 | assert!(!s.is_char_boundary(3)); 134 | assert!(s.is_char_boundary(4)); 135 | assert!(!s.is_char_boundary(5)); 136 | assert!(!s.is_char_boundary(6)); 137 | assert!(s.is_char_boundary(7)); 138 | 139 | assert_eq!(&s[..4], "🍝"); 140 | assert_eq!(&s[..7], "🍝\u{fe0f}"); 141 | } 142 | } 143 | -------------------------------------------------------------------------------- /src/index/ac_node.rs: -------------------------------------------------------------------------------- 1 | use ahash::{HashMap, HashMapExt}; 2 | use std::str::Chars; 3 | 4 | #[derive(Debug, Clone)] 5 | pub struct ACNode { 6 | pub char: char, 7 | pub nodes: HashMap, 8 | pub end_of_word: bool, 9 | } 10 | impl ACNode { 11 | pub fn new(char: char, remaining: &mut Chars) -> ACNode { 12 | let mut n = ACNode { 13 | char, 14 | nodes: HashMap::new(), 15 | end_of_word: false, 16 | }; 17 | 18 | n.add_word(remaining); 19 | 20 | n 21 | } 22 | 23 | pub fn add_word(&mut self, remaining: &mut Chars) { 24 | if let Some(c) = remaining.next() { 25 | if let Some(n) = self.nodes.get_mut(&c) { 26 | n.add_word(remaining); 27 | } else { 28 | self.nodes.insert(c, ACNode::new(c, remaining)); 29 | } 30 | } else { 31 | self.end_of_word = true; 32 | } 33 | } 34 | 35 | pub fn get_word_endings(&self) -> Vec { 36 | let mut matches = Vec::new(); 37 | 38 | if self.end_of_word { 39 | matches.push(self.char.to_string()); 40 | } 41 | 42 | for n in self.nodes.values() { 43 | for s in n.get_word_endings() { 44 | let end = format!("{}{}", self.char, s); 45 | 46 | matches.push(end); 47 | } 48 | } 49 | 50 | matches 51 | } 52 | } 53 | 54 | #[cfg(test)] 55 | mod tests { 56 | use crate::index::ac_node::ACNode; 57 | 58 | #[test] 59 | fn add_word_abcd() { 60 | let remaining = String::from("bcd"); 61 | let mut chars = remaining.chars(); 62 | 63 | let node = ACNode::new('a', &mut chars); 64 | 65 | let endings = node.get_word_endings(); 66 | 67 | assert_eq!(endings.len(), 1); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /src/index/auto_complete.rs: -------------------------------------------------------------------------------- 1 | use crate::index::commit_message_ac::commit_message_ac; 2 | use crate::index::create_branch_ac::create_branch_ac; 3 | use crate::server::request_util::R; 4 | use serde::Deserialize; 5 | use ts_rs::TS; 6 | 7 | #[derive(Debug, Deserialize, TS)] 8 | #[serde(rename_all = "camelCase")] 9 | #[ts(export)] 10 | pub struct MessageAC { 11 | pub current_word: String, 12 | pub repo_path: String, 13 | pub max_num: usize, 14 | pub kind: ACType, 15 | } 16 | 17 | #[derive(Debug, Deserialize, TS)] 18 | #[ts(export)] 19 | pub enum ACType { 20 | CommitMessage, 21 | CreateBranch, 22 | } 23 | 24 | pub fn auto_complete(options: &MessageAC) -> R> { 25 | let MessageAC { 26 | current_word, 27 | repo_path, 28 | max_num, 29 | kind, 30 | } = options; 31 | 32 | match kind { 33 | ACType::CommitMessage => commit_message_ac(repo_path, current_word, *max_num), 34 | ACType::CreateBranch => create_branch_ac(repo_path, current_word, *max_num), 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/index/commit_message_ac.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_types::Commit; 2 | use crate::git::queries::patches::patches::load_patches; 3 | use crate::git::store::STORE; 4 | use crate::global; 5 | use crate::index::ac_index::ACIndex; 6 | use crate::server::request_util::{ES, R}; 7 | use crate::util::global::Global; 8 | 9 | pub fn commit_message_ac( 10 | repo_path: &String, 11 | current_word: &String, 12 | max_num: usize, 13 | ) -> R> { 14 | if current_word.is_empty() { 15 | return Ok(Vec::new()); 16 | } 17 | 18 | let index = get_index(repo_path)?; 19 | 20 | let words = index.find_matching(current_word); 21 | 22 | Ok(words.into_iter().take(max_num).collect()) 23 | } 24 | 25 | #[derive(Clone)] 26 | struct CommitMessageAC { 27 | pub repo_path: String, 28 | pub index: ACIndex, 29 | } 30 | 31 | impl CommitMessageAC { 32 | fn new() -> Self { 33 | Self { 34 | repo_path: String::from(""), 35 | index: ACIndex::new(), 36 | } 37 | } 38 | } 39 | 40 | static INDEX: Global = global!(CommitMessageAC::new()); 41 | 42 | fn get_index(repo_path: &String) -> R { 43 | if let Some(index) = INDEX.get() { 44 | if &index.repo_path == repo_path { 45 | return Ok(index.index); 46 | } 47 | } 48 | 49 | let index = build_index(repo_path)?; 50 | 51 | INDEX.set(CommitMessageAC { 52 | repo_path: repo_path.clone(), 53 | index: index.clone(), 54 | }); 55 | 56 | Ok(index) 57 | } 58 | 59 | fn build_index(repo_path: &String) -> R { 60 | let (commits, refs) = STORE 61 | .get_commits_and_refs(repo_path) 62 | .ok_or(ES::from("build_index: Couldn't get commits and refs."))?; 63 | let patches = load_patches(repo_path, &commits)?; 64 | 65 | let mut index = ACIndex::new(); 66 | 67 | for c in commits { 68 | index.add_word(&c.email); 69 | index.add_word(&c.author); 70 | 71 | let message_words = get_words_in_commit_message(&c); 72 | 73 | for w in message_words { 74 | index.add_word(&w); 75 | } 76 | } 77 | 78 | for r in refs { 79 | index.add_word(&r.short_name); 80 | } 81 | 82 | for c in patches.values() { 83 | for p in c { 84 | index.add_word(&p.new_file); 85 | index.add_word(&p.old_file); 86 | } 87 | } 88 | 89 | Ok(index) 90 | } 91 | 92 | fn get_words_in_commit_message(commit: &Commit) -> Vec { 93 | let mut words: Vec = Vec::new(); 94 | let mut word: Vec = Vec::new(); 95 | 96 | for c in commit.message.chars() { 97 | if !char::is_whitespace(c) { 98 | word.push(c); 99 | } else { 100 | if word.len() > 6 { 101 | words.push(word.iter().collect()); 102 | } 103 | word.clear(); 104 | } 105 | } 106 | 107 | words 108 | } 109 | -------------------------------------------------------------------------------- /src/index/create_branch_ac.rs: -------------------------------------------------------------------------------- 1 | use crate::git::store::STORE; 2 | use crate::index::ac_index::ACIndex; 3 | use crate::server::request_util::{ES, R}; 4 | 5 | pub fn create_branch_ac( 6 | repo_path: &String, 7 | current_word: &str, 8 | max_num: usize, 9 | ) -> R> { 10 | let (_, refs) = STORE 11 | .get_commits_and_refs(repo_path) 12 | .ok_or(ES::from("create_branch_ac: Couldn't get refs."))?; 13 | let mut index = ACIndex::new(); 14 | 15 | for r in refs { 16 | index.add_word(&r.short_name); 17 | } 18 | 19 | Ok( 20 | index 21 | .find_matching(current_word) 22 | .into_iter() 23 | .take(max_num) 24 | .collect(), 25 | ) 26 | } 27 | -------------------------------------------------------------------------------- /src/index/mod.rs: -------------------------------------------------------------------------------- 1 | mod ac_index; 2 | mod ac_node; 3 | pub(crate) mod auto_complete; 4 | mod commit_message_ac; 5 | mod create_branch_ac; 6 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use crate::git::git_settings::set_git_env; 2 | use crate::git::git_version::load_git_version; 3 | use crate::server::requests::start_async_server; 4 | 5 | mod config; 6 | pub(crate) mod git; 7 | mod index; 8 | mod parser; 9 | mod server; 10 | mod util; 11 | 12 | fn main() { 13 | set_git_env(); 14 | load_git_version(); 15 | start_async_server(); 16 | } 17 | -------------------------------------------------------------------------------- /src/parser/input.rs: -------------------------------------------------------------------------------- 1 | // 2 | 3 | pub struct Input { 4 | pub code: Vec, 5 | pub position: usize, 6 | pub attempted_position: usize, 7 | } 8 | 9 | impl Input { 10 | pub fn new(code: &str) -> Input { 11 | Input { 12 | code: code.chars().collect(), 13 | position: 0, 14 | attempted_position: 0, 15 | } 16 | } 17 | 18 | pub fn advance(&mut self) { 19 | self.set_position(self.position + 1); 20 | } 21 | 22 | // pub fn advance_by(&mut self, num: usize) { 23 | // self.set_position(self.position + num); 24 | // } 25 | 26 | pub fn next_char(&mut self) -> char { 27 | self.code[self.position] 28 | } 29 | 30 | pub fn set_position(&mut self, pos: usize) { 31 | if pos > self.attempted_position { 32 | self.attempted_position = pos; 33 | } 34 | self.position = pos; 35 | } 36 | 37 | pub fn end(&self) -> bool { 38 | self.position >= self.code.len() 39 | } 40 | 41 | pub fn successfully_parsed(&self) -> String { 42 | String::from_iter(&self.code[..self.attempted_position]) 43 | } 44 | 45 | pub fn unparsed(&self) -> String { 46 | String::from_iter(&self.code[self.attempted_position..]) 47 | } 48 | } 49 | 50 | #[cfg(test)] 51 | mod tests { 52 | use super::*; 53 | 54 | #[test] 55 | fn test_advance() { 56 | let mut input = Input::new("wowowoowowow"); 57 | 58 | assert_eq!(input.next_char(), 'w'); 59 | 60 | input.advance(); 61 | 62 | assert_eq!(input.position, 1); 63 | assert_eq!(input.next_char(), 'o'); 64 | } 65 | 66 | #[test] 67 | fn test_end() { 68 | let input = Input::new(""); 69 | 70 | assert_eq!(input.end(), true); 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /src/parser/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::dprintln; 2 | use crate::parser::input::Input; 3 | use crate::server::request_util::{ES, R}; 4 | 5 | pub(crate) mod input; 6 | mod parser_types; 7 | pub(crate) mod standard_parsers; 8 | 9 | pub type Parser = fn(&mut Input) -> Option; 10 | 11 | pub fn parse_all(parser: Parser, text: &str) -> Option { 12 | run_parser( 13 | parser, 14 | text, 15 | ParseOptions { 16 | must_parse_all: true, 17 | print_error: true, 18 | }, 19 | ) 20 | } 21 | 22 | pub fn parse_all_err(parser: Parser, text: &str) -> R { 23 | run_parser_err( 24 | parser, 25 | text, 26 | ParseOptions { 27 | must_parse_all: true, 28 | print_error: false, 29 | }, 30 | ) 31 | } 32 | 33 | // Beware: Doesn't complain. 34 | pub fn parse_part(parser: Parser, text: &str) -> Option { 35 | run_parser( 36 | parser, 37 | text, 38 | ParseOptions { 39 | must_parse_all: false, 40 | print_error: false, 41 | }, 42 | ) 43 | } 44 | 45 | pub struct ParseOptions { 46 | pub must_parse_all: bool, 47 | pub print_error: bool, 48 | } 49 | 50 | pub fn run_parser(parser: Parser, text: &str, options: ParseOptions) -> Option { 51 | let mut input = Input::new(text); 52 | 53 | let result = parser(&mut input); 54 | 55 | if options.must_parse_all && !input.end() { 56 | if options.print_error { 57 | eprintln!("{}", get_error_message(&input)); 58 | } 59 | 60 | return None; 61 | } 62 | 63 | result 64 | } 65 | 66 | pub fn run_parser_err(parser: Parser, text: &str, options: ParseOptions) -> R { 67 | let mut input = Input::new(text); 68 | 69 | if let Some(res) = parser(&mut input) { 70 | if options.must_parse_all && !input.end() { 71 | let message = get_error_message(&input); 72 | 73 | if options.print_error { 74 | dprintln!("{}", message); 75 | } 76 | 77 | return Err(ES::Text(message)); 78 | } 79 | 80 | return Ok(res); 81 | } 82 | 83 | let message = get_error_message(&input); 84 | 85 | if options.print_error { 86 | dprintln!("{}", message); 87 | } 88 | 89 | Err(ES::Text(message)) 90 | } 91 | 92 | fn get_error_message(input: &Input) -> String { 93 | format!( 94 | r#" 95 | PARSE FAILURE AT POSITION {}: 96 | SUCCESSFULLY PARSED: 97 | "{}" 98 | 99 | FAILED AT: 100 | "{}" 101 | "#, 102 | input.attempted_position, 103 | input.successfully_parsed(), 104 | input.unparsed() 105 | ) 106 | } 107 | 108 | #[macro_export] 109 | macro_rules! map { 110 | ($parser:expr, $function:expr) => { 111 | |input: &mut $crate::parser::input::Input| { 112 | if let Some(result) = $parser(input) { 113 | Some($function(result)) 114 | } else { 115 | None 116 | } 117 | } 118 | }; 119 | } 120 | 121 | #[macro_export] 122 | macro_rules! map2 { 123 | ($parser: expr, $name: ident, $map: expr) => { 124 | |input: &mut $crate::parser::input::Input| { 125 | if let Some($name) = $parser(input) { 126 | Some($map) 127 | } else { 128 | None 129 | } 130 | } 131 | }; 132 | } 133 | 134 | #[cfg(test)] 135 | mod tests { 136 | use super::*; 137 | use crate::{character, word}; 138 | 139 | #[test] 140 | fn test_map() { 141 | let my_parser = map!(word!("omg"), String::from); 142 | 143 | let res = parse_all(my_parser, "omg"); 144 | 145 | assert_eq!(res.unwrap(), String::from("omg")); 146 | } 147 | 148 | #[test] 149 | fn test_map2() { 150 | let my_parser = map!(character!('c'), String::from); 151 | 152 | let res = parse_all(my_parser, "c"); 153 | 154 | assert_eq!(res.unwrap(), String::from("c")); 155 | } 156 | } 157 | -------------------------------------------------------------------------------- /src/parser/standard_parsers.rs: -------------------------------------------------------------------------------- 1 | use crate::parser::Parser; 2 | use crate::{ 3 | and, character, conditional_char2, map, map2, optional_take_char_while, or, 4 | take_char_while, until_parser, word, 5 | }; 6 | 7 | pub const ANY_WORD: Parser = take_char_while!(|c: char| { c.is_alphanumeric() }); 8 | pub const UNSIGNED_INT: Parser = take_char_while!(|c: char| { c.is_numeric() }); 9 | pub const SIGNED_INT: Parser = map!( 10 | and!(or!(word!("-"), word!("+"), WS_STR), UNSIGNED_INT), 11 | |res: (&str, String)| { res.0.to_string() + &res.1 } 12 | ); 13 | 14 | // TODO: Handle more cases. 15 | pub const STRING_LITERAL: Parser = map2!( 16 | and!( 17 | character!('"'), 18 | take_char_while!(|c: char| c != '"'), 19 | character!('"') 20 | ), 21 | res, 22 | res.1 23 | ); 24 | 25 | // const NUL: Parser = conditional_char2!(c, c.is_control() && !c.is_whitespace()); 26 | 27 | pub const WS: Parser = optional_take_char_while!(|c: char| { c.is_whitespace() }); 28 | pub const WS_STR: Parser<&str> = map2!(WS, _result, ""); 29 | 30 | pub const LINE_END: Parser<&str> = or!(word!("\n"), word!("\r\n")); 31 | pub const UNTIL_LINE_END: Parser = until_parser!(LINE_END); 32 | // pub const UNTIL_LINE_END_KEEP: Parser<(String, &str)> = 33 | // and!(until_parser_keep!(LINE_END), LINE_END); 34 | 35 | pub const UNTIL_NUL: Parser = 36 | until_parser!(conditional_char2!(c, c.is_control() && !c.is_whitespace())); 37 | 38 | // pub const UNTIL_END: Parser = optional_take_char_while!(|c: char| { c != char::from(0) }); 39 | 40 | #[cfg(test)] 41 | mod tests { 42 | use crate::parser::standard_parsers::{ 43 | ANY_WORD, SIGNED_INT, UNSIGNED_INT, UNTIL_LINE_END, UNTIL_NUL, WS, 44 | }; 45 | use crate::parser::{parse_all, parse_part, run_parser, ParseOptions}; 46 | use crate::take_char_while; 47 | 48 | #[test] 49 | fn test_take_while() { 50 | let parser = take_char_while!(|c: char| { c.is_alphanumeric() }); 51 | 52 | let result = parse_all(parser, "abcd55"); 53 | 54 | assert_eq!(result.unwrap(), "abcd55"); 55 | } 56 | 57 | #[test] 58 | fn is_this_whitespace() { 59 | let c = '\r'; 60 | 61 | assert!(c.is_whitespace()); 62 | assert!(c.is_control()); 63 | 64 | let c = '\0'; 65 | 66 | assert!(!c.is_whitespace()); 67 | assert!(c.is_control()); 68 | } 69 | 70 | #[test] 71 | fn test_any_word() { 72 | let result = parse_all(ANY_WORD, "abcd55"); 73 | 74 | assert_eq!(result.unwrap(), "abcd55"); 75 | 76 | // Should fail for non alpha-numeric. 77 | let result = parse_all(ANY_WORD, "@@@@@"); 78 | 79 | assert_eq!(result, None); 80 | } 81 | 82 | #[test] 83 | fn test_uint() { 84 | let result = parse_all(UNSIGNED_INT, "1234"); 85 | 86 | assert_eq!(result.unwrap(), "1234"); 87 | 88 | // Should fail for non alpha-numeric. 89 | let result = parse_all(ANY_WORD, "@@@@@"); 90 | 91 | assert_eq!(result, None); 92 | } 93 | 94 | #[test] 95 | fn test_signed_int() { 96 | let result = parse_all(SIGNED_INT, "1234"); 97 | 98 | assert_eq!(result.unwrap(), "1234"); 99 | 100 | // Should fail for non alpha-numeric. 101 | let result = run_parser( 102 | ANY_WORD, 103 | "@@@@@", 104 | ParseOptions { 105 | must_parse_all: true, 106 | print_error: false, 107 | }, 108 | ); 109 | 110 | assert_eq!(result, None); 111 | 112 | assert_eq!(parse_all(SIGNED_INT, "-1234").unwrap(), "-1234") 113 | } 114 | 115 | #[test] 116 | fn test_ws_parser() { 117 | let result = parse_all(WS, " "); 118 | 119 | assert!(result.is_some()); 120 | 121 | // Expect success even when nothing parsed. 122 | assert!(parse_all(WS, "").is_some()); 123 | 124 | assert!(parse_all(WS, "\t").is_some()); 125 | 126 | assert!(parse_all(WS, "\n\n").is_some()); 127 | 128 | assert!(run_parser( 129 | WS, 130 | "ab", 131 | ParseOptions { 132 | must_parse_all: true, 133 | print_error: false 134 | } 135 | ) 136 | .is_none()); 137 | } 138 | 139 | #[test] 140 | fn test_until_line_end_parser() { 141 | let result = parse_part(UNTIL_LINE_END, "asdfsdf&^HF JC\tasd !@\nasdf"); 142 | 143 | assert!(result.is_some()); 144 | assert_eq!(result.unwrap(), "asdfsdf&^HF JC\tasd !@"); 145 | } 146 | 147 | #[test] 148 | fn test_until_nul() { 149 | let result = parse_all(UNTIL_NUL, "omg\0"); 150 | 151 | assert!(result.is_some()); 152 | assert_eq!(result.unwrap(), "omg"); 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/server/custom_server/http.rs: -------------------------------------------------------------------------------- 1 | // use crate::parser::standard_parsers::{LINE_END, UNTIL_END, UNTIL_LINE_END, WS}; 2 | // use crate::parser::{Parser, _parse_part}; 3 | // use crate::{and, character, many, map, take_char_while}; 4 | // use std::collections::HashMap; 5 | // 6 | // pub fn parse_http_request(request_text: String) -> Option { 7 | // _parse_part(HTTP_REQUEST, &request_text) 8 | // } 9 | // 10 | // const NOT_WS: Parser = take_char_while!(|c: char| { !c.is_whitespace() }); 11 | // 12 | // const METHOD_LINE: Parser<(String, String, String)> = map!( 13 | // and!(NOT_WS, WS, NOT_WS, WS, NOT_WS, LINE_END), 14 | // |result: (String, String, String, String, String, &str)| { 15 | // let (method, _, url, _, protocol, _) = result; 16 | // 17 | // (method, url, protocol) 18 | // } 19 | // ); 20 | // 21 | // const HEADER: Parser<(String, String)> = map!( 22 | // and!( 23 | // take_char_while!(|c: char| { !c.is_whitespace() && c != ':' }), 24 | // character!(':'), 25 | // WS, 26 | // UNTIL_LINE_END 27 | // ), 28 | // |res: (String, char, String, String)| { (res.0, res.3) } 29 | // ); 30 | // 31 | // const HTTP_REQUEST: Parser = map!( 32 | // and!(METHOD_LINE, many!(HEADER), LINE_END, UNTIL_END), 33 | // |result: ( 34 | // (String, String, String), 35 | // Vec<(String, String)>, 36 | // &str, 37 | // String 38 | // )| { 39 | // let ((method, url, protocol), headers, _, body) = result; 40 | // 41 | // HttpRequest { 42 | // method, 43 | // url, 44 | // protocol, 45 | // headers: headers.into_iter().collect(), 46 | // body, 47 | // } 48 | // } 49 | // ); 50 | // 51 | // #[derive(Debug)] 52 | // pub struct HttpRequest { 53 | // pub method: String, 54 | // pub url: String, 55 | // pub protocol: String, 56 | // pub headers: HashMap, 57 | // pub body: String, 58 | // } 59 | // 60 | // #[cfg(test)] 61 | // mod tests { 62 | // use crate::parser::{_parse_part, parse_all}; 63 | // use crate::server::http::{HTTP_REQUEST, METHOD_LINE}; 64 | // 65 | // const REQ_TEXT: &str = "POST / HTTP/1.1 66 | // HOST: 127.0.0.1:29996 67 | // content-type: application/json 68 | // content-length: 23 69 | // 70 | // { 71 | // \"repoPath\": \".\" 72 | // }"; 73 | // 74 | // #[test] 75 | // fn test_parse_header() { 76 | // let result = _parse_part(METHOD_LINE, REQ_TEXT); 77 | // 78 | // assert!(result.is_some()); 79 | // assert_eq!( 80 | // result.unwrap(), 81 | // ("POST".to_string(), "/".to_string(), "HTTP/1.1".to_string()) 82 | // ); 83 | // } 84 | // 85 | // #[test] 86 | // fn test_start_server() { 87 | // let result = parse_all(HTTP_REQUEST, REQ_TEXT); 88 | // 89 | // assert!(result.is_some()); 90 | // } 91 | // } 92 | 93 | // pub fn _handle_sync_request<'a, O: Deserialize<'a>, R: Serialize>( 94 | // body: &'a str, 95 | // mut stream: TcpStream, 96 | // handler: fn(&O) -> R, 97 | // ) -> Result<(), Box> { 98 | // let options = from_str(body)?; 99 | // 100 | // let handler_result = handler(&options); 101 | // let serialized = serde_json::to_string(&handler_result)?; 102 | // 103 | // let response = format!( 104 | // "HTTP/1.1 200 OK\r\nContent-Length: {}\r\n\r\n{}", 105 | // serialized.len(), 106 | // serialized 107 | // ); 108 | // 109 | // stream.write(response.as_bytes())?; 110 | // stream.flush()?; 111 | // 112 | // Ok(()) 113 | // } 114 | 115 | // #[macro_export] 116 | // macro_rules! requests { 117 | // ($request:expr, $stream:expr, $($handler:ident),*) => {{ 118 | // let url = $request.url.as_str(); 119 | // let body = $request.body.as_str(); 120 | // 121 | // match url { 122 | // $( 123 | // concat!("/", stringify!($handler)) => { 124 | // if let Err(e) = crate::server::git_request::handle_sync_request(body, $stream, $handler) { 125 | // println!("{}", e); 126 | // } 127 | // }, 128 | // )* 129 | // unknown_url => { 130 | // println!("Unknown url {}", unknown_url); 131 | // } 132 | // } 133 | // }}; 134 | // } 135 | -------------------------------------------------------------------------------- /src/server/custom_server/server.rs: -------------------------------------------------------------------------------- 1 | // use crate::server::http::{parse_http_request, HttpRequest}; 2 | // 3 | // use std::io::Read; 4 | // use std::net::{TcpListener, TcpStream}; 5 | // 6 | // #[cfg(debug_assertions)] 7 | // const _PORT: u16 = 29997; 8 | // #[cfg(not(debug_assertions))] 9 | // // const PORT: u16 = 0; 10 | // const _PORT: u16 = 29997; 11 | // 12 | // const _ADDRESS: fn() -> String = || format!("127.0.0.1:{}", _PORT); 13 | // 14 | // pub fn _start_sync_server() { 15 | // let listener = TcpListener::bind(_ADDRESS()).unwrap(); 16 | // 17 | // if let Ok(r) = listener.local_addr() { 18 | // println!("Port: {}", r.port()) 19 | // }; 20 | // 21 | // for stream in listener.incoming() { 22 | // let stream = stream.unwrap(); 23 | // 24 | // _handle_connection(stream); 25 | // } 26 | // } 27 | // 28 | // fn _handle_connection(stream: TcpStream) { 29 | // let result = _get_request_from_stream(&stream); 30 | // 31 | // if result.is_some() { 32 | // let request = result.unwrap(); 33 | // println!("Body: {}", &request.body); 34 | // 35 | // // requests!( 36 | // // request, 37 | // // stream, 38 | // // load_commits_and_stashes, 39 | // // load_full_config, 40 | // // load_head_commit, 41 | // // load_top_commit_for_branch, 42 | // // commit_ids_between_commits, 43 | // // load_patches, 44 | // // load_hunks, 45 | // // is_merge_in_progress, 46 | // // load_wip_patches 47 | // // ); 48 | // } 49 | // } 50 | // 51 | // fn _get_request_from_stream(mut stream: &TcpStream) -> Option { 52 | // let mut buffer = [0; 20048]; 53 | // 54 | // if let Err(e) = stream.read(&mut buffer) { 55 | // println!("Failed to read tcp stream: {}", e); 56 | // 57 | // return None; 58 | // } 59 | // 60 | // let request_text = String::from_utf8_lossy(&buffer[..]).to_string(); 61 | // 62 | // println!( 63 | // "request_text: {}, length: {}", 64 | // request_text, 65 | // request_text.len() 66 | // ); 67 | // 68 | // parse_http_request(request_text) 69 | // } 70 | -------------------------------------------------------------------------------- /src/server/git_request.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use ts_rs::TS; 3 | 4 | #[derive(Debug, Deserialize, TS)] 5 | #[serde(rename_all = "camelCase")] 6 | #[ts(export)] 7 | pub struct ReqOptions { 8 | pub repo_path: String, 9 | } 10 | 11 | // #[derive(Debug, Deserialize, TS)] 12 | // #[serde(rename_all = "camelCase")] 13 | // #[ts(export)] 14 | // pub struct ActionOptions { 15 | // pub repo_path: String, 16 | // } 17 | 18 | // #[derive(Debug, Clone, Deserialize, TS)] 19 | // #[serde(rename_all = "camelCase")] 20 | // #[ts(export)] 21 | // pub struct ReqCommitsOptions { 22 | // pub repo_path: String, 23 | // pub num_commits: u32, 24 | // } 25 | -------------------------------------------------------------------------------- /src/server/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod git_request; 2 | pub mod request_util; 3 | pub(crate) mod requests; 4 | pub(crate) mod static_files; 5 | -------------------------------------------------------------------------------- /src/server/request_util.rs: -------------------------------------------------------------------------------- 1 | use crate::f; 2 | use serde::Serialize; 3 | use ts_rs::TS; 4 | 5 | pub type R = Result; 6 | 7 | #[derive(Debug, Clone, TS, Serialize)] 8 | #[ts(export)] 9 | pub enum ES { 10 | Text(String), 11 | } 12 | 13 | impl ES { 14 | pub fn from(text: &str) -> Self { 15 | Self::Text(text.to_string()) 16 | } 17 | } 18 | 19 | impl From> for ES { 20 | fn from(err: std::sync::PoisonError) -> Self { 21 | ES::Text(err.to_string()) 22 | } 23 | } 24 | 25 | impl From for ES { 26 | fn from(err: std::io::Error) -> Self { 27 | ES::Text(err.to_string()) 28 | } 29 | } 30 | 31 | impl From> for ES { 32 | fn from(err: Box) -> Self { 33 | ES::Text(f!("{:?}", err)) 34 | } 35 | } 36 | 37 | impl From for ES { 38 | fn from(err: std::path::StripPrefixError) -> Self { 39 | ES::Text(err.to_string()) 40 | } 41 | } 42 | 43 | #[macro_export] 44 | macro_rules! parse_json { 45 | ($request: expr) => {{ 46 | let mut content = String::new(); 47 | 48 | if let Err(_e) = $request.as_reader().read_to_string(&mut content) { 49 | dprintln!("{}", _e); 50 | return; 51 | } 52 | 53 | match serde_json::from_str(&content) { 54 | Ok(options) => options, 55 | Err(_e) => { 56 | dprintln!("{}", _e); 57 | None 58 | } 59 | } 60 | }}; 61 | } 62 | 63 | #[macro_export] 64 | macro_rules! send_response { 65 | ($request: expr, $result: expr) => {{ 66 | let result = serde_json::to_string(&$result); 67 | 68 | match result { 69 | Ok(serialized) => { 70 | match $request.respond(Response::from_string(serialized)) { 71 | Ok(_) => {} 72 | Err(_e) => { 73 | dprintln!("{}", _e); 74 | } 75 | }; 76 | } 77 | Err(_e) => { 78 | dprintln!("{}", _e); 79 | } 80 | } 81 | }}; 82 | } 83 | 84 | #[macro_export] 85 | macro_rules! handle_request { 86 | ($request:expr, $handler: ident) => {{ 87 | match $crate::parse_json!($request) { 88 | Some(options) => { 89 | $crate::time_block!(stringify!($handler), { 90 | $crate::send_response!($request, $handler(&options)); 91 | }); 92 | } 93 | None => {} 94 | }; 95 | }}; 96 | } 97 | 98 | #[macro_export] 99 | macro_rules! handle_function_request { 100 | ($request:expr, $($handler:ident),*) => {{ 101 | match $request.url() { 102 | $( 103 | concat!("/f/", stringify!($handler)) => { 104 | $crate::handle_request!($request, $handler); 105 | }, 106 | )* 107 | _unknown_url => { 108 | dprintln!("Unknown url {}", _unknown_url); 109 | } 110 | } 111 | }}; 112 | } 113 | -------------------------------------------------------------------------------- /src/server/requests.rs: -------------------------------------------------------------------------------- 1 | use std::process::exit; 2 | 3 | use tiny_http::{Response, Server}; 4 | 5 | use crate::git::actions::add::git_add_files; 6 | use crate::git::actions::clone::clone_repo; 7 | use crate::git::actions::command::command; 8 | use crate::git::actions::create_repo::create_repo; 9 | use crate::git::actions::credentials::set_credentials; 10 | use crate::git::actions::fetch::fetch_all; 11 | use crate::git::actions::stash::{stash_changes, stash_staged}; 12 | use crate::git::conflicts::api::load_conflicted_file; 13 | use crate::git::git_version::git_version; 14 | use crate::git::queries::commits::{ 15 | commit_ids_between_commits, commit_is_ancestor, commit_is_on_branch, 16 | get_all_commits_on_current_branch, load_commits_and_refs, 17 | }; 18 | use crate::git::queries::hunks::html_code::get_patch_as_html; 19 | use crate::git::queries::hunks::images::load_commit_image; 20 | use crate::git::queries::hunks::load_hunks::{load_hunks, load_hunks_split}; 21 | use crate::git::queries::patches::patches_for_commit::load_patches_for_commit; 22 | use crate::git::queries::refs::ref_diffs::calc_ref_diffs; 23 | use crate::git::queries::run::run; 24 | use crate::git::queries::scan_workspace::scan_workspace; 25 | use crate::git::queries::search::search_commits::search_commits; 26 | use crate::git::queries::search::search_request::{poll_diff_search, start_diff_search}; 27 | use crate::git::queries::unpushed_commits::get_un_pushed_commits; 28 | use crate::git::queries::wip::is_rebase_in_progress; 29 | use crate::git::queries::wip::wip_diff::{ 30 | load_wip_hunk_lines, load_wip_hunks, load_wip_hunks_split, 31 | }; 32 | use crate::git::queries::wip::wip_patches::load_wip_patches; 33 | use crate::git::queries::workspace::repo_status::load_repo_status; 34 | use crate::git::run_git_action::poll_action2; 35 | use crate::git::store::{clear_all_caches, clear_cache, override_git_home}; 36 | use crate::index::auto_complete::auto_complete; 37 | use crate::server::static_files::{ 38 | file_size, handle_resource_request, path_exists, temp_dir, write_file, 39 | }; 40 | use crate::util::data_store::{get_data_store, set_data_store}; 41 | use crate::{dprintln, handle_function_request}; 42 | 43 | #[cfg(debug_assertions)] 44 | const PORT: u16 = 29997; 45 | #[cfg(not(debug_assertions))] 46 | const PORT: u16 = 0; 47 | 48 | const ADDRESS: fn() -> String = || format!("127.0.0.1:{}", PORT); 49 | 50 | pub fn start_async_server() { 51 | let server = Server::http(ADDRESS()).expect("Started server"); 52 | 53 | print_port( 54 | server 55 | .server_addr() 56 | .to_ip() 57 | .expect("Get port for printing") 58 | .port(), 59 | ); 60 | 61 | for mut request in server.incoming_requests() { 62 | match &request.url()[..3] { 63 | "/r/" => { 64 | handle_resource_request(request); 65 | } 66 | "/pi" => { 67 | let _ = request.respond(Response::from_string("gitfiend")); 68 | } 69 | "/ex" => { 70 | let _ = request.respond(Response::from_string("GitFiend core exiting...")); 71 | exit(0); 72 | } 73 | "/f/" => { 74 | handle_function_request! { 75 | request, 76 | 77 | // Queries 78 | git_version, 79 | run, 80 | 81 | scan_workspace, 82 | load_repo_status, 83 | 84 | is_rebase_in_progress, 85 | load_commits_and_refs, 86 | 87 | load_hunks, 88 | load_hunks_split, 89 | load_wip_hunks, 90 | load_wip_hunk_lines, 91 | load_wip_hunks_split, 92 | load_conflicted_file, 93 | get_patch_as_html, 94 | 95 | load_wip_patches, 96 | load_patches_for_commit, 97 | load_commit_image, 98 | 99 | commit_ids_between_commits, 100 | get_un_pushed_commits, 101 | calc_ref_diffs, 102 | commit_is_ancestor, 103 | commit_is_on_branch, 104 | get_all_commits_on_current_branch, 105 | 106 | search_commits, 107 | start_diff_search, 108 | poll_diff_search, 109 | auto_complete, 110 | 111 | // TODO: Will this work in a sand-boxed mac app? 112 | path_exists, 113 | temp_dir, 114 | file_size, 115 | write_file, 116 | 117 | // Core messages 118 | clear_cache, 119 | clear_all_caches, 120 | set_credentials, 121 | poll_action2, 122 | override_git_home, 123 | get_data_store, 124 | set_data_store, 125 | 126 | // Actions 127 | command, 128 | git_add_files, 129 | stash_changes, 130 | fetch_all, 131 | clone_repo, 132 | create_repo, 133 | stash_staged 134 | } 135 | } 136 | _ => { 137 | dprintln!("Unhandled url {}", request.url()); 138 | } 139 | } 140 | } 141 | } 142 | 143 | fn print_port(port: u16) { 144 | // This is required by the renderer. Expected to be formatted like: 145 | // PORT:12345 146 | // We pad the width so we can read a specific number of chars from the stream. 147 | println!("PORT:{:<12}", port); 148 | } 149 | -------------------------------------------------------------------------------- /src/server/static_files.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::fs::File; 3 | use std::io::Write; 4 | use std::path::{Path, PathBuf}; 5 | use std::str::FromStr; 6 | 7 | use serde::Deserialize; 8 | use tiny_http::{Header, Request, Response}; 9 | use ts_rs::TS; 10 | 11 | use crate::dprintln; 12 | use crate::server::request_util::{ES, R}; 13 | 14 | // TODO: If there's an error then a response won't be sent. This probably leaks memory. 15 | pub fn handle_resource_request(request: Request) -> Option<()> { 16 | let dir = get_server_dir()?; 17 | 18 | // Remove any extra query part. 19 | let url = request.url().split('?').next()?; 20 | let file_path = dir.join(&url[3..]); 21 | 22 | dprintln!("file_path {:?}, exists: {}", file_path, file_path.exists()); 23 | 24 | let file = File::open(&file_path).ok()?; 25 | let mut response = Response::from_file(file); 26 | 27 | let content_type = get_content_type(&file_path.to_string_lossy())?; 28 | 29 | let header = Header::from_str(&content_type).ok()?; 30 | response.add_header(header); 31 | 32 | let _ = request.respond(response); 33 | 34 | Some(()) 35 | } 36 | 37 | fn get_content_type(file_path: &str) -> Option { 38 | let guess = mime_guess::from_path(file_path); 39 | 40 | Some(format!("Content-Type: {}", guess.first()?)) 41 | } 42 | 43 | fn get_server_dir() -> Option { 44 | #[cfg(debug_assertions)] 45 | return Some(env::current_dir().ok()?.parent()?.join("git-fiend")); 46 | 47 | // TODO: This is tested for native mac app, not electron production build. 48 | // TODO: May need to unpack all from asar? 49 | #[cfg(not(debug_assertions))] 50 | Some( 51 | env::current_exe() 52 | .ok()? 53 | .parent()? 54 | .parent()? 55 | .parent()? 56 | .to_path_buf(), 57 | ) 58 | } 59 | 60 | pub fn path_exists(file_path: &String) -> bool { 61 | Path::new(file_path).exists() 62 | } 63 | 64 | pub fn temp_dir(_: &String) -> R { 65 | Ok(String::from( 66 | env::temp_dir() 67 | .to_str() 68 | .ok_or(ES::from("temp_dir: Couldn't convert to str."))?, 69 | )) 70 | } 71 | 72 | pub fn file_size(file_path: &String) -> R { 73 | Ok(Path::new(file_path).metadata()?.len()) 74 | } 75 | 76 | #[derive(Debug, Deserialize, TS)] 77 | #[serde(rename_all = "camelCase")] 78 | #[ts(export)] 79 | pub struct WriteFileOpts { 80 | pub file_path: String, 81 | pub content: String, 82 | } 83 | 84 | pub fn write_file(options: &WriteFileOpts) -> R { 85 | let WriteFileOpts { file_path, content } = options; 86 | 87 | let mut file = File::create(file_path)?; 88 | file.write_all(content.as_ref())?; 89 | 90 | Ok(true) 91 | } 92 | -------------------------------------------------------------------------------- /src/util/data_store.rs: -------------------------------------------------------------------------------- 1 | use crate::config::{APPLICATION, ORGANISATION, QUALIFIER}; 2 | use crate::dprintln; 3 | use crate::server::git_request::ReqOptions; 4 | use directories::ProjectDirs; 5 | use serde::{Deserialize, Serialize}; 6 | use std::collections::HashMap; 7 | use std::fs::{create_dir_all, File, OpenOptions}; 8 | use std::io::{BufReader, Read, Write}; 9 | use std::path::PathBuf; 10 | use ts_rs::TS; 11 | 12 | pub fn get_data_store(_: &ReqOptions) -> UserConfigResult { 13 | load_config() 14 | } 15 | 16 | #[derive(Debug, Deserialize, TS)] 17 | #[serde(rename_all = "camelCase")] 18 | #[ts(export)] 19 | pub struct DataStoreValues { 20 | pub data: HashMap, 21 | } 22 | 23 | #[derive(Debug, Serialize, TS)] 24 | #[serde(rename_all = "camelCase")] 25 | #[ts(export)] 26 | pub struct ResultStatus { 27 | pub success: bool, 28 | pub message: String, 29 | } 30 | 31 | impl ResultStatus { 32 | pub fn success(message: &str) -> ResultStatus { 33 | ResultStatus { 34 | success: true, 35 | message: message.to_string(), 36 | } 37 | } 38 | 39 | pub fn failure(message: &str) -> ResultStatus { 40 | ResultStatus { 41 | success: false, 42 | message: message.to_string(), 43 | } 44 | } 45 | } 46 | 47 | pub fn set_data_store(o: &DataStoreValues) -> ResultStatus { 48 | let DataStoreValues { data } = o; 49 | 50 | match get_config_file_path() { 51 | None => ResultStatus::failure("Failed to get config file path"), 52 | Some(config_file_path) => { 53 | dprintln!("config_file_path: {:?}", config_file_path); 54 | 55 | match OpenOptions::new() 56 | .read(true) 57 | .write(true) 58 | .create(true) 59 | .truncate(true) 60 | .open(config_file_path) 61 | { 62 | Err(e) => ResultStatus::failure(&format!("Failed to open config file: {}", e)), 63 | Ok(mut config_file) => match serde_json::to_string_pretty(&data) { 64 | Err(e) => ResultStatus::failure(&format!("Failed to serialize data: {}", e)), 65 | Ok(config_text) => match config_file.write_all(config_text.as_bytes()) { 66 | Err(e) => { 67 | ResultStatus::failure(&format!("Failed to write to config file: {}", e)) 68 | } 69 | Ok(_) => ResultStatus::success("Data store updated"), 70 | }, 71 | }, 72 | } 73 | } 74 | } 75 | } 76 | 77 | #[derive(Debug, Serialize, TS)] 78 | #[serde(rename_all = "camelCase")] 79 | #[ts(export)] 80 | pub enum UserConfigResult { 81 | Error(String), 82 | Config(HashMap), 83 | } 84 | 85 | fn load_config() -> UserConfigResult { 86 | match get_config_file_path() { 87 | None => UserConfigResult::Error("Failed to get config file path".to_string()), 88 | Some(config_file_path) => match File::open(config_file_path) { 89 | Err(e) => UserConfigResult::Error(format!("Failed to open config file: {}", e)), 90 | Ok(file) => { 91 | let mut reader = BufReader::new(file); 92 | let mut text = String::new(); 93 | match reader.read_to_string(&mut text) { 94 | Err(e) => UserConfigResult::Error(format!("Failed to read config file: {}", e)), 95 | Ok(_) => match serde_json::from_str::>(&text) { 96 | Err(e) => { 97 | UserConfigResult::Error(format!("Failed to parse config file: {}", e)) 98 | } 99 | Ok(config) => UserConfigResult::Config(config), 100 | }, 101 | } 102 | } 103 | }, 104 | } 105 | } 106 | 107 | fn get_config_file_path() -> Option { 108 | if let Some(proj_dirs) = ProjectDirs::from(QUALIFIER, ORGANISATION, APPLICATION) { 109 | let dir = proj_dirs.config_dir(); 110 | 111 | create_dir_all(dir).ok()?; 112 | 113 | Some(dir.join("data_store.json")) 114 | } else { 115 | None 116 | } 117 | } 118 | -------------------------------------------------------------------------------- /src/util/debug_print.rs: -------------------------------------------------------------------------------- 1 | #[cfg(not(debug_assertions))] 2 | #[macro_export] 3 | macro_rules! dprintln { 4 | ($( $args:expr ),*) => {}; 5 | } 6 | #[cfg(debug_assertions)] 7 | #[macro_export] 8 | macro_rules! dprintln { 9 | ($( $args:expr ),*) => { println!( $( $args ),* ); } 10 | } 11 | 12 | #[cfg(not(debug_assertions))] 13 | #[macro_export] 14 | macro_rules! time_block { 15 | ($name:expr, $code:block) => { 16 | $code 17 | }; 18 | } 19 | #[cfg(debug_assertions)] 20 | #[macro_export] 21 | macro_rules! time_block { 22 | ($name:expr, $code:block) => { 23 | let now = std::time::Instant::now(); 24 | 25 | $code 26 | 27 | let ms = now.elapsed().as_millis(); 28 | 29 | if (ms > 1) { 30 | $crate::dprintln!("{}ms for {}", now.elapsed().as_millis(), $name); 31 | } 32 | } 33 | } 34 | 35 | #[cfg(not(debug_assertions))] 36 | #[macro_export] 37 | macro_rules! time_result { 38 | ($name:expr, $code:expr) => {{ 39 | $code 40 | }}; 41 | } 42 | #[cfg(debug_assertions)] 43 | #[macro_export] 44 | macro_rules! time_result { 45 | ($name:expr, $code:expr) => {{ 46 | let now = std::time::Instant::now(); 47 | 48 | let result = $code; 49 | 50 | let ms = now.elapsed().as_millis(); 51 | 52 | if (ms > 1) { 53 | dprintln!("{}ms for {}", now.elapsed().as_millis(), $name); 54 | } 55 | 56 | result 57 | }}; 58 | } 59 | -------------------------------------------------------------------------------- /src/util/global.rs: -------------------------------------------------------------------------------- 1 | use ahash::AHashMap; 2 | use once_cell::sync::Lazy; 3 | use std::hash::Hash; 4 | use std::sync::RwLock; 5 | 6 | #[macro_export] 7 | macro_rules! global { 8 | ($value: expr) => { 9 | Global { 10 | data: once_cell::sync::Lazy::new(|| std::sync::RwLock::new($value)), 11 | } 12 | }; 13 | } 14 | 15 | pub struct Global { 16 | pub data: Lazy>, 17 | } 18 | 19 | #[macro_export] 20 | macro_rules! glo { 21 | ($value: expr) => { 22 | once_cell::sync::Lazy::>::new(|| std::sync::RwLock::new($value)) 23 | }; 24 | } 25 | 26 | pub type Glo = Lazy>; 27 | 28 | impl Global { 29 | pub fn set(&self, new_data: T) { 30 | if let Ok(mut data) = self.data.write() { 31 | *data = new_data; 32 | } 33 | } 34 | 35 | // The result of this should be considered potentially stale. 36 | pub fn get(&self) -> Option { 37 | if let Ok(data) = self.data.read() { 38 | return Some((*data).clone()); 39 | } 40 | None 41 | } 42 | } 43 | 44 | impl Global> 45 | where 46 | K: Hash + Clone + Eq, 47 | V: Clone, 48 | { 49 | pub fn insert(&self, key: K, value: V) { 50 | if let Ok(mut data) = self.data.write() { 51 | data.insert(key, value); 52 | } 53 | } 54 | 55 | pub fn get_by_key(&self, key: &K) -> Option { 56 | if let Ok(data) = self.data.read() { 57 | return Some(data.get(key)?.clone()); 58 | } 59 | None 60 | } 61 | 62 | pub fn remove(&self, key: &K) -> Option { 63 | if let Ok(mut data) = self.data.write() { 64 | return data.remove(key); 65 | } 66 | None 67 | } 68 | } 69 | 70 | #[cfg(test)] 71 | mod tests { 72 | use crate::util::global::Global; 73 | 74 | static MY_GLOBAL: Global> = global!(Vec::new()); 75 | 76 | #[test] 77 | fn test_global() { 78 | assert_eq!(MY_GLOBAL.get(), Some(Vec::new())); 79 | 80 | MY_GLOBAL.set(vec![1, 2, 3]); 81 | 82 | assert_eq!(MY_GLOBAL.get(), Some(vec![1, 2, 3])); 83 | } 84 | 85 | static OPTIONAL: Global> = global!(None); 86 | 87 | #[test] 88 | fn test_optional() { 89 | assert_eq!(OPTIONAL.get(), Some(None)); 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/util/mod.rs: -------------------------------------------------------------------------------- 1 | pub(crate) mod data_store; 2 | pub(crate) mod debug_print; 3 | pub(crate) mod global; 4 | pub(crate) mod short_cache; 5 | 6 | #[macro_export] 7 | macro_rules! f { 8 | ($($arg:tt)*) => {{ 9 | let res = std::fmt::format(format_args!($($arg)*)); 10 | res 11 | }} 12 | } 13 | -------------------------------------------------------------------------------- /src/util/short_cache.rs: -------------------------------------------------------------------------------- 1 | use ahash::AHashMap; 2 | use std::time::{Duration, Instant}; 3 | 4 | #[derive(Clone)] 5 | pub struct ShortCache { 6 | map: AHashMap, 7 | // Just for testing. 8 | _name: String, 9 | duration: Duration, 10 | last_access: Instant, 11 | } 12 | 13 | impl ShortCache 14 | where 15 | V: Clone, 16 | { 17 | pub fn new(name: String, duration: Duration) -> Self { 18 | Self { 19 | map: AHashMap::new(), 20 | _name: name, 21 | duration, 22 | last_access: Instant::now(), 23 | } 24 | } 25 | 26 | pub fn get(&mut self, key: &str) -> Option<&V> { 27 | let now = Instant::now(); 28 | 29 | let duration_since = now - self.last_access; 30 | 31 | if duration_since > self.duration { 32 | self.map.clear(); 33 | 34 | return None; 35 | } 36 | 37 | self.map.get(key) 38 | } 39 | 40 | pub fn insert(&mut self, key: &str, value: V) { 41 | self.last_access = Instant::now(); 42 | 43 | self.map.insert(String::from(key), value); 44 | } 45 | } 46 | --------------------------------------------------------------------------------