├── .github └── workflows │ └── release.yml ├── .gitignore ├── CHANNELOG.md ├── Cargo.lock ├── Cargo.toml ├── LICENCE.md ├── README.md ├── dist-workspace.toml ├── src ├── catter.rs ├── concater.rs ├── converter.rs ├── main.rs ├── markitdown │ ├── docx.rs │ ├── mod.rs │ ├── opendoc.rs │ ├── pdf.rs │ ├── pptx.rs │ └── sheets.rs ├── prompter.rs ├── rasteroid │ ├── image_extended.rs │ ├── iterm_encoder.rs │ ├── kitty_encoder.rs │ ├── mod.rs │ ├── sixel_encoder.rs │ └── term_misc.rs └── scrapy.rs └── styles ├── default.css └── makurai.css /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | # This file was autogenerated by dist: https://opensource.axo.dev/cargo-dist/ 2 | # 3 | # Copyright 2022-2024, axodotdev 4 | # SPDX-License-Identifier: MIT or Apache-2.0 5 | # 6 | # CI that: 7 | # 8 | # * checks for a Git Tag that looks like a release 9 | # * builds artifacts with dist (archives, installers, hashes) 10 | # * uploads those artifacts to temporary workflow zip 11 | # * on success, uploads the artifacts to a GitHub Release 12 | # 13 | # Note that the GitHub Release will be created with a generated 14 | # title/body based on your changelogs. 15 | 16 | name: Release 17 | permissions: 18 | "contents": "write" 19 | 20 | # This task will run whenever you push a git tag that looks like a version 21 | # like "1.0.0", "v0.1.0-prerelease.1", "my-app/0.1.0", "releases/v1.0.0", etc. 22 | # Various formats will be parsed into a VERSION and an optional PACKAGE_NAME, where 23 | # PACKAGE_NAME must be the name of a Cargo package in your workspace, and VERSION 24 | # must be a Cargo-style SemVer Version (must have at least major.minor.patch). 25 | # 26 | # If PACKAGE_NAME is specified, then the announcement will be for that 27 | # package (erroring out if it doesn't have the given version or isn't dist-able). 28 | # 29 | # If PACKAGE_NAME isn't specified, then the announcement will be for all 30 | # (dist-able) packages in the workspace with that version (this mode is 31 | # intended for workspaces with only one dist-able package, or with all dist-able 32 | # packages versioned/released in lockstep). 33 | # 34 | # If you push multiple tags at once, separate instances of this workflow will 35 | # spin up, creating an independent announcement for each one. However, GitHub 36 | # will hard limit this to 3 tags per commit, as it will assume more tags is a 37 | # mistake. 38 | # 39 | # If there's a prerelease-style suffix to the version, then the release(s) 40 | # will be marked as a prerelease. 41 | on: 42 | pull_request: 43 | push: 44 | tags: 45 | - '**[0-9]+.[0-9]+.[0-9]+*' 46 | 47 | jobs: 48 | # Run 'dist plan' (or host) to determine what tasks we need to do 49 | plan: 50 | runs-on: "ubuntu-latest" 51 | outputs: 52 | val: ${{ steps.plan.outputs.manifest }} 53 | tag: ${{ !github.event.pull_request && github.ref_name || '' }} 54 | tag-flag: ${{ !github.event.pull_request && format('--tag={0}', github.ref_name) || '' }} 55 | publishing: ${{ !github.event.pull_request }} 56 | env: 57 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 58 | steps: 59 | - uses: actions/checkout@v4 60 | with: 61 | submodules: recursive 62 | - name: Install dist 63 | # we specify bash to get pipefail; it guards against the `curl` command 64 | # failing. otherwise `sh` won't catch that `curl` returned non-0 65 | shell: bash 66 | run: "curl --proto '=https' --tlsv1.2 -LsSf https://github.com/axodotdev/cargo-dist/releases/download/v0.28.0/cargo-dist-installer.sh | sh" 67 | - name: Cache dist 68 | uses: actions/upload-artifact@v4 69 | with: 70 | name: cargo-dist-cache 71 | path: ~/.cargo/bin/dist 72 | # sure would be cool if github gave us proper conditionals... 73 | # so here's a doubly-nested ternary-via-truthiness to try to provide the best possible 74 | # functionality based on whether this is a pull_request, and whether it's from a fork. 75 | # (PRs run on the *source* but secrets are usually on the *target* -- that's *good* 76 | # but also really annoying to build CI around when it needs secrets to work right.) 77 | - id: plan 78 | run: | 79 | dist ${{ (!github.event.pull_request && format('host --steps=create --tag={0}', github.ref_name)) || 'plan' }} --output-format=json > plan-dist-manifest.json 80 | echo "dist ran successfully" 81 | cat plan-dist-manifest.json 82 | echo "manifest=$(jq -c "." plan-dist-manifest.json)" >> "$GITHUB_OUTPUT" 83 | - name: "Upload dist-manifest.json" 84 | uses: actions/upload-artifact@v4 85 | with: 86 | name: artifacts-plan-dist-manifest 87 | path: plan-dist-manifest.json 88 | 89 | # Build and packages all the platform-specific things 90 | build-local-artifacts: 91 | name: build-local-artifacts (${{ join(matrix.targets, ', ') }}) 92 | # Let the initial task tell us to not run (currently very blunt) 93 | needs: 94 | - plan 95 | if: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix.include != null && (needs.plan.outputs.publishing == 'true' || fromJson(needs.plan.outputs.val).ci.github.pr_run_mode == 'upload') }} 96 | strategy: 97 | fail-fast: false 98 | # Target platforms/runners are computed by dist in create-release. 99 | # Each member of the matrix has the following arguments: 100 | # 101 | # - runner: the github runner 102 | # - dist-args: cli flags to pass to dist 103 | # - install-dist: expression to run to install dist on the runner 104 | # 105 | # Typically there will be: 106 | # - 1 "global" task that builds universal installers 107 | # - N "local" tasks that build each platform's binaries and platform-specific installers 108 | matrix: ${{ fromJson(needs.plan.outputs.val).ci.github.artifacts_matrix }} 109 | runs-on: ${{ matrix.runner }} 110 | container: ${{ matrix.container && matrix.container.image || null }} 111 | env: 112 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 113 | BUILD_MANIFEST_NAME: target/distrib/${{ join(matrix.targets, '-') }}-dist-manifest.json 114 | steps: 115 | - name: enable windows longpaths 116 | run: | 117 | git config --global core.longpaths true 118 | - uses: actions/checkout@v4 119 | with: 120 | submodules: recursive 121 | - name: Install Rust non-interactively if not already installed 122 | if: ${{ matrix.container }} 123 | run: | 124 | if ! command -v cargo > /dev/null 2>&1; then 125 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y 126 | echo "$HOME/.cargo/bin" >> $GITHUB_PATH 127 | fi 128 | - name: Install dist 129 | run: ${{ matrix.install_dist.run }} 130 | # Get the dist-manifest 131 | - name: Fetch local artifacts 132 | uses: actions/download-artifact@v4 133 | with: 134 | pattern: artifacts-* 135 | path: target/distrib/ 136 | merge-multiple: true 137 | - name: Install dependencies 138 | run: | 139 | ${{ matrix.packages_install }} 140 | - name: Build artifacts 141 | run: | 142 | # Actually do builds and make zips and whatnot 143 | dist build ${{ needs.plan.outputs.tag-flag }} --print=linkage --output-format=json ${{ matrix.dist_args }} > dist-manifest.json 144 | echo "dist ran successfully" 145 | - id: cargo-dist 146 | name: Post-build 147 | # We force bash here just because github makes it really hard to get values up 148 | # to "real" actions without writing to env-vars, and writing to env-vars has 149 | # inconsistent syntax between shell and powershell. 150 | shell: bash 151 | run: | 152 | # Parse out what we just built and upload it to scratch storage 153 | echo "paths<> "$GITHUB_OUTPUT" 154 | dist print-upload-files-from-manifest --manifest dist-manifest.json >> "$GITHUB_OUTPUT" 155 | echo "EOF" >> "$GITHUB_OUTPUT" 156 | 157 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 158 | - name: "Upload artifacts" 159 | uses: actions/upload-artifact@v4 160 | with: 161 | name: artifacts-build-local-${{ join(matrix.targets, '_') }} 162 | path: | 163 | ${{ steps.cargo-dist.outputs.paths }} 164 | ${{ env.BUILD_MANIFEST_NAME }} 165 | 166 | # Build and package all the platform-agnostic(ish) things 167 | build-global-artifacts: 168 | needs: 169 | - plan 170 | - build-local-artifacts 171 | runs-on: "ubuntu-latest" 172 | env: 173 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 174 | BUILD_MANIFEST_NAME: target/distrib/global-dist-manifest.json 175 | steps: 176 | - uses: actions/checkout@v4 177 | with: 178 | submodules: recursive 179 | - name: Install cached dist 180 | uses: actions/download-artifact@v4 181 | with: 182 | name: cargo-dist-cache 183 | path: ~/.cargo/bin/ 184 | - run: chmod +x ~/.cargo/bin/dist 185 | # Get all the local artifacts for the global tasks to use (for e.g. checksums) 186 | - name: Fetch local artifacts 187 | uses: actions/download-artifact@v4 188 | with: 189 | pattern: artifacts-* 190 | path: target/distrib/ 191 | merge-multiple: true 192 | - id: cargo-dist 193 | shell: bash 194 | run: | 195 | dist build ${{ needs.plan.outputs.tag-flag }} --output-format=json "--artifacts=global" > dist-manifest.json 196 | echo "dist ran successfully" 197 | 198 | # Parse out what we just built and upload it to scratch storage 199 | echo "paths<> "$GITHUB_OUTPUT" 200 | jq --raw-output ".upload_files[]" dist-manifest.json >> "$GITHUB_OUTPUT" 201 | echo "EOF" >> "$GITHUB_OUTPUT" 202 | 203 | cp dist-manifest.json "$BUILD_MANIFEST_NAME" 204 | - name: "Upload artifacts" 205 | uses: actions/upload-artifact@v4 206 | with: 207 | name: artifacts-build-global 208 | path: | 209 | ${{ steps.cargo-dist.outputs.paths }} 210 | ${{ env.BUILD_MANIFEST_NAME }} 211 | # Determines if we should publish/announce 212 | host: 213 | needs: 214 | - plan 215 | - build-local-artifacts 216 | - build-global-artifacts 217 | # Only run if we're "publishing", and only if local and global didn't fail (skipped is fine) 218 | if: ${{ always() && needs.plan.outputs.publishing == 'true' && (needs.build-global-artifacts.result == 'skipped' || needs.build-global-artifacts.result == 'success') && (needs.build-local-artifacts.result == 'skipped' || needs.build-local-artifacts.result == 'success') }} 219 | env: 220 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 221 | runs-on: "ubuntu-latest" 222 | outputs: 223 | val: ${{ steps.host.outputs.manifest }} 224 | steps: 225 | - uses: actions/checkout@v4 226 | with: 227 | submodules: recursive 228 | - name: Install cached dist 229 | uses: actions/download-artifact@v4 230 | with: 231 | name: cargo-dist-cache 232 | path: ~/.cargo/bin/ 233 | - run: chmod +x ~/.cargo/bin/dist 234 | # Fetch artifacts from scratch-storage 235 | - name: Fetch artifacts 236 | uses: actions/download-artifact@v4 237 | with: 238 | pattern: artifacts-* 239 | path: target/distrib/ 240 | merge-multiple: true 241 | - id: host 242 | shell: bash 243 | run: | 244 | dist host ${{ needs.plan.outputs.tag-flag }} --steps=upload --steps=release --output-format=json > dist-manifest.json 245 | echo "artifacts uploaded and released successfully" 246 | cat dist-manifest.json 247 | echo "manifest=$(jq -c "." dist-manifest.json)" >> "$GITHUB_OUTPUT" 248 | - name: "Upload dist-manifest.json" 249 | uses: actions/upload-artifact@v4 250 | with: 251 | # Overwrite the previous copy 252 | name: artifacts-dist-manifest 253 | path: dist-manifest.json 254 | # Create a GitHub Release while uploading all files to it 255 | - name: "Download GitHub Artifacts" 256 | uses: actions/download-artifact@v4 257 | with: 258 | pattern: artifacts-* 259 | path: artifacts 260 | merge-multiple: true 261 | - name: Cleanup 262 | run: | 263 | # Remove the granular manifests 264 | rm -f artifacts/*-dist-manifest.json 265 | - name: Create GitHub Release 266 | env: 267 | PRERELEASE_FLAG: "${{ fromJson(steps.host.outputs.manifest).announcement_is_prerelease && '--prerelease' || '' }}" 268 | ANNOUNCEMENT_TITLE: "${{ fromJson(steps.host.outputs.manifest).announcement_title }}" 269 | ANNOUNCEMENT_BODY: "${{ fromJson(steps.host.outputs.manifest).announcement_github_body }}" 270 | RELEASE_COMMIT: "${{ github.sha }}" 271 | run: | 272 | # Write and read notes from a file to avoid quoting breaking things 273 | echo "$ANNOUNCEMENT_BODY" > $RUNNER_TEMP/notes.txt 274 | 275 | gh release create "${{ needs.plan.outputs.tag }}" --target "$RELEASE_COMMIT" $PRERELEASE_FLAG --title "$ANNOUNCEMENT_TITLE" --notes-file "$RUNNER_TEMP/notes.txt" artifacts/* 276 | 277 | announce: 278 | needs: 279 | - plan 280 | - host 281 | # use "always() && ..." to allow us to wait for all publish jobs while 282 | # still allowing individual publish jobs to skip themselves (for prereleases). 283 | # "host" however must run to completion, no skipping allowed! 284 | if: ${{ always() && needs.host.result == 'success' }} 285 | runs-on: "ubuntu-latest" 286 | env: 287 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} 288 | steps: 289 | - uses: actions/checkout@v4 290 | with: 291 | submodules: recursive 292 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .images 3 | -------------------------------------------------------------------------------- /CHANNELOG.md: -------------------------------------------------------------------------------- 1 | ## V0.1.4 2 | now closing kitty animations when interrupted mid way 3 | 4 | ## V0.1.3 5 | removes feature that requires native-tls (for cross compile) 6 | 7 | ## V0.1.2 8 | #### new features 9 | * concatenate images (vertical or horizontal) 10 | * concatenate videos (time based, must be same format) 11 | * scale image while maintaining center via --inline-options "scale=" 12 | #### improved 13 | * text based concatenate 14 | 15 | ## V0.1.1 16 | now accepts multi input: 17 | mcat file.docx file.pptx file.odt .. 18 | 19 | ## V0.1.0 20 | First Release 21 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "mcat" 3 | version = "0.1.4" 4 | authors = ["Meron Bossin"] 5 | description = "a powerfull extended cat command, to cat all the things you couldn't before" 6 | keywords = ["mcat", "inline", "markitdown", "terminal", "rasterm"] 7 | categories = ["command-line-utilities"] 8 | repository = "https://github.com/Skardyy/mcat" 9 | homepage = "https://github.com/Skardyy/mcat" 10 | documentation = "https://github.com/Skardyy/mcat" 11 | license = "MIT" 12 | readme = "README.md" 13 | edition = "2024" 14 | 15 | [dependencies] 16 | clap = "4.5.36" 17 | comrak = "0.38.0" 18 | crossterm = "0.29.0" 19 | ignore = "0.4.23" 20 | image = "0.25.6" 21 | inquire = "0.7.5" 22 | lazy_static = "1.5.0" 23 | tempfile = "3.19.1" 24 | windows = { version = "0.60.0", features = ["Win32_Foundation", "Win32_UI_WindowsAndMessaging"] } 25 | color_quant = "1.1.0" 26 | base64 = "0.22.1" 27 | fast_image_resize = { version = "5.1.2", features = ["image"] } 28 | ffmpeg-sidecar = "2.0.5" 29 | flate2 = "1.1.0" 30 | rand = "0.9.0" 31 | syntect = "5.2.0" 32 | resvg = "0.45.1" 33 | fontdb = "0.23.0" 34 | urlencoding = "2.1.3" 35 | # chromiumoxide = { git = "https://github.com/mattsse/chromiumoxide", features = ["tokio-runtime", "_fetcher-rusttls-tokio"], default-features = false, rev = "6f2392f"} 36 | chromiumoxide = { version = "0.7.0", features = ["tokio-runtime", "_fetcher-rusttls-tokio"], default-features = false} 37 | tokio = "1.44.2" 38 | futures = "0.3.31" 39 | dirs = "6.0.0" 40 | csv = "1.3.1" 41 | zip = "2.6.1" 42 | calamine = "0.25.0" 43 | lopdf = "0.36.0" 44 | quick-xml = "0.37.4" 45 | scraper = "0.23.1" 46 | reqwest = { version = "0.12.15", default-features = false, features = ["rustls-tls"] } 47 | signal-hook = "0.3.17" 48 | 49 | # The profile that 'dist' will build with 50 | [profile.dist] 51 | inherits = "release" 52 | lto = "thin" 53 | -------------------------------------------------------------------------------- /LICENCE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Meron Bossin 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | 4 | 5 | 6 | # Mcat 7 | ![Downloads](https://img.shields.io/crates/d/mcat?style=for-the-badge) ![Version](https://img.shields.io/crates/v/mcat?style=for-the-badge) 8 | 9 | [Installation](#%EF%B8%8F-installation) • [Examples](#%EF%B8%8F-example-usage) • [CHANNELOG](./CHANNELOG.md) 10 | 11 | ![mcat_demo](https://github.com/user-attachments/assets/b47aa276-f0e4-4259-b2c5-1525d7d9d6cb) 12 |
13 | 14 | ## ✨ Features 15 | * 📄 **File to Markdown/HTML** 16 | Convert structured content like CSVs, directories, and rich document formats (e.g., DOCX) into clean Markdown/HTML. 17 | 18 | * 🏞️ **Markdown/HTML to Image** 19 | Render Markdown or HTML files into images. 20 | 21 | * 🖼️ **Inline Image/Videos** 22 | Display images/videos *inside* your terminal using protocols like Kitty, iTerm, or Sixel. 23 | 24 | * 🌐 **URL to Inline Image/Video** 25 | View Images/Videos from a URL in your terminal 26 | 27 | * 🔗 **Concatenate Images and Video too!** 28 | Concatenate videos of the same format (time concat) 29 | and Concatenate images by stacking them horizontal or vertical(default) 30 | 31 | * 💃🏻 **Automatic Styling for HTML** 32 | automatically inject styles into the HTML to make the image cooler! 33 | 34 | ## ⬇️ Installation 35 | ```sh 36 | cargo install mcat 37 | ``` 38 | or ~ 39 | ```sh 40 | git clone https://github.com/Skardyy/mcat 41 | cd mcat 42 | cargo install --path . 43 | ``` 44 | or prebuilt from the [latest release](https://github.com/Skardyy/mcat/releases/latest) 45 | 46 | ## 🏋️ Example Usage 47 | ```sh 48 | # View a PDF as Markdown 49 | mcat resume.pdf 50 | 51 | # Render Markdown to an image 52 | mcat notes.md -i 53 | 54 | # Show an image inline in your terminal 55 | mcat diagram.png -i 56 | 57 | # Save a document as image 58 | mcat document.docx -o image > img.png 59 | 60 | # Show a document as image in the terminal 61 | mcat readme.md -i 62 | 63 | # Show a document as image in the terminal with dark theme 64 | mcat readme.md -im 65 | 66 | # Show a document as image in the terminal with your own css 67 | mcat document.pdf -it "path/to/your/file.css" 68 | 69 | # from a url 70 | mcat "https://giphy.com/gifs/..." 71 | 72 | # Concat images (stacks vertical) 73 | mcat SomeImage.png AnotherImage.bmp 74 | 75 | # Or save it (stacks horizontal) 76 | mcat someimage.png anotherimage.bmp --hori -o image > save.png 77 | 78 | # Concat Videos (must be same format: codec,audio..) 79 | mcat part1.mp4 anothervideo.mp4 -o video > save.mp4 80 | ``` 81 | 82 | ## ⚙️ Supported Formats 83 | | Input Type | Output Options | 84 | |---|---| 85 | | DOCX, PDF, CSV, ODT, PPTX, and more.. | Markdown, HTML, Image, Inline | 86 | | Markdown / HTML | Image, Inline Image | 87 | | Images, Videos | Inline Display | 88 | | URLs | Image/Video Fetch + Inline View | 89 | 90 | ## 🛐 Dependencies 91 | Mcat tries to have as little dependencies as possible. 92 | #### chromium (for rendering HTML to image): 93 | 1. exists on every windows machine through msedge. 94 | 2. auto installs the binaries if missing 95 | #### ffmpeg (for videos) 96 | 1. auto installs binaries if missing 97 | 98 | ## 🆘 Help 99 | ```txt 100 | mcat --help 101 | Usage: mcat.exe [OPTIONS] ... 102 | 103 | Arguments: 104 | ... file / dir 105 | 106 | Options: 107 | -o the format to output [possible values: html, md, image, video, inline] 108 | -t alternative css file for images, valid options: [default, makurai, ] [default: default] 109 | -s add style to html too (when html is the output) 110 | --kitty makes the inline image encoded to kitty 111 | --iterm makes the inline image encoded to iterm 112 | --sixel makes the inline image encoded to sixel 113 | -r, --raw allows raw html to run (put only on your content) 114 | -i shortcut for putting --output inline 115 | -m shortcut for putting --theme makurai 116 | --hori concat images horizontal instead of vertical 117 | --inline-options options for the --output inline 118 | * center= 119 | * width= [only for images] 120 | * height= [only for images] 121 | * scale= 122 | * spx= 123 | * sc= 124 | * zoom= [doesn't work yet] 125 | * x= [doesn't work yet] 126 | * y= [doesn't work yet] 127 | * exmp: --inline-options 'center=false,width=80%,height=20c,scale=0.5,spx=1920x1080,sc=100x20,zoom=2,x=16,y=8' 128 | -h, --help Print help 129 | -V, --version Print version 130 | ``` 131 | 132 | ## 🚧 Roadmap 133 | - [ ] mcat.nvim: a neovim plugin to use mcat inside neovim 134 | 135 | ## 📎 License 136 | MIT License 137 | -------------------------------------------------------------------------------- /dist-workspace.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | members = ["cargo:."] 3 | 4 | # Config for 'dist' 5 | [dist] 6 | # The preferred dist version to use in CI (Cargo.toml SemVer syntax) 7 | cargo-dist-version = "0.28.0" 8 | # CI backends to support 9 | ci = "github" 10 | # The installers to generate for each app 11 | installers = ["shell", "powershell"] 12 | # Target platforms to build apps for (Rust target-triple syntax) 13 | targets = ["aarch64-apple-darwin", "aarch64-unknown-linux-gnu", "x86_64-unknown-linux-gnu", "x86_64-pc-windows-msvc"] 14 | # Path that installers should place binaries in 15 | install-path = "CARGO_HOME" 16 | # Whether to install an updater program 17 | install-updater = false 18 | 19 | [dist.github-custom-runners] 20 | global = "ubuntu-latest" 21 | 22 | [dist.github-custom-runners.x86_64-unknown-linux-gnu] 23 | runner = "ubuntu-latest" 24 | 25 | [dist.github-custom-runners.aarch64-unknown-linux-gnu] 26 | runner = "ubuntu-latest" 27 | -------------------------------------------------------------------------------- /src/catter.rs: -------------------------------------------------------------------------------- 1 | use std::{ 2 | fs::{self, File}, 3 | io::Write, 4 | path::Path, 5 | }; 6 | 7 | use image::{DynamicImage, ImageFormat}; 8 | 9 | use crate::{ 10 | converter::{self}, 11 | markitdown, 12 | rasteroid::{self, image_extended::InlineImage}, 13 | }; 14 | 15 | pub enum CatType { 16 | Markdown, 17 | Html, 18 | Image, 19 | Video, 20 | InlineImage, 21 | InlineVideo, 22 | } 23 | 24 | #[derive(Clone, Copy)] 25 | pub struct EncoderForce { 26 | pub kitty: bool, 27 | pub iterm: bool, 28 | pub sixel: bool, 29 | } 30 | 31 | #[derive(Clone, Copy)] 32 | pub struct CatOpts<'a> { 33 | pub to: Option<&'a str>, 34 | pub encoder: Option, 35 | pub style: Option<&'a str>, 36 | pub width: Option<&'a str>, 37 | pub height: Option<&'a str>, 38 | pub style_html: bool, 39 | pub raw_html: bool, 40 | pub center: bool, 41 | } 42 | impl<'a> CatOpts<'a> { 43 | pub fn default() -> Self { 44 | CatOpts { 45 | to: None, 46 | encoder: None, 47 | width: Some("80%"), 48 | height: Some("80%"), 49 | style: None, 50 | style_html: false, 51 | raw_html: false, 52 | center: false, 53 | } 54 | } 55 | } 56 | 57 | pub fn cat( 58 | path: &Path, 59 | out: &mut impl Write, 60 | opts: Option, 61 | ) -> Result> { 62 | if !path.exists() { 63 | return Err(format!("invalid path: {}", path.display()).into()); 64 | } 65 | 66 | let opts = match opts { 67 | Some(o) => o, 68 | None => CatOpts::default(), 69 | }; 70 | let encoder = opts.encoder.unwrap_or(EncoderForce { 71 | kitty: false, 72 | iterm: false, 73 | sixel: false, 74 | }); 75 | let inline_encoder = 76 | &rasteroid::InlineEncoder::auto_detect(encoder.kitty, encoder.iterm, encoder.sixel); 77 | let ext = path 78 | .extension() 79 | .unwrap_or_default() 80 | .to_string_lossy() 81 | .into_owned(); 82 | let mut image_result: Option = None; 83 | let mut string_result: Option = None; 84 | let mut from: &str = "unknown"; 85 | let to = opts.to.unwrap_or("unknown"); 86 | 87 | //video 88 | if is_video(&ext) { 89 | if to == "video" { 90 | let content = fs::read(path)?; 91 | out.write_all(&content)?; 92 | return Ok(CatType::Video); 93 | } 94 | converter::inline_a_video( 95 | path.to_string_lossy().into_owned(), 96 | out, 97 | &inline_encoder, 98 | opts.center, 99 | )?; 100 | return Ok(CatType::InlineVideo); 101 | } 102 | //svg 103 | (image_result, from) = if ext == "svg" { 104 | let file = File::open(path)?; 105 | let dyn_img = converter::svg_to_image(file, opts.width, opts.height)?; 106 | (Some(dyn_img), "image") 107 | } else { 108 | (image_result, from) 109 | }; 110 | //image 111 | (image_result, from) = if ImageFormat::from_extension(&ext).is_some() { 112 | let buf = fs::read(path)?; 113 | let dyn_img = image::load_from_memory(&buf)?; 114 | (Some(dyn_img), "image") 115 | } else { 116 | (image_result, from) 117 | }; 118 | // local file or dir 119 | if from == "unknown" { 120 | (string_result, from) = { 121 | match ext.as_ref() { 122 | "md" | "html" => { 123 | let r = fs::read_to_string(path)?; 124 | (Some(r), ext.as_ref()) 125 | } 126 | _ => { 127 | let f = markitdown::convert(&path, None)?; 128 | (Some(f), "md") 129 | } 130 | } 131 | }; 132 | } 133 | 134 | // converting 135 | match (from.as_ref(), to.as_ref()) { 136 | ("md", "html") => { 137 | let html = converter::md_to_html(&string_result.unwrap(), if opts.style_html {opts.style} else {None}, opts.raw_html); 138 | out.write_all(&html.as_bytes().to_vec())?; 139 | return Ok(CatType::Html); 140 | }, 141 | ("md", "image") => { 142 | let html = converter::md_to_html(&string_result.unwrap(), opts.style, opts.raw_html); 143 | let image = converter::html_to_image(&html)?; 144 | out.write_all(&image)?; 145 | return Ok(CatType::Image); 146 | }, 147 | ("md", "inline") => { 148 | let html = converter::md_to_html(&string_result.unwrap(), opts.style, opts.raw_html); 149 | let image = converter::html_to_image(&html)?; 150 | let dyn_img = image::load_from_memory(&image)?; 151 | let (img, center) = dyn_img.resize_plus(opts.width, opts.height)?; 152 | rasteroid::inline_an_image(&img, out, if opts.center {Some(center)} else {None}, inline_encoder)?; 153 | return Ok(CatType::InlineImage) 154 | }, 155 | ("html", "image") => { 156 | let image = converter::html_to_image(&string_result.unwrap())?; 157 | out.write_all(&image)?; 158 | return Ok(CatType::Image); 159 | }, 160 | ("html", "inline") => { 161 | let image = converter::html_to_image(&string_result.unwrap())?; 162 | let dyn_img = image::load_from_memory(&image)?; 163 | let (img, center) = dyn_img.resize_plus(opts.width, opts.height)?; 164 | rasteroid::inline_an_image(&img, out, if opts.center {Some(center)} else {None}, inline_encoder)?; 165 | return Ok(CatType::InlineImage) 166 | }, 167 | ("image", "image") => { 168 | let buf = fs::read(path)?; 169 | out.write_all(&buf)?; 170 | return Ok(CatType::Image) 171 | }, 172 | ("md", _) => { 173 | //default for md 174 | out.write_all(&string_result.unwrap().as_bytes())?; 175 | return Ok(CatType::Markdown); 176 | } 177 | ("html", _) => { 178 | // default for html 179 | out.write_all(&string_result.unwrap().as_bytes())?; 180 | return Ok(CatType::Html); 181 | }, 182 | ("image", _) => { 183 | // default for image 184 | let (img, center) = image_result.unwrap().resize_plus(opts.width, opts.height)?; 185 | rasteroid::inline_an_image(&img, out, if opts.center {Some(center)} else {None}, inline_encoder)?; 186 | return Ok(CatType::InlineImage) 187 | }, 188 | _ => return Err(format!( 189 | "converting: {} to: {}, is not supported.\nsupported pipeline is: md -> html -> image -> inline_image", 190 | from, to 191 | ).into()), 192 | }; 193 | } 194 | 195 | pub fn is_video(input: &str) -> bool { 196 | matches!( 197 | input, 198 | "mp4" | "mov" | "avi" | "mkv" | "webm" | "wmv" | "flv" | "m4v" | "ts" | "gif" 199 | ) 200 | } 201 | -------------------------------------------------------------------------------- /src/concater.rs: -------------------------------------------------------------------------------- 1 | use std::{fs::File, io::Write, path::PathBuf}; 2 | 3 | use ffmpeg_sidecar::command::FfmpegCommand; 4 | use image::{GenericImage, ImageFormat}; 5 | use tempfile::{NamedTempFile, TempDir}; 6 | 7 | use crate::{catter, converter, markitdown}; 8 | 9 | pub fn concat_text(paths: Vec<(&PathBuf, Option)>) -> NamedTempFile { 10 | let mut markdown = String::new(); 11 | for (path, name) in paths { 12 | if let Ok(md) = markitdown::convert(&path, name.as_ref()) { 13 | markdown.push_str(&format!("{}\n\n", md)); 14 | } else { 15 | markdown.push_str("**[Failed Reading]**\n\n"); 16 | } 17 | } 18 | 19 | let mut tmp_file = NamedTempFile::with_suffix(".md").expect("failed to create tmp file"); 20 | tmp_file 21 | .write_all(markdown.trim().as_bytes()) 22 | .expect("failed writing to tmp file"); 23 | 24 | tmp_file 25 | } 26 | 27 | pub fn concat_images( 28 | image_paths: Vec, 29 | horizontal: bool, 30 | ) -> Result> { 31 | // Load all images 32 | let mut images = Vec::new(); 33 | for path in &image_paths { 34 | if !path.exists() { 35 | return Err(format!("{} is invalid path", path.display()).into()); 36 | } 37 | if path.extension().is_some_and(|e| e == "svg") { 38 | let file = File::open(path)?; 39 | let dyn_img = converter::svg_to_image(file, None, None)?; 40 | images.push(dyn_img); 41 | continue; 42 | } 43 | let img = image::open(path)?; 44 | images.push(img); 45 | } 46 | 47 | // Calculate dimensions of the output image 48 | let (width, height) = if horizontal { 49 | // For horizontal concatenation, sum widths and take max height 50 | let total_width: u32 = images.iter().map(|img| img.width()).sum(); 51 | let max_height: u32 = images.iter().map(|img| img.height()).max().unwrap_or(0); 52 | (total_width, max_height) 53 | } else { 54 | // For vertical concatenation, sum heights and take max width 55 | let max_width: u32 = images.iter().map(|img| img.width()).max().unwrap_or(0); 56 | let total_height: u32 = images.iter().map(|img| img.height()).sum(); 57 | (max_width, total_height) 58 | }; 59 | 60 | // Create a new image with the calculated dimensions 61 | let mut output = image::RgbaImage::new(width, height); 62 | 63 | // Place each image in the output 64 | let mut x_offset = 0; 65 | let mut y_offset = 0; 66 | 67 | for img in images { 68 | output.copy_from(&img, x_offset, y_offset)?; 69 | 70 | if horizontal { 71 | x_offset += img.width(); 72 | } else { 73 | y_offset += img.height(); 74 | } 75 | } 76 | 77 | // Create a temporary file with .png extension 78 | let temp_file = NamedTempFile::with_suffix(".png")?; 79 | output.save_with_format(temp_file.path(), image::ImageFormat::Png)?; 80 | Ok(temp_file) 81 | } 82 | 83 | pub fn concat_video( 84 | paths: &Vec, 85 | ) -> Result<(TempDir, PathBuf), Box> { 86 | let mut concat_list_file = NamedTempFile::new()?; 87 | 88 | for path in paths { 89 | if !path.exists() { 90 | return Err(format!("{} is invalid path", path.display()).into()); 91 | } 92 | let path_dis = path 93 | .canonicalize()? 94 | .to_string_lossy() 95 | .into_owned() 96 | .replace("\\\\?\\", ""); 97 | writeln!(concat_list_file, "file '{}'", path_dis)?; 98 | } 99 | 100 | let first_path = &paths[0]; 101 | let suffix = first_path 102 | .extension() 103 | .unwrap_or_default() 104 | .to_string_lossy() 105 | .into_owned(); 106 | 107 | if !ffmpeg_sidecar::command::ffmpeg_is_installed() { 108 | eprintln!("ffmpeg isn't installed, installing.. it may take a little"); 109 | ffmpeg_sidecar::download::auto_download()?; 110 | } 111 | 112 | let random_temp_dir = tempfile::tempdir()?; 113 | let output_path = random_temp_dir 114 | .path() 115 | .join(format!("concat_output.{}", suffix)); 116 | let output_path_string = output_path.to_string_lossy().into_owned(); 117 | 118 | let mut command = FfmpegCommand::new(); 119 | command 120 | .hwaccel("auto") 121 | .format("concat") 122 | .arg("-safe") 123 | .arg("0") 124 | .input(concat_list_file.path().to_string_lossy().into_owned()) 125 | .arg("-c") 126 | .arg("copy") 127 | .output(&output_path_string); 128 | 129 | let mut child = command.spawn()?; 130 | 131 | let status = child.wait()?; 132 | if status.success() { 133 | Ok((random_temp_dir, output_path)) 134 | } else { 135 | Err(format!( 136 | "FFmpeg failed with code {:?}, make sure the videos are the same format", 137 | status.code(), 138 | ) 139 | .into()) 140 | } 141 | } 142 | 143 | pub fn check_unified_format(paths: &[PathBuf]) -> &'static str { 144 | if paths.is_empty() { 145 | return "text"; // Default if no files 146 | } 147 | 148 | let mut detected_format: Option<&'static str> = None; 149 | 150 | for path in paths { 151 | if let Some(extension) = path.extension() { 152 | if let Some(ext_str) = extension.to_str() { 153 | let ext = ext_str.to_lowercase(); 154 | 155 | let current_format = if catter::is_video(&ext) { 156 | "video" 157 | } else if ImageFormat::from_extension(&ext).is_some() || ext == "svg" { 158 | "image" 159 | } else { 160 | "text" 161 | }; 162 | 163 | if let Some(prev_format) = detected_format { 164 | if prev_format != current_format { 165 | // Found conflicting formats 166 | eprintln!( 167 | "Error: Cannot have 2 different formats [text / images / videos]" 168 | ); 169 | std::process::exit(1); 170 | } 171 | } else { 172 | // First file, set the format 173 | detected_format = Some(current_format); 174 | } 175 | } 176 | } else { 177 | // Files with no extension are considered text 178 | if detected_format.is_some() && detected_format.unwrap() != "text" { 179 | eprintln!("Error: Cannot have 2 different formats"); 180 | std::process::exit(1); 181 | } 182 | detected_format = Some("text"); 183 | } 184 | } 185 | detected_format.unwrap_or("text") 186 | } 187 | 188 | pub fn assign_names<'a>( 189 | paths: &'a [PathBuf], 190 | base_dir: Option<&'a String>, 191 | ) -> Vec<(&'a PathBuf, Option)> { 192 | let is_one_element = paths.len() == 1; 193 | let result: Vec<(&PathBuf, Option)> = paths 194 | .iter() 195 | .map(|path| { 196 | let name = if is_one_element { 197 | None 198 | } else { 199 | match base_dir { 200 | Some(base) => { 201 | let rel_path = path.strip_prefix(base).unwrap_or(path); 202 | Some(rel_path.to_string_lossy().into_owned()) 203 | } 204 | None => { 205 | let name = path 206 | .file_name() 207 | .unwrap_or_default() 208 | .to_string_lossy() 209 | .into_owned(); 210 | Some(name) 211 | } 212 | } 213 | }; 214 | (path, name) 215 | }) 216 | .collect(); 217 | 218 | result 219 | } 220 | -------------------------------------------------------------------------------- /src/converter.rs: -------------------------------------------------------------------------------- 1 | use base64::{Engine, engine::general_purpose}; 2 | use chromiumoxide::{Browser, BrowserConfig, BrowserFetcher, BrowserFetcherOptions}; 3 | use ffmpeg_sidecar::{command::FfmpegCommand, event::OutputVideoFrame}; 4 | use futures::stream::StreamExt; 5 | use image::{DynamicImage, ImageBuffer, Rgba}; 6 | use resvg::{ 7 | tiny_skia, 8 | usvg::{self, Options, Tree}, 9 | }; 10 | use std::{ 11 | error, fs, 12 | io::Read, 13 | path::{Path, PathBuf}, 14 | }; 15 | 16 | use comrak::{ 17 | ComrakOptions, ComrakPlugins, markdown_to_html_with_plugins, plugins::syntect::SyntectAdapter, 18 | }; 19 | use std::io::Write; 20 | 21 | use crate::rasteroid; 22 | 23 | pub fn image_to_base64(img: &Vec) -> String { 24 | general_purpose::STANDARD.encode(&img) 25 | } 26 | 27 | pub fn offset_to_terminal(offset: Option) -> String { 28 | match offset { 29 | Some(offset) => format!("\x1b[{}C", offset), 30 | None => "".to_string(), 31 | } 32 | } 33 | 34 | pub fn svg_to_image( 35 | mut reader: impl Read, 36 | width: Option<&str>, 37 | height: Option<&str>, 38 | ) -> Result> { 39 | let mut svg_data = Vec::new(); 40 | reader.read_to_end(&mut svg_data)?; 41 | 42 | // Create options for parsing SVG 43 | let mut opt = Options::default(); 44 | 45 | // allowing text 46 | let mut fontdb = fontdb::Database::new(); 47 | fontdb.load_system_fonts(); 48 | opt.fontdb = std::sync::Arc::new(fontdb); 49 | opt.text_rendering = usvg::TextRendering::OptimizeLegibility; 50 | 51 | // Parse SVG 52 | let tree = Tree::from_data(&svg_data, &opt)?; 53 | 54 | // Get size of the SVG 55 | let pixmap_size = tree.size(); 56 | let src_width = pixmap_size.width(); 57 | let src_height = pixmap_size.height(); 58 | let width = match width { 59 | Some(w) => rasteroid::term_misc::dim_to_px(w, rasteroid::term_misc::SizeDirection::Width)?, 60 | None => src_width as u32, 61 | }; 62 | let height = match height { 63 | Some(h) => rasteroid::term_misc::dim_to_px(h, rasteroid::term_misc::SizeDirection::Height)?, 64 | None => src_height as u32, 65 | }; 66 | let (target_width, target_height) = 67 | rasteroid::image_extended::calc_fit(src_width as u32, src_height as u32, width, height); 68 | let scale_x = target_width as f32 / src_width; 69 | let scale_y = target_height as f32 / src_height; 70 | let scale = scale_x.min(scale_y); 71 | 72 | // Create a Pixmap to render to 73 | let mut pixmap = tiny_skia::Pixmap::new(target_width, target_height) 74 | .ok_or("Failed to create pixmap for svg")?; 75 | let transform = tiny_skia::Transform::from_scale(scale, scale); 76 | 77 | // Render SVG to Pixmap 78 | resvg::render(&tree, transform, &mut pixmap.as_mut()); 79 | 80 | // Convert Pixmap to ImageBuffer 81 | let image_buffer = ImageBuffer::, _>::from_raw( 82 | target_width as u32, 83 | target_height as u32, 84 | pixmap.data().to_vec(), 85 | ) 86 | .ok_or("Failed to create image buffer for svg")?; 87 | 88 | // Convert ImageBuffer to DynamicImage 89 | Ok(DynamicImage::ImageRgba8(image_buffer)) 90 | } 91 | 92 | fn get_chromium_install_path() -> PathBuf { 93 | let base_dir = dirs::cache_dir() 94 | .or_else(dirs::data_dir) 95 | .unwrap_or_else(|| std::env::temp_dir()); 96 | 97 | let p = base_dir.join("chromiumoxide").join("chromium"); 98 | if !p.exists() { 99 | eprintln!("couldn't find chromium installed, trying to install.. it may take a little."); 100 | let _ = fs::create_dir_all(p.clone()); 101 | } 102 | p 103 | } 104 | 105 | pub fn html_to_image(html: &str) -> Result, Box> { 106 | let encoded_html = urlencoding::encode(&html); 107 | let data_uri = format!("data:text/html;charset=utf-8,{}", encoded_html); 108 | let data = screenshot_uri(&data_uri)?; 109 | 110 | Ok(data) 111 | } 112 | fn screenshot_uri(data_uri: &str) -> Result, Box> { 113 | let rt = tokio::runtime::Builder::new_current_thread() 114 | .enable_all() 115 | .build()?; 116 | 117 | rt.block_on(async { 118 | let config = match BrowserConfig::builder().new_headless_mode().build() { 119 | Ok(c) => c, 120 | Err(_) => { 121 | let download_path = get_chromium_install_path(); 122 | let fetcher = BrowserFetcher::new( 123 | BrowserFetcherOptions::builder() 124 | .with_path(&download_path) 125 | .build()?, 126 | ); 127 | let info = fetcher.fetch().await?; 128 | BrowserConfig::builder() 129 | .chrome_executable(info.executable_path) 130 | .new_headless_mode() 131 | .build()? 132 | } 133 | }; 134 | let (browser, mut handler) = Browser::launch(config).await.map_err(|e| format!("failed to launch chromium\neiter you need to kill chrome/edge process or\nremove: {} and rerun. or install chrome\noriginal error: {}", get_chromium_install_path().display(), e))?; 135 | tokio::spawn(async move { while let Some(_) = handler.next().await {} }); 136 | 137 | let page = browser.new_page(data_uri).await?; 138 | 139 | let mut prms = chromiumoxide::page::ScreenshotParams::default(); 140 | prms.full_page = Some(true); 141 | prms.omit_background = Some(true); 142 | let screenshot = page.screenshot(prms).await?; 143 | 144 | Ok(screenshot) 145 | }) 146 | } 147 | 148 | pub fn md_to_html(markdown: &str, css_path: Option<&str>, raw_html: bool) -> String { 149 | let mut options = ComrakOptions::default(); 150 | 151 | let mut plugins = ComrakPlugins::default(); 152 | let adapter = SyntectAdapter::new(None); 153 | plugins.render.codefence_syntax_highlighter = Some(&adapter); 154 | 155 | // ➕ Enable extensions 156 | options.extension.strikethrough = true; 157 | options.extension.tagfilter = true; 158 | options.extension.table = true; 159 | options.extension.autolink = true; 160 | options.extension.tasklist = true; 161 | options.extension.footnotes = true; 162 | options.extension.description_lists = true; 163 | 164 | // 🎯 Parsing options 165 | options.parse.smart = true; // fancy quotes, dashes, ellipses 166 | 167 | // 💄 Render options 168 | options.render.unsafe_ = raw_html; 169 | options.render.hardbreaks = false; 170 | options.render.github_pre_lang = true; //
171 |     options.render.full_info_string = true;
172 | 
173 |     let css_content = match css_path {
174 |         Some("makurai") => Some(include_str!("../styles/makurai.css").to_string()),
175 |         Some("default") => Some(include_str!("../styles/default.css").to_string()),
176 |         Some(path) => std::fs::read_to_string(path).ok(),
177 |         None => None,
178 |     };
179 | 
180 |     let html = markdown_to_html_with_plugins(markdown, &options, &plugins);
181 |     match css_content {
182 |         Some(css) => format!(
183 |             r#"
184 | 
185 | 
186 | 
187 |   
188 |   
189 | 
190 | 
191 |   {}
192 | 
193 | 
194 | "#,
195 |             css, html
196 |         ),
197 |         None => html,
198 |     }
199 | }
200 | 
201 | pub fn inline_a_video(
202 |     input: impl AsRef,
203 |     out: &mut impl Write,
204 |     inline_encoder: &rasteroid::InlineEncoder,
205 |     center: bool,
206 | ) -> Result<(), Box> {
207 |     match inline_encoder {
208 |         rasteroid::InlineEncoder::Kitty => {
209 |             let frames = video_to_frames(input)?;
210 |             let id = rand::random::();
211 |             rasteroid::kitty_encoder::encode_frames(frames, out, id, center)?;
212 |             Ok(())
213 |         }
214 |         rasteroid::InlineEncoder::Iterm => {
215 |             let gif = video_to_gif(input)?;
216 |             let dyn_img = image::load_from_memory_with_format(&gif, image::ImageFormat::Gif)?;
217 |             let offset = match center {
218 |                 true => Some(rasteroid::term_misc::center_image(dyn_img.width() as u16)),
219 |                 false => None,
220 |             };
221 |             rasteroid::iterm_encoder::encode_image(&gif, out, offset)?;
222 |             Ok(())
223 |         }
224 |         rasteroid::InlineEncoder::Sixel => return Err("Cannot view videos in sixel".into()),
225 |     }
226 | }
227 | 
228 | fn video_to_gif(input: impl AsRef) -> Result, Box> {
229 |     let input = input.as_ref();
230 |     if input.ends_with(".gif") {
231 |         let path = Path::new(input);
232 |         let bytes = fs::read(path)?;
233 |         return Ok(bytes);
234 |     }
235 |     if !ffmpeg_sidecar::command::ffmpeg_is_installed() {
236 |         eprintln!("ffmpeg isn't installed, installing.. it may take a little");
237 |         ffmpeg_sidecar::download::auto_download()?;
238 |     }
239 | 
240 |     let mut command = FfmpegCommand::new();
241 |     command
242 |         .hwaccel("auto")
243 |         .input(input)
244 |         .format("gif")
245 |         .output("-");
246 | 
247 |     let mut child = command.spawn()?;
248 |     let mut stdout = child
249 |         .take_stdout()
250 |         .ok_or("failed to get stdout for ffmpeg")?;
251 | 
252 |     let mut output_bytes = Vec::new();
253 |     stdout.read_to_end(&mut output_bytes)?;
254 | 
255 |     child.wait()?; // ensure process finishes cleanly
256 | 
257 |     Ok(output_bytes)
258 | }
259 | 
260 | fn video_to_frames(
261 |     input: impl AsRef,
262 | ) -> Result>, Box> {
263 |     let input = input.as_ref();
264 |     if !ffmpeg_sidecar::command::ffmpeg_is_installed() {
265 |         eprintln!("ffmpeg isn't installed, installing.. it may take a little");
266 |         ffmpeg_sidecar::download::auto_download()?;
267 |     }
268 | 
269 |     let mut command = FfmpegCommand::new();
270 |     command.hwaccel("auto").input(input).rawvideo();
271 | 
272 |     let mut child = command.spawn()?;
273 |     let frames = child.iter()?.filter_frames();
274 | 
275 |     Ok(Box::new(frames))
276 | }
277 | 


--------------------------------------------------------------------------------
/src/main.rs:
--------------------------------------------------------------------------------
  1 | mod catter;
  2 | mod concater;
  3 | mod converter;
  4 | mod markitdown;
  5 | mod prompter;
  6 | mod rasteroid;
  7 | mod scrapy;
  8 | 
  9 | use std::{
 10 |     collections::HashMap,
 11 |     io::{BufWriter, Write},
 12 |     path::Path,
 13 | };
 14 | 
 15 | #[macro_use]
 16 | extern crate lazy_static;
 17 | 
 18 | use catter::{CatOpts, EncoderForce};
 19 | use clap::{
 20 |     Arg, ColorChoice, Command,
 21 |     builder::{Styles, styling::AnsiColor},
 22 | };
 23 | use rasteroid::term_misc;
 24 | 
 25 | fn main() {
 26 |     let opts = Command::new("mcat")
 27 |         .version(env!("CARGO_PKG_VERSION"))
 28 |         .author(env!("CARGO_PKG_AUTHORS"))
 29 |         .about(env!("CARGO_PKG_DESCRIPTION"))
 30 |         .color(ColorChoice::Always)
 31 |         .styles(
 32 |             Styles::styled()
 33 |                 .header(AnsiColor::Green.on_default().bold())
 34 |                 .literal(AnsiColor::Blue.on_default()),
 35 |         )
 36 |         .arg(Arg::new("input").index(1).num_args(1..).help("file / dir").required(true))
 37 |         .arg(
 38 |             Arg::new("output")
 39 |                 .short('o')
 40 |                 .help("the format to output")
 41 |                 .value_parser(["html", "md", "image", "video", "inline"]),
 42 |         )
 43 |         .arg(
 44 |             Arg::new("theme")
 45 |                 .short('t')
 46 |                 .help("alternative css file for images, valid options: [default, makurai, ]",)
 47 |                 .default_value("default")
 48 |         )
 49 |         .arg(
 50 |             Arg::new("style-html")
 51 |                 .short('s')
 52 |                 .help("add style to html too (when html is the output)")
 53 |                 .action(clap::ArgAction::SetTrue)
 54 |         )
 55 |         .arg(
 56 |             Arg::new("kitty")
 57 |                 .long("kitty")
 58 |                 .help("makes the inline image encoded to kitty")
 59 |                 .action(clap::ArgAction::SetTrue)
 60 |         )
 61 |         .arg(
 62 |             Arg::new("iterm")
 63 |                 .long("iterm")
 64 |                 .help("makes the inline image encoded to iterm")
 65 |                 .action(clap::ArgAction::SetTrue)
 66 |         )
 67 |         .arg(
 68 |             Arg::new("sixel")
 69 |                 .long("sixel")
 70 |                 .help("makes the inline image encoded to sixel")
 71 |                 .action(clap::ArgAction::SetTrue)
 72 |         )
 73 |         .arg(
 74 |             Arg::new("raw")
 75 |                 .long("raw")
 76 |                 .short('r')
 77 |                 .help("allows raw html to run (put only on your content)")
 78 |                 .action(clap::ArgAction::SetTrue)
 79 |         )
 80 |         .arg(
 81 |             Arg::new("inline")
 82 |                 .short('i')
 83 |                 .help("shortcut for putting --output inline")
 84 |                 .action(clap::ArgAction::SetTrue)
 85 |         )
 86 |         .arg(
 87 |             Arg::new("makurai-theme")
 88 |                 .short('m')
 89 |                 .help("shortcut for putting --theme makurai")
 90 |                 .action(clap::ArgAction::SetTrue)
 91 |         )
 92 |         .arg(
 93 |             Arg::new("horizontal")
 94 |                 .long("hori")
 95 |                 .action(clap::ArgAction::SetTrue)
 96 |                 .help("concat images horizontal instead of vertical"))
 97 |         .arg(
 98 |             Arg::new("inline-options")
 99 |                 .long("inline-options")
100 |                 .help("options for the --output inline\n*  center=\n*  width= [only for images]\n*  height= [only for images]\n*  scale=\n*  spx=\n*  sc=\n*  zoom= [doesn't work yet]\n*  x= [doesn't work yet]\n*  y= [doesn't work yet]\n*  exmp: --inline-options 'center=false,width=80%,height=20c,scale=0.5,spx=1920x1080,sc=100x20,zoom=2,x=16,y=8'\n")
101 |         )
102 |         .get_matches();
103 | 
104 |     // main
105 |     let input: Vec = opts.get_many::("input").unwrap().cloned().collect();
106 |     let output = opts.get_one::("output");
107 |     let style = opts.get_one::("theme").unwrap();
108 |     let style_html = *opts.get_one::("style-html").unwrap();
109 |     let raw_html = *opts.get_one::("raw").unwrap();
110 |     let hori = *opts.get_one::("horizontal").unwrap();
111 |     let inline_options = opts.get_one::("inline-options").map(|s| s.as_str());
112 |     let inline_options = InlineOptions::from_string(inline_options.unwrap_or_default());
113 |     let _ = term_misc::init_winsize(
114 |         &term_misc::break_size_string(inline_options.spx.unwrap_or_default()).unwrap_or_exit(),
115 |         &term_misc::break_size_string(inline_options.sc.unwrap_or_default()).unwrap_or_exit(),
116 |         inline_options.scale,
117 |     );
118 | 
119 |     // shortcuts
120 |     let makurai = *opts.get_one::("makurai-theme").unwrap();
121 |     let style: &str = if makurai { "makurai" } else { style };
122 | 
123 |     let inline = *opts.get_one::("inline").unwrap();
124 |     let output: Option<&str> = if inline {
125 |         Some("inline".as_ref())
126 |     } else {
127 |         match output {
128 |             Some(o) => Some(o.as_ref()),
129 |             None => None,
130 |         }
131 |     };
132 | 
133 |     // encoders
134 |     let kitty = *opts.get_one::("kitty").unwrap();
135 |     let iterm = *opts.get_one::("iterm").unwrap();
136 |     let sixel = *opts.get_one::("sixel").unwrap();
137 |     let encoder = EncoderForce {
138 |         kitty,
139 |         iterm,
140 |         sixel,
141 |     };
142 | 
143 |     let opts = CatOpts {
144 |         to: output,
145 |         width: inline_options.width,
146 |         height: inline_options.height,
147 |         center: inline_options.center,
148 |         encoder: Some(encoder),
149 |         style: Some(style),
150 |         style_html,
151 |         raw_html,
152 |     };
153 | 
154 |     let mut tmp_files = Vec::new(); //for lifetime
155 |     let mut path_bufs = Vec::new();
156 |     let mut base_dir = None;
157 |     for i in input {
158 |         let path = Path::new(&i);
159 |         if i.starts_with("https://") {
160 |             if let Ok(tmp) = scrapy::scrape_biggest_media(&i) {
161 |                 let path = tmp.path().to_path_buf();
162 |                 tmp_files.push(tmp);
163 |                 path_bufs.push(path);
164 |             } else {
165 |                 eprintln!("{} didn't contain any supported media", i);
166 |             }
167 |         } else {
168 |             if path.is_dir() {
169 |                 path_bufs.clear();
170 |                 let selected_files = prompter::prompt_for_files(path).unwrap_or_default();
171 |                 path_bufs.extend_from_slice(&selected_files);
172 |                 base_dir = Some(path.to_string_lossy().into_owned());
173 |                 break;
174 |             } else {
175 |                 path_bufs.push(path.to_path_buf());
176 |             }
177 |         }
178 |     }
179 | 
180 |     let stdout = std::io::stdout();
181 |     let mut out = BufWriter::new(stdout);
182 |     let main_format = concater::check_unified_format(&path_bufs);
183 |     match main_format {
184 |         "text" => {
185 |             let mut path_bufs = concater::assign_names(&path_bufs, base_dir.as_ref());
186 |             path_bufs.sort_by_key(|(path, _)| *path);
187 |             let tmp = concater::concat_text(path_bufs);
188 |             catter::cat(tmp.path(), &mut out, Some(opts)).unwrap_or_exit();
189 |         }
190 |         "video" => {
191 |             if path_bufs.len() == 1 {
192 |                 catter::cat(&path_bufs[0], &mut out, Some(opts)).unwrap_or_exit();
193 |             } else {
194 |                 #[allow(unused_variables)] //for lifetime
195 |                 let (dir, path) = concater::concat_video(&path_bufs).unwrap_or_exit();
196 |                 catter::cat(&path, &mut out, Some(opts)).unwrap_or_exit();
197 |             }
198 |         }
199 |         "image" => {
200 |             if path_bufs.len() == 1 {
201 |                 catter::cat(&path_bufs[0], &mut out, Some(opts)).unwrap_or_exit();
202 |             } else {
203 |                 let img = concater::concat_images(path_bufs, hori).unwrap_or_exit();
204 |                 catter::cat(&img.path(), &mut out, Some(opts)).unwrap_or_exit();
205 |             }
206 |         }
207 |         _ => {}
208 |     }
209 |     out.flush().unwrap();
210 | }
211 | 
212 | #[derive(Debug)]
213 | struct InlineOptions<'a> {
214 |     width: Option<&'a str>,
215 |     height: Option<&'a str>,
216 |     spx: Option<&'a str>,
217 |     sc: Option<&'a str>,
218 |     scale: Option,
219 |     zoom: Option,
220 |     x: Option,
221 |     y: Option,
222 |     center: bool,
223 | }
224 | 
225 | impl<'a> InlineOptions<'a> {
226 |     pub fn from_string(s: &'a str) -> Self {
227 |         let mut options = InlineOptions {
228 |             width: Some("80%"),
229 |             height: Some("80%"),
230 |             spx: Some("1920x1080"),
231 |             sc: Some("100x20"),
232 |             scale: Some(1.0),
233 |             zoom: Some(1),
234 |             x: Some(0),
235 |             y: Some(0),
236 |             center: true,
237 |         };
238 |         let map: HashMap<_, _> = s
239 |             .split(',')
240 |             .filter_map(|pair| {
241 |                 let mut split = pair.splitn(2, '=');
242 |                 let key = split.next()?.trim();
243 |                 let value = split.next()?.trim();
244 |                 Some((key, value))
245 |             })
246 |             .collect();
247 | 
248 |         if let Some(&val) = map.get("width") {
249 |             options.width = Some(val);
250 |         }
251 |         if let Some(&val) = map.get("height") {
252 |             options.height = Some(val);
253 |         }
254 |         if let Some(&val) = map.get("spx") {
255 |             options.spx = Some(val);
256 |         }
257 |         if let Some(&val) = map.get("sc") {
258 |             options.sc = Some(val);
259 |         }
260 |         if let Some(&val) = map.get("scale") {
261 |             options.scale = val.parse().ok();
262 |         }
263 |         if let Some(&val) = map.get("zoom") {
264 |             options.zoom = val.parse().ok();
265 |         }
266 |         if let Some(&val) = map.get("x") {
267 |             options.x = val.parse().ok();
268 |         }
269 |         if let Some(&val) = map.get("y") {
270 |             options.y = val.parse().ok();
271 |         }
272 |         if let Some(&val) = map.get("center") {
273 |             options.center = val == "true" || val == "1";
274 |         }
275 | 
276 |         options
277 |     }
278 | }
279 | 
280 | trait UnwrapOrExit {
281 |     fn unwrap_or_exit(self) -> T;
282 | }
283 | 
284 | impl UnwrapOrExit for Result {
285 |     fn unwrap_or_exit(self) -> T {
286 |         match self {
287 |             Ok(value) => value,
288 |             Err(err) => {
289 |                 eprintln!("{}", err);
290 |                 std::process::exit(1);
291 |             }
292 |         }
293 |     }
294 | }
295 | 


--------------------------------------------------------------------------------
/src/markitdown/docx.rs:
--------------------------------------------------------------------------------
  1 | use super::sheets;
  2 | use quick_xml::events::Event;
  3 | use quick_xml::reader::Reader;
  4 | use std::io::{Cursor, Read};
  5 | use std::path::Path;
  6 | use zip::ZipArchive;
  7 | 
  8 | struct Styles {
  9 |     title: bool,     //w:pStyle empty w:val="includes title"
 10 |     header: bool,    // w:pStyle empty w:val="includes heading"
 11 |     bold: bool,      //w:b empty
 12 |     strike: bool,    //w:strike
 13 |     underline: bool, //w:u
 14 |     italics: bool,   //w:i
 15 |     indent: i8,      // w:ilvl w:val="0" (add 1 to it and -1 was indented)
 16 |     table: bool,     //w:tbl
 17 | }
 18 | 
 19 | impl Styles {
 20 |     pub fn default() -> Self {
 21 |         Styles {
 22 |             title: false,
 23 |             header: false,
 24 |             strike: false,
 25 |             italics: false,
 26 |             underline: false,
 27 |             bold: false,
 28 |             indent: 0,
 29 |             table: false,
 30 |         }
 31 |     }
 32 | }
 33 | 
 34 | fn get_attr<'a>(e: &'a quick_xml::events::BytesStart, key: &[u8]) -> Option {
 35 |     for attr in e.attributes().with_checks(false) {
 36 |         if let Ok(attr) = attr {
 37 |             if attr.key.as_ref() == key {
 38 |                 return Some(attr.unescape_value().ok()?.into_owned());
 39 |             }
 40 |         }
 41 |     }
 42 |     None
 43 | }
 44 | 
 45 | pub fn docx_convert(path: &Path) -> Result> {
 46 |     let data = std::fs::read(path)?;
 47 |     let cursor = Cursor::new(data);
 48 | 
 49 |     let mut archive = ZipArchive::new(cursor)?;
 50 |     let mut xml_content = String::new();
 51 | 
 52 |     for i in 0..archive.len() {
 53 |         let mut file = archive.by_index(i)?;
 54 |         if file.name() == "word/document.xml" {
 55 |             file.read_to_string(&mut xml_content)?;
 56 |             break;
 57 |         }
 58 |     }
 59 | 
 60 |     let mut reader = Reader::from_str(&xml_content);
 61 |     let mut buf = Vec::new();
 62 |     let mut markdown = String::new();
 63 | 
 64 |     let mut table_rows: Vec> = Vec::new();
 65 |     let mut current_row: Vec = Vec::new();
 66 |     let mut styles = Styles::default();
 67 | 
 68 |     loop {
 69 |         match reader.read_event_into(&mut buf) {
 70 |             Ok(Event::Start(e)) => match e.name().as_ref() {
 71 |                 b"w:tbl" => styles.table = true,
 72 |                 _ => {
 73 |                     continue;
 74 |                 }
 75 |             },
 76 |             Ok(Event::Empty(e)) => match e.name().as_ref() {
 77 |                 b"w:b" => {
 78 |                     if let Some(val) = get_attr(&e, b"w:val") {
 79 |                         if val == "true" {
 80 |                             styles.bold = true;
 81 |                         }
 82 |                     } else {
 83 |                         styles.bold = true;
 84 |                     }
 85 |                 }
 86 |                 b"w:i" => {
 87 |                     if let Some(val) = get_attr(&e, b"w:val") {
 88 |                         if val == "true" {
 89 |                             styles.italics = true;
 90 |                         }
 91 |                     } else {
 92 |                         styles.italics = true;
 93 |                     }
 94 |                 }
 95 |                 b"w:strike" => {
 96 |                     if let Some(val) = get_attr(&e, b"w:val") {
 97 |                         if val == "true" {
 98 |                             styles.strike = true;
 99 |                         }
100 |                     } else {
101 |                         styles.strike = true;
102 |                     }
103 |                 }
104 |                 b"w:u" => {
105 |                     styles.underline = true;
106 |                 }
107 |                 b"w:pStyle" => {
108 |                     if let Some(val) = get_attr(&e, b"w:val") {
109 |                         if val.to_lowercase().contains("title") {
110 |                             styles.title = true;
111 |                             styles.indent = 0;
112 |                         } else if val.to_lowercase().contains("heading") {
113 |                             styles.header = true;
114 |                             styles.indent = 0;
115 |                         }
116 |                     }
117 |                 }
118 |                 b"w:ilvl" => {
119 |                     if styles.header || styles.title {
120 |                         continue;
121 |                     }
122 |                     if let Some(val) = get_attr(&e, b"w:val") {
123 |                         if let Ok(val) = val.parse::() {
124 |                             styles.indent = val + 1
125 |                         }
126 |                     }
127 |                 }
128 |                 _ => {}
129 |             },
130 |             Ok(Event::Text(e)) => {
131 |                 let mut text = e.unescape()?.into_owned();
132 |                 if styles.bold {
133 |                     text = format!("__{}__ ", text.trim());
134 |                     styles.bold = false;
135 |                 }
136 |                 if styles.underline {
137 |                     text = format!("{} ", text.trim());
138 |                     styles.underline = false;
139 |                 }
140 |                 if styles.strike {
141 |                     text = format!("~~{}~~ ", text.trim());
142 |                     styles.strike = false;
143 |                 }
144 |                 if styles.italics {
145 |                     text = format!("_{}_ ", text.trim());
146 |                     styles.italics = false;
147 |                 }
148 | 
149 |                 if styles.table {
150 |                     current_row.push(text.into());
151 |                     continue;
152 |                 }
153 |                 if styles.title {
154 |                     markdown.push_str(&format!("## {}", text));
155 |                     styles.title = false;
156 |                     continue;
157 |                 }
158 |                 if styles.header {
159 |                     markdown.push_str(&format!("### {}", text));
160 |                     styles.header = false;
161 |                     continue;
162 |                 }
163 |                 if styles.indent > 0 {
164 |                     let indent = "  ".repeat(styles.indent as usize);
165 |                     markdown.push_str(&format!("{}* {}", indent, text));
166 |                     styles.indent = -1;
167 |                     continue;
168 |                 }
169 |                 markdown.push_str(&text);
170 |             }
171 |             Ok(Event::End(e)) => match e.name().as_ref() {
172 |                 b"w:tbl" => {
173 |                     if !table_rows.is_empty() {
174 |                         let headers = table_rows[0].clone();
175 |                         let data_rows = if table_rows.len() > 1 {
176 |                             table_rows[1..].to_vec()
177 |                         } else {
178 |                             Vec::new()
179 |                         };
180 |                         markdown.push_str(&sheets::to_markdown_table(&headers, &data_rows));
181 |                         markdown.push_str("\n");
182 |                         table_rows = Vec::new();
183 |                         styles = Styles::default();
184 |                     }
185 |                 }
186 |                 b"w:tr" => {
187 |                     table_rows.push(current_row);
188 |                     current_row = Vec::new();
189 |                 }
190 |                 b"w:p" => {
191 |                     if styles.indent == -1 {
192 |                         styles.indent = 0;
193 |                         markdown.push_str("  \n");
194 |                     } else {
195 |                         markdown.push_str("\n\n");
196 |                     }
197 |                 }
198 |                 _ => {}
199 |             },
200 |             Ok(Event::Eof) => break,
201 |             Err(e) => {
202 |                 return Err(
203 |                     format!("Error at position {}: {:?}", reader.buffer_position(), e).into(),
204 |                 );
205 |             }
206 |             _ => {}
207 |         }
208 |         buf.clear();
209 |     }
210 | 
211 |     Ok(format(&markdown))
212 | }
213 | 
214 | // Same format function from your ODT implementation
215 | fn format(input: &str) -> String {
216 |     let mut result = String::with_capacity(input.len());
217 |     let mut newline_count = 0;
218 |     let mut spaces_count = 0;
219 | 
220 |     for line in input.lines() {
221 |         if line.trim() == "" {
222 |             result.push_str("\n");
223 |         } else {
224 |             result.push_str(&format!("{}\n", line));
225 |         }
226 |     }
227 |     let input = &result;
228 |     let mut result = String::with_capacity(input.len());
229 | 
230 |     for c in input.chars() {
231 |         if c == ' ' {
232 |             spaces_count += 1;
233 |         }
234 |         if c == '\n' {
235 |             newline_count += 1;
236 |             if spaces_count >= 2 {
237 |                 newline_count += 1;
238 |             }
239 |             spaces_count = 0;
240 |             if newline_count <= 2 {
241 |                 result.push(c);
242 |             }
243 |         } else {
244 |             newline_count = 0;
245 |             spaces_count = 0;
246 |             result.push(c);
247 |         }
248 |     }
249 | 
250 |     result
251 | }
252 | 


--------------------------------------------------------------------------------
/src/markitdown/mod.rs:
--------------------------------------------------------------------------------
 1 | mod docx;
 2 | mod opendoc;
 3 | mod pdf;
 4 | mod pptx;
 5 | mod sheets;
 6 | 
 7 | use std::{
 8 |     fs::{self, File},
 9 |     path::Path,
10 | };
11 | 
12 | use tempfile::Builder;
13 | use zip::ZipArchive;
14 | 
15 | pub fn convert(
16 |     path: &Path,
17 |     name_header: Option<&String>,
18 | ) -> Result> {
19 |     let path = Path::new(path);
20 |     if !path.exists() {
21 |         return Err(format!("{} doesn't exists", path.display()).into());
22 |     }
23 |     if !path.is_file() {
24 |         return Err(format!("Unknown path type for {}", path.display()).into());
25 |     }
26 | 
27 |     let ext = path
28 |         .extension()
29 |         .unwrap_or_default()
30 |         .to_string_lossy()
31 |         .to_lowercase();
32 | 
33 |     let result = match ext.as_str() {
34 |         "csv" => sheets::csv_converter(path)?,
35 |         "docx" => docx::docx_convert(path)?,
36 |         "pdf" => pdf::pdf_convert(path)?,
37 |         "pptx" => pptx::pptx_converter(path)?,
38 |         "xlsx" | "xls" | "xlsm" | "xlsb" | "xla" | "xlam" | "ods" => sheets::sheets_convert(path)?,
39 |         "zip" => zip_convert(path)?,
40 |         "odt" => opendoc::opendoc_convert(path)?,
41 |         "odp" => opendoc::opendoc_convert(path)?,
42 |         "md" | "html" => {
43 |             let res = fs::read_to_string(path)?;
44 |             match name_header {
45 |                 Some(name) => format!("# {}\n\n{}\n\n", name, res),
46 |                 None => format!("{}\n\n", res),
47 |             }
48 |         }
49 |         _ => {
50 |             let content = fs::read_to_string(path)?;
51 |             markitdown_fallback(&content, name_header, &ext)
52 |         }
53 |     };
54 | 
55 |     Ok(result)
56 | }
57 | 
58 | fn zip_convert(path: &Path) -> Result> {
59 |     let file = File::open(path)?;
60 |     let mut archive = ZipArchive::new(file)?;
61 | 
62 |     let mut output = String::new();
63 |     for i in 0..archive.len() {
64 |         let mut entry = archive.by_index(i)?;
65 |         let name = entry.name().to_string();
66 |         let extension = Path::new(&name)
67 |             .extension()
68 |             .and_then(|e| e.to_str())
69 |             .unwrap_or("");
70 | 
71 |         if entry.is_dir() {
72 |             continue;
73 |         }
74 | 
75 |         let mut temp = Builder::new()
76 |             .suffix(&format!(".{}", extension))
77 |             .tempfile()?;
78 |         std::io::copy(&mut entry, &mut temp)?;
79 |         let temp_path = temp.path().to_path_buf(); // clone path before move
80 | 
81 |         // convert using original convert function
82 |         let md = convert(&temp_path, None).unwrap_or("**[Failed Reading]**".into());
83 |         output += &format!("# `{}`\n\n{}\n\n", name, md);
84 |     }
85 | 
86 |     Ok(output)
87 | }
88 | 
89 | fn markitdown_fallback(content: &String, name: Option<&String>, ext: &String) -> String {
90 |     let md = format!("```{}\n{}\n```", ext, content);
91 | 
92 |     match name {
93 |         Some(name) => format!("# `{}`\n\n{}", name, md),
94 |         None => md,
95 |     }
96 | }
97 | 


--------------------------------------------------------------------------------
/src/markitdown/opendoc.rs:
--------------------------------------------------------------------------------
  1 | use quick_xml::events::Event;
  2 | use quick_xml::reader::Reader;
  3 | use std::io::{Cursor, Read};
  4 | use std::path::Path;
  5 | use zip::ZipArchive;
  6 | 
  7 | use super::sheets;
  8 | 
  9 | pub fn opendoc_convert(path: &Path) -> Result> {
 10 |     let data = std::fs::read(path)?;
 11 |     let cursor = Cursor::new(data);
 12 |     let mut archive = ZipArchive::new(cursor)?;
 13 |     let mut xml_content = String::new();
 14 | 
 15 |     for i in 0..archive.len() {
 16 |         let mut file = archive.by_index(i)?;
 17 |         if file.name() == "content.xml" {
 18 |             file.read_to_string(&mut xml_content)?;
 19 |             break;
 20 |         }
 21 |     }
 22 | 
 23 |     let mut reader = Reader::from_str(&xml_content);
 24 |     let mut buf = Vec::new();
 25 |     let mut markdown = String::new();
 26 |     let mut table_rows: Vec> = Vec::new();
 27 |     let mut current_row: Vec = Vec::new();
 28 |     let mut is_table = false;
 29 |     let mut is_list_item = 0;
 30 | 
 31 |     loop {
 32 |         match reader.read_event_into(&mut buf) {
 33 |             Ok(Event::Start(e)) => match e.name().as_ref() {
 34 |                 b"text:p" => continue,
 35 |                 b"text:h" => {
 36 |                     is_list_item = 0;
 37 |                     markdown.push_str("### ");
 38 |                 }
 39 |                 b"text:span" => continue,
 40 |                 b"table:table" => is_table = true,
 41 |                 b"table:table-row" => continue,
 42 |                 b"table:table-cell" => continue,
 43 |                 b"text:list" => markdown.push_str(""),
 44 |                 b"text:list-item" => is_list_item = 1,
 45 |                 b"text:a" => continue,
 46 |                 _ => {
 47 |                     // eprintln!("start {}", String::from_utf8(e.name().0.to_vec())?)
 48 |                 }
 49 |             },
 50 |             Ok(Event::Text(e)) => {
 51 |                 let text = &e.unescape()?.into_owned();
 52 |                 if is_table {
 53 |                     current_row.push(text.into());
 54 |                 } else if is_list_item == 1 {
 55 |                     markdown.push_str(&format!(" * {}", text));
 56 |                     is_list_item = 2;
 57 |                 } else {
 58 |                     markdown.push_str(text);
 59 |                 }
 60 |             }
 61 |             Ok(Event::End(e)) => match e.name().as_ref() {
 62 |                 b"table:table" => {
 63 |                     let headers = table_rows[0].clone();
 64 |                     let data_rows = if table_rows.len() > 1 {
 65 |                         table_rows[1..].to_vec()
 66 |                     } else {
 67 |                         Vec::new()
 68 |                     };
 69 |                     is_table = false;
 70 |                     markdown.push_str(&sheets::to_markdown_table(&headers, &data_rows));
 71 |                     markdown.push_str("\n");
 72 |                     table_rows = Vec::new();
 73 |                 }
 74 |                 b"table:table-row" => {
 75 |                     table_rows.push(current_row);
 76 |                     current_row = Vec::new();
 77 |                 }
 78 |                 b"text:p" => {
 79 |                     if is_list_item != 2 {
 80 |                         markdown.push_str("\n\n");
 81 |                     }
 82 |                 }
 83 |                 b"text:h" => markdown.push_str("\n\n"),
 84 |                 b"text:span" => continue,
 85 |                 b"table:table-cell" => continue,
 86 |                 b"text:list" => markdown.push_str("\n"),
 87 |                 b"text:list-item" => {
 88 |                     is_list_item = 0;
 89 |                     markdown.push_str("  \n");
 90 |                 }
 91 |                 b"text:a" => continue,
 92 |                 _ => {}
 93 |             },
 94 |             Ok(Event::Eof) => break,
 95 |             Err(e) => {
 96 |                 return Err(
 97 |                     format!("Error at position {}: {:?}", reader.buffer_position(), e).into(),
 98 |                 );
 99 |             }
100 |             _ => {}
101 |         }
102 |         buf.clear();
103 |     }
104 | 
105 |     Ok(format(&markdown))
106 | }
107 | 
108 | fn format(input: &str) -> String {
109 |     let mut result = String::with_capacity(input.len());
110 |     let mut newline_count = 0;
111 |     let mut spaces_count = 0;
112 | 
113 |     for line in input.lines() {
114 |         if line.trim() == "" {
115 |             result.push_str("\n");
116 |         } else {
117 |             result.push_str(&format!("{}\n", line));
118 |         }
119 |     }
120 |     let input = &result;
121 |     let mut result = String::with_capacity(input.len());
122 | 
123 |     for c in input.chars() {
124 |         if c == ' ' {
125 |             spaces_count += 1;
126 |         }
127 |         if c == '\n' {
128 |             newline_count += 1;
129 |             if spaces_count >= 2 {
130 |                 newline_count += 1;
131 |             }
132 |             spaces_count = 0;
133 |             if newline_count <= 2 {
134 |                 result.push(c);
135 |             }
136 |         } else {
137 |             newline_count = 0;
138 |             spaces_count = 0;
139 |             result.push(c);
140 |         }
141 |     }
142 | 
143 |     result
144 | }
145 | 


--------------------------------------------------------------------------------
/src/markitdown/pdf.rs:
--------------------------------------------------------------------------------
 1 | use std::path::Path;
 2 | 
 3 | pub fn pdf_convert(path: &Path) -> Result> {
 4 |     let doc = lopdf::Document::load(path)?;
 5 |     let mut result = String::new();
 6 | 
 7 |     let num_pages = doc.get_pages().len();
 8 |     for i in 1..=num_pages {
 9 |         let page_text = doc.extract_text(&[i as u32])?.replace("  ", " ");
10 | 
11 |         let mut output = String::with_capacity(page_text.len());
12 |         for line in page_text.lines() {
13 |             output.push_str(line.trim());
14 |             output.push('\n');
15 |         }
16 |         let page_text = output;
17 |         let page_text = page_text.replace("\n\n\n", "\0");
18 |         let page_text = page_text.replace("\n\n", " ");
19 |         let page_text = page_text.replace("\n", " ");
20 |         let page_text = page_text.replace("\0", "\n\n\n");
21 | 
22 |         result.push_str(&format!("## Page {}\n\n", i));
23 | 
24 |         result.push_str(&page_text);
25 |         result.push_str("\n\n");
26 |     }
27 | 
28 |     Ok(result)
29 | }
30 | 


--------------------------------------------------------------------------------
/src/markitdown/pptx.rs:
--------------------------------------------------------------------------------
  1 | use std::{
  2 |     fs,
  3 |     io::{Cursor, Read},
  4 |     path::Path,
  5 | };
  6 | 
  7 | use quick_xml::events::Event;
  8 | use zip::ZipArchive;
  9 | 
 10 | use super::sheets;
 11 | 
 12 | pub fn pptx_converter(path: &Path) -> Result> {
 13 |     let data = fs::read(path)?;
 14 |     let cursor = Cursor::new(data);
 15 |     let mut archive = ZipArchive::new(cursor)?;
 16 |     let mut markdown = String::new();
 17 |     let mut slide_num = 1;
 18 | 
 19 |     for i in 0..archive.len() {
 20 |         let mut file = archive.by_index(i)?;
 21 |         let file_name = file.name().to_string();
 22 | 
 23 |         if file_name.starts_with("ppt/slides/") && file_name.ends_with(".xml") {
 24 |             markdown.push_str(&format!("\n\n\n", slide_num));
 25 |             slide_num += 1;
 26 | 
 27 |             let mut content = String::new();
 28 |             file.read_to_string(&mut content)?;
 29 |             let mut reader = quick_xml::Reader::from_str(&content);
 30 |             let mut buf = Vec::new();
 31 |             let mut table_rows: Vec> = Vec::new();
 32 |             let mut current_row: Vec = Vec::new();
 33 |             let mut cell_text = String::new();
 34 |             let mut in_text_body = false;
 35 |             let mut in_title = false;
 36 |             let mut in_table = false;
 37 |             let mut in_row = false;
 38 |             let mut in_cell = false;
 39 | 
 40 |             loop {
 41 |                 match reader.read_event_into(&mut buf) {
 42 |                     Ok(Event::Start(ref e)) => match e.name().as_ref() {
 43 |                         b"p:txBody" => {
 44 |                             in_text_body = true;
 45 |                         }
 46 |                         b"p:title" => {
 47 |                             in_title = true;
 48 |                         }
 49 |                         b"a:tbl" => {
 50 |                             in_table = true;
 51 |                             table_rows.clear();
 52 |                         }
 53 |                         b"a:tr" => {
 54 |                             if in_table {
 55 |                                 in_row = true;
 56 |                                 current_row = Vec::new();
 57 |                             }
 58 |                         }
 59 |                         b"a:br" => {
 60 |                             if in_text_body {
 61 |                                 markdown.push_str("  \n");
 62 |                             }
 63 |                         }
 64 |                         b"a:tc" => {
 65 |                             if in_row {
 66 |                                 in_cell = true;
 67 |                                 cell_text.clear();
 68 |                             }
 69 |                         }
 70 |                         _ => {}
 71 |                     },
 72 |                     Ok(Event::Text(e)) => {
 73 |                         if in_text_body {
 74 |                             let text = e.unescape().unwrap_or_default().to_string();
 75 | 
 76 |                             if !text.trim().is_empty() {
 77 |                                 if in_title {
 78 |                                     markdown.push_str(&format!("# {}", text.trim()));
 79 |                                 } else {
 80 |                                     markdown.push_str(&format!("{} ", text.trim()));
 81 |                                 }
 82 |                             }
 83 |                         }
 84 |                         if in_cell {
 85 |                             cell_text.push_str(&e.unescape().unwrap_or_default());
 86 |                         }
 87 |                     }
 88 |                     Ok(Event::End(ref e)) => match e.name().as_ref() {
 89 |                         b"p:txBody" => {
 90 |                             in_text_body = false;
 91 |                             markdown.push_str("  \n");
 92 |                         }
 93 |                         b"p:title" => {
 94 |                             in_title = false;
 95 |                             markdown.push_str("  \n");
 96 |                         }
 97 |                         b"a:tbl" => {
 98 |                             in_table = false;
 99 |                             if !table_rows.is_empty() {
100 |                                 let headers = table_rows[0].clone();
101 |                                 let data_rows = if table_rows.len() > 1 {
102 |                                     table_rows[1..].to_vec()
103 |                                 } else {
104 |                                     Vec::new()
105 |                                 };
106 |                                 markdown.push_str(&sheets::to_markdown_table(&headers, &data_rows));
107 |                                 markdown.push_str("\n");
108 |                             }
109 |                         }
110 |                         b"a:tr" => {
111 |                             in_row = false;
112 |                             if !current_row.is_empty() {
113 |                                 table_rows.push(current_row.clone());
114 |                             }
115 |                         }
116 |                         b"a:tc" => {
117 |                             in_cell = false;
118 |                             if in_row {
119 |                                 current_row.push(cell_text.trim().to_string());
120 |                             }
121 |                         }
122 |                         _ => {}
123 |                     },
124 |                     Ok(Event::Eof) => break,
125 |                     Err(e) => return Err(Box::new(e)),
126 |                     _ => {}
127 |                 }
128 |                 buf.clear();
129 |             }
130 |         }
131 |     }
132 | 
133 |     Ok(markdown.trim().to_string())
134 | }
135 | 


--------------------------------------------------------------------------------
/src/markitdown/sheets.rs:
--------------------------------------------------------------------------------
 1 | use std::{
 2 |     fs::File,
 3 |     io::{self, BufRead},
 4 |     path::Path,
 5 | };
 6 | 
 7 | use calamine::Reader;
 8 | 
 9 | fn detect_delimiter(line: &str) -> u8 {
10 |     let candidates = [',', ';', '\t', '|'];
11 |     candidates
12 |         .iter()
13 |         .map(|&c| (c, line.matches(c).count()))
14 |         .max_by_key(|&(_, count)| count)
15 |         .map(|(c, _)| c as u8)
16 |         .unwrap_or(b',') // fallback to comma
17 | }
18 | 
19 | pub fn to_markdown_table(headers: &[String], rows: &[Vec]) -> String {
20 |     let mut output = String::new();
21 |     output += &format!("| {} |\n", headers.join(" | "));
22 |     output += &format!("|{}|\n", vec!["---"; headers.len()].join("|"));
23 | 
24 |     for row in rows {
25 |         output += &format!("| {} |\n", row.join(" | "));
26 |     }
27 | 
28 |     output
29 | }
30 | 
31 | pub fn sheets_convert(path: &Path) -> Result> {
32 |     let mut workbook = calamine::open_workbook_auto(path)?;
33 |     let mut output = String::new();
34 | 
35 |     for sheet_name in workbook.sheet_names().to_owned() {
36 |         if let Ok(range) = workbook.worksheet_range(&sheet_name) {
37 |             let mut rows = range.rows();
38 |             if let Some(header_row) = rows.next() {
39 |                 let headers = header_row
40 |                     .iter()
41 |                     .map(|cell| cell.to_string())
42 |                     .collect::>();
43 |                 let body = rows
44 |                     .map(|r| r.iter().map(|cell| cell.to_string()).collect::>())
45 |                     .collect::>();
46 | 
47 |                 output += &format!("# {}\n\n", sheet_name);
48 |                 output += &to_markdown_table(&headers, &body);
49 |                 output += "\n";
50 |             }
51 |         }
52 |     }
53 | 
54 |     if output.is_empty() {
55 |         Err("No readable sheets found.".into())
56 |     } else {
57 |         Ok(output)
58 |     }
59 | }
60 | 
61 | pub fn csv_converter(path: &Path) -> Result> {
62 |     let mut file = File::open(path)?;
63 |     let mut first_line = String::new();
64 |     let _ = io::BufReader::new(&mut file).read_line(&mut first_line)?;
65 | 
66 |     let delimiter = detect_delimiter(&first_line);
67 |     let mut reader = csv::ReaderBuilder::new()
68 |         .delimiter(delimiter)
69 |         .from_path(path)?;
70 | 
71 |     let headers = reader
72 |         .headers()?
73 |         .iter()
74 |         .map(|s| s.to_string())
75 |         .collect::>();
76 | 
77 |     let rows = reader
78 |         .records()
79 |         .map(|r| r.map(|rec| rec.iter().map(|s| s.to_string()).collect::>()))
80 |         .collect::, _>>()?;
81 | 
82 |     Ok(to_markdown_table(&headers, &rows))
83 | }
84 | 


--------------------------------------------------------------------------------
/src/prompter.rs:
--------------------------------------------------------------------------------
  1 | use ignore::WalkBuilder;
  2 | use inquire::MultiSelect;
  3 | use std::collections::{HashMap, HashSet};
  4 | use std::path::{Path, PathBuf};
  5 | 
  6 | pub fn prompt_for_files(dir: &Path) -> Result, String> {
  7 |     let mut all_paths = collect_gitignored_paths(dir)?;
  8 |     all_paths.sort(); // Ensures folders come before contents
  9 | 
 10 |     let tree_view = format_file_list(&all_paths, dir);
 11 | 
 12 |     let index_map: HashMap = tree_view
 13 |         .iter()
 14 |         .cloned()
 15 |         .zip(all_paths.iter().cloned())
 16 |         .collect();
 17 | 
 18 |     let selected = MultiSelect::new("Select files or folders", tree_view)
 19 |         .with_page_size(20)
 20 |         .with_vim_mode(true)
 21 |         .prompt()
 22 |         .map_err(|e| e.to_string())?;
 23 | 
 24 |     let selected_paths: HashSet = selected
 25 |         .into_iter()
 26 |         .filter_map(|label| index_map.get(&label).cloned())
 27 |         .collect();
 28 | 
 29 |     // Avoid duplicates: if a folder is selected, skip its inner files
 30 |     let mut final_files = HashSet::new();
 31 | 
 32 |     for path in &selected_paths {
 33 |         if path.is_file() {
 34 |             // Only include files not covered by a selected folder
 35 |             let covered = selected_paths
 36 |                 .iter()
 37 |                 .any(|other| other.is_dir() && path.starts_with(other));
 38 |             if !covered {
 39 |                 final_files.insert(path.clone());
 40 |             }
 41 |         } else if path.is_dir() {
 42 |             for file in all_paths
 43 |                 .iter()
 44 |                 .filter(|p| p.is_file() && p.starts_with(path))
 45 |             {
 46 |                 final_files.insert(file.clone());
 47 |             }
 48 |         }
 49 |     }
 50 | 
 51 |     Ok(final_files.into_iter().collect())
 52 | }
 53 | 
 54 | fn collect_gitignored_paths(dir: &Path) -> Result, String> {
 55 |     let walker = WalkBuilder::new(dir)
 56 |         .follow_links(true)
 57 |         .hidden(true)
 58 |         .git_ignore(true)
 59 |         .git_global(true)
 60 |         .git_exclude(true)
 61 |         .max_depth(None)
 62 |         .build();
 63 | 
 64 |     let mut paths = vec![];
 65 | 
 66 |     for result in walker {
 67 |         match result {
 68 |             Ok(entry) => {
 69 |                 let path = entry.path().to_path_buf();
 70 |                 if path != dir {
 71 |                     paths.push(path);
 72 |                 }
 73 |             }
 74 |             Err(err) => return Err(format!("Error walking directory: {}", err)),
 75 |         }
 76 |     }
 77 | 
 78 |     Ok(paths)
 79 | }
 80 | 
 81 | fn format_file_list(paths: &[PathBuf], base: &Path) -> Vec {
 82 |     let mut formatted = vec![];
 83 | 
 84 |     for (i, path) in paths.iter().enumerate() {
 85 |         let rel = path.strip_prefix(base).unwrap_or(path);
 86 |         let depth = rel.components().count().saturating_sub(1);
 87 |         let name = path.file_name().unwrap_or_default().to_string_lossy();
 88 | 
 89 |         let mut line = String::new();
 90 |         if depth > 0 {
 91 |             line.push_str(&"│   ".repeat(depth - 1));
 92 |             let is_last = paths
 93 |                 .get(i + 1)
 94 |                 .map(|next| {
 95 |                     let next_rel = next.strip_prefix(base).unwrap_or(next);
 96 |                     next_rel.components().count().saturating_sub(1) < depth
 97 |                 })
 98 |                 .unwrap_or(true);
 99 |             line.push_str(if is_last { "└── " } else { "├── " });
100 |         }
101 | 
102 |         line.push_str(&name);
103 |         if path.is_dir() {
104 |             line.push('/');
105 |         }
106 | 
107 |         formatted.push(line);
108 |     }
109 | 
110 |     formatted
111 | }
112 | 


--------------------------------------------------------------------------------
/src/rasteroid/image_extended.rs:
--------------------------------------------------------------------------------
 1 | use std::{error, io::Cursor};
 2 | 
 3 | use fast_image_resize::{IntoImageView, Resizer, images::Image};
 4 | use image::{DynamicImage, GenericImageView, ImageEncoder, codecs::png::PngEncoder};
 5 | 
 6 | use super::term_misc::{self, dim_to_px};
 7 | 
 8 | pub trait InlineImage {
 9 |     fn resize_plus(
10 |         &self,
11 |         width: Option<&str>,
12 |         height: Option<&str>,
13 |     ) -> Result<(Vec, u16), Box>;
14 | }
15 | 
16 | impl InlineImage for DynamicImage {
17 |     fn resize_plus(
18 |         &self,
19 |         width: Option<&str>,
20 |         height: Option<&str>,
21 |     ) -> Result<(Vec, u16), Box> {
22 |         let (src_width, src_height) = self.dimensions();
23 |         let width = match width {
24 |             Some(w) => dim_to_px(w, term_misc::SizeDirection::Width)?,
25 |             None => src_width,
26 |         };
27 |         let height = match height {
28 |             Some(h) => dim_to_px(h, term_misc::SizeDirection::Height)?,
29 |             None => src_height,
30 |         };
31 | 
32 |         let (new_width, new_height) = calc_fit(src_width, src_height, width, height);
33 |         let center = term_misc::center_image(new_width as u16);
34 | 
35 |         let mut dst_image = Image::new(
36 |             new_width.into(),
37 |             new_height.into(),
38 |             self.pixel_type().ok_or("image is invalid")?,
39 |         );
40 |         let mut resizer = Resizer::new();
41 |         resizer.resize(self, &mut dst_image, None)?;
42 | 
43 |         let mut buffer = Vec::new();
44 |         let mut cursor = Cursor::new(&mut buffer);
45 |         let encoder = PngEncoder::new(&mut cursor);
46 |         encoder.write_image(
47 |             dst_image.buffer(),
48 |             dst_image.width(),
49 |             dst_image.height(),
50 |             self.color().into(),
51 |         )?;
52 | 
53 |         return Ok((buffer, center));
54 |     }
55 | }
56 | 
57 | pub fn calc_fit(src_width: u32, src_height: u32, dst_width: u32, dst_height: u32) -> (u32, u32) {
58 |     let src_ar = src_width as f32 / src_height as f32;
59 |     let dst_ar = dst_width as f32 / dst_height as f32;
60 | 
61 |     if src_ar > dst_ar {
62 |         // Image is wider than target: scale by width
63 |         let scaled_height = (dst_width as f32 / src_ar).round() as u32;
64 |         (dst_width, scaled_height)
65 |     } else {
66 |         // Image is taller than target: scale by height
67 |         let scaled_width = (dst_height as f32 * src_ar).round() as u32;
68 |         (scaled_width, dst_height)
69 |     }
70 | }
71 | 


--------------------------------------------------------------------------------
/src/rasteroid/iterm_encoder.rs:
--------------------------------------------------------------------------------
 1 | use crate::{converter, rasteroid::term_misc::EnvIdentifiers};
 2 | use std::io::Write;
 3 | 
 4 | pub fn encode_image(
 5 |     img: &Vec,
 6 |     mut out: impl Write,
 7 |     offset: Option,
 8 | ) -> Result<(), Box> {
 9 |     let base64_encoded = converter::image_to_base64(img);
10 | 
11 |     let center = converter::offset_to_terminal(offset);
12 |     out.write_all(center.as_ref())?;
13 | 
14 |     out.write_all(b"\x1b]1337;File=inline=1;size=")?;
15 |     write!(out, "{}", base64_encoded.len())?;
16 |     out.write_all(&[b':'])?;
17 |     out.write_all(base64_encoded.as_bytes())?;
18 |     out.write_all(&[b'\x07'])?;
19 | 
20 |     Ok(())
21 | }
22 | 
23 | pub fn is_iterm_capable(env: &EnvIdentifiers) -> bool {
24 |     env.term_contains("mintty")
25 |         || env.term_contains("wezterm")
26 |         || env.term_contains("iterm2")
27 |         || env.term_contains("rio")
28 |         || env.term_contains("warp")
29 |         || env.has_key("KONSOLE_VERSION")
30 | }
31 | 


--------------------------------------------------------------------------------
/src/rasteroid/kitty_encoder.rs:
--------------------------------------------------------------------------------
  1 | use signal_hook::consts::signal::*;
  2 | use signal_hook::flag;
  3 | use std::{
  4 |     cmp::min,
  5 |     collections::HashMap,
  6 |     error::Error,
  7 |     io::Write,
  8 |     sync::{
  9 |         Arc,
 10 |         atomic::{AtomicBool, Ordering},
 11 |     },
 12 | };
 13 | 
 14 | use base64::{Engine, engine::general_purpose};
 15 | use ffmpeg_sidecar::event::OutputVideoFrame;
 16 | use flate2::{Compression, write::ZlibEncoder};
 17 | 
 18 | use crate::{
 19 |     converter,
 20 |     rasteroid::term_misc::{self, EnvIdentifiers},
 21 | };
 22 | 
 23 | fn chunk_base64(
 24 |     base64: &str,
 25 |     mut out: impl Write,
 26 |     size: usize,
 27 |     first_opts: HashMap,
 28 |     sub_opts: HashMap,
 29 | ) -> Result<(), std::io::Error> {
 30 |     // first block
 31 |     let mut first_opts_string = Vec::with_capacity(first_opts.len() * 8);
 32 |     for (key, value) in first_opts {
 33 |         if !first_opts_string.is_empty() {
 34 |             first_opts_string.push(b',');
 35 |         }
 36 |         write!(first_opts_string, "{}={}", key, value)?;
 37 |     }
 38 |     if !first_opts_string.is_empty() {
 39 |         first_opts_string.push(b',');
 40 |     }
 41 | 
 42 |     // all other blocks
 43 |     let mut sub_opts_string = Vec::with_capacity(sub_opts.len() * 8);
 44 |     for (key, value) in sub_opts {
 45 |         if !sub_opts_string.is_empty() {
 46 |             sub_opts_string.push(b',');
 47 |         }
 48 |         write!(sub_opts_string, "{}={}", key, value)?;
 49 |     }
 50 |     if !sub_opts_string.is_empty() {
 51 |         sub_opts_string.push(b',');
 52 |     }
 53 | 
 54 |     let total_bytes = base64.len();
 55 |     let mut start = 0;
 56 | 
 57 |     while start < total_bytes {
 58 |         let end = min(start + size, total_bytes);
 59 |         let chunk_data = &base64[start..end];
 60 |         let more_chunks = (end != total_bytes) as u8;
 61 | 
 62 |         let opts = if start == 0 {
 63 |             &first_opts_string
 64 |         } else {
 65 |             &sub_opts_string
 66 |         };
 67 | 
 68 |         out.write_all(b"\x1b_G")?;
 69 |         out.write_all(opts)?;
 70 |         write!(out, "m={};{}", more_chunks, chunk_data)?;
 71 |         out.write(b"\x1b\\")?;
 72 | 
 73 |         start = end;
 74 |     }
 75 | 
 76 |     Ok(())
 77 | }
 78 | 
 79 | pub fn encode_image(
 80 |     img: &Vec,
 81 |     mut out: impl Write,
 82 |     offset: Option,
 83 | ) -> Result<(), Box> {
 84 |     let center_string = converter::offset_to_terminal(offset);
 85 |     let base64 = converter::image_to_base64(img);
 86 | 
 87 |     out.write_all(center_string.as_bytes())?;
 88 |     chunk_base64(
 89 |         &base64,
 90 |         out,
 91 |         4096,
 92 |         HashMap::from([
 93 |             ("f".to_string(), "100".to_string()),
 94 |             ("a".to_string(), "T".to_string()),
 95 |         ]),
 96 |         HashMap::new(),
 97 |     )?;
 98 | 
 99 |     Ok(())
100 | }
101 | 
102 | fn process_frame(
103 |     data: &Vec,
104 |     out: &mut impl Write,
105 |     first_opts: HashMap,
106 |     sub_opts: HashMap,
107 | ) -> Result<(), Box> {
108 |     let mut encoder = ZlibEncoder::new(Vec::new(), Compression::fast());
109 |     encoder.write_all(data)?;
110 |     let compressed = encoder.finish()?;
111 | 
112 |     let base64 = general_purpose::STANDARD.encode(compressed);
113 |     chunk_base64(&base64, out, 4096, first_opts, sub_opts)?;
114 | 
115 |     Ok(())
116 | }
117 | 
118 | fn setup_signal_handler() -> Arc {
119 |     let shutdown = Arc::new(AtomicBool::new(false));
120 | 
121 |     // Register signal handlers
122 |     flag::register(SIGINT, Arc::clone(&shutdown)).unwrap();
123 |     flag::register(SIGTERM, Arc::clone(&shutdown)).unwrap();
124 |     #[cfg(windows)]
125 |     {
126 |         flag::register(SIGBREAK, Arc::clone(&shutdown)).unwrap();
127 |     }
128 |     #[cfg(unix)]
129 |     {
130 |         flag::register(SIGHUP, Arc::clone(&shutdown)).unwrap();
131 |         flag::register(SIGQUIT, Arc::clone(&shutdown)).unwrap();
132 |     }
133 | 
134 |     shutdown
135 | }
136 | 
137 | pub fn encode_frames(
138 |     frames: Box>,
139 |     out: &mut impl Write,
140 |     id: u32,
141 |     center: bool,
142 | ) -> Result<(), Box> {
143 |     let mut frames = frames.into_iter();
144 | 
145 |     // getting the first frame
146 |     let first = frames.next().ok_or("video doesn't contain any frames")?;
147 |     let offset = term_misc::center_image(first.width as u16);
148 |     if center {
149 |         let center = converter::offset_to_terminal(Some(offset));
150 |         out.write_all(center.as_bytes())?;
151 |     }
152 |     let mut pre_timestamp = 0.0;
153 | 
154 |     // adding the root image
155 |     let i = id.to_string();
156 |     let s = first.width.to_string();
157 |     let v = first.height.to_string();
158 |     let f = "24".to_string();
159 |     let o = "z".to_string();
160 |     let q = "2".to_string();
161 |     process_frame(
162 |         &first.data,
163 |         out,
164 |         HashMap::from([
165 |             ("a".to_string(), "T".to_string()),
166 |             ("f".to_string(), f),
167 |             ("o".to_string(), o),
168 |             ("I".to_string(), i),
169 |             ("s".to_string(), s),
170 |             ("v".to_string(), v),
171 |             ("q".to_string(), q),
172 |         ]),
173 |         HashMap::new(),
174 |     )?;
175 | 
176 |     // starting the animation
177 |     let z = 100;
178 |     write!(out, "\x1b_Ga=a,s=2,v=1,r=1,I={},z={}\x1b\\", id, z)?;
179 | 
180 |     let shutdown = setup_signal_handler();
181 | 
182 |     for (c, frame) in frames.enumerate() {
183 |         if shutdown.load(Ordering::SeqCst) {
184 |             break; // clean exit
185 |         }
186 |         let s = frame.width.to_string();
187 |         let v = frame.height.to_string();
188 |         let i = id.to_string();
189 |         let f = "24".to_string();
190 |         let o = "z".to_string();
191 |         let z = ((frame.timestamp - pre_timestamp) * 1000.0) as u32;
192 |         pre_timestamp = frame.timestamp;
193 | 
194 |         let first_opts = HashMap::from([
195 |             ("a".to_string(), "f".to_string()),
196 |             ("f".to_string(), f),
197 |             ("o".to_string(), o),
198 |             ("I".to_string(), i),
199 |             ("c".to_string(), c.to_string()),
200 |             ("s".to_string(), s),
201 |             ("v".to_string(), v),
202 |             ("z".to_string(), z.to_string()),
203 |         ]);
204 |         let sub_opts = HashMap::from([("a".to_string(), "f".to_string())]);
205 | 
206 |         process_frame(&frame.data, out, first_opts, sub_opts)?;
207 |     }
208 | 
209 |     write!(out, "\x1b_Ga=a,s=3,v=1,r=1,I={},z={}\x1b\\", id, z)?;
210 |     Ok(())
211 | }
212 | 
213 | pub fn is_kitty_capable(env: &EnvIdentifiers) -> bool {
214 |     env.has_key("KITTY_WINDOW_ID") || env.term_contains("kitty") || env.term_contains("ghostty")
215 | }
216 | 


--------------------------------------------------------------------------------
/src/rasteroid/mod.rs:
--------------------------------------------------------------------------------
 1 | use std::io::Write;
 2 | 
 3 | pub mod image_extended;
 4 | pub mod iterm_encoder;
 5 | pub mod kitty_encoder;
 6 | pub mod sixel_encoder;
 7 | pub mod term_misc;
 8 | 
 9 | pub fn inline_an_image(
10 |     img: &Vec,
11 |     out: impl Write,
12 |     offset: Option,
13 |     inline_encoder: &InlineEncoder,
14 | ) -> Result<(), Box> {
15 |     match inline_encoder {
16 |         InlineEncoder::Kitty => kitty_encoder::encode_image(img, out, offset),
17 |         InlineEncoder::Iterm => iterm_encoder::encode_image(img, out, offset),
18 |         InlineEncoder::Sixel => sixel_encoder::encode_image(img, out, offset),
19 |     }
20 | }
21 | 
22 | pub enum InlineEncoder {
23 |     Kitty,
24 |     Iterm,
25 |     Sixel,
26 | }
27 | impl InlineEncoder {
28 |     pub fn auto_detect(force_kitty: bool, force_iterm: bool, force_sixel: bool) -> Self {
29 |         if force_kitty {
30 |             return Self::Kitty;
31 |         }
32 |         if force_iterm {
33 |             return Self::Iterm;
34 |         }
35 |         if force_sixel {
36 |             return Self::Sixel;
37 |         }
38 | 
39 |         let env = term_misc::EnvIdentifiers::new();
40 |         if kitty_encoder::is_kitty_capable(&env) {
41 |             return Self::Kitty;
42 |         }
43 |         if iterm_encoder::is_iterm_capable(&env) {
44 |             return Self::Iterm;
45 |         }
46 |         if sixel_encoder::is_sixel_capable(&env) {
47 |             return Self::Sixel;
48 |         }
49 | 
50 |         return Self::Iterm;
51 |     }
52 | }
53 | 


--------------------------------------------------------------------------------
/src/rasteroid/sixel_encoder.rs:
--------------------------------------------------------------------------------
  1 | use crate::{converter, rasteroid::term_misc::EnvIdentifiers};
  2 | use color_quant::NeuQuant;
  3 | use image::{ImageBuffer, Rgb};
  4 | use std::{
  5 |     error::Error,
  6 |     io::{self, Write},
  7 | };
  8 | 
  9 | const SIXEL_MIN: u8 = 0x3f; // '?'
 10 | 
 11 | pub fn encode_image(
 12 |     img: &Vec,
 13 |     mut out: impl Write,
 14 |     offset: Option,
 15 | ) -> Result<(), Box> {
 16 |     let dyn_img = image::load_from_memory_with_format(&img, image::ImageFormat::Png)?;
 17 |     let rgb_img = dyn_img.to_rgb8();
 18 | 
 19 |     let center = converter::offset_to_terminal(offset);
 20 |     out.write_all(center.as_bytes())?;
 21 | 
 22 |     encode_sixel(&rgb_img, out)?;
 23 | 
 24 |     Ok(())
 25 | }
 26 | 
 27 | pub fn is_sixel_capable(env: &EnvIdentifiers) -> bool {
 28 |     // has way more support, i just think sixel is bad
 29 |     env.term_contains("foot") 
 30 |         || env.has_key("WT_PROFILE_ID") // windows-terminal
 31 |         || env.term_contains("sixel-tmux")
 32 | }
 33 | 
 34 | pub fn encode_sixel(
 35 |     img: &ImageBuffer, Vec>,
 36 |     mut out: impl Write,
 37 | ) -> Result<(), Box> {
 38 |     let width = img.width() as usize;
 39 |     let height = img.height() as usize;
 40 | 
 41 |     if width == 0 || height == 0 {
 42 |         return Err("image is empty".into());
 43 |     }
 44 | 
 45 |     write_sixel(&mut out, img)?;
 46 |     Ok(())
 47 | }
 48 | 
 49 | fn write_sixel(out: &mut W, img: &ImageBuffer, Vec>) -> io::Result<()> {
 50 |     let width = img.width() as usize;
 51 |     let height = img.height() as usize;
 52 | 
 53 |     // DECSIXEL introducer and raster attributes
 54 |     write!(out, "\x1bP0;1q\"1;1;{};{}", width, height)?;
 55 | 
 56 |     // median quant works the best through testing
 57 |     let pixels: Vec = img.pixels().flat_map(|p| p.0[..3].to_vec()).collect();
 58 |     let nq = NeuQuant::new(10, 256, &pixels);
 59 |     let palette_vec: Vec<(u8, u8, u8)> = nq
 60 |         .color_map_rgb()
 61 |         .chunks(3)
 62 |         .map(|c| (c[0], c[1], c[2]))
 63 |         .collect();
 64 |     let palette = &palette_vec;
 65 |     let color_indices = map_to_palette(img, palette);
 66 | 
 67 |     // Write palette
 68 |     for (i, &(r, g, b)) in palette.iter().enumerate() {
 69 |         // Convert RGB to percentages (0-100)
 70 |         let r_pct = (r as f32 / 255.0 * 100.0) as u8;
 71 |         let g_pct = (g as f32 / 255.0 * 100.0) as u8;
 72 |         let b_pct = (b as f32 / 255.0 * 100.0) as u8;
 73 | 
 74 |         write!(out, "#{};2;{};{};{}", i, r_pct, g_pct, b_pct)?;
 75 |     }
 76 |     let palette_size = palette.len();
 77 |     let mut color_used = vec![false; palette_size];
 78 |     let mut sixel_data = vec![0u8; width * palette_size];
 79 | 
 80 |     // Process the image in 6-pixel strips
 81 |     let sixel_rows = (height + 5) / 6;
 82 |     for row in 0..sixel_rows {
 83 |         // Graphics NL (new sixel line)
 84 |         if row > 0 {
 85 |             write!(out, "-")?;
 86 |         }
 87 | 
 88 |         // Reset color usage flags and sixel data
 89 |         color_used.fill(false);
 90 |         sixel_data.fill(0);
 91 | 
 92 |         // Buffer sixel row, track used colors
 93 |         for p in 0..6 {
 94 |             let y = (row * 6) + p;
 95 |             if y >= height {
 96 |                 break;
 97 |             }
 98 | 
 99 |             for x in 0..width {
100 |                 let color_idx = color_indices[y * width + x] as usize;
101 |                 color_used[color_idx] = true;
102 |                 sixel_data[(width * color_idx) + x] |= 1 << p;
103 |             }
104 |         }
105 | 
106 |         // Render sixel row for each palette entry
107 |         let mut first_color_written = false;
108 |         for n in 0..palette_size {
109 |             if !color_used[n] {
110 |                 continue;
111 |             }
112 | 
113 |             // Graphics CR
114 |             if first_color_written {
115 |                 write!(out, "$")?;
116 |             }
117 | 
118 |             // Color Introducer
119 |             write!(out, "#{}", n)?;
120 | 
121 |             let mut rle_count = 0;
122 |             let mut prev_sixel = 255; // Sentinel value
123 | 
124 |             for x in 0..width {
125 |                 let next_sixel = sixel_data[(n * width) + x];
126 | 
127 |                 // RLE encode, write on value change
128 |                 if prev_sixel != 255 && next_sixel != prev_sixel {
129 |                     write_gri(out, rle_count, prev_sixel)?;
130 |                     rle_count = 0;
131 |                 }
132 | 
133 |                 prev_sixel = next_sixel;
134 |                 rle_count += 1;
135 |             }
136 | 
137 |             // Write last sixel in line
138 |             write_gri(out, rle_count, prev_sixel)?;
139 | 
140 |             first_color_written = true;
141 |         }
142 |     }
143 | 
144 |     // SIXEL terminator
145 |     write!(out, "\x1b\\")?;
146 | 
147 |     Ok(())
148 | }
149 | 
150 | // Map image pixels to the fixed palette
151 | fn map_to_palette(img: &ImageBuffer, Vec>, palette: &[(u8, u8, u8)]) -> Vec {
152 |     let width = img.width() as usize;
153 |     let height = img.height() as usize;
154 |     let mut indices = Vec::with_capacity(width * height);
155 | 
156 |     for y in 0..height {
157 |         for x in 0..width {
158 |             let pixel = img.get_pixel(x as u32, y as u32);
159 |             let rgb = (pixel[0], pixel[1], pixel[2]);
160 | 
161 |             // Find closest color in palette
162 |             let idx = find_closest_color(palette, &rgb);
163 |             indices.push(idx);
164 |         }
165 |     }
166 | 
167 |     indices
168 | }
169 | 
170 | // Graphics Repeat Introducer encoding
171 | fn write_gri(out: &mut W, repeat_count: usize, sixel: u8) -> io::Result<()> {
172 |     if repeat_count == 0 {
173 |         return Ok(());
174 |     }
175 | 
176 |     // Mask with valid sixel bits, apply offset
177 |     let sixel = SIXEL_MIN + (sixel & 0b111111);
178 | 
179 |     if repeat_count > 3 {
180 |         // Graphics Repeat Introducer
181 |         write!(out, "!{}{}", repeat_count, sixel as char)?;
182 |     } else {
183 |         // Just repeat the character
184 |         for _ in 0..repeat_count {
185 |             write!(out, "{}", sixel as char)?;
186 |         }
187 |     }
188 | 
189 |     Ok(())
190 | }
191 | 
192 | // Find the closest color in the palette
193 | fn find_closest_color(palette: &[(u8, u8, u8)], color: &(u8, u8, u8)) -> u8 {
194 |     let mut closest = 0;
195 |     let mut min_dist = u32::MAX;
196 | 
197 |     for (i, pal_color) in palette.iter().enumerate() {
198 |         let dr = color.0 as i32 - pal_color.0 as i32;
199 |         let dg = color.1 as i32 - pal_color.1 as i32;
200 |         let db = color.2 as i32 - pal_color.2 as i32;
201 | 
202 |         let dist = (dr * dr + dg * dg + db * db) as u32;
203 |         if dist < min_dist {
204 |             min_dist = dist;
205 |             closest = i;
206 |         }
207 |     }
208 | 
209 |     closest as u8
210 | }
211 | 


--------------------------------------------------------------------------------
/src/rasteroid/term_misc.rs:
--------------------------------------------------------------------------------
  1 | use std::{collections::HashMap, env, f32, sync::OnceLock};
  2 | 
  3 | use crossterm::terminal::{size, window_size};
  4 | 
  5 | pub struct Winsize {
  6 |     pub sc_width: u16,
  7 |     pub sc_height: u16,
  8 |     pub spx_width: u16,
  9 |     pub spx_height: u16,
 10 | }
 11 | 
 12 | lazy_static! {
 13 |     static ref WINSIZE: OnceLock = OnceLock::new();
 14 | }
 15 | 
 16 | #[derive(Clone)]
 17 | pub struct Size {
 18 |     pub width: u16,
 19 |     pub height: u16,
 20 |     force: bool,
 21 | }
 22 | 
 23 | impl Winsize {
 24 |     fn new(spx_fallback: &Size, sc_fallback: &Size, scale: Option) -> Self {
 25 |         let mut spx_width = 0;
 26 |         let mut spx_height = 0;
 27 |         if let Ok(res) = window_size() {
 28 |             // ioctl for unix
 29 |             spx_width = res.width;
 30 |             spx_height = res.height;
 31 |         } else {
 32 |             // do windows api here
 33 |             #[cfg(windows)]
 34 |             if let Some(size) = get_size_windows() {
 35 |                 spx_width = size.0;
 36 |                 spx_height = size.1;
 37 |             }
 38 |         }
 39 |         let (mut sc_width, mut sc_height) = size().unwrap_or((0, 0));
 40 | 
 41 |         // fallback or forcing
 42 |         if spx_fallback.force || spx_width == 0 || spx_height == 0 {
 43 |             spx_width = spx_fallback.width;
 44 |             spx_height = spx_fallback.height;
 45 |         }
 46 |         if sc_fallback.force || sc_width == 0 || sc_height == 0 {
 47 |             sc_width = sc_fallback.width;
 48 |             sc_height = sc_fallback.height;
 49 |         }
 50 | 
 51 |         let scale = scale.unwrap_or(1.0);
 52 | 
 53 |         Winsize {
 54 |             sc_height,
 55 |             sc_width: (sc_width as f32 * scale) as u16,
 56 |             spx_height,
 57 |             spx_width: (spx_width as f32 * scale) as u16,
 58 |         }
 59 |     }
 60 | }
 61 | 
 62 | pub fn init_winsize(spx: &Size, sc: &Size, scale: Option) -> Result<(), &'static str> {
 63 |     WINSIZE
 64 |         .set(Winsize::new(spx, sc, scale))
 65 |         .map_err(|_| "Winsize already initialized")?;
 66 |     Ok(())
 67 | }
 68 | 
 69 | pub enum SizeDirection {
 70 |     Width,
 71 |     Height,
 72 | }
 73 | 
 74 | /// call init_winsize before it if you need to
 75 | /// if not going to use 1920x1080, 100x20 fallback for when failing to query sizes
 76 | pub fn get_winsize() -> &'static Winsize {
 77 |     WINSIZE.get_or_init(|| {
 78 |         let spx = Size {
 79 |             width: 1920,
 80 |             height: 1080,
 81 |             force: false,
 82 |         };
 83 |         let sc = Size {
 84 |             width: 100,
 85 |             height: 20,
 86 |             force: false,
 87 |         };
 88 |         Winsize::new(&spx, &sc, None)
 89 |     })
 90 | }
 91 | 
 92 | /// returns a the offset needed to center the image
 93 | pub fn center_image(image_width: u16) -> u16 {
 94 |     let winsize = get_winsize();
 95 |     let offset_x = (winsize.spx_width as f32 - image_width as f32) / 2.0;
 96 |     let offset_x = offset_x / (winsize.spx_width as f32 / winsize.sc_width as f32);
 97 | 
 98 |     let offset = offset_x.round() as u16;
 99 |     offset
100 | }
101 | 
102 | /// convert any format of width / height into pixels.
103 | /// for instance 80% would be converted to the size of screen in the direction specified * 0.8.
104 | /// accepted formats are % (percent) / c (cells) / px (pixels) / or just number
105 | pub fn dim_to_px(dim: &str, direction: SizeDirection) -> Result {
106 |     if let Ok(num) = dim.parse::() {
107 |         return Ok(num);
108 |     }
109 | 
110 |     // only call it if needed
111 |     let not_px = dim.ends_with("c") || dim.ends_with("%");
112 |     let (width, height) = if not_px {
113 |         let winsize = get_winsize();
114 |         match direction {
115 |             SizeDirection::Width => (winsize.spx_width, winsize.sc_width),
116 |             SizeDirection::Height => (winsize.spx_height, winsize.sc_height),
117 |         }
118 |     } else {
119 |         (1, 1)
120 |     };
121 | 
122 |     if dim.ends_with("px") {
123 |         if let Ok(num) = dim.trim_end_matches("px").parse::() {
124 |             return Ok(num);
125 |         }
126 |     } else if dim.ends_with("c") {
127 |         if let Ok(num) = dim.trim_end_matches("c").parse::() {
128 |             let value = width / height * num;
129 |             return Ok(value.into());
130 |         }
131 |     } else if dim.ends_with("%") {
132 |         if let Ok(num) = dim.trim_end_matches("%").parse::() {
133 |             let normalized_percent = num / 100.0;
134 |             let value = (width as f32 * normalized_percent).round() as u32;
135 |             return Ok(value);
136 |         }
137 |     }
138 | 
139 |     Err(format!("Invalid dimension format: {}", dim))
140 | }
141 | 
142 | // gross estimation winsize for windows..
143 | #[cfg(windows)]
144 | fn get_size_windows() -> Option<(u16, u16)> {
145 |     use windows::Win32::UI::WindowsAndMessaging::{
146 |         AdjustWindowRect, GWL_STYLE, GetWindowLongW, WINDOW_STYLE,
147 |     };
148 |     use windows::Win32::{
149 |         Foundation::{HWND, RECT},
150 |         UI::WindowsAndMessaging::{GetClientRect, GetForegroundWindow},
151 |     };
152 | 
153 |     let foreground_window: HWND = unsafe { GetForegroundWindow() };
154 |     if foreground_window.is_invalid() {
155 |         return None;
156 |     }
157 | 
158 |     let mut client_rect = RECT::default();
159 |     unsafe { GetClientRect(foreground_window, &mut client_rect) }.ok()?;
160 | 
161 |     let style = unsafe { GetWindowLongW(foreground_window, GWL_STYLE) };
162 |     let mut frame_rect = RECT {
163 |         left: 0,
164 |         right: 0,
165 |         bottom: 0,
166 |         top: 0,
167 |     };
168 |     unsafe {
169 |         let _ = AdjustWindowRect(&mut frame_rect, WINDOW_STYLE(style as u32), false);
170 |     }
171 |     let frame_width = frame_rect.right - frame_rect.left;
172 |     let frame_height = frame_rect.bottom - frame_rect.top;
173 | 
174 |     let width = (client_rect.right - client_rect.left - frame_width) as u16;
175 |     let height = (client_rect.bottom - client_rect.top - frame_height) as u16;
176 | 
177 |     Some((width, height))
178 | }
179 | 
180 | pub struct EnvIdentifiers {
181 |     pub data: HashMap,
182 | }
183 | 
184 | impl EnvIdentifiers {
185 |     pub fn new() -> Self {
186 |         let keys = vec![
187 |             "TERM",
188 |             "TERM_PROGRAM",
189 |             "LC_TERMINAL",
190 |             "VIM_TERMINAL",
191 |             "KITTY_WINDOW_ID",
192 |             "KONSOLE_VERSION",
193 |             "WT_PROFILE_ID",
194 |         ];
195 |         let mut result = HashMap::new();
196 | 
197 |         for &key in &keys {
198 |             if let Ok(value) = env::var(key) {
199 |                 result.insert(key.to_string(), value.to_lowercase());
200 |             }
201 |         }
202 | 
203 |         result.insert("OS".to_string(), env::consts::OS.to_string());
204 | 
205 |         EnvIdentifiers { data: result }
206 |     }
207 | 
208 |     pub fn has_key(&self, key: &str) -> bool {
209 |         self.data.contains_key(key)
210 |     }
211 | 
212 |     /// all values are normalized into lowercase
213 |     /// pass the substr as lowercase
214 |     pub fn contains(&self, key: &str, substr: &str) -> bool {
215 |         if self.has_key(key) {
216 |             return self.data.get(key).is_some_and(|f| f.contains(substr));
217 |         }
218 |         false
219 |     }
220 | 
221 |     /// all values are normalized into lowercase
222 |     /// pass the term as lowercase
223 |     pub fn term_contains(&self, term: &str) -> bool {
224 |         ["TERM_PROGRAM", "TERM", "LC_TERMINAL"]
225 |             .iter()
226 |             .any(|key| self.contains(key, term))
227 |     }
228 | }
229 | 
230 | pub fn break_size_string(s: &str) -> Result> {
231 |     let mut parts = s.split("x");
232 |     let width = parts.next().ok_or("missing width")?.parse::()?;
233 |     let height = parts.next().ok_or("missing height")?.parse::()?;
234 |     let force = s.contains("force");
235 | 
236 |     Ok(Size {
237 |         width,
238 |         height,
239 |         force,
240 |     })
241 | }
242 | 


--------------------------------------------------------------------------------
/src/scrapy.rs:
--------------------------------------------------------------------------------
  1 | use reqwest::Client;
  2 | use scraper::Html;
  3 | use tempfile::NamedTempFile;
  4 | use tokio::runtime::Builder;
  5 | 
  6 | use std::io::Write;
  7 | 
  8 | use crate::catter;
  9 | 
 10 | pub fn scrape_biggest_media(url: &str) -> Result> {
 11 |     let client = Client::builder()
 12 |         .user_agent("Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/124.0.0.0 Safari/537.36")
 13 |         .build()?;
 14 | 
 15 |     let rt = Builder::new_current_thread().enable_all().build()?;
 16 | 
 17 |     rt.block_on(async {
 18 |         let response = client.get(url).send().await?;
 19 | 
 20 |         if !response.status().is_success() {
 21 |             return Err(format!("Failed to retrieve URL: {}", response.status()).into());
 22 |         }
 23 | 
 24 |         let content_type = response
 25 |             .headers()
 26 |             .get("Content-Type")
 27 |             .and_then(|h| h.to_str().ok());
 28 | 
 29 |         if let Some(ct) = content_type {
 30 |             if ct.contains("image/svg+xml") {
 31 |                 let svg_bytes = response.bytes().await?;
 32 |                 let mut tmp_file = NamedTempFile::with_suffix(".svg")?;
 33 |                 tmp_file.write_all(&svg_bytes)?;
 34 |                 return Ok(tmp_file);
 35 |             }
 36 |         }
 37 | 
 38 |         let html_content = response.text().await?;
 39 |         let html_size = html_content.len();
 40 |         let document = Html::parse_document(&html_content);
 41 | 
 42 |         let mut potential_media: Vec<(String, String)> = Vec::new();
 43 | 
 44 |         // Find image tags
 45 |         let image_selector = scraper::Selector::parse("img[src]").unwrap();
 46 |         for element in document.select(&image_selector) {
 47 |             if let Some(src) = element.value().attr("src") {
 48 |                 potential_media.push((src.to_string(), "image".to_string()));
 49 |             }
 50 |         }
 51 | 
 52 |         // Find video tags
 53 |         let video_selector = scraper::Selector::parse("video[src]").unwrap();
 54 |         for element in document.select(&video_selector) {
 55 |             if let Some(src) = element.value().attr("src") {
 56 |                 potential_media.push((src.to_string(), "video".to_string()));
 57 |             }
 58 |         }
 59 | 
 60 |         // Find source tags within video tags
 61 |         let video_source_selector = scraper::Selector::parse("video source[src]").unwrap();
 62 |         for element in document.select(&video_source_selector) {
 63 |             if let Some(src) = element.value().attr("src") {
 64 |                 potential_media.push((src.to_string(), "video".to_string()));
 65 |             }
 66 |         }
 67 | 
 68 |         // Find object and embed tags for SVGs
 69 |         let svg_selectors = vec![
 70 |             scraper::Selector::parse("object[type='image/svg+xml'][data]").unwrap(),
 71 |             scraper::Selector::parse("embed[type='image/svg+xml'][src]").unwrap(),
 72 |             scraper::Selector::parse("img[src$='.svg']").unwrap(),
 73 |         ];
 74 |         for selector in svg_selectors {
 75 |             for element in document.select(&selector) {
 76 |                 if let Some(src) = element
 77 |                     .value()
 78 |                     .attr("data")
 79 |                     .or_else(|| element.value().attr("src"))
 80 |                 {
 81 |                     potential_media.push((src.to_string(), "svg".to_string()));
 82 |                 }
 83 |             }
 84 |         }
 85 | 
 86 |         let mut biggest_media: Option<(usize, Vec, String)> = None;
 87 | 
 88 |         for (media_url, media_type) in potential_media {
 89 |             if let Ok(resolved_url) =
 90 |                 reqwest::Url::parse(url).and_then(|base| base.join(&media_url))
 91 |             {
 92 |                 if let Ok(media_response) = client.get(resolved_url.as_str()).send().await {
 93 |                     if media_response.status().is_success() {
 94 |                         if let Ok(media_bytes) = media_response.bytes().await {
 95 |                             let media_size = media_bytes.len();
 96 |                             if media_size > (html_size as f64 * 0.3) as usize {
 97 |                                 let extension = resolved_url
 98 |                                     .path_segments()
 99 |                                     .and_then(|segments| segments.last())
100 |                                     .and_then(|filename| filename.split('.').last())
101 |                                     .map(|ext| ext.to_lowercase())
102 |                                     .unwrap_or_default();
103 | 
104 |                                 let is_valid = match media_type.as_str() {
105 |                                     "svg" => extension == "svg",
106 |                                     "video" => catter::is_video(&extension),
107 |                                     "image" => {
108 |                                         image::ImageFormat::from_extension(&extension).is_some()
109 |                                     }
110 |                                     _ => false,
111 |                                 };
112 | 
113 |                                 if is_valid {
114 |                                     if biggest_media.is_none()
115 |                                         || media_size > biggest_media.as_ref().unwrap().0
116 |                                     {
117 |                                         biggest_media =
118 |                                             Some((media_size, media_bytes.to_vec(), extension));
119 |                                     }
120 |                                 }
121 |                             }
122 |                         }
123 |                     }
124 |                 }
125 |             }
126 |         }
127 | 
128 |         match biggest_media {
129 |             Some((_, data, ext)) => {
130 |                 let mut tmp_file = NamedTempFile::with_suffix(format!(".{}", ext))?;
131 |                 tmp_file.write_all(&data)?;
132 |                 Ok(tmp_file)
133 |             }
134 |             None => Err("No significant and valid media found.".into()),
135 |         }
136 |     })
137 | }
138 | 


--------------------------------------------------------------------------------
/styles/default.css:
--------------------------------------------------------------------------------
  1 | body {
  2 |   background-color: #FAFAFA;
  3 |   color: #1B1C21;
  4 |   font-family: "Segoe UI Emoji", "Noto Color Emoji", "Apple Color Emoji", -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
  5 |   min-width: 1200px;
  6 |   line-height: 1.6;
  7 |   padding: 2rem;
  8 | }
  9 | 
 10 | h1,
 11 | h2,
 12 | h3,
 13 | h4,
 14 | h5,
 15 | h6 {
 16 |   color: #2D3640;
 17 |   margin-top: 1.5em;
 18 | }
 19 | 
 20 | h1 {
 21 |   font-size: 2.2em;
 22 |   border-bottom: 2px solid #D0D7DE;
 23 |   padding-bottom: 0.3em;
 24 | }
 25 | 
 26 | h2 {
 27 |   font-size: 1.8em;
 28 |   border-bottom: 1px solid #D0D7DE;
 29 |   padding-bottom: 0.2em;
 30 | }
 31 | 
 32 | h3 {
 33 |   font-size: 1.4em;
 34 | }
 35 | 
 36 | h4,
 37 | h5,
 38 | h6 {
 39 |   color: #6C4BB1;
 40 | }
 41 | 
 42 | a {
 43 |   color: #0066cc;
 44 |   text-decoration: none;
 45 | }
 46 | 
 47 | a:hover {
 48 |   text-decoration: underline;
 49 | }
 50 | 
 51 | code {
 52 |   background-color: #F0F0F0;
 53 |   color: #8B4F00;
 54 |   padding: 0.2em 0.4em;
 55 |   border-radius: 4px;
 56 | }
 57 | 
 58 | pre {
 59 |   background-color: #F0F0F0;
 60 |   color: #000;
 61 |   padding: 1em;
 62 |   overflow-x: auto;
 63 |   border: 1px solid #D0D7DE;
 64 |   border-radius: 6px;
 65 |   white-space: pre-wrap;
 66 |   word-wrap: break-word;
 67 |   overflow-y: hidden;
 68 | }
 69 | 
 70 | pre code {
 71 |   background: none;
 72 |   padding: 0;
 73 |   color: inherit;
 74 |   display: block;
 75 | }
 76 | 
 77 | pre::-webkit-scrollbar {
 78 |   display: none;
 79 | }
 80 | 
 81 | blockquote {
 82 |   border-left: 4px solid #3A7BD5;
 83 |   background-color: #F5F5F5;
 84 |   padding: 0.8em 1em;
 85 |   color: #555;
 86 |   margin: 1em 0;
 87 | }
 88 | 
 89 | ul,
 90 | ol {
 91 |   padding-left: 2em;
 92 | }
 93 | 
 94 | li::marker {
 95 |   color: #6C4BB1;
 96 | }
 97 | 
 98 | table {
 99 |   border-collapse: collapse;
100 |   width: 100%;
101 |   margin: 1em 0;
102 | }
103 | 
104 | th,
105 | td {
106 |   border: 1px solid #C4C9D1;
107 |   padding: 0.6em 1em;
108 | }
109 | 
110 | th {
111 |   background-color: #EFEFEF;
112 |   color: #4B3F1B;
113 | }
114 | 
115 | tr:nth-child(even) {
116 |   background-color: #F9F9F9;
117 | }
118 | 
119 | hr {
120 |   border: none;
121 |   border-top: 1px solid #D0D7DE;
122 |   margin: 2em 0;
123 | }
124 | 
125 | .keyword,
126 | .storage.modifier,
127 | .storage.type {
128 |   color: #a728a5;
129 | }
130 | 
131 | .entity.name.function,
132 | .support.function,
133 | .variable.function {
134 |   color: #4078f2;
135 | }
136 | 
137 | .module,
138 | .struct,
139 | .enum,
140 | .generic,
141 | .path {
142 |   color: #e45649;
143 | }
144 | 
145 | .string,
146 | .punctuation.string {
147 |   color: #50a14f;
148 | }
149 | 
150 | .constant,
151 | .support.type {
152 |   color: #986800;
153 | }
154 | 
155 | .comment,
156 | .punctuation.comment {
157 |   color: #a0a1a7;
158 |   font-style: italic;
159 | }
160 | 
161 | .variable,
162 | .operator,
163 | .punctuation,
164 | .block {
165 |   color: #000000;
166 | }
167 | 


--------------------------------------------------------------------------------
/styles/makurai.css:
--------------------------------------------------------------------------------
  1 | body {
  2 |   background-color: #15161B;
  3 |   color: #FFFFFF;
  4 |   font-family: "Segoe UI Emoji", "Noto Color Emoji", "Apple Color Emoji", -apple-system, BlinkMacSystemFont, "Segoe UI", "Noto Sans", Helvetica, Arial, sans-serif;
  5 |   line-height: 1.6;
  6 |   min-width: 1200px;
  7 |   padding: 2rem;
  8 | }
  9 | 
 10 | h1,
 11 | h2,
 12 | h3,
 13 | h4,
 14 | h5,
 15 | h6 {
 16 |   color: #FF7733;
 17 |   margin-top: 1.5em;
 18 | }
 19 | 
 20 | h1 {
 21 |   font-size: 2.2em;
 22 |   border-bottom: 2px solid #2D3640;
 23 |   padding-bottom: 0.3em;
 24 | }
 25 | 
 26 | h2 {
 27 |   font-size: 1.8em;
 28 |   border-bottom: 1px solid #2D3640;
 29 |   padding-bottom: 0.2em;
 30 | }
 31 | 
 32 | h3 {
 33 |   font-size: 1.4em;
 34 | }
 35 | 
 36 | h4,
 37 | h5,
 38 | h6 {
 39 |   color: #D2A6FF;
 40 | }
 41 | 
 42 | a {
 43 |   color: #66aaff;
 44 |   text-decoration: none;
 45 | }
 46 | 
 47 | a:hover {
 48 |   text-decoration: underline;
 49 | }
 50 | 
 51 | code {
 52 |   background-color: #1B1C21;
 53 |   color: #FFEE99;
 54 |   padding: 0.2em 0.4em;
 55 |   border-radius: 4px;
 56 | }
 57 | 
 58 | pre {
 59 |   background-color: #1B1C21;
 60 |   color: #FFF;
 61 |   padding: 1em;
 62 |   overflow-x: auto;
 63 |   border: 1px solid #2D3640;
 64 |   border-radius: 6px;
 65 |   white-space: pre-wrap;
 66 |   /* Wrap long lines */
 67 |   word-wrap: break-word;
 68 |   overflow-y: hidden;
 69 |   /* Disable vertical scrolling */
 70 | }
 71 | 
 72 | pre code {
 73 |   background: none;
 74 |   padding: 0;
 75 |   color: inherit;
 76 |   display: block;
 77 | }
 78 | 
 79 | pre::-webkit-scrollbar {
 80 |   display: none;
 81 |   /* Hide scrollbar */
 82 | }
 83 | 
 84 | blockquote {
 85 |   border-left: 4px solid #82AAFF;
 86 |   background-color: #1B1C21;
 87 |   padding: 0.8em 1em;
 88 |   color: #5C6773;
 89 |   margin: 1em 0;
 90 | }
 91 | 
 92 | ul,
 93 | ol {
 94 |   padding-left: 2em;
 95 | }
 96 | 
 97 | li::marker {
 98 |   color: #D2A6FF;
 99 | }
100 | 
101 | table {
102 |   border-collapse: collapse;
103 |   width: 100%;
104 |   margin: 1em 0;
105 | }
106 | 
107 | th,
108 | td {
109 |   border: 1px solid #A6ACCD;
110 |   padding: 0.6em 1em;
111 | }
112 | 
113 | th {
114 |   background-color: #1B1C21;
115 |   color: #FFEE99;
116 | }
117 | 
118 | tr:nth-child(even) {
119 |   background-color: #1E1F24;
120 | }
121 | 
122 | hr {
123 |   border: none;
124 |   border-top: 1px solid #2D3640;
125 |   margin: 2em 0;
126 | }
127 | 
128 | .keyword,
129 | .storage.modifier,
130 | .storage.type {
131 |   color: #FF7733;
132 | }
133 | 
134 | .entity.name.function,
135 | .support.function,
136 | .variable.function {
137 |   color: #FFEE99;
138 | }
139 | 
140 | .module,
141 | .struct,
142 | .enum,
143 | .generic,
144 | .path {
145 |   color: #82AAFF;
146 | }
147 | 
148 | .string,
149 | .punctuation.string {
150 |   color: #95FB79;
151 | }
152 | 
153 | .constant,
154 | .support.type {
155 |   color: #D2A6FF;
156 | }
157 | 
158 | .comment,
159 | .punctuation.comment {
160 |   color: #5C6773;
161 |   font-style: italic;
162 | }
163 | 
164 | .variable,
165 | .operator,
166 | .punctuation,
167 | .block {
168 |   color: #FFF;
169 | }
170 | 


--------------------------------------------------------------------------------