├── .github └── workflows │ ├── ci.yml │ ├── publish.yml │ └── release.yml ├── .gitignore ├── .goreleaser.yaml ├── .pre-commit-config.yaml ├── CHANGELOG.md ├── Cargo.lock ├── Cargo.toml ├── README.md ├── install.sh ├── release-plz.toml ├── src ├── cli │ └── mod.rs ├── error.rs ├── lib.rs ├── main.rs ├── migrators │ ├── common.rs │ ├── conda.rs │ ├── detect.rs │ ├── mod.rs │ ├── pipenv.rs │ ├── poetry.rs │ ├── requirements.rs │ └── setup_py.rs ├── models │ ├── dependency.rs │ ├── mod.rs │ └── project.rs └── utils │ ├── author.rs │ ├── build_system.rs │ ├── file_ops.rs │ ├── mod.rs │ ├── pip.rs │ ├── pyproject.rs │ ├── toml.rs │ ├── update.rs │ ├── uv.rs │ └── version.rs └── tests ├── conda_test.rs ├── dependency_format_test.rs ├── file_tracker_test.rs ├── pipenv_test.rs ├── poetry_git_deps_test.rs ├── poetry_package_test.rs ├── poetry_test.rs ├── pyproject_test.rs ├── requirements_text_test.rs ├── setup_py_test.rs └── uv_versions_test.rs /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: ci 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | branches: 9 | - main 10 | 11 | permissions: 12 | contents: read 13 | 14 | env: 15 | CARGO_TERM_COLOR: always 16 | RUST_VERSION_STABLE: 1.87.0 17 | CRATE_PATHS: . 18 | 19 | jobs: 20 | test: 21 | name: test 22 | permissions: 23 | contents: read 24 | checks: write # Required for test results 25 | runs-on: ubuntu-latest 26 | steps: 27 | - uses: actions/checkout@v4 28 | with: 29 | persist-credentials: false 30 | - uses: dtolnay/rust-toolchain@stable 31 | - run: cargo test 32 | 33 | format: 34 | name: format 35 | runs-on: ubuntu-latest 36 | steps: 37 | - uses: actions/checkout@v4 38 | with: 39 | persist-credentials: false 40 | - uses: dtolnay/rust-toolchain@stable 41 | with: 42 | components: rustfmt 43 | - run: cargo fmt --check 44 | 45 | lint: 46 | name: lint 47 | runs-on: ubuntu-latest 48 | steps: 49 | - uses: actions/checkout@v4 50 | with: 51 | persist-credentials: false 52 | - uses: dtolnay/rust-toolchain@stable 53 | with: 54 | components: clippy 55 | - run: cargo clippy -------------------------------------------------------------------------------- /.github/workflows/publish.yml: -------------------------------------------------------------------------------- 1 | name: publish 2 | 3 | on: 4 | release: 5 | types: [ published ] 6 | 7 | permissions: 8 | contents: write 9 | 10 | jobs: 11 | goreleaser: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - name: Checkout 15 | uses: actions/checkout@v4 16 | with: 17 | fetch-depth: 0 18 | 19 | - name: Set up Rust 20 | uses: dtolnay/rust-toolchain@stable 21 | 22 | - name: Set up Zig 23 | uses: mlugg/setup-zig@v1 24 | with: 25 | version: 0.13.0 26 | 27 | - name: Set up Go 28 | uses: actions/setup-go@v5 29 | with: 30 | go-version: stable 31 | 32 | - name: Run GoReleaser 33 | uses: goreleaser/goreleaser-action@v6 34 | with: 35 | distribution: goreleaser 36 | version: "~> v2" 37 | args: release --clean 38 | env: 39 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 40 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: release 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | permissions: 9 | contents: read 10 | 11 | jobs: 12 | release: 13 | name: Release 14 | permissions: 15 | contents: write # For creating releases 16 | id-token: write # Required for release-plz 17 | runs-on: ubuntu-latest 18 | concurrency: 19 | group: release-plz-${{ github.ref }} 20 | cancel-in-progress: false 21 | steps: 22 | - name: Checkout repository 23 | uses: actions/checkout@v4 24 | with: 25 | persist-credentials: true # Change to true to maintain credentials 26 | fetch-depth: 0 27 | token: ${{ secrets.RELEASE_PLZ_TOKEN }} 28 | 29 | - name: Configure Git 30 | run: | 31 | git config --global url."https://${{ secrets.RELEASE_PLZ_TOKEN }}@github.com/".insteadOf "https://github.com/" 32 | 33 | - name: Install Rust toolchain 34 | uses: dtolnay/rust-toolchain@stable 35 | with: 36 | toolchain: 1.87.0 37 | 38 | - name: Run release-plz 39 | uses: MarcoIeni/release-plz-action@v0.5 40 | env: 41 | GITHUB_TOKEN: ${{ secrets.RELEASE_PLZ_TOKEN }} 42 | CARGO_REGISTRY_TOKEN: ${{ secrets.CARGO_REGISTRY_TOKEN }} -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .idea 3 | .vscode 4 | dist/ 5 | debug/ 6 | target/ 7 | **/*.rs.bk 8 | *.pdb 9 | *~ 10 | .fuse_hidden* 11 | .directory 12 | .Trash-* 13 | .nfs* 14 | rust-project.json 15 | Thumbs.db 16 | Thumbs.db:encryptable 17 | ehthumbs.db 18 | ehthumbs_vista.db 19 | *.stackdump 20 | [Dd]esktop.ini 21 | $RECYCLE.BIN/ 22 | *.cab 23 | *.msi 24 | *.msix 25 | *.msm 26 | *.msp 27 | *.lnk 28 | .DS_Store 29 | .AppleDouble 30 | .LSOverride 31 | Icon 32 | ._* 33 | .DocumentRevisions-V100 34 | .fseventsd 35 | .Spotlight-V100 36 | .TemporaryItems 37 | .Trashes 38 | .VolumeIcon.icns 39 | .com.apple.timemachine.donotpresent 40 | .AppleDB 41 | .AppleDesktop 42 | Network Trash Folder 43 | Temporary Items 44 | .apdisk 45 | *.iml 46 | .intentionally-empty-file.o -------------------------------------------------------------------------------- /.goreleaser.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | before: 4 | hooks: 5 | - rustup default stable 6 | - cargo install --locked cargo-zigbuild 7 | - cargo fetch --locked 8 | 9 | builds: 10 | - builder: rust 11 | flags: 12 | - --release 13 | - --features=self_update 14 | targets: 15 | - x86_64-unknown-linux-gnu 16 | - x86_64-apple-darwin 17 | - aarch64-unknown-linux-gnu 18 | - aarch64-apple-darwin 19 | id: "uv-migrator" 20 | binary: uv-migrator 21 | 22 | archives: 23 | - formats: [ 'tar.gz' ] 24 | files: 25 | - none* 26 | name_template: >- 27 | {{ .ProjectName }}-{{ .Target }} 28 | builds: 29 | - uv-migrator 30 | 31 | changelog: 32 | disable: true 33 | 34 | release: 35 | footer: >- 36 | --- 37 | Released by [GoReleaser](https:// 38 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: lint 5 | name: cargo-clippy 6 | entry: cargo 7 | args: 8 | - clippy 9 | language: system 10 | types: [rust] 11 | pass_filenames: false 12 | always_run: true 13 | - id: format 14 | name: cargo-format 15 | entry: cargo 16 | args: 17 | - fmt 18 | language: system 19 | types: [rust] 20 | pass_filenames: false 21 | always_run: true 22 | - id: test 23 | name: cargo-test 24 | entry: cargo 25 | args: 26 | - test 27 | language: system 28 | types: [rust] 29 | pass_filenames: false 30 | always_run: true 31 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "uv-migrator" 3 | version = "2025.8.0" 4 | edition = "2024" 5 | authors = ["stvnksslr@gmail.com"] 6 | description = "Tool for converting various python package soltutions to use the uv solution by astral" 7 | license = "MIT" 8 | repository = "https://github.com/stvnksslr/uv-migrator" 9 | readme = "README.md" 10 | keywords = ["python", "uv"] 11 | 12 | [dependencies] 13 | clap = "4.5.27" 14 | dirs = "6.0.0" 15 | log = "0.4.22" 16 | env_logger = "0.11.6" 17 | self_update = { version = "0.42.0", features = [ 18 | "archive-tar", 19 | "archive-zip", 20 | "compression-flate2", 21 | "rustls", 22 | ], default-features = false, optional = true } 23 | serde = { version = "1.0.216", features = ["derive"] } 24 | toml = "0.8.19" 25 | which = "7.0.1" 26 | semver = "1.0.25" 27 | toml_edit = "0.22.22" 28 | serde_json = "1.0.137" 29 | serde_yml = "0.0.12" 30 | regex = "1.11.1" 31 | 32 | [dev-dependencies] 33 | tempfile = "3.14.0" 34 | 35 | [profile.release] 36 | lto = true 37 | strip = true 38 | codegen-units = 3 39 | 40 | [profile.dev] 41 | codegen-units = 1 42 | 43 | [profile.test] 44 | codegen-units = 1 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # UV Migrator 2 | 3 | ## Disclaimer 4 | 5 | This project is not associated with astral or the uv project in anyway 6 | 7 | ## What is it? 8 | 9 | UV Migrator is simple cli tool designed to seamlessly transition Python projects from various dependency management systems to the UV package manager. 10 | It handles the complexities of migration while preserving your project's dependencies and any existing configs. This project currently supports migrating 11 | applications that consume packages, stay tuned for support for migrating packages themselves. 12 | 13 | ## Installation 14 | 15 | easy install script, source located at [install.sh](https://github.com/stvnksslr/uv-migrator/blob/main/uv-migrator/install.sh) 16 | 17 | ```sh 18 | curl https://files.stvnksslr.com/uv-migrator/install.sh | bash 19 | ``` 20 | 21 | Install via Cargo 22 | 23 | ```sh 24 | cargo install uv-migrator 25 | ``` 26 | 27 | ## Currently Supported 28 | 29 | ✅ Poetry projects 30 | ✅ Pip projects 31 | ✅ Multiple requirements files 32 | ✅ Auto detect development dependencies and dependency groups 33 | ✅ Custom package indexes with named configurations 34 | ✅ Pipenv support 35 | 36 | Package Formats 37 | ✅ setup.py packages 38 | ✅ poetry packages 39 | ✅ anaconda 40 | 41 | ## Usage 42 | 43 | ```sh 44 | ❯ uv-migrator -h 45 | A tool for migrating Python projects to use the uv package manager 46 | 47 | Usage: uv-migrator [OPTIONS] [PATH] 48 | 49 | Arguments: 50 | [PATH] The path to the project directory to migrate [default: .] 51 | 52 | Options: 53 | --merge-groups Merge all dependency groups into the dev group 54 | --import-global-pip-conf Import extra index URLs from ~/.pip/pip.conf 55 | --import-index Additional index URL to import (format: [name@]url) 56 | --disable-restore Disable automatic file restore on error 57 | --self-update Update uv-migrator to the latest version 58 | --check-update Check for updates without installing them 59 | -h, --help Print help (see more with '--help') 60 | -V, --version Print version 61 | 62 | EXAMPLES: 63 | # Migrate a project in the current directory 64 | uv-migrator . 65 | 66 | # Merge all dependency groups into dev dependencies 67 | uv-migrator . --merge-groups 68 | 69 | # Migrate a project with a private package index 70 | uv-migrator . --import-index https://private.pypi.org/simple/ 71 | 72 | # Migrate with named custom indexes 73 | uv-migrator . --import-index mycompany@https://pypi.mycompany.com/simple/ \ 74 | --import-index torch@https://download.pytorch.org/whl/cu118 75 | 76 | # Migrate using global pip configuration 77 | uv-migrator . --import-global-pip-conf 78 | 79 | # Migrate without automatic restore on error 80 | uv-migrator . --disable-restore 81 | 82 | # Check for updates without installing them 83 | uv-migrator --check-update 84 | 85 | # Update to the latest version 86 | uv-migrator --self-update 87 | 88 | For more information and documentation, visit: 89 | https://github.com/stvnksslr/uv-migrator 90 | ``` 91 | 92 | ## Custom Index Configuration 93 | 94 | UV Migrator supports custom package indexes with named configurations. You can specify custom names for your indexes using the `[name@]url` format: 95 | 96 | ### Named Indexes 97 | 98 | ```sh 99 | # Add a named index 100 | uv-migrator . --import-index mycompany@https://pypi.mycompany.com/simple/ 101 | 102 | # Add multiple named indexes 103 | uv-migrator . --import-index torch@https://download.pytorch.org/whl/cu118 \ 104 | --import-index internal@https://internal.company.com/pypi/ 105 | ``` 106 | 107 | This will generate: 108 | 109 | ```toml 110 | [tool.uv] 111 | index = [ 112 | { name = "torch", url = "https://download.pytorch.org/whl/cu118" }, 113 | { name = "internal", url = "https://internal.company.com/pypi/" } 114 | ] 115 | ``` 116 | 117 | ### Index Name Format 118 | 119 | - Names can contain letters, numbers, hyphens, and underscores 120 | - The `@` symbol separates the name from the URL 121 | - If no name is provided, or the format is invalid, the URL is treated as unnamed 122 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -euo pipefail 3 | 4 | # Check if local bin directory exists 5 | [[ ! -d "${HOME}/.local/bin" ]] && { 6 | echo "Error: ${HOME}/.local/bin not found" >&2 7 | exit 1 8 | } 9 | 10 | # Check if directory is in PATH 11 | [[ ":$PATH:" != *":$HOME/.local/bin:"* ]] && { 12 | echo "Error: ${HOME}/.local/bin is not in PATH" >&2 13 | exit 1 14 | } 15 | 16 | # Detect system architecture and OS combination 17 | case "$(uname -m)_$(uname -s)" in 18 | "x86_64_Linux") ARCH_OS="x86_64-unknown-linux-gnu" ;; 19 | "x86_64_Darwin") ARCH_OS="x86_64-apple-darwin" ;; 20 | "aarch64_Linux" | "arm64_Linux") ARCH_OS="aarch64-unknown-linux-gnu" ;; 21 | "aarch64_Darwin" | "arm64_Darwin") ARCH_OS="aarch64-apple-darwin" ;; 22 | *) 23 | echo "Unsupported system" >&2 24 | exit 1 25 | ;; 26 | esac 27 | 28 | # Set up temporary directory for download (auto-cleaned on exit) 29 | TMP_DIR=$(mktemp -d) 30 | trap 'rm -rf $TMP_DIR' EXIT 31 | 32 | # Get latest release, download binary, and install to ~/.local/bin 33 | RELEASE=$(curl -s https://api.github.com/repos/stvnksslr/uv-migrator/releases/latest) 34 | curl -sL "$(echo "$RELEASE" | grep -o "\"browser_download_url\": \"[^\"]*uv-migrator-${ARCH_OS}.tar.gz\"" | cut -d'"' -f4)" | tar xz -C "$TMP_DIR" 35 | mv "$TMP_DIR/uv-migrator" "$HOME/.local/bin/" && chmod +x "$HOME/.local/bin/uv-migrator" 36 | -------------------------------------------------------------------------------- /release-plz.toml: -------------------------------------------------------------------------------- 1 | [workspace] 2 | changelog_update = true 3 | semver_check = false 4 | 5 | [changelog] 6 | body = """ 7 | ## [{{ version | trim_start_matches(pat="v") }}]\ 8 | {%- if release_link -%}\ 9 | ({{ release_link }})\ 10 | {% endif %} \ 11 | - {{ timestamp | date(format="%Y-%m-%d") }} 12 | {% for group, commits in commits | group_by(attribute="group") %} 13 | ### {{ group | upper_first }} 14 | {% for commit in commits %} 15 | {%- if commit.scope -%} 16 | - *({{commit.scope}})* {% if commit.breaking %}[**breaking**] {% endif %}\ 17 | {{ commit.message }}{{ self::username(commit=commit) }}\ 18 | {%- if commit.links %} \ 19 | ({% for link in commit.links %}[{{link.text}}]({{link.href}}) {% endfor -%})\ 20 | {% endif %} 21 | {% else -%} 22 | - {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message }}{{ self::username(commit=commit) }} 23 | {% endif -%} 24 | {% endfor -%} 25 | {% endfor %} 26 | {%- if remote.contributors %} 27 | ### Contributors 28 | {% for contributor in remote.contributors %} 29 | * @{{ contributor.username }} 30 | {%- endfor %} 31 | {% endif -%} 32 | {%- macro username(commit) -%} 33 | {% if commit.remote.username %} (by @{{ commit.remote.username }}){% endif -%} 34 | {% endmacro -%} 35 | """ -------------------------------------------------------------------------------- /src/cli/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | use crate::migrators::run_migration; 3 | use crate::utils::uv::check_uv_requirements; 4 | use clap::{Arg, ArgAction, Command}; 5 | use log::info; 6 | use std::path::PathBuf; 7 | 8 | /// Command line arguments for UV migrator 9 | #[derive(Debug)] 10 | pub struct Args { 11 | /// Path to the project directory 12 | pub path: PathBuf, 13 | 14 | /// Whether to merge dependency groups 15 | pub merge_groups: bool, 16 | 17 | /// Whether to import global pip.conf 18 | pub import_global_pip_conf: bool, 19 | 20 | /// Additional index URLs to import 21 | pub import_index: Vec, 22 | 23 | /// Whether to disable automatic restore on error 24 | pub disable_restore: bool, 25 | 26 | /// Whether to self-update 27 | #[cfg(feature = "self_update")] 28 | pub self_update: bool, 29 | 30 | /// Whether to check for updates without updating 31 | #[cfg(feature = "self_update")] 32 | pub check_update: bool, 33 | } 34 | 35 | /// Configures and runs the CLI 36 | pub fn run() -> Result { 37 | let mut cmd = Command::new("uv-migrator") 38 | .version(env!("CARGO_PKG_VERSION")) 39 | .about("A tool for migrating Python projects to use the uv package manager") 40 | .long_about( 41 | "UV Migrator helps you convert Python projects from various dependency management systems \ 42 | (like Poetry or pip) to use the UV package manager. It preserves your dependencies, \ 43 | development configurations, and project structure while setting up a new UV-based environment." 44 | ); 45 | 46 | cmd = cmd.arg( 47 | Arg::new("PATH") 48 | .help("The path to the project directory to migrate") 49 | .long_help( 50 | "Specifies the directory containing the Python project to migrate. \ 51 | This should be the root directory of your project where pyproject.toml \ 52 | or requirements.txt is located.", 53 | ) 54 | .value_parser(clap::value_parser!(PathBuf)) 55 | .default_value("."), 56 | ); 57 | 58 | cmd = cmd.arg( 59 | Arg::new("merge-groups") 60 | .long("merge-groups") 61 | .help("Merge all dependency groups into the dev group") 62 | .long_help( 63 | "When this flag is set, all dependency groups (including custom groups) \ 64 | will be merged into the dev group. This is useful when you want to \ 65 | simplify your dependency management by having only main and dev dependencies.", 66 | ) 67 | .action(ArgAction::SetTrue), 68 | ); 69 | 70 | cmd = cmd.arg( 71 | Arg::new("import-global-pip-conf") 72 | .long("import-global-pip-conf") 73 | .help("Import extra index URLs from ~/.pip/pip.conf") 74 | .long_help( 75 | "Reads and imports any extra package index URLs defined in your global pip \ 76 | configuration file (~/.pip/pip.conf). This is useful when your project requires \ 77 | packages from private or alternative Python package indexes.", 78 | ) 79 | .action(ArgAction::SetTrue), 80 | ); 81 | 82 | cmd = cmd.arg( 83 | Arg::new("import-index") 84 | .long("import-index") 85 | .help("Additional index URL to import (format: [name@]url)") 86 | .long_help( 87 | "Specifies additional Python package index URLs to use. You can provide this \ 88 | option multiple times to add several index URLs. These URLs will be added to \ 89 | your project's pyproject.toml in the [tool.uv] section.\n\n\ 90 | Format: [name@]url\n\ 91 | - name@https://pypi.example.com/simple/ - adds index with name 'name'\n\ 92 | - https://pypi.example.com/simple/ - adds index with auto-generated name 'extra-N'", 93 | ) 94 | .action(ArgAction::Append) 95 | .value_parser(clap::value_parser!(String)), 96 | ); 97 | 98 | cmd = cmd.arg( 99 | Arg::new("disable-restore") 100 | .long("disable-restore") 101 | .help("Disable automatic file restore on error") 102 | .long_help( 103 | "When this flag is set, the migrator will not attempt to restore files to their \ 104 | original state if an error occurs during migration. This can be useful in \ 105 | automated environments or when you want to inspect the partial migration state.", 106 | ) 107 | .action(ArgAction::SetTrue), 108 | ); 109 | 110 | // Add self-update functionality if the feature is enabled 111 | #[cfg(feature = "self_update")] 112 | { 113 | cmd = cmd.arg( 114 | Arg::new("self_update") 115 | .long("self-update") 116 | .help("Update uv-migrator to the latest version") 117 | .long_help( 118 | "Checks for and downloads the latest version of uv-migrator from GitHub releases. \ 119 | The tool will automatically update itself if a newer version is available." 120 | ) 121 | .action(ArgAction::SetTrue) 122 | ); 123 | 124 | cmd = cmd.arg( 125 | Arg::new("check_update") 126 | .long("check-update") 127 | .help("Check for updates without installing them") 128 | .long_help( 129 | "Checks if a newer version of uv-migrator is available on GitHub releases, \ 130 | but does not install the update. Use --self-update to both check and install.", 131 | ) 132 | .action(ArgAction::SetTrue), 133 | ); 134 | } 135 | 136 | let after_help = "EXAMPLES: 137 | # Migrate a project in the current directory 138 | uv-migrator . 139 | 140 | # Merge all dependency groups into dev dependencies 141 | uv-migrator . --merge-groups 142 | 143 | # Migrate a project with a private package index 144 | uv-migrator . --import-index https://private.pypi.org/simple/ 145 | 146 | # Migrate with named custom indexes 147 | uv-migrator . --import-index mycompany@https://pypi.mycompany.com/simple/ \\ 148 | --import-index torch@https://download.pytorch.org/whl/cu118 149 | 150 | # Migrate using global pip configuration 151 | uv-migrator . --import-global-pip-conf 152 | 153 | # Migrate without automatic restore on error 154 | uv-migrator . --disable-restore 155 | 156 | # Check for updates without installing them 157 | uv-migrator --check-update 158 | 159 | # Update to the latest version 160 | uv-migrator --self-update 161 | 162 | For more information and documentation, visit: 163 | https://github.com/stvnksslr/uv-migrator"; 164 | 165 | cmd = cmd.after_help(after_help); 166 | 167 | let matches = cmd.get_matches(); 168 | 169 | let args = Args { 170 | path: matches 171 | .get_one::("PATH") 172 | .cloned() 173 | .unwrap_or_else(|| PathBuf::from(".")), 174 | merge_groups: matches.get_flag("merge-groups"), 175 | import_global_pip_conf: matches.get_flag("import-global-pip-conf"), 176 | import_index: matches 177 | .get_many::("import-index") 178 | .unwrap_or_default() 179 | .cloned() 180 | .collect(), 181 | disable_restore: matches.get_flag("disable-restore"), 182 | #[cfg(feature = "self_update")] 183 | self_update: matches.get_flag("self_update"), 184 | #[cfg(feature = "self_update")] 185 | check_update: matches.get_flag("check_update"), 186 | }; 187 | 188 | execute(&args)?; 189 | Ok(args) 190 | } 191 | 192 | /// Execute the migration with the provided arguments 193 | pub fn execute(args: &Args) -> Result<()> { 194 | // If we're only checking for updates or doing a self-update, 195 | // we don't need to run the migration 196 | #[cfg(feature = "self_update")] 197 | if args.self_update || args.check_update { 198 | return Ok(()); 199 | } 200 | 201 | info!("Starting UV migrator..."); 202 | 203 | // Check UV requirements before proceeding 204 | check_uv_requirements()?; 205 | 206 | info!("Migrating project at: {}", args.path.display()); 207 | 208 | // Run the migration 209 | run_migration( 210 | &args.path, 211 | args.import_global_pip_conf, 212 | &args.import_index, 213 | args.merge_groups, 214 | !args.disable_restore, 215 | )?; 216 | 217 | info!("Migration completed successfully!"); 218 | Ok(()) 219 | } 220 | -------------------------------------------------------------------------------- /src/error.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use std::io; 3 | use std::path::PathBuf; 4 | 5 | /// Custom error type for UV Migrator operations 6 | #[derive(Debug)] 7 | pub enum Error { 8 | /// I/O errors (file access, permissions, etc.) 9 | Io(io::Error), 10 | 11 | /// TOML parsing errors 12 | Toml(toml_edit::TomlError), 13 | 14 | /// TOML serialization/deserialization errors 15 | TomlSerde(toml::de::Error), 16 | 17 | /// Errors from UV command execution 18 | UvCommand(String), 19 | 20 | /// Errors related to project detection 21 | ProjectDetection(String), 22 | 23 | /// Errors related to dependency parsing 24 | DependencyParsing(String), 25 | 26 | /// Errors related to file operations 27 | FileOperation { path: PathBuf, message: String }, 28 | 29 | /// General errors 30 | General(String), 31 | } 32 | 33 | impl fmt::Display for Error { 34 | fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { 35 | match self { 36 | Error::Io(err) => write!(f, "I/O error: {}", err), 37 | Error::Toml(err) => write!(f, "TOML parsing error: {}", err), 38 | Error::TomlSerde(err) => write!(f, "TOML serialization error: {}", err), 39 | Error::UvCommand(msg) => write!(f, "UV command failed: {}", msg), 40 | Error::ProjectDetection(msg) => write!(f, "Project detection error: {}", msg), 41 | Error::DependencyParsing(msg) => write!(f, "Dependency parsing error: {}", msg), 42 | Error::FileOperation { path, message } => { 43 | write!(f, "File operation error on {}: {}", path.display(), message) 44 | } 45 | Error::General(msg) => write!(f, "{}", msg), 46 | } 47 | } 48 | } 49 | 50 | impl Error { 51 | /// Check if the error message contains a specific string 52 | #[allow(dead_code)] 53 | pub fn contains(&self, needle: &str) -> bool { 54 | match self { 55 | Error::FileOperation { path: _, message } => message.contains(needle), 56 | _ => { 57 | let message = self.to_string(); 58 | message.contains(needle) 59 | } 60 | } 61 | } 62 | } 63 | 64 | impl std::error::Error for Error { 65 | fn source(&self) -> Option<&(dyn std::error::Error + 'static)> { 66 | match self { 67 | Error::Io(err) => Some(err), 68 | Error::Toml(err) => Some(err), 69 | Error::TomlSerde(err) => Some(err), 70 | _ => None, 71 | } 72 | } 73 | } 74 | 75 | // Implement From conversions for common error types 76 | impl From for Error { 77 | fn from(err: io::Error) -> Self { 78 | Error::Io(err) 79 | } 80 | } 81 | 82 | impl From for Error { 83 | fn from(err: toml_edit::TomlError) -> Self { 84 | Error::Toml(err) 85 | } 86 | } 87 | 88 | impl From for Error { 89 | fn from(err: toml::de::Error) -> Self { 90 | Error::TomlSerde(err) 91 | } 92 | } 93 | 94 | impl From for Error { 95 | fn from(err: String) -> Self { 96 | Error::General(err) 97 | } 98 | } 99 | 100 | impl From<&str> for Error { 101 | fn from(err: &str) -> Self { 102 | Error::General(err.to_string()) 103 | } 104 | } 105 | 106 | /// Result type alias for UV Migrator operations 107 | pub type Result = std::result::Result; 108 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cli; 2 | pub mod error; 3 | pub mod migrators; 4 | pub mod models; 5 | pub mod utils; 6 | 7 | // Re-export the main entry points 8 | pub use error::{Error, Result}; 9 | pub use migrators::run_migration; 10 | pub use models::dependency::DependencyType; 11 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | mod cli; 2 | mod error; 3 | mod migrators; 4 | mod models; 5 | mod utils; 6 | 7 | use env_logger::{Builder, Env}; 8 | use log::error; 9 | use std::process::exit; 10 | 11 | #[cfg(feature = "self_update")] 12 | fn update_check(args: &cli::Args) -> crate::error::Result<()> { 13 | if args.self_update { 14 | if let Err(e) = utils::update() { 15 | eprintln!("Update failed: {}", e); 16 | } 17 | } else if args.check_update { 18 | if let Err(e) = utils::check_for_updates() { 19 | eprintln!("Update check failed: {}", e); 20 | } 21 | } 22 | Ok(()) 23 | } 24 | 25 | #[cfg(not(feature = "self_update"))] 26 | fn update_check(_args: &cli::Args) -> crate::error::Result<()> { 27 | Ok(()) 28 | } 29 | 30 | fn main() { 31 | // Initialize logger with default info level 32 | Builder::from_env(Env::default().default_filter_or("info")) 33 | .format_timestamp(None) 34 | .format_target(false) 35 | .init(); 36 | 37 | if let Err(e) = run() { 38 | error!("Error: {}", e); 39 | exit(1); 40 | } 41 | } 42 | 43 | fn run() -> crate::error::Result<()> { 44 | // Run the CLI and get arguments 45 | let args = cli::run()?; 46 | 47 | // Check for updates if requested via flags 48 | update_check(&args)?; 49 | 50 | Ok(()) 51 | } 52 | -------------------------------------------------------------------------------- /src/migrators/common.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Error; 2 | use crate::error::Result; 3 | use crate::migrators::poetry; 4 | use crate::models::project::PoetryProjectType; 5 | use crate::models::*; 6 | use crate::utils::{ 7 | author::extract_authors_from_poetry, 8 | author::extract_authors_from_setup_py, 9 | file_ops::FileTrackerGuard, 10 | parse_pip_conf, 11 | toml::{read_toml, update_section, write_toml}, 12 | }; 13 | use log::info; 14 | use std::path::Path; 15 | use toml_edit::{Array, Formatted, Item, Value}; 16 | 17 | /// Merges all dependency groups into dev dependencies 18 | pub fn merge_dependency_groups(dependencies: Vec) -> Vec { 19 | dependencies 20 | .into_iter() 21 | .map(|mut dep| { 22 | if matches!(dep.dep_type, DependencyType::Group(_)) { 23 | dep.dep_type = DependencyType::Dev; 24 | } 25 | dep 26 | }) 27 | .collect() 28 | } 29 | 30 | /// Performs common migration tasks for all project types 31 | pub fn perform_common_migrations( 32 | project_dir: &Path, 33 | file_tracker: &mut FileTrackerGuard, 34 | import_global_pip_conf: bool, 35 | additional_index_urls: &[String], 36 | ) -> Result<()> { 37 | let pyproject_path = project_dir.join("pyproject.toml"); 38 | 39 | file_tracker.track_file(&pyproject_path)?; 40 | crate::utils::pyproject::update_pyproject_toml(project_dir, &[])?; 41 | 42 | if let Some(version) = crate::utils::version::extract_version(project_dir)? { 43 | info!("Migrating version from setup.py"); 44 | file_tracker.track_file(&pyproject_path)?; 45 | crate::utils::pyproject::update_project_version(project_dir, &version)?; 46 | } 47 | 48 | let mut extra_urls = Vec::new(); 49 | if import_global_pip_conf { 50 | extra_urls.extend(parse_pip_conf()?); 51 | } 52 | 53 | // Explicitly add additional_index_urls to extra_urls 54 | if !additional_index_urls.is_empty() { 55 | info!("Adding custom index URLs: {:?}", additional_index_urls); 56 | extra_urls.extend(additional_index_urls.iter().cloned()); 57 | } 58 | 59 | if !extra_urls.is_empty() { 60 | file_tracker.track_file(&pyproject_path)?; 61 | // Update pyproject.toml with extra URLs 62 | crate::utils::pyproject::update_uv_indices_from_urls(project_dir, &extra_urls)?; 63 | } 64 | 65 | info!("Migrating Tool sections"); 66 | file_tracker.track_file(&pyproject_path)?; 67 | crate::utils::pyproject::append_tool_sections(project_dir)?; 68 | 69 | info!("Reordering pyproject.toml sections"); 70 | file_tracker.track_file(&pyproject_path)?; 71 | crate::utils::toml::reorder_toml_sections(project_dir)?; 72 | 73 | Ok(()) 74 | } 75 | 76 | /// Migrates Poetry-specific features 77 | pub fn perform_poetry_migration( 78 | project_dir: &Path, 79 | file_tracker: &mut FileTrackerGuard, 80 | ) -> Result<()> { 81 | let pyproject_path = project_dir.join("pyproject.toml"); 82 | let old_pyproject_path = project_dir.join("old.pyproject.toml"); 83 | 84 | info!("Checking for Poetry package sources to migrate"); 85 | let sources = crate::utils::pyproject::extract_poetry_sources(project_dir)?; 86 | if !sources.is_empty() { 87 | file_tracker.track_file(&pyproject_path)?; 88 | crate::utils::pyproject::update_uv_indices(project_dir, &sources)?; 89 | } 90 | 91 | info!("Migrating Poetry authors"); 92 | let poetry_authors = extract_authors_from_poetry(project_dir)?; 93 | if !poetry_authors.is_empty() { 94 | file_tracker.track_file(&pyproject_path)?; 95 | let mut doc = read_toml(&pyproject_path)?; 96 | let mut authors_array = Array::new(); 97 | for author in &poetry_authors { 98 | let mut table = toml_edit::InlineTable::new(); 99 | table.insert("name", Value::String(Formatted::new(author.name.clone()))); 100 | if let Some(ref email) = author.email { 101 | table.insert("email", Value::String(Formatted::new(email.clone()))); 102 | } 103 | authors_array.push(Value::InlineTable(table)); 104 | } 105 | update_section( 106 | &mut doc, 107 | &["project", "authors"], 108 | Item::Value(Value::Array(authors_array)), 109 | ); 110 | write_toml(&pyproject_path, &mut doc)?; 111 | } 112 | 113 | info!("Migrating Poetry scripts"); 114 | file_tracker.track_file(&pyproject_path)?; 115 | let has_scripts = crate::utils::pyproject::update_scripts(project_dir)?; 116 | 117 | info!("Checking Poetry build system"); 118 | 119 | // Get project type to handle application vs package differently 120 | let project_type = poetry::PoetryMigrationSource::detect_project_type(project_dir)?; 121 | 122 | // Check for packages in original Poetry config 123 | let has_packages_config = if old_pyproject_path.exists() { 124 | let old_doc = read_toml(&old_pyproject_path)?; 125 | 126 | // Extract and migrate packages configuration 127 | let packages = crate::utils::pyproject::extract_poetry_packages(&old_doc); 128 | if !packages.is_empty() { 129 | file_tracker.track_file(&pyproject_path)?; 130 | let mut doc = read_toml(&pyproject_path)?; 131 | 132 | let mut packages_array = toml_edit::Array::new(); 133 | for pkg in packages { 134 | packages_array.push(toml_edit::Value::String(toml_edit::Formatted::new(pkg))); 135 | } 136 | 137 | update_section( 138 | &mut doc, 139 | &["tool", "hatch", "build", "targets", "wheel", "packages"], 140 | toml_edit::Item::Value(toml_edit::Value::Array(packages_array)), 141 | ); 142 | 143 | write_toml(&pyproject_path, &mut doc)?; 144 | info!("Migrated Poetry packages configuration to Hatchling"); 145 | true 146 | } else { 147 | false 148 | } 149 | } else { 150 | false 151 | }; 152 | 153 | // Check for package-mode setting (Poetry 1.2+) 154 | let is_package_mode = if old_pyproject_path.exists() { 155 | let old_doc = read_toml(&old_pyproject_path)?; 156 | old_doc 157 | .get("tool") 158 | .and_then(|t| t.get("poetry")) 159 | .and_then(|p| p.get("package-mode")) 160 | .and_then(|p| p.as_bool()) 161 | .unwrap_or(true) 162 | } else { 163 | true 164 | }; 165 | 166 | // For application projects with scripts, we need special handling 167 | if has_scripts && matches!(project_type, PoetryProjectType::Application) { 168 | info!("Configuring application project with scripts"); 169 | file_tracker.track_file(&pyproject_path)?; 170 | 171 | // Get or create a sensible package name 172 | let package_name = 173 | if let Ok(Some(name)) = crate::utils::pyproject::extract_project_name(project_dir) { 174 | name.replace('-', "_").to_lowercase() 175 | } else if let Some(dir_name) = project_dir.file_name().and_then(|n| n.to_str()) { 176 | dir_name.replace('-', "_").to_lowercase() 177 | } else { 178 | "app".to_string() 179 | }; 180 | 181 | // For applications with scripts, use a simpler build backend 182 | let mut doc = read_toml(&pyproject_path)?; 183 | 184 | // Use setuptools as it's more forgiving for applications with scripts 185 | let mut build_system_table = toml_edit::Table::new(); 186 | 187 | let mut requires_array = toml_edit::Array::new(); 188 | requires_array.push(Value::String(Formatted::new("setuptools>=42".to_string()))); 189 | requires_array.push(Value::String(Formatted::new("wheel".to_string()))); 190 | 191 | build_system_table.insert("requires", Item::Value(Value::Array(requires_array))); 192 | 193 | build_system_table.insert( 194 | "build-backend", 195 | Item::Value(Value::String(Formatted::new( 196 | "setuptools.build_meta".to_string(), 197 | ))), 198 | ); 199 | 200 | doc.insert("build-system", Item::Table(build_system_table)); 201 | 202 | // Add a basic py_modules section for scriptability 203 | update_section( 204 | &mut doc, 205 | &["tool", "setuptools", "py-modules"], 206 | Item::Value(Value::Array(Array::from_iter([Value::String( 207 | Formatted::new(package_name), 208 | )]))), 209 | ); 210 | 211 | write_toml(&pyproject_path, &mut doc)?; 212 | info!("Configured build system for application with scripts"); 213 | } else if !has_packages_config 214 | && !is_package_mode 215 | && matches!(project_type, PoetryProjectType::Application) 216 | { 217 | // For applications without package config, use setuptools instead of Hatchling 218 | info!("Configuring application project without package configuration"); 219 | file_tracker.track_file(&pyproject_path)?; 220 | 221 | let mut doc = read_toml(&pyproject_path)?; 222 | 223 | // Use setuptools which is more forgiving for applications 224 | let mut build_system_table = toml_edit::Table::new(); 225 | 226 | let mut requires_array = toml_edit::Array::new(); 227 | requires_array.push(Value::String(Formatted::new("setuptools>=42".to_string()))); 228 | requires_array.push(Value::String(Formatted::new("wheel".to_string()))); 229 | 230 | build_system_table.insert("requires", Item::Value(Value::Array(requires_array))); 231 | 232 | build_system_table.insert( 233 | "build-backend", 234 | Item::Value(Value::String(Formatted::new( 235 | "setuptools.build_meta".to_string(), 236 | ))), 237 | ); 238 | 239 | doc.insert("build-system", Item::Table(build_system_table)); 240 | 241 | // Add simple configuration to make it installable 242 | update_section( 243 | &mut doc, 244 | &["tool", "setuptools", "packages", "find"], 245 | Item::Value(Value::InlineTable(toml_edit::InlineTable::new())), 246 | ); 247 | 248 | write_toml(&pyproject_path, &mut doc)?; 249 | info!("Configured simple setuptools build for application project"); 250 | } else { 251 | // For regular packages, use the standard Hatchling configuration 252 | let mut doc = read_toml(&pyproject_path)?; 253 | if crate::utils::build_system::update_build_system(&mut doc, project_dir)? { 254 | info!("Migrated build system from Poetry to Hatchling"); 255 | file_tracker.track_file(&pyproject_path)?; 256 | write_toml(&pyproject_path, &mut doc)?; 257 | } 258 | } 259 | 260 | info!("Checking for Poetry git dependencies to migrate"); 261 | let poetry_source = poetry::PoetryMigrationSource; 262 | match poetry_source.extract_git_dependencies(project_dir) { 263 | Ok(git_dependencies) => { 264 | if !git_dependencies.is_empty() { 265 | info!("Migrating {} git dependencies", git_dependencies.len()); 266 | file_tracker.track_file(&pyproject_path)?; 267 | crate::utils::pyproject::update_git_dependencies(project_dir, &git_dependencies) 268 | .map_err(|e| { 269 | Error::General(format!("Failed to migrate git dependencies: {}", e)) 270 | })?; 271 | } 272 | } 273 | Err(e) => { 274 | log::warn!("Failed to extract git dependencies: {}", e); 275 | } 276 | } 277 | 278 | Ok(()) 279 | } 280 | 281 | /// Migrates setup.py-specific features 282 | pub fn perform_setup_py_migration( 283 | project_dir: &Path, 284 | file_tracker: &mut FileTrackerGuard, 285 | ) -> Result<()> { 286 | let pyproject_path = project_dir.join("pyproject.toml"); 287 | 288 | info!("Migrating metadata from setup.py"); 289 | let description = 290 | crate::migrators::setup_py::SetupPyMigrationSource::extract_description(project_dir)?; 291 | if let Some(desc) = description { 292 | file_tracker.track_file(&pyproject_path)?; 293 | crate::utils::pyproject::update_description(project_dir, &desc)?; 294 | } 295 | 296 | info!("Migrating URL from setup.py"); 297 | let project_url = crate::migrators::setup_py::SetupPyMigrationSource::extract_url(project_dir)?; 298 | if let Some(url) = project_url { 299 | file_tracker.track_file(&pyproject_path)?; 300 | crate::utils::pyproject::update_url(project_dir, &url)?; 301 | } 302 | 303 | info!("Migrating authors from setup.py"); 304 | let setup_py_authors = extract_authors_from_setup_py(project_dir)?; 305 | if !setup_py_authors.is_empty() { 306 | file_tracker.track_file(&pyproject_path)?; 307 | let mut doc = read_toml(&pyproject_path)?; 308 | let mut authors_array = Array::new(); 309 | for author in &setup_py_authors { 310 | let mut table = toml_edit::InlineTable::new(); 311 | table.insert("name", Value::String(Formatted::new(author.name.clone()))); 312 | if let Some(ref email) = author.email { 313 | table.insert("email", Value::String(Formatted::new(email.clone()))); 314 | } 315 | authors_array.push(Value::InlineTable(table)); 316 | } 317 | update_section( 318 | &mut doc, 319 | &["project", "authors"], 320 | Item::Value(Value::Array(authors_array)), 321 | ); 322 | write_toml(&pyproject_path, &mut doc)?; 323 | } 324 | 325 | Ok(()) 326 | } 327 | 328 | /// Migrates Pipenv-specific features 329 | pub fn perform_pipenv_migration( 330 | project_dir: &Path, 331 | file_tracker: &mut FileTrackerGuard, 332 | ) -> Result<()> { 333 | let pyproject_path = project_dir.join("pyproject.toml"); 334 | 335 | if let Ok(content) = std::fs::read_to_string(project_dir.join("Pipfile")) { 336 | if content.contains("[scripts]") { 337 | info!("Migrating Pipfile scripts"); 338 | file_tracker.track_file(&pyproject_path)?; 339 | } 340 | } 341 | 342 | Ok(()) 343 | } 344 | 345 | /// Migrates requirements.txt-specific features 346 | pub fn perform_requirements_migration( 347 | project_dir: &Path, 348 | file_tracker: &mut FileTrackerGuard, 349 | ) -> Result<()> { 350 | let pyproject_path = project_dir.join("pyproject.toml"); 351 | let requirements_source = crate::migrators::requirements::RequirementsMigrationSource; 352 | let req_files = requirements_source.find_requirements_files(project_dir); 353 | 354 | for (file_path, _dep_type) in req_files { 355 | if let Some(file_name) = file_path.file_name().and_then(|n| n.to_str()) { 356 | match file_name { 357 | "requirements.txt" | "requirements-dev.txt" => { 358 | continue; 359 | } 360 | _ => { 361 | if let Some(_group_name) = file_name 362 | .strip_prefix("requirements-") 363 | .and_then(|n| n.strip_suffix(".txt")) 364 | { 365 | info!("Configuring group from requirements file: {}", file_name); 366 | file_tracker.track_file(&pyproject_path)?; 367 | } 368 | } 369 | } 370 | } 371 | } 372 | 373 | Ok(()) 374 | } 375 | 376 | /// Migrates Conda environment-specific features 377 | pub fn perform_conda_migration( 378 | project_dir: &Path, 379 | file_tracker: &mut FileTrackerGuard, 380 | ) -> Result<()> { 381 | let pyproject_path = project_dir.join("pyproject.toml"); 382 | 383 | info!("Checking for Conda channels to document"); 384 | 385 | // Read environment file to check for channels 386 | let env_file = project_dir.join("environment.yml"); 387 | if !env_file.exists() { 388 | let env_file = project_dir.join("environment.yaml"); 389 | if !env_file.exists() { 390 | return Ok(()); 391 | } 392 | } 393 | 394 | // Note: We could extract and document Conda channels as comments in pyproject.toml 395 | // but UV doesn't have a direct equivalent to Conda channels. 396 | // The package name mapping in CondaMigrationSource handles most cases. 397 | 398 | info!("Conda migration completed - package names have been mapped to PyPI equivalents"); 399 | 400 | // Track the pyproject.toml file for any additional changes 401 | file_tracker.track_file(&pyproject_path)?; 402 | 403 | Ok(()) 404 | } 405 | -------------------------------------------------------------------------------- /src/migrators/detect.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | use crate::migrators::conda::CondaMigrationSource; 3 | use crate::migrators::pipenv::PipenvMigrationSource; 4 | use crate::migrators::poetry::PoetryMigrationSource; 5 | use crate::models::project::ProjectType; 6 | use log::info; 7 | use std::path::Path; 8 | 9 | pub fn detect_project_type(project_dir: &Path) -> Result { 10 | // Check for Conda environment first (most specific) 11 | if CondaMigrationSource::detect_project_type(project_dir) { 12 | info!("Detected Conda project"); 13 | return Ok(ProjectType::Conda); 14 | } 15 | 16 | let pyproject_path = project_dir.join("pyproject.toml"); 17 | if pyproject_path.exists() { 18 | // First, check the project section (Poetry 2.0 style) 19 | if let Ok(content) = std::fs::read_to_string(&pyproject_path) { 20 | if let Ok(pyproject) = toml::from_str::(&content) { 21 | // Check for Poetry 2.0 project section 22 | if let Some(project) = pyproject.get("project") { 23 | if project.get("dependencies").is_some() { 24 | info!("Detected Poetry 2.0 project"); 25 | 26 | // Don't automatically assume it's a package; let PoetryMigrationSource determine that 27 | let poetry_type = PoetryMigrationSource::detect_project_type(project_dir)?; 28 | return Ok(ProjectType::Poetry(poetry_type)); 29 | } 30 | } 31 | } 32 | } 33 | 34 | if has_poetry_section(&pyproject_path)? { 35 | info!("Detected Poetry project"); 36 | let poetry_type = PoetryMigrationSource::detect_project_type(project_dir)?; 37 | return Ok(ProjectType::Poetry(poetry_type)); 38 | } 39 | } 40 | 41 | if PipenvMigrationSource::detect_project_type(project_dir) { 42 | info!("Detected Pipenv project"); 43 | return Ok(ProjectType::Pipenv); 44 | } 45 | 46 | let setup_py_path = project_dir.join("setup.py"); 47 | if setup_py_path.exists() { 48 | info!("Detected setuptools project"); 49 | return Ok(ProjectType::SetupPy); 50 | } 51 | 52 | let requirements_files = find_requirements_files(project_dir); 53 | if !requirements_files.is_empty() { 54 | info!("Detected project with requirements files"); 55 | return Ok(ProjectType::Requirements); 56 | } 57 | 58 | Err(crate::error::Error::ProjectDetection("Unable to detect project type. Ensure you have either a pyproject.toml with a [tool.poetry] section or a [project] section, a Pipfile, a setup.py file, requirements.txt file(s), or an environment.yml file for Conda projects.".to_string())) 59 | } 60 | 61 | /// Parses the contents of a TOML file to check for Poetry configuration. 62 | /// 63 | /// # Arguments 64 | /// 65 | /// * `pyproject_path` - The file path of the TOML file being parsed. 66 | /// 67 | /// # Returns 68 | /// 69 | /// * `bool` - Whether the file contains a Poetry configuration 70 | /// 71 | /// # Errors 72 | /// 73 | /// Returns an error if the file cannot be read or parsed 74 | fn has_poetry_section(pyproject_path: &Path) -> Result { 75 | let content = std::fs::read_to_string(pyproject_path).map_err(|e| { 76 | crate::error::Error::FileOperation { 77 | path: pyproject_path.to_path_buf(), 78 | message: format!("Error reading file: {}", e), 79 | } 80 | })?; 81 | 82 | let pyproject: toml::Value = 83 | toml::from_str(&content).map_err(crate::error::Error::TomlSerde)?; 84 | 85 | // Check for traditional Poetry section 86 | let has_tool_poetry = pyproject 87 | .get("tool") 88 | .and_then(|t| t.get("poetry")) 89 | .is_some(); 90 | 91 | // Check for Poetry 2.0 project section 92 | let has_project_section = pyproject 93 | .get("project") 94 | .and_then(|p| p.get("dependencies")) 95 | .is_some(); 96 | 97 | Ok(has_tool_poetry || has_project_section) 98 | } 99 | 100 | /// Finds all requirements files in a directory. 101 | /// 102 | /// Searches the specified directory for files that start with "requirements" 103 | /// (e.g., requirements.txt, requirements-dev.txt). This includes any file 104 | /// with a "requirements" prefix, regardless of its suffix. 105 | /// 106 | /// # Arguments 107 | /// 108 | /// * `dir` - A reference to a Path pointing to the directory to search 109 | /// 110 | /// # Returns 111 | /// 112 | /// A Vec containing paths to all found requirements files. 113 | fn find_requirements_files(dir: &Path) -> Vec { 114 | std::fs::read_dir(dir) 115 | .unwrap() 116 | .filter_map(|entry| { 117 | let entry = entry.unwrap(); 118 | let path = entry.path(); 119 | if path.is_file() 120 | && path 121 | .file_name() 122 | .unwrap() 123 | .to_str() 124 | .unwrap() 125 | .starts_with("requirements") 126 | { 127 | Some(path) 128 | } else { 129 | None 130 | } 131 | }) 132 | .collect() 133 | } 134 | -------------------------------------------------------------------------------- /src/migrators/pipenv.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | use crate::migrators::MigrationSource; 3 | use crate::models::dependency::{Dependency, DependencyType}; 4 | use log::{debug, info}; 5 | use serde_json::Value; 6 | use std::{fs, path::Path}; 7 | 8 | pub struct PipenvMigrationSource; 9 | 10 | impl PipenvMigrationSource { 11 | pub fn detect_project_type(project_dir: &Path) -> bool { 12 | // Check for both Pipfile and Pipfile.lock 13 | let pipfile_path = project_dir.join("Pipfile"); 14 | let pipfile_lock_path = project_dir.join("Pipfile.lock"); 15 | 16 | pipfile_path.exists() && pipfile_lock_path.exists() 17 | } 18 | 19 | /// Parse a single dependency based on the Pipfile specification 20 | fn parse_pipfile_dependency( 21 | &self, 22 | name: &str, 23 | spec: &str, 24 | dep_type: DependencyType, 25 | ) -> Option { 26 | // Skip packages like 'python_version' 27 | if name == "python_version" { 28 | return None; 29 | } 30 | 31 | // Basic dependency parsing 32 | let version = match spec { 33 | "*" => None, 34 | spec if spec.starts_with('*') => None, 35 | spec => Some(spec.to_string()), 36 | }; 37 | 38 | Some(Dependency { 39 | name: name.to_string(), 40 | version, 41 | dep_type, 42 | environment_markers: None, 43 | extras: None, 44 | }) 45 | } 46 | 47 | /// Read and parse the Pipfile to determine dependencies 48 | fn read_pipfile(&self, project_dir: &Path) -> Result> { 49 | let pipfile_path = project_dir.join("Pipfile"); 50 | let content = 51 | fs::read_to_string(&pipfile_path).map_err(|e| crate::error::Error::FileOperation { 52 | path: pipfile_path.clone(), 53 | message: format!("Error reading Pipfile: {}", e), 54 | })?; 55 | 56 | // Use toml crate to parse Pipfile 57 | let pipfile: toml::Value = toml::from_str(&content).map_err(|e| { 58 | crate::error::Error::DependencyParsing(format!("Error parsing Pipfile: {}", e)) 59 | })?; 60 | 61 | let mut dependencies = Vec::new(); 62 | 63 | // Parse main packages 64 | if let Some(packages) = pipfile.get("packages").and_then(|p| p.as_table()) { 65 | for (name, spec) in packages.iter() { 66 | let spec_str = match spec { 67 | toml::Value::String(s) => s.as_str(), 68 | _ => continue, 69 | }; 70 | 71 | if let Some(dep) = 72 | self.parse_pipfile_dependency(name, spec_str, DependencyType::Main) 73 | { 74 | dependencies.push(dep); 75 | } 76 | } 77 | } 78 | 79 | // Parse dev packages 80 | if let Some(dev_packages) = pipfile.get("dev-packages").and_then(|p| p.as_table()) { 81 | for (name, spec) in dev_packages.iter() { 82 | let spec_str = match spec { 83 | toml::Value::String(s) => s.as_str(), 84 | _ => continue, 85 | }; 86 | 87 | if let Some(dep) = 88 | self.parse_pipfile_dependency(name, spec_str, DependencyType::Dev) 89 | { 90 | dependencies.push(dep); 91 | } 92 | } 93 | } 94 | 95 | Ok(dependencies) 96 | } 97 | } 98 | 99 | impl MigrationSource for PipenvMigrationSource { 100 | fn extract_dependencies(&self, project_dir: &Path) -> Result> { 101 | info!("Extracting dependencies from Pipfile"); 102 | 103 | // First, read dependencies from Pipfile 104 | let pipfile_dependencies = self.read_pipfile(project_dir)?; 105 | 106 | // If no dependencies found in Pipfile, fallback to Pipfile.lock 107 | if pipfile_dependencies.is_empty() { 108 | self.extract_dependencies_from_lock_file(project_dir) 109 | } else { 110 | Ok(pipfile_dependencies) 111 | } 112 | } 113 | } 114 | 115 | /// Implementation for reading from Pipfile.lock (kept mostly the same as before) 116 | impl PipenvMigrationSource { 117 | fn extract_dependencies_from_lock_file(&self, project_dir: &Path) -> Result> { 118 | let pipfile_lock_path = project_dir.join("Pipfile.lock"); 119 | 120 | if !pipfile_lock_path.exists() { 121 | return Err(crate::error::Error::FileOperation { 122 | path: pipfile_lock_path.clone(), 123 | message: "Pipfile.lock does not exist".to_string(), 124 | }); 125 | } 126 | 127 | let content = fs::read_to_string(&pipfile_lock_path).map_err(|e| { 128 | crate::error::Error::FileOperation { 129 | path: pipfile_lock_path.clone(), 130 | message: format!("Error reading file: {}", e), 131 | } 132 | })?; 133 | 134 | let lock_data: Value = serde_json::from_str(&content).map_err(|e| { 135 | crate::error::Error::DependencyParsing(format!("Error parsing Pipfile.lock: {}", e)) 136 | })?; 137 | 138 | let mut dependencies = Vec::new(); 139 | 140 | // Process default dependencies 141 | if let Some(default_deps) = lock_data.get("default").and_then(|v| v.as_object()) { 142 | debug!("Processing default dependencies"); 143 | for (name, value) in default_deps { 144 | if let Some(dep) = self.parse_dependency(name, value, DependencyType::Main)? { 145 | dependencies.push(dep); 146 | } 147 | } 148 | } 149 | 150 | // Process development dependencies 151 | if let Some(dev_deps) = lock_data.get("develop").and_then(|v| v.as_object()) { 152 | debug!("Processing development dependencies"); 153 | for (name, value) in dev_deps { 154 | if let Some(dep) = self.parse_dependency(name, value, DependencyType::Dev)? { 155 | dependencies.push(dep); 156 | } 157 | } 158 | } 159 | 160 | Ok(dependencies) 161 | } 162 | 163 | // Existing parse_dependency method from the previous implementation 164 | fn parse_dependency( 165 | &self, 166 | name: &str, 167 | value: &Value, 168 | dep_type: DependencyType, 169 | ) -> Result> { 170 | // Reuse the existing implementation from the previous code 171 | // (This method handles parsing from Pipfile.lock with complex dependency formats) 172 | // ... (keep the existing parse_dependency implementation) 173 | // Simplified version for this example 174 | if name == "python_version" || name == "python_full_version" { 175 | return Ok(None); 176 | } 177 | 178 | let dep_obj = value.as_object().ok_or_else(|| { 179 | crate::error::Error::DependencyParsing(format!( 180 | "Invalid dependency format for '{}': expected object", 181 | name 182 | )) 183 | })?; 184 | 185 | // Handle version specification 186 | let version = match dep_obj.get("version") { 187 | Some(version_value) => { 188 | let version_str = version_value.as_str().ok_or_else(|| { 189 | crate::error::Error::DependencyParsing(format!( 190 | "Invalid version format for '{}': expected string", 191 | name 192 | )) 193 | })?; 194 | Some(version_str.trim_start_matches('=').to_string()) 195 | } 196 | None => None, 197 | }; 198 | 199 | // Extract environment markers (optional) 200 | let markers = match ( 201 | dep_obj.get("markers"), 202 | dep_obj.get("platform_python_implementation"), 203 | dep_obj.get("sys_platform"), 204 | ) { 205 | (Some(marker_val), _, _) => marker_val.as_str().map(|s| s.to_string()), 206 | (_, Some(impl_val), _) => impl_val 207 | .as_str() 208 | .map(|v| format!("platform_python_implementation == '{}'", v)), 209 | (_, _, Some(platform_val)) => platform_val 210 | .as_str() 211 | .map(|v| format!("sys_platform == '{}'", v)), 212 | _ => None, 213 | }; 214 | 215 | Ok(Some(Dependency { 216 | name: name.to_string(), 217 | version, 218 | dep_type, 219 | environment_markers: markers, 220 | extras: None, 221 | })) 222 | } 223 | } 224 | 225 | #[cfg(test)] 226 | mod tests { 227 | use super::*; 228 | use std::fs; 229 | use std::path::PathBuf; 230 | use tempfile::TempDir; 231 | 232 | fn create_test_pipfile_and_lock( 233 | pipfile_content: &str, 234 | lock_content: &str, 235 | ) -> (TempDir, PathBuf) { 236 | let temp_dir = TempDir::new().unwrap(); 237 | let project_dir = temp_dir.path().to_path_buf(); 238 | 239 | fs::write(project_dir.join("Pipfile"), pipfile_content).unwrap(); 240 | fs::write(project_dir.join("Pipfile.lock"), lock_content).unwrap(); 241 | 242 | (temp_dir, project_dir) 243 | } 244 | 245 | #[test] 246 | fn test_pipfile_dependencies() { 247 | let pipfile_content = r#" 248 | [packages] 249 | fastapi = "*" 250 | requests = "^2.31.0" 251 | 252 | [dev-packages] 253 | pytest = "^8.0.0" 254 | 255 | [requires] 256 | python_version = "3.12" 257 | "#; 258 | 259 | let lock_content = r#"{ 260 | "default": { 261 | "fastapi": {"version": "==0.111.0"}, 262 | "requests": {"version": "==2.31.0"} 263 | }, 264 | "develop": { 265 | "pytest": {"version": "==8.0.0"} 266 | } 267 | }"#; 268 | 269 | let (_temp_dir, project_dir) = create_test_pipfile_and_lock(pipfile_content, lock_content); 270 | 271 | let source = PipenvMigrationSource; 272 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 273 | 274 | assert_eq!(dependencies.len(), 3); 275 | 276 | // Check Main dependencies 277 | let main_deps: Vec<_> = dependencies 278 | .iter() 279 | .filter(|d| matches!(d.dep_type, DependencyType::Main)) 280 | .collect(); 281 | assert_eq!(main_deps.len(), 2); 282 | 283 | let fastapi_dep = main_deps.iter().find(|d| d.name == "fastapi").unwrap(); 284 | assert_eq!( 285 | fastapi_dep.version, None, 286 | "Fastapi should have no version from Pipfile" 287 | ); 288 | 289 | let requests_dep = main_deps.iter().find(|d| d.name == "requests").unwrap(); 290 | assert_eq!(requests_dep.version, Some("^2.31.0".to_string())); 291 | 292 | // Check Dev dependencies 293 | let dev_deps: Vec<_> = dependencies 294 | .iter() 295 | .filter(|d| matches!(d.dep_type, DependencyType::Dev)) 296 | .collect(); 297 | assert_eq!(dev_deps.len(), 1); 298 | 299 | let pytest_dep = dev_deps.iter().find(|d| d.name == "pytest").unwrap(); 300 | assert_eq!(pytest_dep.version, Some("^8.0.0".to_string())); 301 | } 302 | 303 | #[test] 304 | fn test_pipfile_with_no_matching_lock_entries() { 305 | let pipfile_content = r#" 306 | [packages] 307 | custom-package = "*" 308 | 309 | [dev-packages] 310 | custom-dev-package = "^1.0.0" 311 | "#; 312 | 313 | let lock_content = r#"{ 314 | "default": {}, 315 | "develop": {} 316 | }"#; 317 | 318 | let (_temp_dir, project_dir) = create_test_pipfile_and_lock(pipfile_content, lock_content); 319 | 320 | let source = PipenvMigrationSource; 321 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 322 | 323 | assert_eq!(dependencies.len(), 2); 324 | 325 | let main_deps: Vec<_> = dependencies 326 | .iter() 327 | .filter(|d| matches!(d.dep_type, DependencyType::Main)) 328 | .collect(); 329 | assert_eq!(main_deps.len(), 1); 330 | assert_eq!(main_deps[0].name, "custom-package"); 331 | assert_eq!(main_deps[0].version, None); 332 | 333 | let dev_deps: Vec<_> = dependencies 334 | .iter() 335 | .filter(|d| matches!(d.dep_type, DependencyType::Dev)) 336 | .collect(); 337 | assert_eq!(dev_deps.len(), 1); 338 | assert_eq!(dev_deps[0].name, "custom-dev-package"); 339 | assert_eq!(dev_deps[0].version, Some("^1.0.0".to_string())); 340 | } 341 | } 342 | -------------------------------------------------------------------------------- /src/migrators/requirements.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | use crate::migrators::MigrationSource; 3 | use crate::models::dependency::{Dependency, DependencyType}; 4 | use log::{debug, info}; 5 | use std::fs; 6 | use std::path::{Path, PathBuf}; 7 | 8 | pub struct RequirementsMigrationSource; 9 | 10 | impl MigrationSource for RequirementsMigrationSource { 11 | fn extract_dependencies(&self, project_dir: &Path) -> Result> { 12 | let requirements_files = self.find_requirements_files(project_dir); 13 | if requirements_files.is_empty() { 14 | return Err(crate::error::Error::ProjectDetection( 15 | "No requirements files found.".to_string(), 16 | )); 17 | } 18 | 19 | let mut dependencies = Vec::new(); 20 | for (file_path, dep_type) in requirements_files { 21 | info!("Processing requirements file: {}", file_path.display()); 22 | let deps = self.process_requirements_file(&file_path, dep_type)?; 23 | debug!("Extracted {} dependencies", deps.len()); 24 | dependencies.extend(deps); 25 | } 26 | 27 | debug!("Total dependencies extracted: {}", dependencies.len()); 28 | Ok(dependencies) 29 | } 30 | } 31 | 32 | impl RequirementsMigrationSource { 33 | pub(crate) fn find_requirements_files(&self, dir: &Path) -> Vec<(PathBuf, DependencyType)> { 34 | let mut requirements_files = Vec::new(); 35 | if let Ok(entries) = fs::read_dir(dir) { 36 | for entry in entries.flatten() { 37 | let path = entry.path(); 38 | if path.is_file() { 39 | if let Some(file_name) = path.file_name().and_then(|n| n.to_str()) { 40 | if file_name == "requirements.txt" { 41 | requirements_files.push((path.clone(), DependencyType::Main)); 42 | info!("Found main requirements file: {}", path.display()); 43 | } else if file_name.starts_with("requirements-") 44 | && file_name.ends_with(".txt") 45 | { 46 | let group_name = file_name 47 | .strip_prefix("requirements-") 48 | .unwrap() 49 | .strip_suffix(".txt") 50 | .unwrap(); 51 | let dep_type = match group_name { 52 | "dev" => DependencyType::Dev, 53 | _ => DependencyType::Group(group_name.to_string()), 54 | }; 55 | requirements_files.push((path.clone(), dep_type)); 56 | info!("Found {} requirements file: {}", group_name, path.display()); 57 | } 58 | } 59 | } 60 | } 61 | } 62 | requirements_files 63 | } 64 | 65 | pub fn has_requirements_files(&self, dir: &Path) -> bool { 66 | !self.find_requirements_files(dir).is_empty() 67 | } 68 | 69 | fn process_requirements_file( 70 | &self, 71 | file_path: &Path, 72 | dep_type: DependencyType, 73 | ) -> Result> { 74 | let contents = 75 | fs::read_to_string(file_path).map_err(|e| crate::error::Error::FileOperation { 76 | path: file_path.to_path_buf(), 77 | message: format!("Error reading file: {}", e), 78 | })?; 79 | 80 | let mut dependencies = Vec::new(); 81 | 82 | for (line_num, line) in contents.lines().enumerate() { 83 | let line = line.trim(); 84 | 85 | // Skip empty lines and comments 86 | if line.is_empty() || line.starts_with('#') { 87 | continue; 88 | } 89 | 90 | match self.parse_requirement(line) { 91 | Ok(Some(dep)) => { 92 | debug!("Parsed dependency on line {}: {:?}", line_num + 1, dep); 93 | dependencies.push(Dependency { 94 | name: dep.name, 95 | version: dep.version, 96 | dep_type: dep_type.clone(), 97 | environment_markers: dep.environment_markers, 98 | extras: dep.extras, 99 | }); 100 | } 101 | Ok(None) => debug!( 102 | "Skipped line {} (possibly 'python' requirement): {}", 103 | line_num + 1, 104 | line 105 | ), 106 | Err(e) => debug!("Failed to parse line {}: {}", line_num + 1, e), 107 | } 108 | } 109 | 110 | debug!("Processed {} dependencies", dependencies.len()); 111 | Ok(dependencies) 112 | } 113 | 114 | fn process_version_spec(&self, version_spec: &str) -> String { 115 | let version_spec = version_spec.trim(); 116 | 117 | // For version specs with multiple constraints, preserve as-is 118 | if version_spec.contains(',') { 119 | return version_spec.to_string(); 120 | } 121 | 122 | // Handle special cases in order of precedence 123 | if version_spec.starts_with("~=") 124 | || version_spec.starts_with(">=") 125 | || version_spec.starts_with("<=") 126 | || version_spec.starts_with(">") 127 | || version_spec.starts_with("<") 128 | || version_spec.starts_with("!=") 129 | { 130 | // Preserve these operators as-is 131 | version_spec.to_string() 132 | } else if let Some(stripped) = version_spec.strip_prefix("==") { 133 | // Remove double equals for exact versions 134 | stripped.to_string() 135 | } else if let Some(stripped) = version_spec.strip_prefix('~') { 136 | // Convert single tilde to tilde-equals 137 | format!("~={}", stripped) 138 | } else { 139 | // If no operator is present, preserve as-is 140 | version_spec.to_string() 141 | } 142 | } 143 | 144 | fn parse_requirement(&self, line: &str) -> Result> { 145 | // Handle editable installs (-e flag) 146 | let line = if line.starts_with("-e") { 147 | let parts: Vec<&str> = line.splitn(2, ' ').collect(); 148 | if parts.len() != 2 { 149 | return Err(crate::error::Error::DependencyParsing( 150 | "Invalid editable install format".to_string(), 151 | )); 152 | } 153 | parts[1] 154 | } else { 155 | line 156 | }; 157 | 158 | // Split the line into package specification and environment markers 159 | let parts: Vec<&str> = line.split(';').collect(); 160 | let package_spec = parts[0].trim(); 161 | 162 | // Handle malformed lines 163 | if package_spec.is_empty() || package_spec.contains("===") { 164 | return Err(crate::error::Error::DependencyParsing( 165 | "Malformed requirement line".to_string(), 166 | )); 167 | } 168 | 169 | // Handle URLs and git repositories 170 | let (name, version) = 171 | if package_spec.starts_with("git+") || package_spec.starts_with("http") { 172 | self.parse_url_requirement(package_spec)? 173 | } else { 174 | self.parse_regular_requirement(package_spec)? 175 | }; 176 | 177 | if name == "python" { 178 | return Ok(None); 179 | } 180 | 181 | // Handle environment markers 182 | let environment_markers = if parts.len() > 1 { 183 | Some(parts[1..].join(";").trim().to_string()) 184 | } else { 185 | None 186 | }; 187 | 188 | Ok(Some(Dependency { 189 | name, 190 | version, 191 | dep_type: DependencyType::Main, // This will be overridden by the caller 192 | environment_markers, 193 | extras: None, 194 | })) 195 | } 196 | 197 | fn parse_url_requirement(&self, package_spec: &str) -> Result<(String, Option)> { 198 | let name = if let Some(egg_part) = package_spec.split('#').last() { 199 | if egg_part.starts_with("egg=") { 200 | egg_part.trim_start_matches("egg=").to_string() 201 | } else if package_spec.ends_with(".whl") { 202 | package_spec 203 | .split('/') 204 | .last() 205 | .and_then(|f| f.split('-').next()) 206 | .ok_or_else(|| { 207 | crate::error::Error::DependencyParsing("Invalid wheel filename".to_string()) 208 | })? 209 | .to_string() 210 | } else { 211 | return Err(crate::error::Error::DependencyParsing( 212 | "Invalid URL format".to_string(), 213 | )); 214 | } 215 | } else { 216 | package_spec 217 | .split('/') 218 | .last() 219 | .and_then(|f| f.split('.').next()) 220 | .ok_or_else(|| { 221 | crate::error::Error::DependencyParsing("Invalid URL format".to_string()) 222 | })? 223 | .to_string() 224 | }; 225 | 226 | Ok((name, None)) 227 | } 228 | 229 | fn parse_regular_requirement(&self, package_spec: &str) -> Result<(String, Option)> { 230 | // Return early if no version specifier is present 231 | if !package_spec.contains(&['>', '<', '=', '~', '!'][..]) { 232 | return Ok((package_spec.to_string(), None)); 233 | } 234 | 235 | let name_end = package_spec 236 | .find(|c| ['>', '<', '=', '~', '!'].contains(&c)) 237 | .unwrap(); 238 | let name = package_spec[..name_end].trim().to_string(); 239 | let version_spec = package_spec[name_end..].trim(); 240 | 241 | let version = Some(self.process_version_spec(version_spec)); 242 | 243 | Ok((name, version)) 244 | } 245 | } 246 | -------------------------------------------------------------------------------- /src/migrators/setup_py.rs: -------------------------------------------------------------------------------- 1 | use crate::error::Result; 2 | use crate::migrators::{MigrationSource, requirements::RequirementsMigrationSource}; 3 | use crate::models::dependency::{Dependency, DependencyType}; 4 | use log::{debug, info}; 5 | use std::fs; 6 | use std::path::Path; 7 | 8 | pub struct SetupPyMigrationSource; 9 | 10 | impl MigrationSource for SetupPyMigrationSource { 11 | fn extract_dependencies(&self, project_dir: &Path) -> Result> { 12 | info!("Extracting dependencies from setup.py"); 13 | let requirements_source = RequirementsMigrationSource; 14 | if requirements_source.has_requirements_files(project_dir) { 15 | info!("Found requirements files, using requirements parser"); 16 | return requirements_source.extract_dependencies(project_dir); 17 | } 18 | 19 | info!("No requirements files found, parsing setup.py directly"); 20 | self.parse_setup_py(project_dir) 21 | } 22 | } 23 | 24 | impl SetupPyMigrationSource { 25 | fn parse_setup_py(&self, project_dir: &Path) -> Result> { 26 | let setup_py_path = project_dir.join("setup.py"); 27 | let content = 28 | fs::read_to_string(&setup_py_path).map_err(|e| crate::error::Error::FileOperation { 29 | path: setup_py_path.clone(), 30 | message: format!("Failed to read setup.py: {}", e), 31 | })?; 32 | 33 | debug!("Parsing setup.py content"); 34 | let mut dependencies = Vec::new(); 35 | 36 | // Extract main dependencies 37 | if let Some(mut deps) = self.extract_install_requires(&content) { 38 | dependencies.append(&mut deps); 39 | } 40 | 41 | // Extract test dependencies 42 | if let Some(mut deps) = self.extract_tests_require(&content) { 43 | dependencies.append(&mut deps); 44 | } 45 | 46 | Ok(dependencies) 47 | } 48 | 49 | fn extract_install_requires(&self, content: &str) -> Option> { 50 | let start_idx = content.find("install_requires=[")?; 51 | let bracket_content = 52 | self.extract_bracket_content(content, start_idx + "install_requires=".len())?; 53 | 54 | Some(self.parse_dependencies(&bracket_content, DependencyType::Main)) 55 | } 56 | 57 | fn extract_tests_require(&self, content: &str) -> Option> { 58 | let start_idx = content.find("tests_require=[")?; 59 | let bracket_content = 60 | self.extract_bracket_content(content, start_idx + "tests_require=".len())?; 61 | 62 | Some(self.parse_dependencies(&bracket_content, DependencyType::Dev)) 63 | } 64 | 65 | pub fn extract_bracket_content(&self, content: &str, start_pos: usize) -> Option { 66 | let content = &content[start_pos..]; 67 | let bracket_start = content.find('[')?; 68 | let mut bracket_count = 1; 69 | let mut pos = bracket_start + 1; 70 | 71 | while bracket_count > 0 && pos < content.len() { 72 | match content.chars().nth(pos)? { 73 | '[' => bracket_count += 1, 74 | ']' => bracket_count -= 1, 75 | _ => {} 76 | } 77 | pos += 1; 78 | } 79 | 80 | if bracket_count == 0 { 81 | Some(content[bracket_start + 1..pos - 1].to_string()) 82 | } else { 83 | None 84 | } 85 | } 86 | 87 | fn parse_dependencies(&self, content: &str, dep_type: DependencyType) -> Vec { 88 | let mut dependencies = Vec::new(); 89 | 90 | for line in content.split(',') { 91 | let line = line.trim(); 92 | if line.is_empty() { 93 | continue; 94 | } 95 | 96 | // Remove quotes and extract package name and version 97 | let dep_str = line.trim_matches(|c| c == '\'' || c == '"'); 98 | if let Some((name, version)) = self.parse_dependency_spec(dep_str) { 99 | dependencies.push(Dependency { 100 | name, 101 | version, 102 | dep_type: dep_type.clone(), 103 | environment_markers: None, 104 | extras: None, 105 | }); 106 | } 107 | } 108 | 109 | dependencies 110 | } 111 | 112 | fn parse_dependency_spec(&self, dep_str: &str) -> Option<(String, Option)> { 113 | if dep_str.is_empty() || dep_str == "setuptools" { 114 | return None; 115 | } 116 | 117 | // Handle different package specification formats 118 | if dep_str.contains(">=") { 119 | let parts: Vec<&str> = dep_str.split(">=").collect(); 120 | Some(( 121 | parts[0].trim().to_string(), 122 | Some(format!(">={}", parts[1].trim())), 123 | )) 124 | } else if dep_str.contains("==") { 125 | let parts: Vec<&str> = dep_str.split("==").collect(); 126 | Some(( 127 | parts[0].trim().to_string(), 128 | Some(parts[1].trim().to_string()), 129 | )) 130 | } else if dep_str.contains('>') { 131 | let parts: Vec<&str> = dep_str.split('>').collect(); 132 | Some(( 133 | parts[0].trim().to_string(), 134 | Some(format!(">{}", parts[1].trim())), 135 | )) 136 | } else { 137 | Some((dep_str.trim().to_string(), None)) 138 | } 139 | } 140 | 141 | pub fn extract_description(project_dir: &Path) -> Result> { 142 | let setup_py_path = project_dir.join("setup.py"); 143 | if !setup_py_path.exists() { 144 | return Ok(None); 145 | } 146 | 147 | let content = fs::read_to_string(&setup_py_path) 148 | .map_err(|e| format!("Failed to read setup.py: {}", e))?; 149 | 150 | // Look for description in setup() call 151 | if let Some(start_idx) = content.find("setup(") { 152 | let bracket_content = Self::extract_setup_content(&content[start_idx..])?; 153 | 154 | // First try to find long_description 155 | if let Some(desc) = Self::extract_parameter(&bracket_content, "long_description") { 156 | debug!("Found long_description in setup.py"); 157 | return Ok(Some(desc)); 158 | } 159 | 160 | // Fall back to regular description 161 | if let Some(desc) = Self::extract_parameter(&bracket_content, "description") { 162 | debug!("Found description in setup.py"); 163 | return Ok(Some(desc)); 164 | } 165 | } 166 | 167 | Ok(None) 168 | } 169 | 170 | pub fn extract_setup_content(content: &str) -> Result { 171 | let lines = content.lines().enumerate().peekable(); 172 | let mut setup_content = String::new(); 173 | let mut in_setup = false; 174 | let mut paren_count = 0; 175 | 176 | for (_, line) in lines { 177 | let trimmed = line.trim(); 178 | 179 | // Skip empty lines and comments 180 | if trimmed.is_empty() || trimmed.starts_with('#') { 181 | continue; 182 | } 183 | 184 | if !in_setup { 185 | if trimmed.starts_with("setup(") { 186 | in_setup = true; 187 | paren_count = 1; 188 | // Extract everything after setup( 189 | if let Some(content) = line.split("setup(").nth(1) { 190 | setup_content.push_str(content); 191 | setup_content.push('\n'); 192 | } 193 | } 194 | } else { 195 | // Count parentheses in the line 196 | for c in line.chars() { 197 | match c { 198 | '(' => paren_count += 1, 199 | ')' => paren_count -= 1, 200 | _ => {} 201 | } 202 | } 203 | 204 | setup_content.push_str(line); 205 | setup_content.push('\n'); 206 | 207 | if paren_count == 0 { 208 | break; 209 | } 210 | } 211 | } 212 | 213 | if !in_setup { 214 | return Err(crate::error::Error::DependencyParsing( 215 | "Could not find setup() call".to_string(), 216 | )); 217 | } 218 | if paren_count > 0 { 219 | return Err(crate::error::Error::DependencyParsing( 220 | "Could not find matching closing parenthesis for setup()".to_string(), 221 | )); 222 | } 223 | 224 | Ok(setup_content) 225 | } 226 | 227 | pub fn extract_parameter(content: &str, param_name: &str) -> Option { 228 | let param_pattern = format!("{} = ", param_name); 229 | let param_pattern2 = format!("{}=", param_name); 230 | 231 | let lines = content.lines().peekable(); 232 | for line in lines { 233 | let trimmed = line.trim(); 234 | if trimmed.is_empty() || trimmed.starts_with('#') { 235 | continue; 236 | } 237 | 238 | if trimmed.starts_with(¶m_pattern) || trimmed.starts_with(¶m_pattern2) { 239 | // Direct string assignment 240 | if trimmed.contains('"') || trimmed.contains('\'') { 241 | if let Some(desc) = Self::extract_string_value(trimmed) { 242 | return Some(desc); 243 | } 244 | } 245 | // Single string variable 246 | else { 247 | // For this case, just take the value parameter at face value 248 | if param_name == "version" || param_name == "description" { 249 | if let Some(value) = trimmed.split('=').nth(1) { 250 | return Some(value.trim().to_string()); 251 | } 252 | } 253 | } 254 | } 255 | } 256 | None 257 | } 258 | 259 | pub fn extract_string_value(line: &str) -> Option { 260 | let after_equals = line.split('=').nth(1)?.trim(); 261 | 262 | // Handle different quote types 263 | let (quote_char, content) = match after_equals.chars().next()? { 264 | '\'' => ('\'', &after_equals[1..]), 265 | '"' => ('"', &after_equals[1..]), 266 | _ => return None, 267 | }; 268 | 269 | // Find matching end quote 270 | let end_pos = content.find(quote_char)?; 271 | let value = content[..end_pos].to_string(); 272 | 273 | Some(value) 274 | } 275 | 276 | pub fn extract_url(project_dir: &Path) -> Result> { 277 | let setup_py_path = project_dir.join("setup.py"); 278 | if !setup_py_path.exists() { 279 | return Ok(None); 280 | } 281 | 282 | let content = 283 | fs::read_to_string(&setup_py_path).map_err(|e| crate::error::Error::FileOperation { 284 | path: setup_py_path.clone(), 285 | message: format!("Failed to read setup.py: {}", e), 286 | })?; 287 | 288 | if let Some(start_idx) = content.find("setup(") { 289 | let bracket_content = Self::extract_setup_content(&content[start_idx..])?; 290 | if let Some(url) = Self::extract_parameter(&bracket_content, "url") { 291 | debug!("Found URL in setup.py"); 292 | return Ok(Some(url)); 293 | } 294 | } 295 | 296 | Ok(None) 297 | } 298 | } 299 | -------------------------------------------------------------------------------- /src/models/dependency.rs: -------------------------------------------------------------------------------- 1 | /// Represents a project dependency with its type, version, and other requirements 2 | #[derive(Debug, Clone)] 3 | pub struct Dependency { 4 | /// The name of the dependency package 5 | pub name: String, 6 | 7 | /// Optional version constraint 8 | pub version: Option, 9 | 10 | /// Type of the dependency (main, dev, or specific group) 11 | pub dep_type: DependencyType, 12 | 13 | /// Optional environment markers (e.g. "python_version > '3.7'") 14 | pub environment_markers: Option, 15 | 16 | /// Optional extras (e.g. ["s3", "test"]) 17 | pub extras: Option>, 18 | } 19 | 20 | /// Represents the type of dependency 21 | #[derive(Debug, Clone, PartialEq, Eq, Hash)] 22 | pub enum DependencyType { 23 | /// Main project dependency 24 | Main, 25 | 26 | /// Development dependency 27 | Dev, 28 | 29 | /// Dependency in a specific group (e.g. "docs", "test") 30 | Group(String), 31 | } 32 | 33 | #[derive(Debug, Clone)] 34 | pub struct GitDependency { 35 | /// The name of the dependency package 36 | pub name: String, 37 | 38 | /// Git repository URL 39 | pub git_url: String, 40 | 41 | /// Optional branch reference 42 | pub branch: Option, 43 | 44 | /// Optional tag reference 45 | pub tag: Option, 46 | 47 | /// Optional revision reference 48 | pub rev: Option, 49 | } 50 | 51 | use std::str::FromStr; 52 | 53 | impl FromStr for DependencyType { 54 | type Err = String; 55 | 56 | /// Converts a string representation to a DependencyType 57 | fn from_str(dep_type: &str) -> Result { 58 | Ok(match dep_type { 59 | "dev" => DependencyType::Dev, 60 | "main" => DependencyType::Main, 61 | group => DependencyType::Group(group.to_string()), 62 | }) 63 | } 64 | } 65 | 66 | impl DependencyType { 67 | /// Converts a string representation to a DependencyType without error handling 68 | #[allow(dead_code)] 69 | pub fn parse_str(dep_type: &str) -> Self { 70 | match dep_type { 71 | "dev" => DependencyType::Dev, 72 | "main" => DependencyType::Main, 73 | group => DependencyType::Group(group.to_string()), 74 | } 75 | } 76 | } 77 | 78 | impl Dependency { 79 | /// Creates a new dependency with the given name and dependency type 80 | #[allow(dead_code)] 81 | pub fn new(name: String, dep_type: DependencyType) -> Self { 82 | Self { 83 | name, 84 | version: None, 85 | dep_type, 86 | environment_markers: None, 87 | extras: None, 88 | } 89 | } 90 | 91 | /// Creates a new dependency with the given name, version, and dependency type 92 | #[allow(dead_code)] 93 | pub fn with_version(name: String, version: String, dep_type: DependencyType) -> Self { 94 | Self { 95 | name, 96 | version: Some(version), 97 | dep_type, 98 | environment_markers: None, 99 | extras: None, 100 | } 101 | } 102 | 103 | /// Adds environment markers to the dependency 104 | #[allow(dead_code)] 105 | pub fn with_markers(mut self, markers: String) -> Self { 106 | self.environment_markers = Some(markers); 107 | self 108 | } 109 | 110 | /// Adds extras to the dependency 111 | #[allow(dead_code)] 112 | pub fn with_extras(mut self, extras: Vec) -> Self { 113 | self.extras = Some(extras); 114 | self 115 | } 116 | } 117 | -------------------------------------------------------------------------------- /src/models/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod dependency; 2 | pub mod project; 3 | 4 | // Re-export commonly used types 5 | pub use dependency::{Dependency, DependencyType, GitDependency}; 6 | -------------------------------------------------------------------------------- /src/models/project.rs: -------------------------------------------------------------------------------- 1 | use serde::Deserialize; 2 | use std::collections::HashMap; 3 | 4 | /// Represents a detected project type 5 | #[derive(Debug, Clone, PartialEq)] 6 | pub enum ProjectType { 7 | /// Poetry project (application or package) 8 | Poetry(PoetryProjectType), 9 | /// Pipenv project 10 | Pipenv, 11 | /// Requirements.txt based project 12 | Requirements, 13 | /// Setup.py based project 14 | SetupPy, 15 | /// Conda environment project 16 | Conda, 17 | } 18 | 19 | /// Distinguishes between Poetry application and package projects 20 | #[derive(Debug, Clone, PartialEq)] 21 | pub enum PoetryProjectType { 22 | /// Poetry application project (no packages section) 23 | Application, 24 | /// Poetry package project (has packages section) 25 | Package, 26 | } 27 | 28 | /// Represents the entire pyproject.toml file structure 29 | #[derive(Deserialize, Debug)] 30 | #[allow(dead_code)] // Fields used through Serde deserialization 31 | pub struct PyProject { 32 | pub tool: Option, 33 | pub project: Option, 34 | } 35 | 36 | /// Represents the [tool] section of pyproject.toml 37 | #[derive(Deserialize, Debug)] 38 | #[allow(dead_code)] // Fields used through Serde deserialization 39 | pub struct Tool { 40 | pub poetry: Option, 41 | } 42 | 43 | /// Represents the top-level [project] section (Poetry 2.0 style) 44 | #[derive(Deserialize, Debug)] 45 | #[allow(dead_code)] // Fields used through Serde deserialization 46 | pub struct Project { 47 | pub name: Option, 48 | pub version: Option, 49 | pub description: Option, 50 | pub authors: Option>, 51 | pub readme: Option, 52 | pub requires_python: Option, 53 | pub dependencies: Option>, 54 | } 55 | 56 | /// Represents an author configuration 57 | #[derive(Deserialize, Debug)] 58 | #[allow(dead_code)] // Fields used through Serde deserialization 59 | pub struct AuthorConfig { 60 | pub name: String, 61 | pub email: Option, 62 | } 63 | 64 | /// Represents the [tool.poetry] section 65 | #[derive(Deserialize, Debug)] 66 | #[allow(dead_code)] // Fields used through Serde deserialization 67 | pub struct Poetry { 68 | pub dependencies: Option>, 69 | pub group: Option>, 70 | pub packages: Option>, 71 | } 72 | 73 | /// Represents a package configuration in [tool.poetry.packages] 74 | #[derive(Deserialize, Debug)] 75 | #[allow(dead_code)] // Fields used through Serde deserialization 76 | pub struct Package { 77 | pub include: Option, 78 | } 79 | 80 | /// Represents a dependency group in [tool.poetry.group] 81 | #[derive(Deserialize, Debug)] 82 | #[allow(dead_code)] // Fields used through Serde deserialization 83 | pub struct Group { 84 | pub dependencies: HashMap, 85 | } 86 | -------------------------------------------------------------------------------- /src/utils/author.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{Error, Result}; 2 | use crate::migrators::setup_py::SetupPyMigrationSource; 3 | use std::path::Path; 4 | use toml_edit::DocumentMut; 5 | 6 | #[derive(Debug)] 7 | pub struct Author { 8 | pub name: String, 9 | pub email: Option, 10 | } 11 | 12 | pub fn extract_authors_from_setup_py(project_dir: &Path) -> Result> { 13 | let setup_py_path = project_dir.join("setup.py"); 14 | if !setup_py_path.exists() { 15 | return Ok(vec![]); 16 | } 17 | 18 | let content = std::fs::read_to_string(&setup_py_path).map_err(|e| Error::FileOperation { 19 | path: setup_py_path.clone(), 20 | message: format!("Failed to read setup.py: {}", e), 21 | })?; 22 | 23 | let mut authors = Vec::new(); 24 | 25 | // Extract author and author_email from setup() 26 | if let Some(start_idx) = content.find("setup(") { 27 | let bracket_content = SetupPyMigrationSource::extract_setup_content(&content[start_idx..])?; 28 | 29 | if let Some(name) = SetupPyMigrationSource::extract_parameter(&bracket_content, "author") { 30 | let email = SetupPyMigrationSource::extract_parameter(&bracket_content, "author_email"); 31 | authors.push(Author { name, email }); 32 | } 33 | } 34 | 35 | Ok(authors) 36 | } 37 | 38 | pub fn extract_authors_from_poetry(project_dir: &Path) -> Result> { 39 | let old_pyproject_path = project_dir.join("old.pyproject.toml"); 40 | if !old_pyproject_path.exists() { 41 | return Ok(vec![]); 42 | } 43 | 44 | let content = 45 | std::fs::read_to_string(&old_pyproject_path).map_err(|e| Error::FileOperation { 46 | path: old_pyproject_path.clone(), 47 | message: format!("Failed to read old.pyproject.toml: {}", e), 48 | })?; 49 | 50 | let doc = content.parse::().map_err(Error::Toml)?; 51 | 52 | // Extract authors from project section (Poetry 2.0 style) 53 | if let Some(project) = doc.get("project") { 54 | if let Some(authors_array) = project.get("authors").and_then(|a| a.as_array()) { 55 | let mut results = Vec::new(); 56 | for author_value in authors_array.iter() { 57 | if let Some(author_str) = author_value.as_str() { 58 | results.push(parse_author_string(author_str)); 59 | } else if let Some(author_table) = author_value.as_inline_table() { 60 | // Poetry 2.0 style inline table 61 | let name = author_table 62 | .get("name") 63 | .and_then(|n| n.as_str()) 64 | .unwrap_or("Unknown") 65 | .to_string(); 66 | 67 | let email = author_table 68 | .get("email") 69 | .and_then(|e| e.as_str()) 70 | .map(|s| s.to_string()); 71 | 72 | results.push(Author { name, email }); 73 | } 74 | } 75 | return Ok(results); 76 | } 77 | } 78 | 79 | // Fallback to traditional Poetry section 80 | let authors = match doc 81 | .get("tool") 82 | .and_then(|t| t.get("poetry")) 83 | .and_then(|poetry| poetry.get("authors")) 84 | { 85 | Some(array) => { 86 | let mut result = Vec::new(); 87 | if let Some(arr) = array.as_array() { 88 | for value in arr.iter() { 89 | if let Some(author_str) = value.as_str() { 90 | result.push(parse_author_string(author_str)); 91 | } 92 | } 93 | } 94 | result 95 | } 96 | None => vec![], 97 | }; 98 | 99 | Ok(authors) 100 | } 101 | 102 | fn parse_author_string(author_str: &str) -> Author { 103 | let author_str = author_str.trim(); 104 | 105 | // First, check for Poetry 2.0 style inline table author format 106 | if author_str.starts_with('{') && author_str.ends_with('}') { 107 | // Remove {} and split by commas 108 | let content = &author_str[1..author_str.len() - 1]; 109 | let mut name = String::new(); 110 | let mut email = None; 111 | 112 | for part in content.split(',') { 113 | let part = part.trim(); 114 | if let Some(name_part) = part 115 | .strip_prefix("name = ") 116 | .or_else(|| part.strip_prefix("name=")) 117 | { 118 | name = name_part.trim_matches(&['"', '\''][..]).to_string(); 119 | } 120 | if let Some(email_part) = part 121 | .strip_prefix("email = ") 122 | .or_else(|| part.strip_prefix("email=")) 123 | { 124 | email = Some(email_part.trim_matches(&['"', '\''][..]).to_string()); 125 | } 126 | } 127 | 128 | return Author { name, email }; 129 | } 130 | 131 | // Classic Poetry author format with email in angle brackets 132 | let (name, email) = match (author_str.rfind('<'), author_str.rfind('>')) { 133 | (Some(start), Some(end)) if start < end => { 134 | let name = author_str[..start].trim().to_string(); 135 | let email = author_str[start + 1..end].trim().to_string(); 136 | (name, Some(email)) 137 | } 138 | _ => (author_str.to_string(), None), 139 | }; 140 | 141 | Author { name, email } 142 | } 143 | -------------------------------------------------------------------------------- /src/utils/build_system.rs: -------------------------------------------------------------------------------- 1 | use log::debug; 2 | use std::path::Path; 3 | use toml_edit::{DocumentMut, Item, Table, Value}; 4 | 5 | /// Updates the build system configuration in pyproject.toml. 6 | /// This function follows PEP 621 guidelines to determine if a project is a package 7 | /// that needs a build system or an application that can use the default. 8 | /// 9 | /// # Arguments 10 | /// 11 | /// * `doc` - The TOML document to update 12 | /// * `project_dir` - The project directory path 13 | /// 14 | /// # Returns 15 | /// 16 | /// * `bool` - Whether any changes were made to the document 17 | pub fn update_build_system(doc: &mut DocumentMut, project_dir: &Path) -> Result { 18 | debug!("Checking if project needs a build system configuration"); 19 | let old_pyproject_path = project_dir.join("old.pyproject.toml"); 20 | if !old_pyproject_path.exists() { 21 | return Ok(false); 22 | } 23 | 24 | // Read old pyproject.toml to analyze the project structure 25 | let old_content = std::fs::read_to_string(&old_pyproject_path) 26 | .map_err(|e| format!("Failed to read old.pyproject.toml: {}", e))?; 27 | 28 | let old_doc = old_content 29 | .parse::() 30 | .map_err(|e| format!("Failed to parse old.pyproject.toml: {}", e))?; 31 | 32 | // Check if this is a package project according to PEP 621 and Poetry standards 33 | let is_package_project = determine_if_package_project(&old_doc, project_dir); 34 | 35 | // If it's not a package project, don't add a build-system section 36 | if !is_package_project { 37 | debug!("Project appears to be an application, not setting build system"); 38 | return Ok(false); 39 | } 40 | 41 | debug!("Project appears to be a package, configuring build system with Hatchling"); 42 | 43 | // Create new build-system table 44 | let mut build_system = Table::new(); 45 | 46 | // Add requires array 47 | let mut requires = toml_edit::Array::new(); 48 | requires.push(Value::String(toml_edit::Formatted::new( 49 | "hatchling".to_string(), 50 | ))); 51 | build_system.insert("requires", Item::Value(Value::Array(requires))); 52 | 53 | // Add build-backend string 54 | build_system.insert( 55 | "build-backend", 56 | Item::Value(Value::String(toml_edit::Formatted::new( 57 | "hatchling.build".to_string(), 58 | ))), 59 | ); 60 | 61 | // Update the document 62 | doc.insert("build-system", Item::Table(build_system)); 63 | 64 | Ok(true) 65 | } 66 | 67 | /// Determines if a project is a package (vs an application) based on various indicators 68 | fn determine_if_package_project(doc: &DocumentMut, project_dir: &Path) -> bool { 69 | // Check for various indicators that this is a package project: 70 | 71 | // 1. Check for Poetry packages configuration with src directory 72 | let has_poetry_package_config = doc 73 | .get("tool") 74 | .and_then(|t| t.get("poetry")) 75 | .and_then(|poetry| { 76 | // First try `packages` key directly in poetry 77 | if let Some(packages) = poetry.get("packages").and_then(|p| p.as_array()) { 78 | Some(packages.iter().any(|pkg| { 79 | if let Some(table) = pkg.as_inline_table() { 80 | (table.get("from").and_then(|f| f.as_str()) == Some("src")) 81 | || (table.get("include").and_then(|i| i.as_str()).is_some() 82 | && table.get("from").and_then(|f| f.as_str()) == Some("src")) 83 | } else { 84 | false 85 | } 86 | })) 87 | } 88 | // Try packages array within tool.poetry.packages section 89 | else if let Some(packages_section) = poetry.get("packages") { 90 | if let Some(packages_array) = 91 | packages_section.get("packages").and_then(|p| p.as_array()) 92 | { 93 | Some(packages_array.iter().any(|pkg| { 94 | if let Some(table) = pkg.as_inline_table() { 95 | (table.get("from").and_then(|f| f.as_str()) == Some("src")) 96 | || (table.get("include").and_then(|i| i.as_str()).is_some() 97 | && table.get("from").and_then(|f| f.as_str()) == Some("src")) 98 | } else { 99 | false 100 | } 101 | })) 102 | } else { 103 | // Just having a packages section is a strong indication it's a package 104 | Some(true) 105 | } 106 | } else { 107 | None 108 | } 109 | }) 110 | .unwrap_or(false); 111 | 112 | // OR: Check for Poetry packages section in any configuration 113 | let has_poetry_packages = has_poetry_package_config 114 | || doc 115 | .get("tool") 116 | .and_then(|t| t.get("poetry")) 117 | .and_then(|poetry| poetry.get("packages")) 118 | .is_some(); 119 | 120 | // Also check for packages configuration in Poetry 2.0 format 121 | let has_poetry2_packages = doc 122 | .get("project") 123 | .and_then(|project| project.get("packages")) 124 | .is_some(); 125 | 126 | if has_poetry_packages || has_poetry2_packages { 127 | debug!("Project has Poetry package configuration"); 128 | return true; 129 | } 130 | 131 | // 2. Check for setup.py or setup.cfg which would indicate a package 132 | if project_dir.join("setup.py").exists() || project_dir.join("setup.cfg").exists() { 133 | debug!("Project has setup.py or setup.cfg"); 134 | return true; 135 | } 136 | 137 | // 3. Check for typical package structure (src directory with an __init__.py file) 138 | let src_dir = project_dir.join("src"); 139 | if src_dir.exists() && src_dir.is_dir() { 140 | // Check if there are any __init__.py files in the src directory 141 | if std::fs::read_dir(&src_dir).ok().is_some_and(|entries| { 142 | entries 143 | .flatten() 144 | .any(|entry| entry.path().is_dir() && entry.path().join("__init__.py").exists()) 145 | }) { 146 | debug!("Project has src directory with __init__.py files"); 147 | return true; 148 | } 149 | } 150 | 151 | // 4. Check for PEP 621 package indicators in [project] section 152 | let has_pep621_package_indicators = doc 153 | .get("project") 154 | .map(|project| { 155 | // Check for typical package indicators in PEP 621 format 156 | let has_urls = project.get("urls").is_some(); 157 | let has_classifiers = project.get("classifiers").is_some(); 158 | let has_keywords = project.get("keywords").is_some(); 159 | 160 | // If it has multiple of these fields, it's likely a package 161 | (has_urls as u8) + (has_classifiers as u8) + (has_keywords as u8) >= 2 162 | }) 163 | .unwrap_or(false); 164 | 165 | if has_pep621_package_indicators { 166 | debug!("Project has PEP 621 package indicators"); 167 | return true; 168 | } 169 | 170 | // 5. Check for existing build-system in old pyproject.toml 171 | let has_build_system = doc.get("build-system").is_some(); 172 | if has_build_system { 173 | debug!("Project already has a build-system section"); 174 | return true; 175 | } 176 | 177 | debug!("No package indicators found, treating as application"); 178 | false 179 | } 180 | 181 | #[cfg(test)] 182 | mod tests { 183 | use super::*; 184 | use std::fs; 185 | use std::path::PathBuf; 186 | use tempfile::TempDir; 187 | 188 | fn setup_test_environment( 189 | old_content: &str, 190 | new_content: &str, 191 | create_setup_py: bool, 192 | create_src_init: bool, 193 | ) -> (TempDir, DocumentMut, PathBuf) { 194 | let temp_dir = TempDir::new().unwrap(); 195 | let project_dir = temp_dir.path().to_path_buf(); 196 | 197 | fs::write(project_dir.join("old.pyproject.toml"), old_content).unwrap(); 198 | fs::write(project_dir.join("pyproject.toml"), new_content).unwrap(); 199 | 200 | if create_setup_py { 201 | fs::write(project_dir.join("setup.py"), "# Test setup.py").unwrap(); 202 | } 203 | 204 | if create_src_init { 205 | let src_dir = project_dir.join("src"); 206 | let pkg_dir = src_dir.join("test_pkg"); 207 | fs::create_dir_all(&pkg_dir).unwrap(); 208 | fs::write(pkg_dir.join("__init__.py"), "# Test init file").unwrap(); 209 | } 210 | 211 | let doc = new_content.parse::().unwrap(); 212 | (temp_dir, doc, project_dir) 213 | } 214 | 215 | #[test] 216 | fn test_poetry_to_hatchling_conversion_with_existing_build_system() { 217 | let old_content = r#" 218 | [tool.poetry] 219 | name = "test-project" 220 | version = "0.1.0" 221 | 222 | [build-system] 223 | requires = ["poetry-core"] 224 | build-backend = "poetry.core.masonry.api" 225 | "#; 226 | 227 | let new_content = r#" 228 | [project] 229 | name = "test-project" 230 | version = "0.1.0" 231 | "#; 232 | 233 | let (_temp_dir, mut doc, project_dir) = 234 | setup_test_environment(old_content, new_content, false, false); 235 | 236 | let result = update_build_system(&mut doc, &project_dir).unwrap(); 237 | assert!(result); 238 | 239 | let build_system = doc.get("build-system").unwrap(); 240 | let requires = build_system.get("requires").unwrap().as_array().unwrap(); 241 | let first_req = requires.get(0).unwrap().as_str().unwrap(); 242 | assert_eq!(first_req, "hatchling"); 243 | 244 | let backend = build_system.get("build-backend").unwrap().as_str().unwrap(); 245 | assert_eq!(backend, "hatchling.build"); 246 | } 247 | 248 | #[test] 249 | fn test_poetry_to_hatchling_with_poetry_package_config() { 250 | let old_content = r#" 251 | [tool.poetry] 252 | name = "test-project" 253 | version = "0.1.0" 254 | 255 | [tool.poetry.packages] 256 | packages = [ 257 | { from = "src" }, 258 | ] 259 | "#; 260 | 261 | let new_content = r#" 262 | [project] 263 | name = "test-project" 264 | version = "0.1.0" 265 | "#; 266 | 267 | let (_temp_dir, mut doc, project_dir) = 268 | setup_test_environment(old_content, new_content, false, false); 269 | 270 | let result = update_build_system(&mut doc, &project_dir).unwrap(); 271 | assert!(result); 272 | 273 | let build_system = doc.get("build-system").unwrap(); 274 | let requires = build_system.get("requires").unwrap().as_array().unwrap(); 275 | let first_req = requires.get(0).unwrap().as_str().unwrap(); 276 | assert_eq!(first_req, "hatchling"); 277 | } 278 | 279 | #[test] 280 | fn test_poetry_to_hatchling_with_setup_py() { 281 | let old_content = r#" 282 | [tool.poetry] 283 | name = "test-project" 284 | version = "0.1.0" 285 | "#; 286 | 287 | let new_content = r#" 288 | [project] 289 | name = "test-project" 290 | version = "0.1.0" 291 | "#; 292 | 293 | let (_temp_dir, mut doc, project_dir) = 294 | setup_test_environment(old_content, new_content, true, false); 295 | 296 | let result = update_build_system(&mut doc, &project_dir).unwrap(); 297 | assert!(result); 298 | 299 | let build_system = doc.get("build-system").unwrap(); 300 | let backend = build_system.get("build-backend").unwrap().as_str().unwrap(); 301 | assert_eq!(backend, "hatchling.build"); 302 | } 303 | 304 | #[test] 305 | fn test_poetry_to_hatchling_with_src_init() { 306 | let old_content = r#" 307 | [tool.poetry] 308 | name = "test-project" 309 | version = "0.1.0" 310 | "#; 311 | 312 | let new_content = r#" 313 | [project] 314 | name = "test-project" 315 | version = "0.1.0" 316 | "#; 317 | 318 | let (_temp_dir, mut doc, project_dir) = 319 | setup_test_environment(old_content, new_content, false, true); 320 | 321 | let result = update_build_system(&mut doc, &project_dir).unwrap(); 322 | assert!(result); 323 | 324 | let build_system = doc.get("build-system").unwrap(); 325 | let backend = build_system.get("build-backend").unwrap().as_str().unwrap(); 326 | assert_eq!(backend, "hatchling.build"); 327 | } 328 | 329 | #[test] 330 | fn test_poetry_to_hatchling_with_pep621_indicators() { 331 | let old_content = r#" 332 | [project] 333 | name = "test-project" 334 | version = "0.1.0" 335 | description = "A test project" 336 | classifiers = ["Programming Language :: Python"] 337 | keywords = ["test", "project"] 338 | urls = { repository = "https://github.com/user/repo" } 339 | "#; 340 | 341 | let new_content = r#" 342 | [project] 343 | name = "test-project" 344 | version = "0.1.0" 345 | "#; 346 | 347 | let (_temp_dir, mut doc, project_dir) = 348 | setup_test_environment(old_content, new_content, false, false); 349 | 350 | let result = update_build_system(&mut doc, &project_dir).unwrap(); 351 | assert!(result); 352 | 353 | let build_system = doc.get("build-system").unwrap(); 354 | let backend = build_system.get("build-backend").unwrap().as_str().unwrap(); 355 | assert_eq!(backend, "hatchling.build"); 356 | } 357 | 358 | #[test] 359 | fn test_no_build_system_for_application() { 360 | let old_content = r#" 361 | [tool.poetry] 362 | name = "test-project" 363 | version = "0.1.0" 364 | description = "A simple application" 365 | "#; 366 | 367 | let new_content = r#" 368 | [project] 369 | name = "test-project" 370 | version = "0.1.0" 371 | "#; 372 | 373 | let (_temp_dir, mut doc, project_dir) = 374 | setup_test_environment(old_content, new_content, false, false); 375 | 376 | let result = update_build_system(&mut doc, &project_dir).unwrap(); 377 | assert!(!result); 378 | assert!(doc.get("build-system").is_none()); 379 | } 380 | 381 | #[test] 382 | fn test_no_old_pyproject() { 383 | let new_content = r#" 384 | [project] 385 | name = "test-project" 386 | version = "0.1.0" 387 | "#; 388 | 389 | let temp_dir = TempDir::new().unwrap(); 390 | let mut doc = new_content.parse::().unwrap(); 391 | 392 | let result = update_build_system(&mut doc, temp_dir.path()).unwrap(); 393 | assert!(!result); 394 | } 395 | 396 | #[test] 397 | fn test_determine_if_package_project() { 398 | // Test with package configuration 399 | let content = r#" 400 | [tool.poetry] 401 | name = "test-project" 402 | version = "0.1.0" 403 | packages = [ 404 | { include = "src" } 405 | ] 406 | "#; 407 | let doc = content.parse::().unwrap(); 408 | let temp_dir = TempDir::new().unwrap(); 409 | 410 | let result = determine_if_package_project(&doc, temp_dir.path()); 411 | assert!( 412 | result, 413 | "Should detect package project from Poetry packages config" 414 | ); 415 | 416 | // Test with Poetry 2.0 format 417 | let content2 = r#" 418 | [project] 419 | name = "test-project" 420 | version = "0.1.0" 421 | packages = [ 422 | { include = "src" } 423 | ] 424 | "#; 425 | let doc2 = content2.parse::().unwrap(); 426 | let result2 = determine_if_package_project(&doc2, temp_dir.path()); 427 | assert!( 428 | result2, 429 | "Should detect package project from Poetry 2.0 packages config" 430 | ); 431 | } 432 | 433 | #[test] 434 | fn test_single_package_include() { 435 | // Test with simple include format 436 | let content = r#" 437 | [tool.poetry] 438 | name = "test-project" 439 | version = "0.1.0" 440 | packages = [ 441 | { include = "src" } 442 | ] 443 | "#; 444 | let doc = content.parse::().unwrap(); 445 | let temp_dir = TempDir::new().unwrap(); 446 | 447 | let result = determine_if_package_project(&doc, temp_dir.path()); 448 | assert!(result, "Should detect package from single include format"); 449 | } 450 | 451 | #[test] 452 | fn test_multiple_package_includes() { 453 | // Test with multiple includes 454 | let content = r#" 455 | [tool.poetry] 456 | name = "test-project" 457 | version = "0.1.0" 458 | packages = [ 459 | { include = "src" }, 460 | { include = "lib" } 461 | ] 462 | "#; 463 | let doc = content.parse::().unwrap(); 464 | let temp_dir = TempDir::new().unwrap(); 465 | 466 | let result = determine_if_package_project(&doc, temp_dir.path()); 467 | assert!(result, "Should detect package from multiple includes"); 468 | } 469 | } 470 | -------------------------------------------------------------------------------- /src/utils/file_ops.rs: -------------------------------------------------------------------------------- 1 | use crate::error::{Error, Result}; 2 | use log::{debug, info, warn}; 3 | use std::collections::HashMap; 4 | use std::fs; 5 | use std::path::{Path, PathBuf}; 6 | 7 | /// Represents a file change that can be tracked for potential rollback 8 | #[derive(Debug, Clone)] 9 | pub enum FileChange { 10 | /// File was created (contains its content for potential rollback) 11 | Created { 12 | original_existed: bool, 13 | original_content: Option>, 14 | }, 15 | /// File was renamed (contains source path for potential rollback) 16 | Renamed { source_path: PathBuf }, 17 | } 18 | 19 | impl FileChange { 20 | /// Creates a new FileChange for a created file 21 | pub fn new_created() -> Self { 22 | FileChange::Created { 23 | original_existed: false, 24 | original_content: None, 25 | } 26 | } 27 | 28 | /// Creates a new FileChange for a created file, storing original content for rollback 29 | pub fn created_with_content(content: Vec) -> Self { 30 | FileChange::Created { 31 | original_existed: true, 32 | original_content: Some(content), 33 | } 34 | } 35 | 36 | /// Creates a new FileChange for a renamed file 37 | pub fn renamed(source_path: PathBuf) -> Self { 38 | FileChange::Renamed { source_path } 39 | } 40 | } 41 | 42 | /// Tracks file changes and provides rollback functionality 43 | pub struct FileTracker { 44 | /// Map of file paths to their tracked changes 45 | changes: HashMap, 46 | /// Whether automatic restore on drop is enabled 47 | restore_enabled: bool, 48 | /// Whether to force rollback regardless of restore_enabled 49 | force_rollback: bool, 50 | } 51 | 52 | impl Default for FileTracker { 53 | fn default() -> Self { 54 | Self::new() 55 | } 56 | } 57 | 58 | impl FileTracker { 59 | /// Creates a new FileTracker with restore on drop enabled 60 | pub fn new() -> Self { 61 | Self { 62 | changes: HashMap::new(), 63 | restore_enabled: true, 64 | force_rollback: false, 65 | } 66 | } 67 | 68 | /// Creates a new FileTracker with restore on drop configurable 69 | pub fn new_with_restore(restore_enabled: bool) -> Self { 70 | Self { 71 | changes: HashMap::new(), 72 | restore_enabled, 73 | force_rollback: false, 74 | } 75 | } 76 | 77 | /// Starts tracking a file 78 | pub fn track_file(&mut self, path: &Path) -> Result<()> { 79 | debug!("Tracking file: {}", path.display()); 80 | 81 | if self.changes.contains_key(path) { 82 | debug!("File already tracked: {}", path.display()); 83 | return Ok(()); 84 | } 85 | 86 | // If the file already exists, store its content for potential rollback 87 | if path.exists() { 88 | let content = fs::read(path).map_err(|e| Error::FileOperation { 89 | path: path.to_path_buf(), 90 | message: format!("Failed to read file content: {}", e), 91 | })?; 92 | 93 | self.changes.insert( 94 | path.to_path_buf(), 95 | FileChange::created_with_content(content), 96 | ); 97 | } else { 98 | self.changes 99 | .insert(path.to_path_buf(), FileChange::new_created()); 100 | } 101 | 102 | info!("Started tracking file: {}", path.display()); 103 | Ok(()) 104 | } 105 | 106 | /// Tracks a file rename operation 107 | pub fn track_rename(&mut self, source: &Path, target: &Path) -> Result<()> { 108 | debug!( 109 | "Tracking file rename: {} -> {}", 110 | source.display(), 111 | target.display() 112 | ); 113 | 114 | if !source.exists() { 115 | return Err(Error::FileOperation { 116 | path: source.to_path_buf(), 117 | message: "Source file doesn't exist".to_string(), 118 | }); 119 | } 120 | 121 | self.changes.insert( 122 | target.to_path_buf(), 123 | FileChange::renamed(source.to_path_buf()), 124 | ); 125 | 126 | info!( 127 | "Tracked rename operation: {} -> {}", 128 | source.display(), 129 | target.display() 130 | ); 131 | Ok(()) 132 | } 133 | 134 | /// Force rollback of tracked changes 135 | pub fn force_rollback(&mut self) { 136 | self.force_rollback = true; 137 | } 138 | 139 | /// Rollback all tracked changes 140 | pub fn rollback(&mut self) -> Result<()> { 141 | info!("Rolling back file changes..."); 142 | 143 | // Process file changes in reverse order 144 | let paths: Vec = self.changes.keys().cloned().collect(); 145 | for path in paths.iter().rev() { 146 | if let Some(change) = self.changes.get(path) { 147 | match change { 148 | FileChange::Created { 149 | original_existed, 150 | original_content, 151 | } => { 152 | if *original_existed { 153 | if let Some(content) = original_content { 154 | fs::write(path, content).map_err(|e| Error::FileOperation { 155 | path: path.to_path_buf(), 156 | message: format!("Failed to restore file content: {}", e), 157 | })?; 158 | info!("Restored original content to {}", path.display()); 159 | } 160 | } else if path.exists() { 161 | fs::remove_file(path).map_err(|e| Error::FileOperation { 162 | path: path.to_path_buf(), 163 | message: format!("Failed to remove file: {}", e), 164 | })?; 165 | info!("Removed created file: {}", path.display()); 166 | } 167 | } 168 | FileChange::Renamed { source_path } => { 169 | if path.exists() { 170 | if source_path.exists() { 171 | // Both files exist, need to move content 172 | let content = fs::read(path).map_err(|e| Error::FileOperation { 173 | path: path.to_path_buf(), 174 | message: format!("Failed to read renamed file: {}", e), 175 | })?; 176 | fs::write(source_path, content).map_err(|e| { 177 | Error::FileOperation { 178 | path: source_path.to_path_buf(), 179 | message: format!("Failed to restore renamed file: {}", e), 180 | } 181 | })?; 182 | fs::remove_file(path).map_err(|e| Error::FileOperation { 183 | path: path.to_path_buf(), 184 | message: format!("Failed to remove renamed file: {}", e), 185 | })?; 186 | } else { 187 | // Simple rename back 188 | fs::rename(path, source_path).map_err(|e| { 189 | Error::FileOperation { 190 | path: path.to_path_buf(), 191 | message: format!( 192 | "Failed to rename back to {}: {}", 193 | source_path.display(), 194 | e 195 | ), 196 | } 197 | })?; 198 | } 199 | info!( 200 | "Renamed file back: {} -> {}", 201 | path.display(), 202 | source_path.display() 203 | ); 204 | } 205 | } 206 | } 207 | } 208 | } 209 | 210 | self.changes.clear(); 211 | info!("Rollback completed successfully"); 212 | Ok(()) 213 | } 214 | 215 | /// Clear tracked changes without rollback 216 | #[allow(dead_code)] 217 | pub fn clear(&mut self) { 218 | self.changes.clear(); 219 | } 220 | } 221 | 222 | impl Drop for FileTracker { 223 | fn drop(&mut self) { 224 | // Only perform rollback if force_rollback is true and restore_enabled is true 225 | if self.force_rollback && self.restore_enabled && !self.changes.is_empty() { 226 | match self.rollback() { 227 | Ok(_) => {} 228 | Err(e) => { 229 | warn!("Error during automatic rollback: {}", e); 230 | } 231 | } 232 | } 233 | } 234 | } 235 | 236 | /// A guard wrapper around FileTracker that simplifies working with tracked files 237 | pub struct FileTrackerGuard { 238 | inner: FileTracker, 239 | } 240 | 241 | impl Default for FileTrackerGuard { 242 | fn default() -> Self { 243 | Self::new() 244 | } 245 | } 246 | 247 | impl FileTrackerGuard { 248 | /// Creates a new FileTrackerGuard with restore on drop enabled 249 | pub fn new() -> Self { 250 | Self { 251 | inner: FileTracker::new(), 252 | } 253 | } 254 | 255 | /// Creates a new FileTrackerGuard with restore on drop configurable 256 | pub fn new_with_restore(restore_enabled: bool) -> Self { 257 | Self { 258 | inner: FileTracker::new_with_restore(restore_enabled), 259 | } 260 | } 261 | 262 | /// Starts tracking a file 263 | pub fn track_file(&mut self, path: &Path) -> Result<()> { 264 | self.inner.track_file(path) 265 | } 266 | 267 | /// Tracks a file rename operation 268 | pub fn track_rename(&mut self, source: &Path, target: &Path) -> Result<()> { 269 | self.inner.track_rename(source, target) 270 | } 271 | 272 | /// Force rollback of tracked changes 273 | pub fn force_rollback(&mut self) { 274 | self.inner.force_rollback(); 275 | } 276 | } 277 | -------------------------------------------------------------------------------- /src/utils/mod.rs: -------------------------------------------------------------------------------- 1 | pub mod file_ops; 2 | pub mod pip; 3 | pub mod pyproject; 4 | pub mod toml; 5 | pub mod uv; 6 | 7 | // Utility modules 8 | pub mod author; 9 | pub mod build_system; 10 | pub mod version; 11 | 12 | // Feature-dependent modules 13 | #[cfg(feature = "self_update")] 14 | mod update; 15 | #[cfg(feature = "self_update")] 16 | pub use update::{check_for_updates, update}; 17 | 18 | // Re-export commonly used items 19 | pub use pip::parse_pip_conf; 20 | -------------------------------------------------------------------------------- /src/utils/pip.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io::{BufRead, BufReader}; 3 | 4 | pub fn parse_pip_conf() -> Result, String> { 5 | let home_dir = 6 | dirs::home_dir().ok_or_else(|| "Unable to determine home directory".to_string())?; 7 | let pip_conf_path = home_dir.join(".pip").join("pip.conf"); 8 | 9 | if !pip_conf_path.exists() { 10 | return Ok(vec![]); 11 | } 12 | 13 | let file = File::open(&pip_conf_path).map_err(|e| format!("Failed to open pip.conf: {}", e))?; 14 | let reader = BufReader::new(file); 15 | 16 | let mut extra_urls = vec![]; 17 | for line in reader.lines() { 18 | let line = line.map_err(|e| format!("Failed to read line from pip.conf: {}", e))?; 19 | let trimmed = line.trim(); 20 | if trimmed.starts_with("extra-index-url") { 21 | let parts: Vec<&str> = trimmed.splitn(2, '=').collect(); 22 | if parts.len() == 2 { 23 | extra_urls.push(parts[1].trim().to_string()); 24 | } 25 | } 26 | } 27 | 28 | Ok(extra_urls) 29 | } 30 | -------------------------------------------------------------------------------- /src/utils/toml.rs: -------------------------------------------------------------------------------- 1 | use std::{fs, path::Path}; 2 | 3 | use toml_edit::{DocumentMut, Item, Table}; 4 | 5 | /// Reads a TOML file and returns its content as a DocumentMut. 6 | pub fn read_toml(path: &Path) -> Result { 7 | let content = fs::read_to_string(path) 8 | .map_err(|e| format!("Failed to read TOML file '{}': {}", path.display(), e))?; 9 | 10 | content 11 | .parse::() 12 | .map_err(|e| format!("Failed to parse TOML in '{}': {}", path.display(), e)) 13 | } 14 | 15 | /// Updates or creates a section in a TOML document. 16 | pub fn update_section(doc: &mut DocumentMut, section_path: &[&str], content: Item) { 17 | let mut current = doc.as_table_mut(); 18 | 19 | for §ion in §ion_path[..section_path.len() - 1] { 20 | if !current.contains_key(section) { 21 | let mut new_table = Table::new(); 22 | new_table.set_implicit(true); 23 | current.insert(section, Item::Table(new_table)); 24 | } 25 | current = current[section].as_table_mut().unwrap(); 26 | } 27 | 28 | let last_section = section_path.last().unwrap(); 29 | current.insert(last_section, content); 30 | } 31 | 32 | /// Writes a TOML document to a file, removing any empty sections first. 33 | pub fn write_toml(path: &Path, doc: &mut DocumentMut) -> Result<(), String> { 34 | cleanup_empty_sections(doc); 35 | fs::write(path, doc.to_string()) 36 | .map_err(|e| format!("Failed to write TOML file '{}': {}", path.display(), e)) 37 | } 38 | 39 | /// Removes empty sections from a TOML document recursively. 40 | pub fn cleanup_empty_sections(doc: &mut DocumentMut) { 41 | let root_table = doc.as_table_mut(); 42 | cleanup_table(root_table); 43 | } 44 | 45 | /// Recursively cleans up empty sections in a TOML table 46 | fn cleanup_table(table: &mut Table) { 47 | // First pass: Collect keys to clean up 48 | let keys_to_check: Vec = table 49 | .iter() 50 | .filter(|(_, value)| value.is_table() || value.is_array()) 51 | .map(|(key, _)| key.to_string()) 52 | .collect(); 53 | 54 | // Second pass: Clean up nested tables 55 | for key in &keys_to_check { 56 | if let Some(value) = table.get_mut(key) { 57 | if let Some(nested_table) = value.as_table_mut() { 58 | cleanup_table(nested_table); 59 | } 60 | } 61 | } 62 | 63 | // Third pass: Remove empty tables and sections 64 | let keys_to_remove: Vec = table 65 | .iter() 66 | .filter(|(_, value)| is_empty_section(value)) 67 | .map(|(key, _)| key.to_string()) 68 | .collect(); 69 | 70 | for key in keys_to_remove { 71 | table.remove(&key); 72 | } 73 | } 74 | 75 | /// Checks if a TOML item is empty 76 | fn is_empty_section(item: &Item) -> bool { 77 | match item { 78 | Item::Table(table) => table.is_empty() || table.iter().all(|(_, v)| is_empty_section(v)), 79 | Item::Value(value) => { 80 | if let Some(array) = value.as_array() { 81 | array.is_empty() 82 | } else { 83 | false 84 | } 85 | } 86 | Item::None => true, 87 | Item::ArrayOfTables(array) => array.is_empty(), 88 | } 89 | } 90 | 91 | /// Defines the expected order of fields within the [project] section 92 | const PROJECT_FIELD_ORDER: &[&str] = &[ 93 | "name", 94 | "version", 95 | "description", 96 | "authors", 97 | "readme", 98 | "requires-python", 99 | "dependencies", 100 | "classifiers", 101 | "optional-dependencies", 102 | "scripts", 103 | "urls", 104 | ]; 105 | 106 | /// Orders fields within a table according to a predefined order 107 | fn order_table_fields(table: &mut Table, field_order: &[&str]) -> Table { 108 | let mut ordered = Table::new(); 109 | ordered.set_implicit(table.is_implicit()); 110 | 111 | // First add fields in the specified order 112 | for &field in field_order { 113 | if let Some(value) = table.remove(field) { 114 | ordered.insert(field, value); 115 | } 116 | } 117 | 118 | // Then add any remaining fields that weren't in the order list 119 | for (key, value) in table.iter() { 120 | if !field_order.contains(&key.to_string().as_str()) { 121 | ordered.insert(key, value.clone()); 122 | } 123 | } 124 | 125 | ordered 126 | } 127 | 128 | /// Updates the reorder_toml_sections function to include field ordering 129 | pub fn reorder_toml_sections(project_dir: &Path) -> Result<(), String> { 130 | let pyproject_path = project_dir.join("pyproject.toml"); 131 | let content = fs::read_to_string(&pyproject_path) 132 | .map_err(|e| format!("Failed to read pyproject.toml: {}", e))?; 133 | 134 | let mut doc = content 135 | .parse::() 136 | .map_err(|e| format!("Failed to parse TOML: {}", e))?; 137 | 138 | // Order the [project] section fields if it exists 139 | if let Some(Item::Table(project_table)) = doc.get_mut("project") { 140 | let ordered_project = order_table_fields(project_table, PROJECT_FIELD_ORDER); 141 | doc.insert("project", Item::Table(ordered_project)); 142 | } 143 | 144 | // Continue with existing section ordering logic 145 | let mut sections: Vec<(String, Item)> = Vec::new(); 146 | let mut tool_sections: Vec<(String, Item)> = Vec::new(); 147 | 148 | // Collect and categorize sections 149 | for (key, value) in doc.iter() { 150 | let owned_key = key.to_string(); 151 | let owned_value = value.clone(); 152 | 153 | if owned_key.starts_with("tool.") { 154 | tool_sections.push((owned_key, owned_value)); 155 | } else { 156 | sections.push((owned_key, owned_value)); 157 | } 158 | } 159 | 160 | // Clear the document 161 | let keys_to_remove: Vec = doc.as_table().iter().map(|(k, _)| k.to_string()).collect(); 162 | for key in keys_to_remove { 163 | doc.remove(&key); 164 | } 165 | 166 | // Write back sections in the desired order 167 | let section_order = ["project", "build-system"]; 168 | 169 | // First, add ordered known sections 170 | for §ion_name in section_order.iter() { 171 | if let Some((_, item)) = sections.iter().find(|(key, _)| key == section_name) { 172 | doc.insert(section_name, item.clone()); 173 | } 174 | } 175 | 176 | // Then add any remaining non-tool sections that weren't in the order list 177 | for (key, item) in sections.iter() { 178 | if !section_order.contains(&key.as_str()) { 179 | doc.insert(key, item.clone()); 180 | } 181 | } 182 | 183 | // Finally add tool sections 184 | for (key, item) in tool_sections { 185 | doc.insert(&key, item); 186 | } 187 | 188 | // Write the reordered content back to the file 189 | fs::write(&pyproject_path, doc.to_string()) 190 | .map_err(|e| format!("Failed to write pyproject.toml: {}", e))?; 191 | 192 | Ok(()) 193 | } 194 | 195 | #[cfg(test)] 196 | mod tests { 197 | use super::*; 198 | use std::fs; 199 | use tempfile::TempDir; 200 | 201 | #[test] 202 | fn test_project_field_ordering() { 203 | let temp_dir = TempDir::new().unwrap(); 204 | let input_content = r#"[project] 205 | dependencies = ["package1>=1.0.0"] 206 | name = "test-project" 207 | version = "1.0.0" 208 | authors = [{ name = "Test Author", email = "test@example.com" }] 209 | description = "Test description" 210 | "#; 211 | fs::write(temp_dir.path().join("pyproject.toml"), input_content).unwrap(); 212 | 213 | reorder_toml_sections(temp_dir.path()).unwrap(); 214 | 215 | let result = fs::read_to_string(temp_dir.path().join("pyproject.toml")).unwrap(); 216 | 217 | // Verify field order 218 | let name_pos = result.find("name").unwrap(); 219 | let version_pos = result.find("version").unwrap(); 220 | let description_pos = result.find("description").unwrap(); 221 | let authors_pos = result.find("authors").unwrap(); 222 | let dependencies_pos = result.find("dependencies").unwrap(); 223 | 224 | assert!(name_pos < version_pos, "name should come before version"); 225 | assert!( 226 | version_pos < description_pos, 227 | "version should come before description" 228 | ); 229 | assert!( 230 | description_pos < authors_pos, 231 | "description should come before authors" 232 | ); 233 | assert!( 234 | authors_pos < dependencies_pos, 235 | "authors should come before dependencies" 236 | ); 237 | } 238 | } 239 | -------------------------------------------------------------------------------- /src/utils/update.rs: -------------------------------------------------------------------------------- 1 | use log::info; 2 | use self_update::cargo_crate_version; 3 | 4 | /// Checks if a newer version is available without updating 5 | pub fn check_for_updates() -> Result { 6 | info!("Checking for updates..."); 7 | 8 | let updater = self_update::backends::github::Update::configure() 9 | .repo_owner("stvnksslr") 10 | .repo_name("uv-migrator") 11 | .bin_name("uv-migrator") 12 | .current_version(cargo_crate_version!()) 13 | .build() 14 | .map_err(|e| format!("Failed to build updater: {}", e))?; 15 | 16 | let latest_release = updater 17 | .get_latest_release() 18 | .map_err(|e| format!("Failed to check for updates: {}", e))?; 19 | 20 | // Compare versions 21 | let current_version = cargo_crate_version!(); 22 | let update_available = latest_release.version != current_version; 23 | 24 | if update_available { 25 | info!("New version available: {}", latest_release.version); 26 | } else { 27 | info!( 28 | "No updates available. Already at latest version: {}", 29 | current_version 30 | ); 31 | } 32 | 33 | Ok(update_available) 34 | } 35 | 36 | /// Downloads and applies the update 37 | pub fn update() -> Result<(), String> { 38 | info!("Updating to the latest version..."); 39 | 40 | let status = self_update::backends::github::Update::configure() 41 | .repo_owner("stvnksslr") 42 | .repo_name("uv-migrator") 43 | .bin_name("uv-migrator") 44 | .current_version(cargo_crate_version!()) 45 | .build() 46 | .map_err(|e| format!("Failed to build updater: {}", e))? 47 | .update() 48 | .map_err(|e| format!("Failed to update binary: {}", e))?; 49 | 50 | match status.updated() { 51 | true => { 52 | info!( 53 | "Updated successfully to version {}! Please restart.", 54 | status.version() 55 | ); 56 | Ok(()) 57 | } 58 | false => { 59 | info!("No updates available. Already at latest version."); 60 | Ok(()) 61 | } 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /src/utils/uv.rs: -------------------------------------------------------------------------------- 1 | use log::info; 2 | use semver::Version; 3 | use std::path::{Path, PathBuf}; 4 | use std::process::{Command, Output}; 5 | 6 | /// Minimum required UV version 7 | const MIN_UV_VERSION: &str = "0.5.0"; 8 | 9 | /// Version that supports the --bare flag 10 | pub const UV_SUPPORT_BARE: &str = "0.6.0"; 11 | 12 | /// Helper function to find the UV executable and ensure it meets version requirements 13 | pub fn find_uv_path() -> Result { 14 | // Check if uv is in PATH 15 | which::which("uv").map_err(|e| format!( 16 | "The 'uv' command is not available. Please install uv and ensure it's in your PATH. Error: {}", 17 | e 18 | )) 19 | } 20 | 21 | /// Gets the current UV version 22 | /// 23 | /// Returns a semver Version that can be compared to determine if 24 | /// we should use certain flags like --bare 25 | pub fn get_uv_version() -> Result { 26 | let uv_path = find_uv_path()?; 27 | 28 | // Get the version by executing uv --version 29 | let output = Command::new(&uv_path) 30 | .arg("--version") 31 | .output() 32 | .map_err(|e| format!("Failed to execute uv --version: {}", e))?; 33 | 34 | if !output.status.success() { 35 | let stderr = String::from_utf8_lossy(&output.stderr); 36 | return Err(format!("Failed to get uv version: {}", stderr)); 37 | } 38 | 39 | let version_output = String::from_utf8_lossy(&output.stdout); 40 | log::debug!("Raw UV version output: {}", version_output); 41 | 42 | // The mock outputs "uv X.Y.Z" directly, so we extract the last part 43 | let version_str = version_output 44 | .split_whitespace() 45 | .nth(1) 46 | .ok_or_else(|| format!("Unexpected uv version format: '{}'", version_output))?; 47 | 48 | log::debug!("Parsed version string: {}", version_str); 49 | 50 | Version::parse(version_str) 51 | .map_err(|e| format!("Failed to parse uv version '{}': {}", version_str, e)) 52 | } 53 | 54 | /// Command builder for UV operations 55 | pub struct UvCommandBuilder { 56 | uv_path: PathBuf, 57 | args: Vec, 58 | working_dir: Option, 59 | } 60 | 61 | impl UvCommandBuilder { 62 | /// Create a new command builder with the UV executable 63 | pub fn new() -> Result { 64 | let uv_path = find_uv_path()?; 65 | Ok(Self { 66 | uv_path, 67 | args: Vec::new(), 68 | working_dir: None, 69 | }) 70 | } 71 | 72 | /// Add an argument to the command 73 | pub fn arg>(mut self, arg: S) -> Self { 74 | self.args.push(arg.into()); 75 | self 76 | } 77 | 78 | /// Add multiple arguments to the command 79 | pub fn args(mut self, args: I) -> Self 80 | where 81 | I: IntoIterator, 82 | S: Into, 83 | { 84 | for arg in args { 85 | self.args.push(arg.into()); 86 | } 87 | self 88 | } 89 | 90 | /// Set the working directory for the command 91 | pub fn working_dir>(mut self, dir: P) -> Self { 92 | self.working_dir = Some(dir.as_ref().to_path_buf()); 93 | self 94 | } 95 | 96 | /// Execute the command and return the output 97 | pub fn execute(self) -> Result { 98 | let mut command = Command::new(&self.uv_path); 99 | command.args(&self.args); 100 | 101 | if let Some(dir) = self.working_dir { 102 | command.current_dir(dir); 103 | } 104 | 105 | info!("Executing UV command: {:?}", self.args); 106 | command 107 | .output() 108 | .map_err(|e| format!("Failed to execute UV command: {}", e)) 109 | } 110 | 111 | /// Execute the command and check for success 112 | pub fn execute_success(self) -> Result<(), String> { 113 | let output = self.execute()?; 114 | 115 | if output.status.success() { 116 | Ok(()) 117 | } else { 118 | let stderr = String::from_utf8_lossy(&output.stderr); 119 | Err(format!("UV command failed: {}", stderr)) 120 | } 121 | } 122 | } 123 | 124 | pub fn check_uv_requirements() -> Result<(), String> { 125 | let _uv_path = find_uv_path()?; 126 | 127 | // If uv is found, check its version 128 | let current_version = get_uv_version()?; 129 | 130 | let min_version = Version::parse(MIN_UV_VERSION) 131 | .map_err(|e| format!("Failed to parse minimum version: {}", e))?; 132 | 133 | if current_version < min_version { 134 | return Err(format!( 135 | "uv version {} or higher is required. Found version {}", 136 | MIN_UV_VERSION, current_version 137 | )); 138 | } 139 | 140 | Ok(()) 141 | } 142 | -------------------------------------------------------------------------------- /src/utils/version.rs: -------------------------------------------------------------------------------- 1 | use log::debug; 2 | use std::fs; 3 | use std::path::Path; 4 | 5 | /// Clean and validate a version string 6 | fn clean_version(version: &str) -> Option { 7 | let mut cleaned = version.trim().to_string(); 8 | let mut prev_len; 9 | 10 | // Keep cleaning until no more changes occur 11 | loop { 12 | prev_len = cleaned.len(); 13 | cleaned = cleaned 14 | .trim() 15 | .trim_matches('"') 16 | .trim_matches('\'') 17 | .trim_matches(',') 18 | .trim() 19 | .to_string(); 20 | 21 | if cleaned.len() == prev_len { 22 | break; 23 | } 24 | } 25 | 26 | // Basic version validation - should contain at least one number 27 | if cleaned.chars().any(|c| c.is_ascii_digit()) { 28 | Some(cleaned) 29 | } else { 30 | None 31 | } 32 | } 33 | 34 | /// Extracts the version from setup.py, __init__.py, or **version** file 35 | /// 36 | /// # Arguments 37 | /// 38 | /// * `project_dir` - The project directory to search for version information 39 | /// 40 | /// # Returns 41 | /// 42 | /// * `Result, String>` - The version if found, None if not found, or an error 43 | pub fn extract_version(project_dir: &Path) -> Result, String> { 44 | // First try to get version from setup.py 45 | if let Some(version) = extract_version_from_setup_py(project_dir).map_err(|e| e.to_string())? { 46 | debug!("Found version in setup.py: {}", version); 47 | return Ok(Some(version)); 48 | } 49 | 50 | // Then try __init__.py files 51 | if let Some(version) = extract_version_from_init_py(project_dir)? { 52 | debug!("Found version in __init__.py: {}", version); 53 | return Ok(Some(version)); 54 | } 55 | 56 | // Finally, try **version** file 57 | if let Some(version) = extract_version_from_version_file(project_dir)? { 58 | debug!("Found version in **version** file: {}", version); 59 | return Ok(Some(version)); 60 | } 61 | 62 | Ok(None) 63 | } 64 | 65 | /// Extracts version from setup.py file 66 | fn extract_version_from_setup_py(project_dir: &Path) -> crate::error::Result> { 67 | let setup_py_path = project_dir.join("setup.py"); 68 | if !setup_py_path.exists() { 69 | return Ok(None); 70 | } 71 | 72 | let content = 73 | fs::read_to_string(&setup_py_path).map_err(|e| crate::error::Error::FileOperation { 74 | path: setup_py_path.clone(), 75 | message: format!("Failed to read setup.py: {}", e), 76 | })?; 77 | 78 | // Look for version in setup() call 79 | if let Some(start_idx) = content.find("setup(") { 80 | let bracket_content = 81 | crate::migrators::setup_py::SetupPyMigrationSource::extract_setup_content( 82 | &content[start_idx..], 83 | )?; 84 | 85 | if let Some(version) = crate::migrators::setup_py::SetupPyMigrationSource::extract_parameter( 86 | &bracket_content, 87 | "version", 88 | ) { 89 | if let Some(cleaned_version) = clean_version(&version) { 90 | return Ok(Some(cleaned_version)); 91 | } 92 | } 93 | } 94 | 95 | Ok(None) 96 | } 97 | 98 | /// Extracts version from __init__.py file(s) 99 | fn extract_version_from_init_py(project_dir: &Path) -> Result, String> { 100 | // First, try the direct __init__.py in the project directory 101 | let init_path = project_dir.join("__init__.py"); 102 | if let Some(version) = extract_version_from_init_file(&init_path)? { 103 | return Ok(Some(version)); 104 | } 105 | 106 | // Then, look for package directories 107 | for entry in 108 | fs::read_dir(project_dir).map_err(|e| format!("Failed to read project directory: {}", e))? 109 | { 110 | let entry = entry.map_err(|e| format!("Failed to read directory entry: {}", e))?; 111 | let path = entry.path(); 112 | if path.is_dir() 113 | && !path 114 | .file_name() 115 | .is_none_or(|n| n.to_string_lossy().starts_with('.')) 116 | { 117 | let init_path = path.join("__init__.py"); 118 | if let Some(version) = extract_version_from_init_file(&init_path)? { 119 | return Ok(Some(version)); 120 | } 121 | } 122 | } 123 | 124 | Ok(None) 125 | } 126 | 127 | /// Extracts version from a specific __init__.py file 128 | fn extract_version_from_init_file(init_path: &Path) -> Result, String> { 129 | if !init_path.exists() { 130 | return Ok(None); 131 | } 132 | 133 | let content = fs::read_to_string(init_path) 134 | .map_err(|e| format!("Failed to read {}: {}", init_path.display(), e))?; 135 | 136 | // Look for __version__ = "X.Y.Z" pattern 137 | for line in content.lines() { 138 | let line = line.trim(); 139 | if line.starts_with("__version__") { 140 | // Split by comment character and take first part 141 | let parts: Vec<&str> = line.splitn(2, '#').collect(); 142 | let version_part = parts[0].splitn(2, '=').collect::>(); 143 | if version_part.len() == 2 { 144 | if let Some(cleaned_version) = clean_version(version_part[1]) { 145 | return Ok(Some(cleaned_version)); 146 | } 147 | } 148 | } 149 | } 150 | 151 | Ok(None) 152 | } 153 | 154 | /// Extracts version from **version** file 155 | fn extract_version_from_version_file(project_dir: &Path) -> Result, String> { 156 | let version_path = project_dir.join("**version**"); 157 | if !version_path.exists() { 158 | return Ok(None); 159 | } 160 | 161 | let content = fs::read_to_string(&version_path) 162 | .map_err(|e| format!("Failed to read **version** file: {}", e))?; 163 | 164 | if let Some(cleaned_version) = clean_version(&content) { 165 | Ok(Some(cleaned_version)) 166 | } else { 167 | Ok(None) 168 | } 169 | } 170 | 171 | #[cfg(test)] 172 | mod tests { 173 | use super::*; 174 | use std::fs; 175 | use tempfile::TempDir; 176 | 177 | fn create_test_dir() -> TempDir { 178 | TempDir::new().unwrap() 179 | } 180 | 181 | #[test] 182 | fn test_clean_version() { 183 | let test_cases = vec![ 184 | ("1.2.3", Some("1.2.3")), 185 | ("\"1.2.3\"", Some("1.2.3")), 186 | ("'1.2.3'", Some("1.2.3")), 187 | ("1.2.3,", Some("1.2.3")), 188 | (" 1.2.3 ", Some("1.2.3")), 189 | ("\"1.2.3\",", Some("1.2.3")), 190 | ("'1.2.3',", Some("1.2.3")), 191 | (" \"1.2.3\", ", Some("1.2.3")), 192 | (" '1.2.3', ", Some("1.2.3")), 193 | ("__version__,", None), 194 | ("", None), 195 | ("\"\"", None), 196 | ("\",\"", None), 197 | ("version", None), 198 | ]; 199 | 200 | for (input, expected) in test_cases { 201 | assert_eq!( 202 | clean_version(input), 203 | expected.map(String::from), 204 | "Failed for input: {:?}", 205 | input 206 | ); 207 | } 208 | } 209 | 210 | #[test] 211 | fn test_extract_version_from_init_py() { 212 | let temp_dir = create_test_dir(); 213 | let pkg_dir = temp_dir.path().join("my_package"); 214 | fs::create_dir(&pkg_dir).unwrap(); 215 | 216 | let init_content = r#" 217 | from .core import something 218 | 219 | __version__ = "1.2.0" 220 | 221 | def setup(): 222 | pass 223 | "#; 224 | fs::write(pkg_dir.join("__init__.py"), init_content).unwrap(); 225 | 226 | let version = extract_version(temp_dir.path()).unwrap(); 227 | assert_eq!(version, Some("1.2.0".to_string())); 228 | } 229 | 230 | #[test] 231 | fn test_extract_version_from_init_py_single_quotes() { 232 | let temp_dir = create_test_dir(); 233 | let pkg_dir = temp_dir.path().join("my_package"); 234 | fs::create_dir(&pkg_dir).unwrap(); 235 | 236 | let init_content = "__version__ = '1.2.0'"; 237 | fs::write(pkg_dir.join("__init__.py"), init_content).unwrap(); 238 | 239 | let version = extract_version(temp_dir.path()).unwrap(); 240 | assert_eq!(version, Some("1.2.0".to_string())); 241 | } 242 | 243 | #[test] 244 | fn test_extract_version_with_multiple_sources() { 245 | let temp_dir = create_test_dir(); 246 | 247 | // Create setup.py with version 248 | let setup_py_content = r#" 249 | from setuptools import setup 250 | 251 | setup( 252 | name="test", 253 | version="2.0.0", 254 | description="Test project" 255 | ) 256 | "#; 257 | fs::write(temp_dir.path().join("setup.py"), setup_py_content).unwrap(); 258 | 259 | // Create package with __init__.py 260 | let pkg_dir = temp_dir.path().join("my_package"); 261 | fs::create_dir(&pkg_dir).unwrap(); 262 | fs::write(pkg_dir.join("__init__.py"), r#"__version__ = "1.2.0""#).unwrap(); 263 | 264 | // Create **version** file 265 | fs::write(temp_dir.path().join("**version**"), "3.0.0\n").unwrap(); 266 | 267 | // Should prefer setup.py version 268 | let version = extract_version(temp_dir.path()).unwrap(); 269 | assert_eq!(version, Some("2.0.0".to_string())); 270 | } 271 | 272 | #[test] 273 | fn test_extract_version_precedence() { 274 | let temp_dir = create_test_dir(); 275 | let pkg_dir = temp_dir.path().join("my_package"); 276 | fs::create_dir(&pkg_dir).unwrap(); 277 | 278 | // Create only __init__.py and **version** 279 | fs::write(pkg_dir.join("__init__.py"), r#"__version__ = "1.2.0""#).unwrap(); 280 | fs::write(temp_dir.path().join("**version**"), "3.0.0\n").unwrap(); 281 | 282 | // Should prefer __init__.py version when setup.py is absent 283 | let version = extract_version(temp_dir.path()).unwrap(); 284 | assert_eq!(version, Some("1.2.0".to_string())); 285 | } 286 | 287 | #[test] 288 | fn test_extract_version_with_invalid_values() { 289 | let temp_dir = create_test_dir(); 290 | let pkg_dir = temp_dir.path().join("my_package"); 291 | fs::create_dir(&pkg_dir).unwrap(); 292 | 293 | // Test with invalid version string 294 | fs::write( 295 | pkg_dir.join("__init__.py"), 296 | r#"__version__ = "__version__,""#, 297 | ) 298 | .unwrap(); 299 | 300 | let version = extract_version(temp_dir.path()).unwrap(); 301 | assert_eq!(version, None); 302 | } 303 | 304 | #[test] 305 | fn test_extract_version_with_comma() { 306 | let temp_dir = create_test_dir(); 307 | let pkg_dir = temp_dir.path().join("my_package"); 308 | fs::create_dir(&pkg_dir).unwrap(); 309 | 310 | // Test various combinations of quotes, commas, and comments 311 | let test_cases = vec![ 312 | r#"__version__ = "1.2.0","#, 313 | r#"__version__ = '1.2.0',"#, 314 | r#"__version__ = "1.2.0", "#, 315 | r#"__version__ = "1.2.0", # Comment"#, 316 | r#"__version__ = "1.2.0" # Comment"#, 317 | r#"__version__ = '1.2.0' # With spaces and comment"#, 318 | r#"__version__ = "1.2.0",# No space before comment"#, 319 | ]; 320 | 321 | for test_case in test_cases { 322 | fs::write(pkg_dir.join("__init__.py"), test_case).unwrap(); 323 | let version = extract_version(temp_dir.path()).unwrap(); 324 | assert_eq!( 325 | version, 326 | Some("1.2.0".to_string()), 327 | "Failed for case: {}", 328 | test_case 329 | ); 330 | fs::remove_file(pkg_dir.join("__init__.py")).unwrap(); 331 | } 332 | } 333 | } 334 | -------------------------------------------------------------------------------- /tests/conda_test.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use std::path::PathBuf; 3 | use tempfile::TempDir; 4 | use uv_migrator::DependencyType; 5 | use uv_migrator::migrators::MigrationSource; 6 | use uv_migrator::migrators::conda::CondaMigrationSource; 7 | 8 | /// Helper function to create a temporary test project with an environment.yml file. 9 | /// 10 | /// # Arguments 11 | /// 12 | /// * `content` - The content to write to the environment.yml file 13 | /// 14 | /// # Returns 15 | /// 16 | /// A tuple containing the temporary directory and its path 17 | fn create_test_environment(content: &str) -> (TempDir, PathBuf) { 18 | let temp_dir = TempDir::new().unwrap(); 19 | let project_dir = temp_dir.path().to_path_buf(); 20 | 21 | fs::write(project_dir.join("environment.yml"), content).unwrap(); 22 | 23 | (temp_dir, project_dir) 24 | } 25 | 26 | /// Test detection of Conda projects 27 | #[test] 28 | fn test_detect_conda_project() { 29 | let temp_dir = TempDir::new().unwrap(); 30 | let project_dir = temp_dir.path().to_path_buf(); 31 | 32 | // Test with environment.yml 33 | fs::write(project_dir.join("environment.yml"), "").unwrap(); 34 | assert!(CondaMigrationSource::detect_project_type(&project_dir)); 35 | 36 | // Clean up and test with environment.yaml 37 | fs::remove_file(project_dir.join("environment.yml")).unwrap(); 38 | fs::write(project_dir.join("environment.yaml"), "").unwrap(); 39 | assert!(CondaMigrationSource::detect_project_type(&project_dir)); 40 | 41 | // Test without environment file 42 | fs::remove_file(project_dir.join("environment.yaml")).unwrap(); 43 | assert!(!CondaMigrationSource::detect_project_type(&project_dir)); 44 | } 45 | 46 | /// Test extraction of basic Conda dependencies 47 | #[test] 48 | fn test_extract_basic_conda_dependencies() { 49 | let content = r#" 50 | name: test-env 51 | channels: 52 | - conda-forge 53 | - defaults 54 | dependencies: 55 | - python=3.9 56 | - numpy=1.21.5 57 | - pandas>=1.3.0 58 | - scikit-learn 59 | - matplotlib 60 | "#; 61 | 62 | let (_temp_dir, project_dir) = create_test_environment(content); 63 | let source = CondaMigrationSource; 64 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 65 | 66 | // Should skip python 67 | assert_eq!(dependencies.len(), 4); 68 | 69 | // Check numpy 70 | let numpy_dep = dependencies.iter().find(|d| d.name == "numpy").unwrap(); 71 | assert_eq!(numpy_dep.version, Some("1.21.5".to_string())); 72 | assert_eq!(numpy_dep.dep_type, DependencyType::Main); 73 | 74 | // Check pandas 75 | let pandas_dep = dependencies.iter().find(|d| d.name == "pandas").unwrap(); 76 | assert_eq!(pandas_dep.version, Some(">=1.3.0".to_string())); 77 | 78 | // Check scikit-learn (no version) 79 | let sklearn_dep = dependencies 80 | .iter() 81 | .find(|d| d.name == "scikit-learn") 82 | .unwrap(); 83 | assert_eq!(sklearn_dep.version, None); 84 | } 85 | 86 | /// Test extraction of dependencies with wildcards 87 | #[test] 88 | fn test_extract_wildcard_dependencies() { 89 | let content = r#" 90 | name: test-env 91 | dependencies: 92 | - numpy=1.21.* 93 | - pandas=1.* 94 | - scipy=* 95 | "#; 96 | 97 | let (_temp_dir, project_dir) = create_test_environment(content); 98 | let source = CondaMigrationSource; 99 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 100 | 101 | assert_eq!(dependencies.len(), 3); 102 | 103 | // Check numpy with minor wildcard 104 | let numpy_dep = dependencies.iter().find(|d| d.name == "numpy").unwrap(); 105 | assert_eq!(numpy_dep.version, Some(">=1.21.0,<1.22.0".to_string())); 106 | 107 | // Check pandas with major wildcard 108 | let pandas_dep = dependencies.iter().find(|d| d.name == "pandas").unwrap(); 109 | assert_eq!(pandas_dep.version, Some(">=1.0.0,<2.0.0".to_string())); 110 | 111 | // Check scipy with any version 112 | let scipy_dep = dependencies.iter().find(|d| d.name == "scipy").unwrap(); 113 | assert_eq!(scipy_dep.version, None); 114 | } 115 | 116 | /// Test extraction of pip dependencies within Conda environment 117 | #[test] 118 | fn test_extract_pip_dependencies() { 119 | let content = r#" 120 | name: test-env 121 | channels: 122 | - conda-forge 123 | dependencies: 124 | - python=3.9 125 | - numpy 126 | - pip 127 | - pip: 128 | - requests==2.28.0 129 | - flask>=2.0.0 130 | - django[rest]>=4.0.0 131 | - beautifulsoup4 132 | "#; 133 | 134 | let (_temp_dir, project_dir) = create_test_environment(content); 135 | let source = CondaMigrationSource; 136 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 137 | 138 | // Should have 1 conda dep (numpy) + 4 pip deps 139 | assert_eq!(dependencies.len(), 5); 140 | 141 | // Check requests 142 | let requests_dep = dependencies.iter().find(|d| d.name == "requests").unwrap(); 143 | assert_eq!(requests_dep.version, Some("==2.28.0".to_string())); 144 | 145 | // Check flask 146 | let flask_dep = dependencies.iter().find(|d| d.name == "flask").unwrap(); 147 | assert_eq!(flask_dep.version, Some(">=2.0.0".to_string())); 148 | 149 | // Check django with extras 150 | let django_dep = dependencies.iter().find(|d| d.name == "django").unwrap(); 151 | assert_eq!(django_dep.version, Some(">=4.0.0".to_string())); 152 | assert_eq!(django_dep.extras, Some(vec!["rest".to_string()])); 153 | } 154 | 155 | /// Test package name mapping from Conda to PyPI 156 | #[test] 157 | fn test_conda_to_pypi_mapping() { 158 | let content = r#" 159 | name: ml-env 160 | dependencies: 161 | - pytorch 162 | - tensorflow-gpu 163 | - py-opencv 164 | - pillow-simd 165 | "#; 166 | 167 | let (_temp_dir, project_dir) = create_test_environment(content); 168 | let source = CondaMigrationSource; 169 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 170 | 171 | // Check pytorch -> torch 172 | assert!(dependencies.iter().any(|d| d.name == "torch")); 173 | assert!(!dependencies.iter().any(|d| d.name == "pytorch")); 174 | 175 | // Check tensorflow-gpu -> tensorflow 176 | assert!(dependencies.iter().any(|d| d.name == "tensorflow")); 177 | assert!(!dependencies.iter().any(|d| d.name == "tensorflow-gpu")); 178 | 179 | // Check py-opencv -> opencv-python 180 | assert!(dependencies.iter().any(|d| d.name == "opencv-python")); 181 | assert!(!dependencies.iter().any(|d| d.name == "py-opencv")); 182 | 183 | // Check pillow-simd -> pillow 184 | assert!(dependencies.iter().any(|d| d.name == "pillow")); 185 | assert!(!dependencies.iter().any(|d| d.name == "pillow-simd")); 186 | } 187 | 188 | /// Test that system packages are skipped 189 | #[test] 190 | fn test_skip_system_packages() { 191 | let content = r#" 192 | name: test-env 193 | dependencies: 194 | - python=3.9 195 | - numpy 196 | - libgcc-ng 197 | - openssl 198 | - mkl 199 | - cudatoolkit 200 | - gcc 201 | - make 202 | "#; 203 | 204 | let (_temp_dir, project_dir) = create_test_environment(content); 205 | let source = CondaMigrationSource; 206 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 207 | 208 | // Should only have numpy 209 | assert_eq!(dependencies.len(), 1); 210 | assert_eq!(dependencies[0].name, "numpy"); 211 | } 212 | 213 | /// Test extraction of Python version from environment 214 | #[test] 215 | fn test_extract_python_version() { 216 | let content = r#" 217 | name: test-env 218 | dependencies: 219 | - python=3.9.7 220 | - numpy 221 | "#; 222 | 223 | let (_temp_dir, project_dir) = create_test_environment(content); 224 | let python_version = 225 | CondaMigrationSource::extract_python_version_from_environment(&project_dir).unwrap(); 226 | 227 | assert_eq!(python_version, Some("3.9".to_string())); 228 | } 229 | 230 | /// Test complex pip dependencies with extras and markers 231 | #[test] 232 | fn test_complex_pip_dependencies() { 233 | let content = r#" 234 | name: test-env 235 | dependencies: 236 | - pip: 237 | - "apache-airflow[postgres,google]==2.7.0" 238 | - "pytest[testing]>=7.0.0" 239 | - "torch[cpu]>=2.0.0,<3.0.0" 240 | "#; 241 | 242 | let (_temp_dir, project_dir) = create_test_environment(content); 243 | let source = CondaMigrationSource; 244 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 245 | 246 | // Check apache-airflow with multiple extras 247 | let airflow_dep = dependencies 248 | .iter() 249 | .find(|d| d.name == "apache-airflow") 250 | .unwrap(); 251 | assert_eq!(airflow_dep.version, Some("==2.7.0".to_string())); 252 | assert_eq!( 253 | airflow_dep.extras, 254 | Some(vec!["postgres".to_string(), "google".to_string()]) 255 | ); 256 | 257 | // Check torch with version range 258 | let torch_dep = dependencies.iter().find(|d| d.name == "torch").unwrap(); 259 | assert_eq!(torch_dep.version, Some(">=2.0.0,<3.0.0".to_string())); 260 | assert_eq!(torch_dep.extras, Some(vec!["cpu".to_string()])); 261 | } 262 | 263 | /// Test handling of missing or empty environment file 264 | #[test] 265 | fn test_empty_environment() { 266 | let content = r#" 267 | name: empty-env 268 | "#; 269 | 270 | let (_temp_dir, project_dir) = create_test_environment(content); 271 | let source = CondaMigrationSource; 272 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 273 | 274 | assert!(dependencies.is_empty()); 275 | } 276 | 277 | /// Test handling of environment file without dependencies section 278 | #[test] 279 | fn test_no_dependencies_section() { 280 | let content = r#" 281 | name: test-env 282 | channels: 283 | - conda-forge 284 | - defaults 285 | "#; 286 | 287 | let (_temp_dir, project_dir) = create_test_environment(content); 288 | let source = CondaMigrationSource; 289 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 290 | 291 | assert!(dependencies.is_empty()); 292 | } 293 | -------------------------------------------------------------------------------- /tests/dependency_format_test.rs: -------------------------------------------------------------------------------- 1 | use uv_migrator::migrators; 2 | use uv_migrator::models::{Dependency, DependencyType}; 3 | 4 | /// Test formatting of dependencies for UV CLI use. 5 | /// 6 | /// This test verifies that the format_dependency function correctly formats: 7 | /// 1. Simple dependencies with version 8 | /// 2. Dependencies with extras 9 | /// 3. Dependencies with multiple extras 10 | /// 4. Dependencies with environment markers 11 | #[test] 12 | fn test_format_dependency() { 13 | // Use the public format_dependency function directly 14 | let format_dependency = migrators::format_dependency; 15 | 16 | // Test simple dependency with version 17 | let dep1 = Dependency { 18 | name: "requests".to_string(), 19 | version: Some("2.28.1".to_string()), 20 | dep_type: DependencyType::Main, 21 | environment_markers: None, 22 | extras: None, 23 | }; 24 | assert_eq!(format_dependency(&dep1), "requests==2.28.1"); 25 | 26 | // Test dependency with extras 27 | let dep2 = Dependency { 28 | name: "uvicorn".to_string(), 29 | version: Some("^0.30.1".to_string()), 30 | dep_type: DependencyType::Main, 31 | environment_markers: None, 32 | extras: Some(vec!["standard".to_string()]), 33 | }; 34 | assert_eq!(format_dependency(&dep2), "uvicorn[standard]>=0.30.1"); 35 | 36 | // Test dependency with multiple extras 37 | let dep3 = Dependency { 38 | name: "ibis-framework".to_string(), 39 | version: Some("^10.0.0".to_string()), 40 | dep_type: DependencyType::Main, 41 | environment_markers: None, 42 | extras: Some(vec![ 43 | "bigquery".to_string(), 44 | "duckdb".to_string(), 45 | "polars".to_string(), 46 | ]), 47 | }; 48 | assert_eq!( 49 | format_dependency(&dep3), 50 | "ibis-framework[bigquery,duckdb,polars]>=10.0.0" 51 | ); 52 | 53 | // Test dependency with environment markers 54 | let dep4 = Dependency { 55 | name: "dataclasses".to_string(), 56 | version: Some("1.0.0".to_string()), 57 | dep_type: DependencyType::Main, 58 | environment_markers: Some("python_version < '3.7'".to_string()), 59 | extras: None, 60 | }; 61 | assert_eq!( 62 | format_dependency(&dep4), 63 | "dataclasses==1.0.0; python_version < '3.7'" 64 | ); 65 | 66 | // Test dependency with both extras and environment markers 67 | let dep5 = Dependency { 68 | name: "django".to_string(), 69 | version: Some("~=4.2.0".to_string()), 70 | dep_type: DependencyType::Main, 71 | environment_markers: Some("platform_system != 'Windows'".to_string()), 72 | extras: Some(vec!["rest".to_string(), "admin".to_string()]), 73 | }; 74 | assert_eq!( 75 | format_dependency(&dep5), 76 | "django[rest,admin]~=4.2.0; platform_system != 'Windows'" 77 | ); 78 | } 79 | -------------------------------------------------------------------------------- /tests/file_tracker_test.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use std::path::PathBuf; 3 | use tempfile::TempDir; 4 | use uv_migrator::utils::file_ops::FileTrackerGuard; 5 | 6 | #[cfg(test)] 7 | mod tests { 8 | use super::*; 9 | 10 | /// Creates a temporary test environment with directory and file. 11 | /// 12 | /// Returns: 13 | /// - TempDir: The temporary directory handle (automatically cleaned up when dropped) 14 | /// - PathBuf: Path to the project directory 15 | /// - PathBuf: Path to a test file within the project directory 16 | fn setup_test_environment() -> (TempDir, PathBuf, PathBuf) { 17 | let temp_dir = TempDir::new().unwrap(); 18 | let project_dir = temp_dir.path().to_path_buf(); 19 | let test_file = project_dir.join("test.txt"); 20 | fs::write(&test_file, "test content").unwrap(); 21 | (temp_dir, project_dir, test_file) 22 | } 23 | 24 | /// Tests that a new file can be tracked successfully. 25 | /// 26 | /// This test verifies that: 27 | /// 1. A file can be added to tracking 28 | /// 2. The tracking operation completes without errors 29 | #[test] 30 | fn test_track_new_file() { 31 | let (_temp_dir, _project_dir, test_file) = setup_test_environment(); 32 | let mut guard = FileTrackerGuard::new(); 33 | let result = guard.track_file(&test_file); 34 | assert!(result.is_ok()); 35 | } 36 | 37 | /// Tests that tracking the same file twice is idempotent. 38 | /// 39 | /// This test verifies that: 40 | /// 1. A file can be tracked multiple times 41 | /// 2. Subsequent tracking of the same file doesn't cause errors 42 | #[test] 43 | fn test_track_same_file_twice() { 44 | let (_temp_dir, _project_dir, test_file) = setup_test_environment(); 45 | let mut guard = FileTrackerGuard::new(); 46 | 47 | assert!(guard.track_file(&test_file).is_ok()); 48 | assert!(guard.track_file(&test_file).is_ok()); 49 | } 50 | 51 | /// Tests file rename tracking functionality. 52 | /// 53 | /// This test verifies that: 54 | /// 1. A file rename operation can be tracked 55 | /// 2. The tracking completes successfully 56 | #[test] 57 | fn test_track_rename() { 58 | let (_temp_dir, project_dir, test_file) = setup_test_environment(); 59 | let new_path = project_dir.join("renamed.txt"); 60 | let mut guard = FileTrackerGuard::new(); 61 | 62 | let result = guard.track_rename(&test_file, &new_path); 63 | assert!(result.is_ok()); 64 | } 65 | 66 | /// Tests handling of rename operations with nonexistent source files. 67 | /// 68 | /// This test verifies that: 69 | /// 1. Attempting to track a rename of a nonexistent file results in an error 70 | /// 2. The error message correctly indicates the file doesn't exist 71 | #[test] 72 | fn test_track_rename_nonexistent_file() { 73 | let (_temp_dir, project_dir, _test_file) = setup_test_environment(); 74 | let nonexistent = project_dir.join("nonexistent.txt"); 75 | let new_path = project_dir.join("renamed.txt"); 76 | let mut guard = FileTrackerGuard::new(); 77 | 78 | let result = guard.track_rename(&nonexistent, &new_path); 79 | assert!(result.is_err()); 80 | assert!(result.unwrap_err().contains("Source file doesn't exist")); 81 | } 82 | 83 | /// Tests automatic rollback functionality of FileTrackerGuard. 84 | /// 85 | /// This test verifies that: 86 | /// 1. When force_rollback is called, the guard restores files 87 | /// 2. Files are restored to their original state 88 | /// 3. The rollback occurs when the guard is dropped 89 | #[test] 90 | fn test_file_tracker_guard_auto_rollback() { 91 | let (_temp_dir, project_dir, _) = setup_test_environment(); 92 | let pyproject_path = project_dir.join("pyproject.toml"); 93 | let backup_path = project_dir.join("old.pyproject.toml"); 94 | 95 | // Create initial pyproject.toml 96 | fs::write(&pyproject_path, "original content").unwrap(); 97 | 98 | { 99 | let mut guard = FileTrackerGuard::new(); 100 | guard.track_rename(&pyproject_path, &backup_path).unwrap(); 101 | fs::rename(&pyproject_path, &backup_path).unwrap(); 102 | 103 | // Force rollback 104 | guard.force_rollback(); 105 | } // Guard is dropped here 106 | 107 | // Verify original file is restored 108 | assert!(pyproject_path.exists()); 109 | let content = fs::read_to_string(&pyproject_path).unwrap(); 110 | assert_eq!(content, "original content"); 111 | } 112 | 113 | /// Tests that rollback properly restores files to their original state. 114 | /// 115 | /// This test verifies that: 116 | /// 1. Files are properly backed up during rename operations 117 | /// 2. Original content is preserved 118 | /// 3. Rollback restores both the file and its content 119 | #[test] 120 | fn test_rollback_restores_files() { 121 | let (_temp_dir, project_dir, _) = setup_test_environment(); 122 | let pyproject_path = project_dir.join("pyproject.toml"); 123 | let backup_path = project_dir.join("old.pyproject.toml"); 124 | 125 | // Create and track initial pyproject.toml 126 | fs::write(&pyproject_path, "original content").unwrap(); 127 | 128 | { 129 | let mut guard = FileTrackerGuard::new(); 130 | guard.track_rename(&pyproject_path, &backup_path).unwrap(); 131 | fs::rename(&pyproject_path, &backup_path).unwrap(); 132 | fs::write(&pyproject_path, "new content").unwrap(); 133 | guard.force_rollback(); 134 | } // Guard is dropped here 135 | 136 | assert!(pyproject_path.exists()); 137 | let content = fs::read_to_string(&pyproject_path).unwrap(); 138 | assert_eq!(content, "original content"); 139 | } 140 | 141 | /// Tests handling of files in nested directories. 142 | /// 143 | /// This test verifies that: 144 | /// 1. Files in nested directories can be tracked 145 | /// 2. Parent directories are properly handled 146 | /// 3. Tracking works with deep directory structures 147 | #[test] 148 | fn test_nested_directory_creation() { 149 | let temp_dir = TempDir::new().unwrap(); 150 | let nested_path = temp_dir.path().join("nested").join("dir").join("file.txt"); 151 | 152 | // Create parent directories first 153 | if let Some(parent) = nested_path.parent() { 154 | fs::create_dir_all(parent).unwrap(); 155 | } 156 | fs::write(&nested_path, "test content").unwrap(); 157 | 158 | let mut guard = FileTrackerGuard::new(); 159 | assert!(guard.track_file(&nested_path).is_ok()); 160 | } 161 | 162 | /// Tests tracking of multiple file operations. 163 | /// 164 | /// This test verifies that: 165 | /// 1. Multiple files can be tracked simultaneously 166 | /// 2. Different operations (track and rename) can be mixed 167 | /// 3. All operations complete successfully 168 | #[test] 169 | fn test_multiple_operations() { 170 | let (_temp_dir, project_dir, _) = setup_test_environment(); 171 | let mut guard = FileTrackerGuard::new(); 172 | 173 | let file1 = project_dir.join("file1.txt"); 174 | let file2 = project_dir.join("file2.txt"); 175 | let file3 = project_dir.join("file3.txt"); 176 | 177 | fs::write(&file1, "content1").unwrap(); 178 | fs::write(&file2, "content2").unwrap(); 179 | 180 | // Track multiple files 181 | assert!(guard.track_file(&file1).is_ok()); 182 | assert!(guard.track_file(&file2).is_ok()); 183 | 184 | // Perform a rename 185 | assert!(guard.track_rename(&file1, &file3).is_ok()); 186 | } 187 | } 188 | -------------------------------------------------------------------------------- /tests/pipenv_test.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use tempfile::TempDir; 3 | use uv_migrator::DependencyType; 4 | use uv_migrator::migrators::MigrationSource; 5 | use uv_migrator::migrators::pipenv::PipenvMigrationSource; 6 | 7 | /// Test extracting dependencies from a simple Pipenv project 8 | /// 9 | /// This test verifies that a basic Pipenv configuration 10 | /// has its dependencies correctly extracted without needing to run the full migration. 11 | /// We manually create a basic Pipfile with minimal content to test the parser. 12 | #[test] 13 | fn test_extract_pipenv_dependencies() { 14 | // Create a temporary directory for our test project 15 | let temp_dir = TempDir::new().unwrap(); 16 | let project_dir = temp_dir.path().to_path_buf(); 17 | 18 | // Create a simple Pipfile 19 | let pipfile_content = r#"[[source]] 20 | url = "https://pypi.org/simple" 21 | verify_ssl = true 22 | name = "pypi" 23 | 24 | [packages] 25 | fastapi = "*" 26 | 27 | [dev-packages] 28 | 29 | [requires] 30 | python_version = "3.12" 31 | "#; 32 | 33 | // Create Pipfile.lock to ensure detection works 34 | let pipfile_lock_content = r#"{ 35 | "default": { 36 | "fastapi": { 37 | "version": "==0.115.8" 38 | } 39 | }, 40 | "develop": {} 41 | }"#; 42 | 43 | // Write the Pipfile to the temporary directory 44 | fs::write(project_dir.join("Pipfile"), pipfile_content).unwrap(); 45 | fs::write(project_dir.join("Pipfile.lock"), pipfile_lock_content).unwrap(); 46 | 47 | // Use PipenvMigrationSource to extract dependencies 48 | let source = PipenvMigrationSource; 49 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 50 | 51 | // Verify the dependencies were extracted correctly 52 | assert!(!dependencies.is_empty(), "No dependencies were extracted"); 53 | 54 | // Check if fastapi is in the dependencies 55 | let fastapi_dep = dependencies.iter().find(|d| d.name == "fastapi"); 56 | assert!(fastapi_dep.is_some(), "FastAPI dependency not found"); 57 | 58 | // Check if it has the correct type 59 | let fastapi_dep = fastapi_dep.unwrap(); 60 | assert_eq!(fastapi_dep.dep_type, DependencyType::Main); 61 | 62 | // Check if python_version is correctly ignored 63 | let python_dep = dependencies.iter().find(|d| d.name == "python_version"); 64 | assert!(python_dep.is_none(), "Python version should be ignored"); 65 | 66 | // Verify there are no dev dependencies 67 | let dev_deps: Vec<_> = dependencies 68 | .iter() 69 | .filter(|d| matches!(d.dep_type, DependencyType::Dev)) 70 | .collect(); 71 | assert!(dev_deps.is_empty(), "There should be no dev dependencies"); 72 | } 73 | 74 | /// Test Pipenv project structure detection 75 | /// 76 | /// This test verifies that a directory with both Pipfile and Pipfile.lock 77 | /// is correctly detected as a Pipenv project. 78 | #[test] 79 | fn test_detect_pipenv_project() { 80 | // Create a temporary directory for our test project 81 | let temp_dir = TempDir::new().unwrap(); 82 | let project_dir = temp_dir.path().to_path_buf(); 83 | 84 | // Create both Pipfile and Pipfile.lock 85 | fs::write(project_dir.join("Pipfile"), "").unwrap(); 86 | fs::write(project_dir.join("Pipfile.lock"), "{}").unwrap(); 87 | 88 | // Use PipenvMigrationSource to detect project type 89 | let is_pipenv = PipenvMigrationSource::detect_project_type(&project_dir); 90 | 91 | // Verify it's detected as a Pipenv project 92 | assert!( 93 | is_pipenv, 94 | "Directory with Pipfile and Pipfile.lock should be detected as Pipenv project" 95 | ); 96 | 97 | // Create another temporary directory with only one file 98 | let temp_dir2 = TempDir::new().unwrap(); 99 | let project_dir2 = temp_dir2.path().to_path_buf(); 100 | 101 | // Add only Pipfile 102 | fs::write(project_dir2.join("Pipfile"), "").unwrap(); 103 | 104 | // Verify it's not detected as a Pipenv project 105 | let is_pipenv2 = PipenvMigrationSource::detect_project_type(&project_dir2); 106 | assert!( 107 | !is_pipenv2, 108 | "Directory with only Pipfile should not be detected as Pipenv project" 109 | ); 110 | 111 | // Test with only Pipfile.lock 112 | let temp_dir3 = TempDir::new().unwrap(); 113 | let project_dir3 = temp_dir3.path().to_path_buf(); 114 | 115 | // Add only Pipfile.lock 116 | fs::write(project_dir3.join("Pipfile.lock"), "{}").unwrap(); 117 | 118 | // Verify it's not detected as a Pipenv project 119 | let is_pipenv3 = PipenvMigrationSource::detect_project_type(&project_dir3); 120 | assert!( 121 | !is_pipenv3, 122 | "Directory with only Pipfile.lock should not be detected as Pipenv project" 123 | ); 124 | } 125 | -------------------------------------------------------------------------------- /tests/poetry_git_deps_test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod git_dependency_tests { 3 | use std::fs; 4 | use std::path::PathBuf; 5 | use tempfile::TempDir; 6 | use uv_migrator::migrators::common::perform_poetry_migration; 7 | use uv_migrator::utils::file_ops::FileTrackerGuard; 8 | 9 | fn create_test_poetry_project_with_git_deps() -> (TempDir, PathBuf) { 10 | let temp_dir = TempDir::new().unwrap(); 11 | let project_dir = temp_dir.path().to_path_buf(); 12 | 13 | // Create pyproject.toml with git dependency 14 | let content = r#" 15 | [tool.poetry] 16 | name = "test-project" 17 | version = "0.1.0" 18 | description = "A test project with git dependencies" 19 | authors = ["Test Author "] 20 | 21 | [tool.poetry.dependencies] 22 | python = "^3.9" 23 | requests = "^2.28.0" 24 | dependency = { git = "https://github.com/user/library.git", branch = "my-branch" } 25 | another-dep = { git = "https://github.com/user/another-lib.git", tag = "v1.0.0" } 26 | revision-dep = { git = "https://github.com/user/rev-lib.git", rev = "123abc" } 27 | 28 | [build-system] 29 | requires = ["poetry-core>=1.0.0"] 30 | build-backend = "poetry.core.masonry.api" 31 | "#; 32 | 33 | fs::write(project_dir.join("pyproject.toml"), content).unwrap(); 34 | 35 | // Rename to old.pyproject.toml to simulate migration 36 | fs::rename( 37 | project_dir.join("pyproject.toml"), 38 | project_dir.join("old.pyproject.toml"), 39 | ) 40 | .unwrap(); 41 | 42 | // Create new pyproject.toml like uv-migrator would do 43 | let new_content = r#" 44 | [project] 45 | name = "test-project" 46 | version = "0.1.0" 47 | description = "A test project with git dependencies" 48 | authors = [{ name = "Test Author", email = "test@example.com" }] 49 | requires-python = ">=3.9" 50 | dependencies = [ 51 | "requests>=2.28.0", 52 | "dependency>=1.0.0", 53 | "another-dep>=1.0.0", 54 | "revision-dep>=1.0.0", 55 | ] 56 | 57 | [build-system] 58 | requires = ["hatchling"] 59 | build-backend = "hatchling.build" 60 | "#; 61 | 62 | fs::write(project_dir.join("pyproject.toml"), new_content).unwrap(); 63 | 64 | (temp_dir, project_dir) 65 | } 66 | 67 | #[test] 68 | fn test_git_dependency_migration() { 69 | let (_temp_dir, project_dir) = create_test_poetry_project_with_git_deps(); 70 | let mut file_tracker = FileTrackerGuard::new(); 71 | 72 | // Perform the migration 73 | let result = perform_poetry_migration(&project_dir, &mut file_tracker); 74 | assert!(result.is_ok(), "Poetry migration failed: {:?}", result); 75 | 76 | // Read the resulting pyproject.toml 77 | let content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 78 | 79 | // Verify the git dependencies were migrated correctly 80 | assert!( 81 | content.contains("[tool.uv.sources]") 82 | || content.contains("[tool.uv.sources.dependency]"), 83 | "Missing [tool.uv.sources] section" 84 | ); 85 | 86 | // Check for first git dependency with branch 87 | assert!( 88 | content.contains(r#"[tool.uv.sources.dependency]"#), 89 | "Missing dependency in sources" 90 | ); 91 | assert!( 92 | content.contains(r#"git = "https://github.com/user/library.git""#), 93 | "Missing git URL for dependency" 94 | ); 95 | assert!( 96 | content.contains(r#"branch = "my-branch""#), 97 | "Missing branch for dependency" 98 | ); 99 | 100 | // Check for second git dependency with tag 101 | assert!( 102 | content.contains(r#"[tool.uv.sources.another-dep]"#), 103 | "Missing another-dep in sources" 104 | ); 105 | assert!( 106 | content.contains(r#"git = "https://github.com/user/another-lib.git""#), 107 | "Missing git URL for another-dep" 108 | ); 109 | assert!( 110 | content.contains(r#"tag = "v1.0.0""#), 111 | "Missing tag for another-dep" 112 | ); 113 | 114 | // Check for third git dependency with revision 115 | assert!( 116 | content.contains(r#"[tool.uv.sources.revision-dep]"#), 117 | "Missing revision-dep in sources" 118 | ); 119 | assert!( 120 | content.contains(r#"git = "https://github.com/user/rev-lib.git""#), 121 | "Missing git URL for revision-dep" 122 | ); 123 | assert!( 124 | content.contains(r#"rev = "123abc""#), 125 | "Missing revision for revision-dep" 126 | ); 127 | } 128 | } 129 | -------------------------------------------------------------------------------- /tests/poetry_package_test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use std::fs; 4 | use std::path::PathBuf; 5 | use tempfile::TempDir; 6 | use uv_migrator::migrators::common::perform_poetry_migration; 7 | use uv_migrator::utils::file_ops::FileTrackerGuard; 8 | 9 | fn create_test_poetry_package() -> (TempDir, PathBuf) { 10 | let temp_dir = TempDir::new().unwrap(); 11 | let project_dir = temp_dir.path().to_path_buf(); 12 | 13 | // Create pyproject.toml with package configuration 14 | let content = r#" 15 | [tool.poetry] 16 | name = "test-poetry-package" 17 | version = "0.1.0" 18 | description = "A test poetry package" 19 | authors = ["Test Author "] 20 | packages = [ 21 | { include = "src" } 22 | ] 23 | 24 | [tool.poetry.dependencies] 25 | python = "^3.9" 26 | fastapi = "^0.111.0" 27 | 28 | [build-system] 29 | requires = ["poetry-core>=1.0.0"] 30 | build-backend = "poetry.core.masonry.api" 31 | "#; 32 | 33 | fs::write(project_dir.join("pyproject.toml"), content).unwrap(); 34 | 35 | // Rename to old.pyproject.toml to simulate migration 36 | fs::rename( 37 | project_dir.join("pyproject.toml"), 38 | project_dir.join("old.pyproject.toml"), 39 | ) 40 | .unwrap(); 41 | 42 | // Create new pyproject.toml like uv-migrator would do 43 | let new_content = r#" 44 | [project] 45 | name = "test-poetry-package" 46 | version = "0.1.0" 47 | description = "A test poetry package" 48 | authors = [{ name = "Test Author", email = "test@example.com" }] 49 | requires-python = ">=3.9" 50 | dependencies = [ 51 | "fastapi>=0.111.0", 52 | ] 53 | 54 | [build-system] 55 | requires = ["hatchling"] 56 | build-backend = "hatchling.build" 57 | "#; 58 | 59 | fs::write(project_dir.join("pyproject.toml"), new_content).unwrap(); 60 | 61 | (temp_dir, project_dir) 62 | } 63 | 64 | fn create_test_poetry2_package() -> (TempDir, PathBuf) { 65 | let temp_dir = TempDir::new().unwrap(); 66 | let project_dir = temp_dir.path().to_path_buf(); 67 | 68 | // Create pyproject.toml with Poetry 2.0 package configuration 69 | let content = r#" 70 | [project] 71 | name = "test-poetry-v2-package" 72 | version = "0.1.0" 73 | description = "A test Poetry 2.0 package" 74 | authors = [ 75 | {name = "Test Author", email = "test@example.com"} 76 | ] 77 | readme = "README.md" 78 | requires-python = ">=3.10" 79 | dependencies = [ 80 | "fastapi (>=0.115.6,<0.116.0)", 81 | ] 82 | 83 | [build-system] 84 | requires = ["poetry-core>=2.0.0,<3.0.0"] 85 | build-backend = "poetry.core.masonry.api" 86 | 87 | [tool.poetry] 88 | packages = [ 89 | { include = "src" }, 90 | ] 91 | "#; 92 | 93 | fs::write(project_dir.join("pyproject.toml"), content).unwrap(); 94 | 95 | // Rename to old.pyproject.toml to simulate migration 96 | fs::rename( 97 | project_dir.join("pyproject.toml"), 98 | project_dir.join("old.pyproject.toml"), 99 | ) 100 | .unwrap(); 101 | 102 | // Create new pyproject.toml like uv-migrator would do 103 | let new_content = r#" 104 | [project] 105 | name = "test-poetry-v2-package" 106 | version = "0.1.0" 107 | description = "A test Poetry 2.0 package" 108 | authors = [{ name = "Test Author", email = "test@example.com" }] 109 | requires-python = ">=3.10" 110 | dependencies = [ 111 | "fastapi>=0.115.6,<0.116.0", 112 | ] 113 | 114 | [build-system] 115 | requires = ["hatchling"] 116 | build-backend = "hatchling.build" 117 | "#; 118 | 119 | fs::write(project_dir.join("pyproject.toml"), new_content).unwrap(); 120 | 121 | (temp_dir, project_dir) 122 | } 123 | 124 | #[test] 125 | fn test_poetry_package_migration() { 126 | let (_temp_dir, project_dir) = create_test_poetry_package(); 127 | let mut file_tracker = FileTrackerGuard::new(); 128 | 129 | // Perform the migration 130 | let result = perform_poetry_migration(&project_dir, &mut file_tracker); 131 | assert!(result.is_ok(), "Poetry migration failed: {:?}", result); 132 | 133 | // Read the resulting pyproject.toml 134 | let content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 135 | 136 | // Verify the migration was successful without requiring specific packages config 137 | assert!( 138 | content.contains("[build-system]"), 139 | "Missing build system section" 140 | ); 141 | assert!( 142 | content.contains("build-backend = \"hatchling.build\""), 143 | "Missing or incorrect build backend" 144 | ); 145 | 146 | // Check that the authors were migrated correctly 147 | assert!(content.contains("authors = ["), "Missing authors section"); 148 | assert!( 149 | content.contains("{ name = \"Test Author\", email = \"test@example.com\" }"), 150 | "Missing or incorrect author information" 151 | ); 152 | } 153 | 154 | #[test] 155 | fn test_poetry2_package_migration() { 156 | let (_temp_dir, project_dir) = create_test_poetry2_package(); 157 | let mut file_tracker = FileTrackerGuard::new(); 158 | 159 | // Perform the migration 160 | let result = perform_poetry_migration(&project_dir, &mut file_tracker); 161 | assert!(result.is_ok(), "Poetry 2.0 migration failed: {:?}", result); 162 | 163 | // Read the resulting pyproject.toml 164 | let content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 165 | 166 | // Verify the migration was successful without requiring specific packages config 167 | assert!( 168 | content.contains("[build-system]"), 169 | "Missing build system section" 170 | ); 171 | assert!( 172 | content.contains("build-backend = \"hatchling.build\""), 173 | "Missing or incorrect build backend" 174 | ); 175 | 176 | // Check that the project metadata was preserved 177 | assert!( 178 | content.contains("name = \"test-poetry-v2-package\""), 179 | "Missing or incorrect project name" 180 | ); 181 | assert!( 182 | content.contains("requires-python = \">=3.10\""), 183 | "Missing or incorrect Python version requirement" 184 | ); 185 | } 186 | } 187 | -------------------------------------------------------------------------------- /tests/pyproject_test.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use tempfile::TempDir; 3 | use uv_migrator::utils::pyproject::append_tool_sections; 4 | 5 | /// Helper function to create a temporary test directory with pyproject files. 6 | /// 7 | /// # Arguments 8 | /// 9 | /// * `old_content` - Content for old.pyproject.toml 10 | /// * `new_content` - Content for pyproject.toml 11 | /// 12 | /// # Returns 13 | /// 14 | /// A tuple containing the temporary directory and its path 15 | fn setup_test_files(old_content: &str, new_content: &str) -> (TempDir, std::path::PathBuf) { 16 | let temp_dir = TempDir::new().unwrap(); 17 | let project_dir = temp_dir.path().to_path_buf(); 18 | 19 | fs::write(project_dir.join("old.pyproject.toml"), old_content).unwrap(); 20 | fs::write(project_dir.join("pyproject.toml"), new_content).unwrap(); 21 | 22 | (temp_dir, project_dir) 23 | } 24 | 25 | /// Test basic tool section appending functionality. 26 | /// 27 | /// This test verifies that: 28 | /// 1. Tool sections are correctly copied from old to new pyproject.toml 29 | /// 2. Poetry sections are properly excluded 30 | /// 3. Tool section values are preserved accurately 31 | /// 4. No duplicate or empty tool sections are created 32 | /// 33 | /// # Test Setup 34 | /// Creates two TOML files: 35 | /// - old.pyproject.toml with poetry, black, and isort sections 36 | /// - pyproject.toml with basic project configuration 37 | /// 38 | /// # Verification Steps 39 | /// 1. Verifies black and isort sections are copied 40 | /// 2. Confirms poetry section is not copied 41 | /// 3. Validates section content preservation 42 | /// 4. Checks for proper tool section structure 43 | #[test] 44 | fn test_append_tool_sections() { 45 | let old_content = r#" 46 | [tool.poetry] 47 | name = "test" 48 | version = "0.1.0" 49 | 50 | [tool.black] 51 | line-length = 100 52 | target-version = ["py37"] 53 | 54 | [tool.isort] 55 | profile = "black" 56 | "#; 57 | 58 | let new_content = r#" 59 | [project] 60 | name = "test" 61 | version = "0.1.0" 62 | description = "A test project" 63 | "#; 64 | 65 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 66 | append_tool_sections(&project_dir).unwrap(); 67 | 68 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 69 | 70 | // Verify tool sections were copied correctly 71 | assert!( 72 | result.contains("[tool.black]"), 73 | "black section should be present" 74 | ); 75 | assert!( 76 | result.contains("[tool.isort]"), 77 | "isort section should be present" 78 | ); 79 | assert!( 80 | result.contains("line-length = 100"), 81 | "black settings should be preserved" 82 | ); 83 | assert!( 84 | result.contains("profile = \"black\""), 85 | "isort settings should be preserved" 86 | ); 87 | 88 | // Verify poetry section was not copied 89 | assert!( 90 | !result.contains("[tool.poetry]"), 91 | "poetry section should not be present" 92 | ); 93 | 94 | // Verify [tool] section behavior 95 | let tool_count = result.matches("[tool]").count(); 96 | assert!(tool_count <= 1, "Should not have multiple [tool] sections"); 97 | if tool_count == 1 { 98 | let tool_index = result.find("[tool]").unwrap(); 99 | let next_section = result[tool_index..] 100 | .find("\n[") 101 | .unwrap_or(result.len() - tool_index); 102 | let tool_content = &result[tool_index..tool_index + next_section]; 103 | assert!( 104 | tool_content.contains("[tool.black]") || tool_content.contains("[tool.isort]"), 105 | "Empty [tool] section should not be present" 106 | ); 107 | } 108 | } 109 | 110 | /// Test handling of existing tool sections during append operation. 111 | /// 112 | /// This test verifies that: 113 | /// 1. Existing tool sections in the target file are preserved 114 | /// 2. Non-conflicting sections from old file are appended 115 | /// 3. Existing section values are not overwritten 116 | /// 4. Section order is maintained appropriately 117 | /// 118 | /// # Test Setup 119 | /// Creates two TOML files: 120 | /// - old.pyproject.toml with poetry, black, and isort sections 121 | /// - pyproject.toml with existing black section 122 | /// 123 | /// # Verification Steps 124 | /// 1. Confirms existing black configuration is preserved 125 | /// 2. Verifies isort section is properly copied 126 | /// 3. Checks no empty sections are created 127 | /// 4. Validates overall section structure 128 | #[test] 129 | fn test_append_tool_sections_with_existing() { 130 | let old_content = r#" 131 | [tool.poetry] 132 | name = "test" 133 | version = "0.1.0" 134 | 135 | [tool.black] 136 | line-length = 100 137 | 138 | [tool.isort] 139 | profile = "black" 140 | "#; 141 | 142 | let new_content = r#" 143 | [project] 144 | name = "test" 145 | version = "0.1.0" 146 | 147 | [tool.black] 148 | line-length = 88 149 | "#; 150 | 151 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 152 | append_tool_sections(&project_dir).unwrap(); 153 | 154 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 155 | 156 | let black_section = result.find("[tool.black]").unwrap(); 157 | let next_section = result[black_section..] 158 | .find("\n[") 159 | .unwrap_or(result.len() - black_section); 160 | let black_content = &result[black_section..black_section + next_section]; 161 | assert!( 162 | black_content.contains("line-length = 88"), 163 | "Existing black configuration should be preserved" 164 | ); 165 | 166 | assert!( 167 | result.contains("[tool.isort]"), 168 | "isort section should be present" 169 | ); 170 | assert!( 171 | result.contains("profile = \"black\""), 172 | "isort settings should be preserved" 173 | ); 174 | 175 | assert!( 176 | !result.matches("[tool]").any(|_| true), 177 | "Should not have empty [tool] section" 178 | ); 179 | } 180 | 181 | /// Test preservation of TOML formatting and comments. 182 | /// 183 | /// This test verifies that: 184 | /// 1. Inline comments are preserved 185 | /// 2. Multi-line formatting is maintained 186 | /// 3. Array formatting and indentation is kept 187 | /// 4. Section-level comments are retained 188 | /// 189 | /// # Test Setup 190 | /// Creates TOML files with: 191 | /// - Inline comments 192 | /// - Multi-line arrays 193 | /// - Section comments 194 | /// - Various formatting styles 195 | /// 196 | /// # Verification Steps 197 | /// 1. Checks inline comment preservation 198 | /// 2. Verifies multi-line array formatting 199 | /// 3. Validates section comment retention 200 | /// 4. Confirms overall formatting structure 201 | #[test] 202 | fn test_preserve_formatting() { 203 | let old_content = r#" 204 | [tool.black] 205 | line-length = 100 # Custom line length 206 | target-version = [ 207 | "py37", 208 | "py38", 209 | ] # Supported versions 210 | 211 | [tool.isort] 212 | profile = "black" # Match black 213 | "#; 214 | 215 | let new_content = "[project]\nname = \"test\"\n"; 216 | 217 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 218 | append_tool_sections(&project_dir).unwrap(); 219 | 220 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 221 | 222 | assert!(result.contains("line-length = 100 # Custom line length")); 223 | assert!(result.contains("profile = \"black\" # Match black")); 224 | assert!(result.contains( 225 | r#"target-version = [ 226 | "py37", 227 | "py38", 228 | ] # Supported versions"# 229 | )); 230 | } 231 | 232 | /// Test handling of missing old pyproject.toml file. 233 | /// 234 | /// This test verifies that: 235 | /// 1. Missing old.pyproject.toml is handled gracefully 236 | /// 2. Existing pyproject.toml remains unchanged 237 | /// 3. No empty sections are created 238 | /// 4. Operation completes successfully 239 | /// 240 | /// # Test Setup 241 | /// Creates only a new pyproject.toml file without old.pyproject.toml 242 | /// 243 | /// # Verification Steps 244 | /// 1. Confirms operation succeeds without error 245 | /// 2. Verifies content remains unchanged 246 | /// 3. Checks no empty sections are added 247 | /// 4. Validates file integrity 248 | #[test] 249 | fn test_no_old_pyproject() { 250 | let new_content = r#" 251 | [project] 252 | name = "test" 253 | version = "0.1.0" 254 | description = "A test project" 255 | "#; 256 | 257 | let temp_dir = TempDir::new().unwrap(); 258 | let project_dir = temp_dir.path().to_path_buf(); 259 | fs::write(project_dir.join("pyproject.toml"), new_content).unwrap(); 260 | 261 | let result = append_tool_sections(&project_dir); 262 | assert!( 263 | result.is_ok(), 264 | "Should handle missing old.pyproject.toml gracefully" 265 | ); 266 | 267 | let final_content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 268 | assert_eq!( 269 | final_content, new_content, 270 | "Content should remain unchanged" 271 | ); 272 | assert!( 273 | !final_content.contains("[tool]"), 274 | "Should not have empty [tool] section" 275 | ); 276 | } 277 | 278 | /// Test handling of nested tool sections. 279 | /// 280 | /// This test verifies that: 281 | /// 1. Nested tool sections are correctly copied 282 | /// 2. Section hierarchy is preserved 283 | /// 3. Nested values are maintained accurately 284 | /// 4. Array values in nested sections are preserved 285 | /// 286 | /// # Test Setup 287 | /// Creates TOML files with: 288 | /// - Multiple levels of nested tool sections 289 | /// - Various value types in nested sections 290 | /// - Poetry section for exclusion 291 | /// 292 | /// # Verification Steps 293 | /// 1. Verifies nested section presence 294 | /// 2. Confirms nested values are preserved 295 | /// 3. Validates array value preservation 296 | /// 4. Checks section hierarchy integrity 297 | #[test] 298 | fn test_nested_tool_sections() { 299 | let old_content = r#" 300 | [tool.poetry] 301 | name = "test" 302 | version = "0.1.0" 303 | 304 | [tool.black] 305 | line-length = 100 306 | 307 | [tool.pytest.ini_options] 308 | minversion = "6.0" 309 | addopts = "-ra -q" 310 | testpaths = ["tests"] 311 | "#; 312 | 313 | let new_content = r#" 314 | [project] 315 | name = "test" 316 | version = "0.1.0" 317 | "#; 318 | 319 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 320 | append_tool_sections(&project_dir).unwrap(); 321 | 322 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 323 | 324 | assert!( 325 | result.contains("[tool.pytest.ini_options]"), 326 | "Nested pytest section should be present" 327 | ); 328 | assert!( 329 | result.contains("minversion = \"6.0\""), 330 | "Nested section content should be preserved" 331 | ); 332 | assert!( 333 | result.contains("testpaths = [\"tests\"]"), 334 | "Array values should be preserved" 335 | ); 336 | 337 | assert!( 338 | !result.matches("[tool]").any(|_| true), 339 | "Should not have empty [tool] section" 340 | ); 341 | let tool_sections = result.matches("[tool.").count(); 342 | assert!(tool_sections > 0, "Should have non-empty tool sections"); 343 | } 344 | 345 | /// Test handling of empty nested sections. 346 | /// 347 | /// This test verifies that: 348 | /// 1. Empty sections are properly cleaned up 349 | /// 2. Non-empty sections are preserved 350 | /// 3. Empty nested sections are removed 351 | /// 4. Section hierarchy remains intact for non-empty sections 352 | /// 353 | /// # Test Setup 354 | /// Creates TOML files with: 355 | /// - Mix of empty and non-empty sections 356 | /// - Empty nested sections 357 | /// - Valid sections with content 358 | /// 359 | /// # Verification Steps 360 | /// 1. Confirms empty sections are removed 361 | /// 2. Verifies non-empty sections remain 362 | /// 3. Checks nested section cleanup 363 | /// 4. Validates overall structure integrity 364 | #[test] 365 | fn test_empty_nested_sections() { 366 | let old_content = r#" 367 | [tool.poetry] 368 | name = "test" 369 | 370 | [tool.black] 371 | line-length = 100 372 | 373 | [tool.pytest] 374 | 375 | [tool.pytest.ini_options] 376 | "#; 377 | 378 | let new_content = r#" 379 | [project] 380 | name = "test" 381 | version = "0.1.0" 382 | "#; 383 | 384 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 385 | append_tool_sections(&project_dir).unwrap(); 386 | 387 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 388 | 389 | assert!( 390 | result.contains("[tool.black]"), 391 | "Non-empty black section should be present" 392 | ); 393 | assert!( 394 | !result.contains("[tool.pytest]"), 395 | "Empty pytest section should not be present" 396 | ); 397 | assert!( 398 | !result.contains("[tool.pytest.ini_options]"), 399 | "Empty nested section should not be present" 400 | ); 401 | 402 | assert!( 403 | !result.matches("[tool]").any(|_| true), 404 | "Should not have empty [tool] section" 405 | ); 406 | } 407 | 408 | /// Test handling of empty tool sections. 409 | /// 410 | /// This test verifies that: 411 | /// 1. Empty tool sections are not created 412 | /// 2. Project content remains unchanged 413 | /// 3. No unnecessary sections are added 414 | /// 415 | /// # Test Setup 416 | /// Creates TOML files with: 417 | /// - Only poetry section in old file 418 | /// - Basic project configuration in new file 419 | /// 420 | /// # Verification Steps 421 | /// 1. Verifies no empty tool section is created 422 | /// 2. Confirms project content is preserved 423 | /// 3. Validates overall file structure 424 | #[test] 425 | fn test_no_empty_tool_section() { 426 | let old_content = r#" 427 | [tool.poetry] 428 | name = "test" 429 | version = "0.1.0" 430 | "#; 431 | 432 | let new_content = r#" 433 | [project] 434 | name = "test" 435 | version = "0.1.0" 436 | description = "A test project" 437 | "#; 438 | 439 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 440 | append_tool_sections(&project_dir).unwrap(); 441 | 442 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 443 | assert!( 444 | !result.contains("[tool]"), 445 | "Empty [tool] section should not be present" 446 | ); 447 | } 448 | 449 | /// Test cleanup of empty tool sections after processing. 450 | /// 451 | /// This test verifies that: 452 | /// 1. Empty tool sections are removed during cleanup 453 | /// 2. Empty tool subsections are removed 454 | /// 3. Project content remains intact 455 | /// 4. Overall file structure is maintained 456 | /// 457 | /// # Test Setup 458 | /// Creates TOML files with: 459 | /// - Empty tool sections 460 | /// - Poetry configuration 461 | /// - Empty black configuration 462 | /// 463 | /// # Verification Steps 464 | /// 1. Confirms empty tool sections are removed 465 | /// 2. Verifies empty black section is cleaned up 466 | /// 3. Validates project content preservation 467 | /// 4. Checks final file structure 468 | #[test] 469 | fn test_no_empty_tool_section_after_cleanup() { 470 | let old_content = r#" 471 | [tool.poetry] 472 | name = "test" 473 | version = "0.1.0" 474 | 475 | [tool.black] 476 | "#; 477 | 478 | let new_content = r#" 479 | [project] 480 | name = "test" 481 | version = "0.1.0" 482 | "#; 483 | 484 | let (_temp_dir, project_dir) = setup_test_files(old_content, new_content); 485 | append_tool_sections(&project_dir).unwrap(); 486 | 487 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 488 | assert!( 489 | !result.contains("[tool]"), 490 | "Empty [tool] section should not be present after cleanup" 491 | ); 492 | assert!( 493 | !result.contains("[tool.black]"), 494 | "Empty black section should be cleaned up" 495 | ); 496 | } 497 | 498 | /// Test update_uv_indices_with_custom_names functionality. 499 | /// 500 | /// This test verifies that: 501 | /// 1. Named indexes are correctly parsed from [name@]url format 502 | /// 2. Unnamed indexes receive auto-generated names (extra-N) 503 | /// 3. Invalid formats are handled gracefully 504 | /// 4. The resulting TOML structure is correct 505 | #[test] 506 | fn test_update_uv_indices_with_custom_names() { 507 | use uv_migrator::utils::pyproject::update_uv_indices_from_urls; 508 | 509 | let temp_dir = TempDir::new().unwrap(); 510 | let project_dir = temp_dir.path().to_path_buf(); 511 | 512 | // Create initial pyproject.toml 513 | let content = r#"[project] 514 | name = "test-project" 515 | version = "0.1.0" 516 | "#; 517 | fs::write(project_dir.join("pyproject.toml"), content).unwrap(); 518 | 519 | // Test with mixed named and unnamed indexes 520 | let urls = vec![ 521 | "mycompany@https://pypi.mycompany.com/simple/".to_string(), 522 | "https://pypi.org/simple/".to_string(), 523 | "torch@https://download.pytorch.org/whl/cu118".to_string(), 524 | "@https://invalid.example.com/".to_string(), // Invalid format, should be treated as URL 525 | "name-with-dashes@https://example.com/pypi/".to_string(), 526 | ]; 527 | 528 | update_uv_indices_from_urls(&project_dir, &urls).unwrap(); 529 | 530 | let result = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 531 | 532 | // Verify named indexes 533 | assert!(result.contains(r#"name = "mycompany""#)); 534 | assert!(result.contains(r#"url = "https://pypi.mycompany.com/simple/""#)); 535 | 536 | assert!(result.contains(r#"name = "torch""#)); 537 | assert!(result.contains(r#"url = "https://download.pytorch.org/whl/cu118""#)); 538 | 539 | assert!(result.contains(r#"name = "name-with-dashes""#)); 540 | assert!(result.contains(r#"url = "https://example.com/pypi/""#)); 541 | 542 | // Verify auto-generated names 543 | assert!(result.contains(r#"name = "extra-2""#)); // For the unnamed URL 544 | assert!(result.contains(r#"url = "https://pypi.org/simple/""#)); 545 | 546 | assert!(result.contains(r#"name = "extra-4""#)); // For the invalid format 547 | assert!(result.contains(r#"url = "@https://invalid.example.com/""#)); 548 | } 549 | -------------------------------------------------------------------------------- /tests/setup_py_test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | use std::fs; 3 | use std::path::PathBuf; 4 | use tempfile::TempDir; 5 | use uv_migrator::DependencyType; 6 | use uv_migrator::migrators::MigrationSource; 7 | use uv_migrator::migrators::setup_py::SetupPyMigrationSource; 8 | use uv_migrator::utils::author::extract_authors_from_setup_py; 9 | use uv_migrator::utils::toml::{read_toml, update_section, write_toml}; 10 | 11 | /// Helper function to create a temporary test project with setup.py and optional requirements.txt. 12 | /// 13 | /// # Arguments 14 | /// 15 | /// * `setup_content` - Content for the setup.py file 16 | /// * `requirements_content` - Optional content for requirements.txt 17 | /// 18 | /// # Returns 19 | /// 20 | /// A tuple containing the temporary directory and its path 21 | fn create_test_project( 22 | setup_content: &str, 23 | requirements_content: Option<&str>, 24 | ) -> (TempDir, PathBuf) { 25 | let temp_dir = TempDir::new().unwrap(); 26 | let project_dir = temp_dir.path().to_path_buf(); 27 | 28 | // Write setup.py 29 | fs::write(project_dir.join("setup.py"), setup_content).unwrap(); 30 | 31 | // Write requirements.txt if provided 32 | if let Some(content) = requirements_content { 33 | fs::write(project_dir.join("requirements.txt"), content).unwrap(); 34 | } 35 | 36 | (temp_dir, project_dir) 37 | } 38 | 39 | #[test] 40 | fn test_setup_py_with_requirements_file() { 41 | let setup_content = r#" 42 | from setuptools import setup 43 | 44 | setup( 45 | name="pb_logging", 46 | version="1.0.0", 47 | description="Logging-related utilities", 48 | ) 49 | "#; 50 | 51 | let requirements_content = r#" 52 | flask==2.0.0 53 | requests==2.31.0 54 | sqlalchemy>=1.4.0,<2.0.0 55 | "#; 56 | 57 | let (_temp_dir, project_dir) = create_test_project(setup_content, Some(requirements_content)); 58 | let source = SetupPyMigrationSource; 59 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 60 | 61 | assert_eq!( 62 | dependencies.len(), 63 | 3, 64 | "Should extract all dependencies from requirements.txt" 65 | ); 66 | 67 | let flask_dep = dependencies.iter().find(|d| d.name == "flask").unwrap(); 68 | assert_eq!(flask_dep.version, Some("2.0.0".to_string())); 69 | assert_eq!(flask_dep.dep_type, DependencyType::Main); 70 | 71 | let requests_dep = dependencies.iter().find(|d| d.name == "requests").unwrap(); 72 | assert_eq!(requests_dep.version, Some("2.31.0".to_string())); 73 | assert_eq!(requests_dep.dep_type, DependencyType::Main); 74 | 75 | let sqlalchemy_dep = dependencies 76 | .iter() 77 | .find(|d| d.name == "sqlalchemy") 78 | .unwrap(); 79 | assert_eq!(sqlalchemy_dep.version, Some(">=1.4.0,<2.0.0".to_string())); 80 | assert_eq!(sqlalchemy_dep.dep_type, DependencyType::Main); 81 | } 82 | 83 | #[test] 84 | fn test_setup_py_with_direct_dependencies() { 85 | let setup_content = r#" 86 | from setuptools import setup 87 | 88 | setup( 89 | name="pb_logging", 90 | version="1.0.0", 91 | description="Logging-related utilities", 92 | install_requires=[ 93 | 'flask>=2.0.0', 94 | 'requests==2.31.0', 95 | 'sqlalchemy>=1.4.0' 96 | ], 97 | tests_require=[ 98 | 'pytest>=7.0.0', 99 | 'pytest-cov>=4.0.0' 100 | ] 101 | ) 102 | "#; 103 | 104 | let (_temp_dir, project_dir) = create_test_project(setup_content, None); 105 | let source = SetupPyMigrationSource; 106 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 107 | 108 | let main_deps: Vec<_> = dependencies 109 | .iter() 110 | .filter(|d| matches!(d.dep_type, DependencyType::Main)) 111 | .collect(); 112 | 113 | let dev_deps: Vec<_> = dependencies 114 | .iter() 115 | .filter(|d| matches!(d.dep_type, DependencyType::Dev)) 116 | .collect(); 117 | 118 | assert_eq!(main_deps.len(), 3, "Should have 3 main dependencies"); 119 | assert_eq!(dev_deps.len(), 2, "Should have 2 dev dependencies"); 120 | 121 | let flask_dep = main_deps.iter().find(|d| d.name == "flask").unwrap(); 122 | assert_eq!(flask_dep.version, Some(">=2.0.0".to_string())); 123 | 124 | let pytest_dep = dev_deps.iter().find(|d| d.name == "pytest").unwrap(); 125 | assert_eq!(pytest_dep.version, Some(">=7.0.0".to_string())); 126 | } 127 | 128 | #[test] 129 | fn test_setup_py_no_requirements() { 130 | let setup_content = r#" 131 | from setuptools import setup 132 | 133 | setup( 134 | name="pb_logging", 135 | version="1.0.0", 136 | description="Logging-related utilities", 137 | ) 138 | "#; 139 | 140 | let (_temp_dir, project_dir) = create_test_project(setup_content, None); 141 | let source = SetupPyMigrationSource; 142 | let dependencies = source.extract_dependencies(&project_dir).unwrap(); 143 | 144 | assert!(dependencies.is_empty(), "Should have no dependencies"); 145 | } 146 | 147 | #[test] 148 | fn test_setup_py_malformed() { 149 | let setup_content = r#" 150 | from setuptools import setup 151 | 152 | setup( 153 | name="pb_logging", 154 | version="1.0.0", 155 | description="Logging-related utilities", 156 | install_requires="not a list", # This is invalid 157 | ) 158 | "#; 159 | 160 | let (_temp_dir, project_dir) = create_test_project(setup_content, None); 161 | let source = SetupPyMigrationSource; 162 | let result = source.extract_dependencies(&project_dir); 163 | 164 | assert!( 165 | result.is_ok(), 166 | "Should handle malformed setup.py without crashing" 167 | ); 168 | let dependencies = result.unwrap(); 169 | assert!( 170 | dependencies.is_empty(), 171 | "Should have no dependencies for malformed setup.py" 172 | ); 173 | } 174 | 175 | fn setup_test_environment(setup_content: &str, pyproject_content: &str) -> (TempDir, PathBuf) { 176 | let temp_dir = TempDir::new().unwrap(); 177 | let project_dir = temp_dir.path().to_path_buf(); 178 | 179 | fs::write(project_dir.join("setup.py"), setup_content).unwrap(); 180 | fs::write(project_dir.join("pyproject.toml"), pyproject_content).unwrap(); 181 | 182 | (temp_dir, project_dir) 183 | } 184 | 185 | #[test] 186 | fn test_extract_authors() { 187 | let setup_content = r#" 188 | from setuptools import setup 189 | 190 | setup( 191 | name="test-project", 192 | version="1.0.0", 193 | author="Demo Name", 194 | author_email="demo.name@corp.com", 195 | description="Test project" 196 | ) 197 | "#; 198 | let (_temp_dir, project_dir) = setup_test_environment(setup_content, ""); 199 | 200 | let authors = extract_authors_from_setup_py(&project_dir).unwrap(); 201 | assert_eq!(authors.len(), 1); 202 | assert_eq!(authors[0].name, "Demo Name"); 203 | assert_eq!(authors[0].email, Some("demo.name@corp.com".to_string())); 204 | } 205 | 206 | #[test] 207 | fn test_update_authors_in_pyproject() { 208 | let setup_content = r#" 209 | from setuptools import setup 210 | 211 | setup( 212 | name="test-project", 213 | version="1.0.0", 214 | author="Demo Name", 215 | author_email="demo.name@corp.com", 216 | description="Test project" 217 | ) 218 | "#; 219 | let pyproject_content = r#"[project] 220 | name = "test-project" 221 | version = "1.0.0" 222 | description = "Test project" 223 | "#; 224 | 225 | let (_temp_dir, project_dir) = setup_test_environment(setup_content, pyproject_content); 226 | 227 | // First extract the authors 228 | let authors = extract_authors_from_setup_py(&project_dir).unwrap(); 229 | 230 | // Then update the pyproject.toml with the extracted authors 231 | let mut doc = read_toml(&project_dir.join("pyproject.toml")).unwrap(); 232 | let mut authors_array = toml_edit::Array::new(); 233 | for author in &authors { 234 | let mut table = toml_edit::InlineTable::new(); 235 | table.insert( 236 | "name", 237 | toml_edit::Value::String(toml_edit::Formatted::new(author.name.clone())), 238 | ); 239 | if let Some(ref email) = author.email { 240 | table.insert( 241 | "email", 242 | toml_edit::Value::String(toml_edit::Formatted::new(email.clone())), 243 | ); 244 | } 245 | authors_array.push(toml_edit::Value::InlineTable(table)); 246 | } 247 | update_section( 248 | &mut doc, 249 | &["project", "authors"], 250 | toml_edit::Item::Value(toml_edit::Value::Array(authors_array)), 251 | ); 252 | write_toml(&project_dir.join("pyproject.toml"), &mut doc).unwrap(); 253 | 254 | let updated_content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 255 | assert!(updated_content.contains("authors = [")); 256 | assert!(updated_content.contains(r#"{ name = "Demo Name", email = "demo.name@corp.com" }"#)); 257 | } 258 | 259 | #[test] 260 | fn test_update_authors_with_existing_authors() { 261 | let setup_content = r#" 262 | from setuptools import setup 263 | 264 | setup( 265 | name="test-project", 266 | version="1.0.0", 267 | author="Demo Name", 268 | author_email="demo.name@corp.com", 269 | description="Test project" 270 | ) 271 | "#; 272 | let pyproject_content = r#"[project] 273 | name = "test-project" 274 | version = "1.0.0" 275 | description = "Test project" 276 | authors = [ 277 | { name = "Old Author", email = "old@example.com" } 278 | ] 279 | "#; 280 | 281 | let (_temp_dir, project_dir) = setup_test_environment(setup_content, pyproject_content); 282 | 283 | let authors = extract_authors_from_setup_py(&project_dir).unwrap(); 284 | 285 | let mut doc = read_toml(&project_dir.join("pyproject.toml")).unwrap(); 286 | let mut authors_array = toml_edit::Array::new(); 287 | for author in &authors { 288 | let mut table = toml_edit::InlineTable::new(); 289 | table.insert( 290 | "name", 291 | toml_edit::Value::String(toml_edit::Formatted::new(author.name.clone())), 292 | ); 293 | if let Some(ref email) = author.email { 294 | table.insert( 295 | "email", 296 | toml_edit::Value::String(toml_edit::Formatted::new(email.clone())), 297 | ); 298 | } 299 | authors_array.push(toml_edit::Value::InlineTable(table)); 300 | } 301 | update_section( 302 | &mut doc, 303 | &["project", "authors"], 304 | toml_edit::Item::Value(toml_edit::Value::Array(authors_array)), 305 | ); 306 | write_toml(&project_dir.join("pyproject.toml"), &mut doc).unwrap(); 307 | 308 | let updated_content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 309 | assert!(updated_content.contains(r#"{ name = "Demo Name", email = "demo.name@corp.com" }"#)); 310 | assert!(!updated_content.contains(r#"{ name = "Old Author", email = "old@example.com" }"#)); 311 | } 312 | 313 | #[test] 314 | fn test_missing_author_email() { 315 | let setup_content = r#" 316 | from setuptools import setup 317 | 318 | setup( 319 | name="test-project", 320 | version="1.0.0", 321 | author="Demo Name", 322 | description="Test project" 323 | ) 324 | "#; 325 | let pyproject_content = r#"[project] 326 | name = "test-project" 327 | version = "1.0.0" 328 | description = "Test project" 329 | "#; 330 | 331 | let (_temp_dir, project_dir) = setup_test_environment(setup_content, pyproject_content); 332 | 333 | let authors = extract_authors_from_setup_py(&project_dir).unwrap(); 334 | 335 | let mut doc = read_toml(&project_dir.join("pyproject.toml")).unwrap(); 336 | let mut authors_array = toml_edit::Array::new(); 337 | for author in &authors { 338 | let mut table = toml_edit::InlineTable::new(); 339 | table.insert( 340 | "name", 341 | toml_edit::Value::String(toml_edit::Formatted::new(author.name.clone())), 342 | ); 343 | if let Some(ref email) = author.email { 344 | table.insert( 345 | "email", 346 | toml_edit::Value::String(toml_edit::Formatted::new(email.clone())), 347 | ); 348 | } 349 | authors_array.push(toml_edit::Value::InlineTable(table)); 350 | } 351 | update_section( 352 | &mut doc, 353 | &["project", "authors"], 354 | toml_edit::Item::Value(toml_edit::Value::Array(authors_array)), 355 | ); 356 | write_toml(&project_dir.join("pyproject.toml"), &mut doc).unwrap(); 357 | 358 | let updated_content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 359 | assert!(updated_content.contains(r#"{ name = "Demo Name" }"#)); 360 | assert!(!updated_content.contains("email")); 361 | } 362 | 363 | #[test] 364 | fn test_update_authors_with_url() { 365 | let setup_content = r#" 366 | from setuptools import setup 367 | 368 | setup( 369 | name="test-project", 370 | version="1.0.0", 371 | author="Demo Name", 372 | author_email="demo.name@corp.com", 373 | url="https://gitlab.com/example/project", 374 | description="Test project" 375 | ) 376 | "#; 377 | let pyproject_content = r#"[project] 378 | name = "test-project" 379 | version = "1.0.0" 380 | description = "Test project" 381 | "#; 382 | 383 | let (_temp_dir, project_dir) = setup_test_environment(setup_content, pyproject_content); 384 | 385 | // Update URL 386 | if let Some(url) = SetupPyMigrationSource::extract_url(&project_dir).unwrap() { 387 | uv_migrator::utils::pyproject::update_url(&project_dir, &url).unwrap(); 388 | } 389 | 390 | // Update authors 391 | let authors = extract_authors_from_setup_py(&project_dir).unwrap(); 392 | 393 | let mut doc = read_toml(&project_dir.join("pyproject.toml")).unwrap(); 394 | let mut authors_array = toml_edit::Array::new(); 395 | for author in &authors { 396 | let mut table = toml_edit::InlineTable::new(); 397 | table.insert( 398 | "name", 399 | toml_edit::Value::String(toml_edit::Formatted::new(author.name.clone())), 400 | ); 401 | if let Some(ref email) = author.email { 402 | table.insert( 403 | "email", 404 | toml_edit::Value::String(toml_edit::Formatted::new(email.clone())), 405 | ); 406 | } 407 | authors_array.push(toml_edit::Value::InlineTable(table)); 408 | } 409 | update_section( 410 | &mut doc, 411 | &["project", "authors"], 412 | toml_edit::Item::Value(toml_edit::Value::Array(authors_array)), 413 | ); 414 | write_toml(&project_dir.join("pyproject.toml"), &mut doc).unwrap(); 415 | 416 | let updated_content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 417 | assert!(updated_content.contains(r#"{ name = "Demo Name", email = "demo.name@corp.com" }"#)); 418 | assert!( 419 | updated_content.contains(r#"urls = { repository = "https://gitlab.com/example/project" }"#) 420 | ); 421 | } 422 | 423 | #[test] 424 | fn test_update_urls_existing_content() { 425 | let setup_content = r#" 426 | from setuptools import setup 427 | 428 | setup( 429 | name="test-project", 430 | version="1.0.0", 431 | author="Demo Name", 432 | author_email="demo.name@corp.com", 433 | url="https://gitlab.com/updated/project", 434 | description="Test project" 435 | ) 436 | "#; 437 | let pyproject_content = r#"[project] 438 | name = "test-project" 439 | version = "1.0.0" 440 | description = "Test project" 441 | authors = [ 442 | { name = "Old Author", email = "old@example.com" } 443 | ] 444 | urls = { repository = "https://oldproject.example.com" } 445 | requires-python = ">=3.8" 446 | "#; 447 | 448 | let (_temp_dir, project_dir) = setup_test_environment(setup_content, pyproject_content); 449 | 450 | // Update URL 451 | if let Some(url) = SetupPyMigrationSource::extract_url(&project_dir).unwrap() { 452 | uv_migrator::utils::pyproject::update_url(&project_dir, &url).unwrap(); 453 | } 454 | 455 | // Update authors 456 | let authors = extract_authors_from_setup_py(&project_dir).unwrap(); 457 | 458 | let mut doc = read_toml(&project_dir.join("pyproject.toml")).unwrap(); 459 | let mut authors_array = toml_edit::Array::new(); 460 | for author in &authors { 461 | let mut table = toml_edit::InlineTable::new(); 462 | table.insert( 463 | "name", 464 | toml_edit::Value::String(toml_edit::Formatted::new(author.name.clone())), 465 | ); 466 | if let Some(ref email) = author.email { 467 | table.insert( 468 | "email", 469 | toml_edit::Value::String(toml_edit::Formatted::new(email.clone())), 470 | ); 471 | } 472 | authors_array.push(toml_edit::Value::InlineTable(table)); 473 | } 474 | update_section( 475 | &mut doc, 476 | &["project", "authors"], 477 | toml_edit::Item::Value(toml_edit::Value::Array(authors_array)), 478 | ); 479 | write_toml(&project_dir.join("pyproject.toml"), &mut doc).unwrap(); 480 | 481 | let updated_content = fs::read_to_string(project_dir.join("pyproject.toml")).unwrap(); 482 | assert!(updated_content.contains(r#"{ name = "Demo Name", email = "demo.name@corp.com" }"#)); 483 | assert!( 484 | updated_content.contains(r#"urls = { repository = "https://gitlab.com/updated/project" }"#) 485 | ); 486 | } 487 | -------------------------------------------------------------------------------- /tests/uv_versions_test.rs: -------------------------------------------------------------------------------- 1 | #[cfg(test)] 2 | mod tests { 3 | use semver::Version; 4 | 5 | use uv_migrator::utils::uv::UV_SUPPORT_BARE; 6 | 7 | #[test] 8 | fn test_version_comparison() { 9 | // Hardcode the bare version for testing to ensure consistency 10 | let bare_version = Version::parse(UV_SUPPORT_BARE).unwrap(); 11 | 12 | // Version below support threshold 13 | let below = Version::new(0, 5, 0); 14 | assert!(below < bare_version); 15 | 16 | // Version at support threshold 17 | let at = Version::new(0, 6, 0); 18 | assert!(at >= bare_version); 19 | 20 | // Version above support threshold 21 | let above = Version::new(0, 7, 0); 22 | assert!(above > bare_version); 23 | } 24 | 25 | // Test the should_use_bare_flag function directly instead of mocking UV 26 | #[test] 27 | fn test_should_use_bare_flag_with_version() { 28 | // Create a test function that isolates the version comparison logic 29 | fn should_use_bare_flag(uv_version: &str) -> bool { 30 | let uv_version = Version::parse(uv_version).unwrap(); 31 | let min_version = Version::parse(UV_SUPPORT_BARE).unwrap(); 32 | uv_version >= min_version 33 | } 34 | 35 | // Test with version below 0.6.0 36 | assert!( 37 | !should_use_bare_flag("0.5.9"), 38 | "Version 0.5.9 should NOT use --bare flag" 39 | ); 40 | 41 | // Test with version equal to 0.6.0 42 | assert!( 43 | should_use_bare_flag("0.6.0"), 44 | "Version 0.6.0 should use --bare flag" 45 | ); 46 | 47 | // Test with version above 0.6.0 48 | assert!( 49 | should_use_bare_flag("0.7.0"), 50 | "Version 0.7.0 should use --bare flag" 51 | ); 52 | } 53 | 54 | // Test that we correctly construct the UvCommandBuilder with the bare flag 55 | #[test] 56 | fn test_command_construction_with_bare_flag() { 57 | use std::env; 58 | 59 | // Save the current environment 60 | let had_test_var = env::var("UV_TEST_SUPPORT_BARE").is_ok(); 61 | let original_value = env::var("UV_TEST_SUPPORT_BARE").unwrap_or_default(); 62 | 63 | // Set test environment 64 | unsafe { 65 | env::set_var("UV_TEST_SUPPORT_BARE", "0.6.0"); 66 | } 67 | 68 | // Test version below threshold - shouldn't use --bare 69 | { 70 | let uv_version = Version::new(0, 5, 9); 71 | let version_supports_bare = Version::parse("0.6.0").unwrap(); 72 | let using_bare_flag = uv_version >= version_supports_bare; 73 | assert!(!using_bare_flag, "Should not use --bare with version 0.5.9"); 74 | } 75 | 76 | // Test version at threshold - should use --bare 77 | { 78 | let uv_version = Version::new(0, 6, 0); 79 | let version_supports_bare = Version::parse("0.6.0").unwrap(); 80 | let using_bare_flag = uv_version >= version_supports_bare; 81 | assert!(using_bare_flag, "Should use --bare with version 0.6.0"); 82 | } 83 | 84 | // Test version above threshold - should use --bare 85 | { 86 | let uv_version = Version::new(0, 7, 0); 87 | let version_supports_bare = Version::parse("0.6.0").unwrap(); 88 | let using_bare_flag = uv_version >= version_supports_bare; 89 | assert!(using_bare_flag, "Should use --bare with version 0.7.0"); 90 | } 91 | 92 | // Restore environment 93 | if had_test_var { 94 | unsafe { 95 | env::set_var("UV_TEST_SUPPORT_BARE", original_value); 96 | } 97 | } else { 98 | unsafe { 99 | env::remove_var("UV_TEST_SUPPORT_BARE"); 100 | } 101 | } 102 | } 103 | } 104 | --------------------------------------------------------------------------------