├── .editorconfig ├── .github └── workflows │ └── ci.yml ├── .gitignore ├── LICENSE ├── README.md ├── bootstrap-nonimble.sh ├── bootstrap.ps1 ├── choosenim ├── ci-bootstrap.cfg ├── ci-docs.cfg ├── nimph.json ├── nimph.nimble ├── src ├── nimph.nim ├── nimph.nim.cfg ├── nimph │ ├── asjson.nim │ ├── config.nim │ ├── dependency.nim │ ├── doctor.nim │ ├── group.nim │ ├── locker.nim │ ├── nimble.nim │ ├── package.nim │ ├── project.nim │ ├── requirement.nim │ ├── runner.nim │ ├── skullduggery.nim │ ├── spec.nim │ ├── thehub.nim │ ├── version.nim │ └── versiontags.nim └── rest.nim └── tests ├── nim.cfg ├── sample.cfg ├── test.nim └── tspec.nim /.editorconfig: -------------------------------------------------------------------------------- 1 | [*] 2 | indent_style = space 3 | insert_final_newline = true 4 | indent_size = 2 5 | trim_trailing_whitespace = true 6 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | schedule: 4 | - cron: '30 5 * * *' 5 | 6 | push: 7 | branches: 8 | - master 9 | paths: 10 | - '**.cfg' 11 | - '**.nims' 12 | - '**.nim' 13 | - '**.nimble' 14 | - '**.sh' 15 | - 'tests/**' 16 | - '.github/workflows/ci.yml' 17 | 18 | pull_request: 19 | branches: 20 | - '*' 21 | paths: 22 | - '**.cfg' 23 | - '**.nims' 24 | - '**.nim' 25 | - '**.nimble' 26 | - '**.sh' 27 | - 'tests/**' 28 | - '.github/workflows/ci.yml' 29 | 30 | jobs: 31 | build: 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | #os: ['windows-latest', 'macos-latest', 'ubuntu-latest'] 36 | #os: ['macos-latest', 'ubuntu-latest'] 37 | os: ['ubuntu-latest'] 38 | compiler: 39 | - name: nim 40 | version: devel 41 | #- name: nim 42 | # version: version-2-0 43 | - name: nim 44 | version: version-1-6 45 | - name: nim 46 | version: version-1-4 47 | - name: nim 48 | version: version-1-2 49 | - name: nimskull 50 | version: '^0.1.0-dev.21080' 51 | name: '${{ matrix.os }} (${{ matrix.compiler.name }} ${{ matrix.compiler.version }})' 52 | runs-on: ${{ matrix.os }} 53 | steps: 54 | - name: Checkout 55 | uses: actions/checkout@v4 56 | with: 57 | path: ci 58 | fetch-depth: 0 59 | # nimph will scream when this is used 60 | #filter: tree:0 61 | 62 | - if: matrix.compiler.name == 'nim' 63 | name: Setup Nim 64 | uses: alaviss/setup-nim@0.1.1 65 | with: 66 | path: nim 67 | version: ${{ matrix.compiler.version }} 68 | 69 | - if: matrix.compiler.name == 'nimskull' 70 | id: nimskull 71 | name: Setup Nimskull 72 | uses: nim-works/setup-nimskull@0.1.0 73 | with: 74 | nimskull-version: ${{ matrix.compiler.version }} 75 | 76 | - if: matrix.compiler.name == 'nimskull' 77 | name: Fetch nimble's fork for nimskull 78 | uses: actions/checkout@v4 79 | with: 80 | path: nimble 81 | repository: alaviss/nimble 82 | ref: nimskull 83 | 84 | - if: matrix.compiler.name == 'nimskull' 85 | name: Build nimble and add to PATH 86 | shell: bash 87 | run: | 88 | cd nimble 89 | nim c -d:release -o:nimble src/nimble.nim 90 | cp nimble "$NIMSKULL_BIN/nimble" 91 | env: 92 | NIMSKULL_BIN: ${{ steps.nimskull.outputs.bin-path }} 93 | 94 | - name: Fetch libgit2 95 | uses: actions/checkout@v4 96 | with: 97 | path: ci/libgit2 98 | repository: libgit2/libgit2 99 | ref: v1.7.1 100 | 101 | - name: Setup libgit2 102 | shell: bash 103 | run: | 104 | cd ci/libgit2 105 | mkdir build 106 | cd build 107 | cmake .. 108 | cmake --build . -- --quiet 109 | 110 | - name: Run tests 111 | shell: bash 112 | run: | 113 | mkdir $HOME/.nimble 114 | cd ci 115 | cp ci-bootstrap.cfg nim.cfg 116 | ./bootstrap-nonimble.sh 117 | ./nimph refresh 118 | ./nimph 119 | ./nimph doctor || true 120 | cat nim.cfg 121 | ./nimph 122 | ./nimph doctor || true 123 | cat nim.cfg 124 | cd `./nimph path balls` 125 | nim c --out:$HOME/balls balls.nim 126 | cd - 127 | echo "remove nim's config.nims...?" 128 | ls -l `dirname \`which nim\``/../config/ 129 | rm `dirname \`which nim\``/../config/config.nims || true 130 | 131 | soExt= 132 | case "$RUNNER_OS" in 133 | macOS) 134 | soExt=dylib 135 | ;; 136 | Linux) 137 | soExt=so 138 | esac 139 | libgit2Lib=$(pwd)/libgit2/build/libgit2.$soExt 140 | 141 | nim c --define:libgit2Lib="$libgit2Lib" --passC:"-I$(pwd)/libgit2/include" --define:ssl -r tests/test.nim 142 | nim c --define:libgit2Lib="$libgit2Lib" --passC:"-I$(pwd)/libgit2/include" --define:ssl --define:release -r tests/test.nim 143 | 144 | - name: Docs 145 | if: matrix.compiler.version == 'version-1-6' 146 | shell: bash 147 | run: | 148 | cd ci 149 | branch=${{ github.ref }} 150 | branch=${branch##*/} 151 | mv ci-docs.cfg nim.cfg 152 | rm -rf deps 153 | mkdir deps 154 | ./nimph doctor || true 155 | cat nim.cfg 156 | nim doc --project --outdir:docs \ 157 | '--git.url:https://github.com/${{ github.repository }}' \ 158 | '--git.commit:${{ github.sha }}' \ 159 | "--git.devel:$branch" \ 160 | src/nimph.nim 161 | # Ignore failures for older Nim 162 | cp docs/{the,}index.html || true 163 | 164 | - name: Pages 165 | if: > 166 | github.event_name == 'push' && github.ref == 'refs/heads/master' && 167 | matrix.os == 'ubuntu-latest' && matrix.compiler.version == 'version-1-6' 168 | uses: crazy-max/ghaction-github-pages@v1 169 | with: 170 | build_dir: ci/docs 171 | env: 172 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 173 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | nimblemeta.json 2 | nimbledeps 3 | deps 4 | bin 5 | tests/tconfig 6 | tests/tnimble 7 | tests/tpackage 8 | tests/tspec 9 | tests/tgit 10 | tests/ttags 11 | nim.cfg 12 | nimph.exe 13 | libcurl.so* 14 | libgit2.so* 15 | libmbedcrypto.so* 16 | libmbedtls.so* 17 | libmbedx509.so* 18 | libnghttp2.so* 19 | libssh2.so* 20 | libz.so* 21 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Andy Davidoff 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # nimph 2 | 3 | [![Test Matrix](https://github.com/disruptek/nimph/workflows/CI/badge.svg)](https://github.com/disruptek/nimph/actions?query=workflow%3ACI) 4 | [![GitHub release (latest by date)](https://img.shields.io/github/v/release/disruptek/nimph?style=flat)](https://github.com/disruptek/nimph/releases/latest) 5 | ![Minimum supported Nim version](https://img.shields.io/badge/nim-1.2.14%2B-informational?style=flat&logo=nim) 6 | ![Maximum supported Nim version](https://img.shields.io/badge/nim-1.6.7%2B-informational?style=flat&logo=nim) 7 | [![License](https://img.shields.io/github/license/disruptek/nimph?style=flat)](#license) 8 | 9 | nim package hierarchy manager from the future 10 | 11 | or: _How I Learned to Stop Worrying and Love the Search Path_ 12 | 13 | ## Features 14 | 15 | - truly path-agnostic dependencies 16 | - native git integration for speed 17 | - github api integration for comfort 18 | - reproducible builds via lockfiles 19 | - immutable cloud-based distributions 20 | - wildcard, tilde, and caret semver 21 | - absolutely zero configuration 22 | - total interoperability with Nimble 23 | - full-featured choosenim replacement 24 | 25 | ## Usage 26 | 27 | You can run `nimph` from anywhere in your project tree; it will simply search 28 | upwards until it finds a `.nimble` file and act as if you ran it there. 29 | 30 | Most operations do require that you be within a project, but `nimph` is 31 | flexible enough to operate on local dependencies, global packages, and anything 32 | in-between. You can run it on any package, anywhere, and it will provide useful 33 | output (and optional repair) of the environment it finds itself in. 34 | 35 | - [Searching for New Nim Packages](https://github.com/disruptek/nimph#search) 36 | - [Adding Packages to the Environment](https://github.com/disruptek/nimph#clone) 37 | - [Checking the Environment for Errors](https://github.com/disruptek/nimph#doctor) 38 | - [Quickly Forking an Installed Package](https://github.com/disruptek/nimph#fork) 39 | - [Finding a Path via Nim Import Name](https://github.com/disruptek/nimph#path) 40 | - [Locking the Dependency Tree by Name](https://github.com/disruptek/nimph#lock) 41 | - [Specifying Arbitrary Package Versions](https://github.com/disruptek/nimph#roll) 42 | - [Upgrading Dependencies Automatically](https://github.com/disruptek/nimph#upgrade) 43 | - [Downgrading Dependencies Automatically](https://github.com/disruptek/nimph#downgrade) 44 | - [Cutting New Release Versions+Tags](https://github.com/disruptek/nimph#bump) 45 | - [Adding Any Missing Tags Automatically](https://github.com/disruptek/nimph#tag) 46 | - [Running Commands on All Dependencies](https://github.com/disruptek/nimph#run) 47 | - [Outputting the Dependency Graph](https://github.com/disruptek/nimph#graph) 48 | - [Git Subcommand Auto-Integration](https://github.com/disruptek/nimph#git-subcommands) 49 | - [Nimble Subcommand Auto-Integration](https://github.com/disruptek/nimph#nimble-subcommands) 50 | - [Tweaking Nimph Behavior Constants](https://github.com/disruptek/nimph#hacking) 51 | - [Using `choosenim` to Select Nim Toolchains](https://github.com/disruptek/nimph#choose-nimph-choose-nim) 52 | - [Nimph Module Documentation](https://github.com/disruptek/nimph#documentation) 53 | 54 | ## Demonstration 55 | 56 | This is a demo screencast of using Nimph to setup a project for development. 57 | Starting with nothing more than the project's repository, we'll... 58 | 59 | 1. show the `bot.nimble` that specifies varied dependencies 60 | 1. show the `nim.cfg` that specifies compilation options 61 | 1. edit the `nim.cfg` to configure a directory to hold local dependencies 62 | 1. create a `deps` directory to hold those packages 63 | 1. run `nimph` to evaluate the state of the environment -- verdict: 😦 64 | 1. run `nimph doctor` to converge the environment to our specifications 65 | 1. run `nimph` to confirm the environment state -- verdict: 😊 66 | 1. show the `nim.cfg` to reveal any changes made by `nimph doctor` 67 | 68 | [![asciicast](https://asciinema.org/a/aoDAm39yjoKenepl15L3AyfzN.svg)](https://asciinema.org/a/aoDAm39yjoKenepl15L3AyfzN) 69 | 70 | ## Installation 71 | 72 | A `bootstrap-nonimble.sh` script is provided which retrieves the dependencies 73 | and builds Nimph without requiring `nimble`. 74 | 75 | ### Windows 76 | 77 | I no longer test Windows via the CI because I have no way to debug it. 78 | That said, Windows builds may work just fine for you using the older 79 | `bootstrap.ps1` which relies upon `nimble` to install dependencies. 80 | 81 | ### OS X 82 | 83 | I no longer test OS X via the CI because I cannot be bothered to debug 84 | libgit2/libssh behavior there. The tests for nim-1.2 do pass, however. 85 | 86 | ### GitHub Integration 87 | 88 | You may want to [create a new GitHub personal access token 89 | here](https://github.com/settings/tokens) and then add it to your environment 90 | as `NIMPH_TOKEN` or `GITHUB_TOKEN`. 91 | 92 | If you skip this step, Nimph will try to use a Nimble token for **search**es, 93 | and it will also try to read any `hub` or `ghi` credentials. Notably, the 94 | **fork** subcommand will not work without adequate scope authorization. 95 | 96 | ## Subcommand Usage 97 | 98 | ### Search 99 | 100 | The `search` subcommand is used to query GitHub for 101 | packages. Arguments should match [GitHub search syntax for 102 | repositories](https://help.github.com/en/github/searching-for-information-on-github/searching-for-repositories) and for convenience, a `language:nim` 103 | qualifier will be included. 104 | 105 | Results are output in **increasing order of relevance** to reduce scrolling; 106 | _the last result is the best_. 107 | 108 | ``` 109 | $ nimph search pegs 110 | 111 | https://github.com/GlenHertz/peg pushed 2017-11-19 112 | 645 kb 0 issues 0 stars 0 forks created 2017-11-18 113 | PEG version of grep 114 | 115 | https://github.com/lguzzon-NIM/simplePEG pushed 2019-09-05 116 | 82 kb 0 issues 0 stars 0 forks created 2017-09-05 117 | Simple Peg 118 | 119 | https://github.com/zevv/npeg pushed 2019-11-27 120 | 9125 kb 2 issues 66 stars 2 forks created 2019-03-08 121 | PEGs for Nim, another take 122 | ``` 123 | 124 | ### Clone 125 | 126 | The `clone` subcommand performs git clones to add packages to your environment. 127 | Pass this subcommand some GitHub search syntax and it will download the best 128 | matching package, or you can supply a URL directly. Local URLs are fine, too. 129 | 130 | Where the package ends up is a function of your existing compiler settings 131 | as recorded in relevant `nim.cfg` files; we'll search all `--nimblePath` 132 | statements, but according to a convention also adopted by Nimble... 133 | 134 | _The last specified --nimblePath, as processed by the `nim.cfg` files, is the 135 | "default" for the purposes of new package additions._ 136 | 137 | ``` 138 | $ nimph clone npeg 139 | 👭cloning git://github.com/zevv/npeg.git... 140 | 👌cloned git://github.com/zevv/npeg.git 141 | ``` 142 | 143 | ### Doctor 144 | 145 | The interesting action happens in the `doctor` subcommand. When run without any 146 | arguments, `nimph` effectively runs the `doctor` with a `--dry-run` option, to 147 | perform non-destructive evaluation of your environment and report any issues. 148 | In this mode, logging is elevated to report package versions and a summary of 149 | their last commit or tag. 150 | 151 | ``` 152 | $ nimph 153 | ✔️ 8a7114 bot cleanups 154 | ✔️ 775047 swayipc we can remove this notice now 155 | ✔️ v0.4.5 nesm Version 0.4.5 156 | ✔️ 5186f4 cligen Add a test program and update release notes as per last commit to fix https://github.com/c-blake/cligen/issues/120 157 | ✔️ c7ba0f dbus Merge pull request #3 from SolitudeSF/case 158 | ✔️ 57f244 c2nim new option: annotate procs with `{.noconv.}` 159 | ✔️ 54ed41 npeg Added section about non-consuming operators and captures to the README. Fixes #17 160 | ✔️ 183eaa unittest2 remove redundant import 161 | ✔️ v0.3.0 irc v0.3.0 162 | ✔️ fe276f rest add generated docs 163 | ✔️ 5d72a4 foreach clarify example 164 | ✔️ 5493b2 xs add some docs about google 165 | ✔️ 1.0.1 cutelog ladybug easier to see 166 | ✔️ 9d75fe bump update docs 167 | ✔️ 1.0.2 github fix nimble again 168 | ✔️ 6830ae nimph add asciinema demo 169 | ✔️ b6b8d5 compiler [backport] always set `fileInfoIdx.isKnownFile` (#12773) 170 | ✔️ v0.3.3 nimterop v0.3.3 171 | ✔️ v0.13.0 regex bump 0.13.0 (#52) 172 | ✔️ 2afc38 unicodedb improve decomposition performance (#11) 173 | ✔️ v0.5.1 unicodeplus Fix ascii range (#2) 174 | ✔️ v0.1.1 nimgit2 v0.1.1 175 | ✔️ v0.5.0 parsetoml Update to version 0.5.0 176 | 👌bot version 0.0.11 lookin' good 177 | ``` 178 | When run as `nimph doctor`, any problems discovered will be fixed, if possible. 179 | This includes cloning missing packages for which we can determine a URL, 180 | adjusting path settings in the project's `nim.cfg`, and similar housekeeping. 181 | 182 | ``` 183 | $ nimph doctor 184 | 👌bot version 0.0.11 lookin' good 185 | ``` 186 | 187 | ### Fork 188 | 189 | The `fork` subcommand is used to fork an installed dependency in your GitHub 190 | account and add a new git `origin` remote pointing at your new fork. The 191 | original `origin` remote is renamed to `upstream` by default. These constants 192 | may be easily changed; see **Hacking** below. 193 | 194 | This allows you to quickly move from merely testing a package to improving it 195 | and sharing your work upstream. 196 | 197 | ``` 198 | $ nimph fork npeg 199 | 🍴forking npeg-#54ed418e80f1e1b14133ed383b9c585b320a66cf 200 | 🔱https://github.com/disruptek/npeg 201 | ``` 202 | 203 | ### Path 204 | 205 | The `path` subcommand is used to retrieve the filesystem path to a package 206 | given the Nim symbol you might use to import it. For consistency, the package 207 | must be installed. 208 | 209 | In contrast to Nimble, you can specify multiple symbols to search for, and the 210 | symbols are matched without regard to underscores or capitalization. 211 | ``` 212 | $ nimph path nimterop irc 213 | /home/adavidoff/git/bot/deps/pkgs/nimterop-#v0.3.3 214 | /home/adavidoff/git/bot/deps/pkgs/irc-#v0.3.0 215 | ``` 216 | 217 | If you want to limit your search to packages that are part of your project's 218 | dependency tree, add the `--strict` switch: 219 | 220 | ``` 221 | $ nimph path coco 222 | /home/adavidoff/git/nimph/deps/pkgs/coco-#head 223 | 224 | $ nimph path --strict coco 225 | couldn't find a dependency importable as `coco` 226 | ``` 227 | 228 | It's useful to create a shell function to jump into dependency directories so 229 | you can quickly hack at them. 230 | 231 | ```bash 232 | #!/bin/bash 233 | function goto { pushd `nimph path $1`; } 234 | ``` 235 | 236 | or 237 | 238 | ```fish 239 | #!/bin/fish 240 | function goto; pushd (nimph path $argv); end 241 | ``` 242 | 243 | ### Lock 244 | 245 | The `lock` subcommand writes the current dependency tree to a JSON file; see 246 | **Hacking** below to customize its name. You pass arguments to give this record 247 | a name that you can use to retrieve the dependency tree later. Multiple such 248 | _lockfiles_ may be cached in a single file. 249 | 250 | ``` 251 | $ nimph lock works with latest npeg 252 | 👌locked nimph-#0.0.26 as `works with latest npeg` 253 | ``` 254 | 255 | ### Unlock 256 | 257 | The `unlock` subcommand reads a dependency tree previously saved with `lock` 258 | and adjusts the environment to match, installing any missing dependencies and 259 | rolling repositories to the versions that were recorded previously. 260 | 261 | ``` 262 | $ nimph unlock goats 263 | unsafe lock of `regex` for regex>=0.10.0 as #ff6ab8297c72f30e4da34daa9e8a60075ce8df7b 264 | 👭cloning https://github.com/zevv/npeg... 265 | rolled to #e3243f6ff2d05290f9c6f1e3d3f1c725091d60ab to meet git://github.com/disruptek/cutelog.git##1.1.1 266 | ``` 267 | 268 | ### Roll 269 | 270 | The `roll` subcommand lets you supply arbitrary requirements which are 271 | evaluated exactly as if they appeared in your package specification file. For 272 | shell escaping reasons, each such requirement should be a quoted string. 273 | 274 | ``` 275 | $ nimph roll "nimterop == 0.3.4" 276 | rolled to #v0.3.4 to meet nimterop>=0.3.3 277 | 👌nimph is lookin' good 278 | ``` 279 | 280 | Nimph will ensure that the new requirement doesn't break any existing 281 | requirements of the project or any of its dependencies. 282 | 283 | ``` 284 | $ nimph roll "nimterop > 6" 285 | nimterop*6 unmet by nimterop-#v0.3.4 286 | failed to fix all dependencies 287 | 👎nimph is not where you want it 288 | ``` 289 | 290 | As Nimble does not yet support caret (`^`), tilde (`~`), or wildcard (`*`), 291 | `roll` is the only way to experiment with these operators in requirements. 292 | 293 | ``` 294 | $ nimph roll "nimterop 0.3.*" 295 | rolled to #v0.3.6 to meet nimterop>=0.3.3 296 | 👌nimph is lookin' good 297 | ``` 298 | 299 | You can also use `roll` to resolve packages that are named in Nimble's official 300 | package directory but aren't hosted on GitHub. 301 | 302 | ``` 303 | $ nimph roll nesm 304 | 👭cloning https://gitlab.com/xomachine/NESM.git... 305 | rolled to #v0.4.5 to meet nesm** 306 | 👌xs is lookin' good 307 | ``` 308 | 309 | ### Upgrade 310 | 311 | The `upgrade` subcommand resolves the project's dependencies and attempts to 312 | upgrade any git clones to the latest release tag that matches the project's 313 | requirements. 314 | 315 | The `outdated` subcommand is an alias equivalent to `upgrade --dry-run`: 316 | 317 | ``` 318 | $ nimph outdated 319 | would upgrade bump from 1.8.16 to 1.8.17 320 | would upgrade nimph from 0.3.2 to 0.4.1 321 | would upgrade nimterop from 0.3.3 to v0.3.5 322 | 👎bot is not where you want it 323 | ``` 324 | 325 | Upgrade individual packages by specifying the _import name_. 326 | 327 | ``` 328 | $ nimph upgrade swayipc 329 | rolled swayipc from 3.1.0 to 3.1.3 330 | the latest swayipc release of 3.1.4 is masked 331 | 👌bot is up-to-date 332 | ``` 333 | 334 | Upgrade all dependencies at once by omitting any module names. 335 | 336 | ``` 337 | $ nimph upgrade 338 | the latest swayipc release of 3.1.4 is masked 339 | rolled foreach from 1.0.0 to 1.0.2 340 | rolled cutelog from 1.0.1 to 1.1.1 341 | rolled bump from 1.8.11 to 1.8.16 342 | rolled github from 1.0.1 to 1.0.2 343 | rolled nimph from 0.1.0 to 0.2.1 344 | rolled regex from 0.10.0 to v0.13.0 345 | rolled unicodedb from 0.6.0 to v0.7.2 346 | 👌bot is up-to-date 347 | ``` 348 | 349 | ### Downgrade 350 | 351 | The `downgrade` subcommand performs the opposite action to the upgrade 352 | subcommand. 353 | 354 | ``` 355 | $ nimph downgrade 356 | rolled swayipc from 3.1.4 to 3.1.0 357 | rolled cligen from 0.9.41 to v0.9.40 358 | rolled foreach from 1.0.2 to 1.0.0 359 | rolled cutelog from 1.1.1 to 1.0.1 360 | rolled bump from 1.8.16 to 1.8.11 361 | rolled github from 1.0.2 to 1.0.1 362 | rolled nimph from 0.3.2 to 0.3.0 363 | rolled regex from 0.13.0 to v0.10.0 364 | rolled unicodeplus from 0.5.1 to v0.5.0 365 | 👌bot is lookin' good 366 | ``` 367 | 368 | ### Bump 369 | 370 | The `bump` tool is included as a dependency; it provides easy version and tag incrementing. 371 | 372 | ``` 373 | $ bump fixed a bug 374 | 🎉1.0.3: fixed a bug 375 | 🍻bumped 376 | ``` 377 | 378 | For complete `bump` documentation, see https://github.com/disruptek/bump 379 | 380 | ### Tag 381 | 382 | The `tag` subcommand operates on a clean project and will roll the repository 383 | as necessary to examine any changes to your package configuration, noting any 384 | commits that: 385 | 386 | - introduced a new version of the package but aren't pointed to by a tag, _and_ 387 | - introduced a new version for which there exists no tag parsable as that version 388 | 389 | ``` 390 | $ nimph tag --dry-run --log-level=lvlInfo 391 | bump is missing a tag for version 1.1.0 392 | version 1.1.0 arrived in commit-009d45a977a688d22a9f1b14a21b6bd1a064760e 393 | use the `tag` subcommand to add missing tags 394 | run without --dry-run to fix these 395 | ``` 396 | 397 | The above conditions suggest that if you don't want to use this particular 398 | commit for your tag, you can simply point the tag at a different commit; Nimph 399 | won't change it on you. 400 | 401 | ``` 402 | $ git tag -a "re-release_of_1.1.0_just_in_time_for_the_holidays" 0abe7a9f0b5a05f2dd709f2b120805cc0cdd9668 403 | ``` 404 | 405 | Alternatively, if you don't want a version tag to be used by package managers, 406 | you can give the tag a name that won't parse as a version. Having found a tag 407 | for the commit, Nimph won't warn you that the commit needs tagging. 408 | 409 | ``` 410 | $ git tag -a "oops_this_was_compromised" 0abe7a9f0b5a05f2dd709f2b120805cc0cdd9668 411 | ``` 412 | 413 | When run without `--dry-run`, any missing tags are added automatically. 414 | 415 | ``` 416 | $ nimph tag --log-level=lvlInfo 417 | created new tag 1.1.0 for 009d45a977a688d22a9f1b14a21b6bd1a064760e 418 | 👌bump tags are lookin' good 419 | ``` 420 | 421 | Incidentally, these command-line examples demonstrate adjusting the log-level 422 | to increase verbosity. 423 | 424 | ### Run 425 | 426 | The `run` subcommand lets you invoke arbitrary programs in the root of each 427 | dependency of your project. 428 | 429 | ``` 430 | $ nimph run pwd 431 | /home/adavidoff/git/Nim 432 | /home/adavidoff/git/nimph/deps/pkgs/github-1.0.2 433 | /home/adavidoff/git/nimph/deps/pkgs/npeg-0.20.0 434 | /home/adavidoff/git/nimph/deps/pkgs/rest-#head 435 | /home/adavidoff/git/nimph/deps/pkgs/foreach-#head 436 | /home/adavidoff/git/nimph/deps/pkgs/cligen-#head 437 | /home/adavidoff/git/nimph/deps/pkgs/bump-1.8.15 438 | /home/adavidoff/git/nimph/deps/pkgs/cutelog-1.1.1 439 | /home/adavidoff/git/nimph/deps/pkgs/nimgit2-0.1.1 440 | /home/adavidoff/git/nimph/deps/pkgs/nimterop-0.3.3 441 | /home/adavidoff/git/nimph/deps/pkgs/regex-#v0.13.0 442 | /home/adavidoff/git/nimph/deps/pkgs/unicodedb-0.7.2 443 | /home/adavidoff/git/nimph/deps/pkgs/unicodeplus-0.5.0 444 | /home/adavidoff/git/nimph/deps/pkgs/unittest2-#head 445 | ``` 446 | 447 | To pass switches to commands `run` in your dependencies, use the `--` as a stopword. 448 | 449 | ``` 450 | $ nimph run -- head -1 LICENSE 451 | /bin/head: cannot open 'LICENSE' for reading: No such file or directory 452 | head -1 LICENSE 453 | head didn't like that in /home/adavidoff/git/Nim 454 | MIT License 455 | Copyright 2019 Ico Doornekamp 456 | MIT License 457 | MIT License 458 | Copyright (c) 2015,2016,2017,2018,2019 Charles L. Blake. 459 | MIT License 460 | MIT License 461 | MIT License 462 | MIT License 463 | MIT License 464 | MIT License 465 | MIT License 466 | /bin/head: cannot open 'LICENSE' for reading: No such file or directory 467 | head -1 LICENSE 468 | head didn't like that in /home/adavidoff/git/nimph/deps/pkgs/unittest2-#head 469 | ``` 470 | 471 | Finally, you can use the `--git` switch to limit `run` to dependencies with 472 | Git repositories; see [Git Subcommands](https://github.com/disruptek/nimph#git-subcommands) for examples. 473 | 474 | ### Graph 475 | 476 | The `graph` subcommand dumps some _very basic_ details about discovered 477 | dependencies and their associated packages and projects. 478 | 479 | ``` 480 | $ nimph graph 481 | 482 | requirement: swayipc>=3.1.4 from xs 483 | package: https://github.com/disruptek/swayipc 484 | 485 | requirement: cligen>=0.9.41 from xs 486 | requirement: cligen>=0.9.40 from bump 487 | package: https://github.com/c-blake/cligen.git 488 | directory: /home/adavidoff/.nimble/pkgs/cligen-0.9.41 489 | project: cligen-#b144d5b3392bac63ed49df3e1f176becbbf04e24 490 | 491 | requirement: dbus** from xs 492 | package: https://github.com/zielmicha/nim-dbus 493 | 494 | requirement: irc>=0.2.1 from xs 495 | package: https://github.com/nim-lang/irc 496 | 497 | requirement: https://github.com/disruptek/cutelog.git>=1.0.1 from xs 498 | requirement: git://github.com/disruptek/cutelog.git>=1.1.0 from bump 499 | package: git://github.com/disruptek/cutelog.git 500 | 501 | requirement: bump>=1.8.11 from xs 502 | package: file:///home/adavidoff/.nimble/pkgs/bump-1.8.13 503 | directory: /home/adavidoff/.nimble/pkgs/bump-1.8.13 504 | project: bump-1.8.13 505 | ``` 506 | 507 | Like other subcommands, you can provide _import names_ to retrieve the detail 508 | for only those dependencies, or omit any additional arguments to display all 509 | dependencies. 510 | 511 | ``` 512 | $ nimph graph cligen 513 | 514 | requirement: cligen>=0.9.41 from xs 515 | requirement: cligen>=0.9.40 from bump 516 | package: https://github.com/c-blake/cligen.git 517 | directory: /home/adavidoff/.nimble/pkgs/cligen-0.9.41 518 | project: cligen-#b144d5b3392bac63ed49df3e1f176becbbf04e24 519 | ``` 520 | 521 | Raising the log level of the `graph` command will cause retrieval and display 522 | releases and any _other_ commits at which the package changed versions. 523 | 524 | ``` 525 | $ nimph graph --log=lvlInfo nimterop 526 | 527 | requirement: nimterop>=0.3.3 from nimgit2 528 | package: https://github.com/genotrance/nimterop.git 529 | directory: /home/adavidoff/git/nimph/deps/pkgs/nimterop-0.4.0 530 | project: nimterop-#v0.4.0 531 | tagged release commits: 532 | tag: v0.1.0 commit-c3734587a174ea2fc7e19943e6d11d024f06e091 533 | tag: v0.2.0 commit-3e9dc2fb0fd6257fd86897c1b13f10ed2a5279b4 534 | tag: v0.2.1 commit-e9120eee7840851bda8113afbc71062b29fff872 535 | tag: v0.3.0 commit-37f5faa43d446a415e8934cc1a713bb7f5c5564f 536 | tag: v0.3.1 commit-1bca308ac472796329c212410ae198c0e31d3acb 537 | tag: v0.3.2 commit-12cc08900d1bfd39579164567acad75ca021a86b 538 | tag: v0.3.3 commit-751128e75859de66e07be9888c8341fe3b553816 539 | tag: v0.3.4 commit-c878a4be05cadd512db2182181b187de2a566ce8 540 | tag: v0.3.5 commit-c4b6a01878f0f72d428a24c26153723c60f6695f 541 | tag: v0.3.6 commit-d032a2c107d7f342df79980e01a3cf35194764de 542 | tag: v0.4.0 commit-f71cf837d297192f8cddfa136e8c3cd84bbc81eb 543 | untagged version commits: 544 | ver: 0.2.0 commit-3a2395360712d2c6f27221e0887b7e3cad0be7a1 545 | ver: 0.1.0 commit-9787797d15d281ce1dd792d247fac043c72dc769 546 | ``` 547 | 548 | ### Git Subcommands 549 | 550 | There are a couple shortcuts for running common git commands inside your 551 | dependencies: 552 | 553 | - `nimph fetch` is an alias for `nimph run -- git fetch`; ie. it runs `git fetch` in each dependency package directory. 554 | - `nimph pull` is an alias for `nimph run -- git pull`; ie. it runs `git pull` in each dependency package directory. 555 | 556 | ### Nimble Subcommands 557 | 558 | Any commands not mentioned above are passed directly to an instance of `nimble` 559 | which is run with the appropriate `nimbleDir` environment to ensure that it will 560 | operate upon the project it should. 561 | 562 | You can use this to, for example, **refresh** the official packages list, run **test**s, or build **doc**umentation for a project. 563 | 564 | ``` 565 | $ nimph refresh 566 | Downloading Official package list 567 | Success Package list downloaded. 568 | ``` 569 | 570 | ## Hacking 571 | 572 | Virtually all constants in Nimph are recorded in a single `spec` file where 573 | you can perform quick behavioral tweaks. Additionally, these constants may be 574 | overridden via `--define:key=value` statements during compilation. 575 | 576 | Notably, compiling `nimph` outside `release` or `danger` modes will increase 577 | the default log-level baked into the executable. Use a `debug` define for even 578 | more spam. 579 | 580 | Interesting procedures are exported so that you can exploit them in your own 581 | projects. 582 | 583 | Compilation flags to adjust output colors/styling/emojis are found in the 584 | project's `nimph.nim.cfg`. 585 | 586 | ## Choose Nimph, Choose Nim! 587 | 588 | The `choosenim` tool included in Nimph allows you to easily switch a symbolic 589 | link between adjacent Nim distributions, wherever you may have installed them. 590 | 591 | ### Installing `choosenim` 592 | 1. Install [jq](https://stedolan.github.io/jq/) from GitHub or wherever. 593 | 1. Add the `chosen` toolchain to your `$PATH`. 594 | 1. Run `choosenim` against any of your toolchains. 595 | ``` 596 | # after installing jq however you please... 597 | $ set --export PATH=/directory/for/all-my-nim-installations/chosen:$PATH 598 | $ ./choosenim 1.0 599 | Nim Compiler Version 1.0.7 [Linux: amd64] 600 | Compiled at 2020-04-05 601 | Copyright (c) 2006-2019 by Andreas Rumpf 602 | 603 | git hash: b6924383df63c91f0ad6baf63d0b1aa84f9329b7 604 | active boot switches: -d:release 605 | ``` 606 | 607 | ### Using `choosenim` 608 | To list available toolchains, run `choosenim`. 609 | ``` 610 | $ choosenim 611 | . 612 | ├── 1.0 613 | ├── 1.2 614 | ├── chosen -> 1.2 615 | ├── devel 616 | └── stable -> 1.0 617 | ``` 618 | Switch toolchains by supplying a name or alias. 619 | ``` 620 | $ choosenim 1.2 621 | Nim Compiler Version 1.2.0 [Linux: amd64] 622 | Compiled at 2020-04-05 623 | Copyright (c) 2006-2020 by Andreas Rumpf 624 | 625 | git hash: 7e83adff84be5d0c401a213eccb61e321a3fb1ff 626 | active boot switches: -d:release 627 | ``` 628 | ``` 629 | $ choosenim devel 630 | Nim Compiler Version 1.3.1 [Linux: amd64] 631 | Compiled at 2020-04-05 632 | Copyright (c) 2006-2020 by Andreas Rumpf 633 | 634 | git hash: b6814be65349d22fd12944c7c3d19fd8eb44683d 635 | active boot switches: -d:release 636 | ``` 637 | ``` 638 | $ choosenim stable 639 | Nim Compiler Version 1.0.7 [Linux: amd64] 640 | Compiled at 2020-04-05 641 | Copyright (c) 2006-2019 by Andreas Rumpf 642 | 643 | git hash: b6924383df63c91f0ad6baf63d0b1aa84f9329b7 644 | ``` 645 | 646 | ### Hacking `choosenim` 647 | It's a 20-line shell script, buddy; go nuts. 648 | 649 | ## Documentation 650 | 651 | See [the documentation for the nimph module](https://disruptek.github.io/nimph/nimph.html) as generated directly from the source. 652 | 653 | ## License 654 | MIT 655 | -------------------------------------------------------------------------------- /bootstrap-nonimble.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | PASSES="" 4 | if [ "$GITHUB_ACTIONS" = "true" ]; then 5 | if [ $(uname -s) = "Linux" ]; then 6 | LGEXT="so" 7 | else 8 | LGEXT="dylib" 9 | fi 10 | SO="$(pwd)/libgit2/build/libgit2.$LGEXT" 11 | ls -ld "$SO" 12 | PASSES="--define:libgit2Lib=$SO --passC:\"-I$(pwd)/libgit2/include\" --define:nimDebugDlOpen" 13 | fi 14 | 15 | mkdir -p temporary 16 | cd temporary 17 | 18 | git clone --depth 1 --branch 1.8.31 https://github.com/disruptek/bump.git 19 | git clone --depth 1 --branch 2.0.1 https://github.com/disruptek/cutelog.git 20 | git clone --depth 1 --branch 3.2.2 https://github.com/disruptek/gittyup.git 21 | git clone --depth 1 --branch 2.0.3 https://github.com/disruptek/cligen.git 22 | git clone --depth 1 --branch 0.26.0 https://github.com/zevv/npeg.git 23 | git clone --depth 1 --branch 1.0.2 https://github.com/disruptek/jsonconvert.git 24 | git clone --depth 1 --branch 2.1.3 https://github.com/disruptek/badresults.git 25 | git clone --depth 1 --branch 3.0.0 https://github.com/disruptek/github.git 26 | git clone --depth 1 --branch 0.0.7 https://github.com/disruptek/ups.git 27 | git clone --depth 1 --branch 0.1.7 https://github.com/alaviss/hlibgit2.git 28 | git clone --depth 1 --branch 0.1.5 https://github.com/haxscramper/hlibssh2.git 29 | 30 | nim c --outdir:.. --define:release --path:../src --path:hlibgit2/src --path:hlibssh2/src --path:ups --path:cligen --path:github --path:npeg/src --path:jsonconvert --path:badresults --path:bump --path:cutelog --path:gittyup $PASSES ../src/nimph.nim 31 | cd .. 32 | 33 | if test -x nimph; then 34 | echo "nimph built successfully" 35 | echo "you can safely remove the 'temporary' subdirectory" 36 | else 37 | echo "unable to build nimph" 38 | exit 1 39 | fi 40 | -------------------------------------------------------------------------------- /bootstrap.ps1: -------------------------------------------------------------------------------- 1 | if ( !(Join-Path 'src' 'nimph.nim' | Test-Path) ) { 2 | git clone git://github.com/disruptek/nimph.git 3 | Set-Location nimph 4 | } 5 | 6 | $env:NIMBLE_DIR = Join-Path $PWD 'deps' 7 | New-Item -Type Directory $env:NIMBLE_DIR -Force | Out-Null 8 | 9 | nimble --accept refresh 10 | nimble install "--passNim:--path:$(Resolve-Path 'src') --outDir:$PWD" 11 | -------------------------------------------------------------------------------- /choosenim: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | CHOOSE=`realpath $0` # make note of our origin 3 | NIM=`nim --hint[Conf]:off --dump.format:json dump config | jq -r .prefixdir`/.. 4 | if [ "$NIM" = "null/.." ]; then # true when the prefixdir is missing 5 | NIM=`dirname \`which nim\``/../.. # fallback for 1.0 support; see #127 6 | fi 7 | if [ $! -eq 0 ]; then # if nim threw an error due to a bad arg, 8 | exit 1 # fail so the user can deal with it 9 | fi 10 | cd "$NIM" 11 | if [ -n "$*" ]; then # a toolchain was requested 12 | if [ -d "$*" ]; then # the toolchain is available 13 | rm -f chosen # ffs my ln -sf should remove it 14 | ln -sf "$*" chosen # select the chosen toolchain 15 | if ! [ -f "chosen/bin/$CHOOSE" ]; then 16 | cp -p "$CHOOSE" chosen/bin # install choosenim if necessary 17 | fi 18 | nim --version # emit current toolchain version 19 | exit 0 # successful selection of toolchain 20 | fi 21 | fi 22 | tree -v -d -L 1 --noreport # report on available toolchains 23 | exit 1 # signify failure to switch 24 | -------------------------------------------------------------------------------- /ci-bootstrap.cfg: -------------------------------------------------------------------------------- 1 | --hint[Link]=off 2 | --hint[Processing]=off 3 | --hint[Cc]=off 4 | --path="$nim" 5 | --path="$config" 6 | -------------------------------------------------------------------------------- /ci-docs.cfg: -------------------------------------------------------------------------------- 1 | --hint[Link]=off 2 | --hint[Processing]=off 3 | --hint[Cc]=off 4 | --clearNimblePath 5 | --nimblePath="$config/deps/pkgs" 6 | --path="$nim" 7 | --path="$config" 8 | -------------------------------------------------------------------------------- /nimph.json: -------------------------------------------------------------------------------- 1 | { 2 | "lockfiles": { 3 | "demo of lockfiles": { 4 | "github": { 5 | "name": "github", 6 | "url": "https://github.com/disruptek/github", 7 | "release": { 8 | "operator": "#", 9 | "reference": "1.0.2" 10 | }, 11 | "requirement": { 12 | "identity": "github", 13 | "operator": "#", 14 | "release": { 15 | "operator": "#", 16 | "reference": "1.0.2" 17 | } 18 | }, 19 | "dist": "git" 20 | }, 21 | "npeg": { 22 | "name": "npeg", 23 | "url": "git://github.com/zevv/npeg.git", 24 | "release": { 25 | "operator": "#", 26 | "reference": "0.21.3" 27 | }, 28 | "requirement": { 29 | "identity": "npeg", 30 | "operator": "#", 31 | "release": { 32 | "operator": "#", 33 | "reference": "0.21.3" 34 | } 35 | }, 36 | "dist": "git" 37 | }, 38 | "rest": { 39 | "name": "rest", 40 | "url": "git://github.com/disruptek/rest.git", 41 | "release": { 42 | "operator": "#", 43 | "reference": "1.0.0" 44 | }, 45 | "requirement": { 46 | "identity": "https://github.com/disruptek/rest.git", 47 | "operator": "#", 48 | "release": { 49 | "operator": "#", 50 | "reference": "1.0.0" 51 | } 52 | }, 53 | "dist": "git" 54 | }, 55 | "foreach": { 56 | "name": "foreach", 57 | "url": "git@github.com:disruptek/foreach.git", 58 | "release": { 59 | "operator": "#", 60 | "reference": "1.0.2" 61 | }, 62 | "requirement": { 63 | "identity": "foreach", 64 | "operator": "#", 65 | "release": { 66 | "operator": "#", 67 | "reference": "1.0.2" 68 | } 69 | }, 70 | "dist": "git" 71 | }, 72 | "cligen": { 73 | "name": "cligen", 74 | "url": "https://github.com/c-blake/cligen.git", 75 | "release": { 76 | "operator": "#", 77 | "reference": "v0.9.41" 78 | }, 79 | "requirement": { 80 | "identity": "cligen", 81 | "operator": "#", 82 | "release": { 83 | "operator": "#", 84 | "reference": "v0.9.41" 85 | } 86 | }, 87 | "dist": "git" 88 | }, 89 | "bump": { 90 | "name": "bump", 91 | "url": "https://github.com/disruptek/bump", 92 | "release": { 93 | "operator": "#", 94 | "reference": "1.8.18" 95 | }, 96 | "requirement": { 97 | "identity": "bump", 98 | "operator": "#", 99 | "release": { 100 | "operator": "#", 101 | "reference": "1.8.18" 102 | } 103 | }, 104 | "dist": "git" 105 | }, 106 | "cutelog": { 107 | "name": "cutelog", 108 | "url": "git@github.com:disruptek/cutelog.git", 109 | "release": { 110 | "operator": "#", 111 | "reference": "1.1.1" 112 | }, 113 | "requirement": { 114 | "identity": "https://github.com/disruptek/cutelog", 115 | "operator": "#", 116 | "release": { 117 | "operator": "#", 118 | "reference": "1.1.1" 119 | } 120 | }, 121 | "dist": "git" 122 | }, 123 | "nimgit2": { 124 | "name": "nimgit2", 125 | "url": "https://github.com/genotrance/nimgit2.git", 126 | "release": { 127 | "operator": "#", 128 | "reference": "v0.1.1" 129 | }, 130 | "requirement": { 131 | "identity": "nimgit2", 132 | "operator": "#", 133 | "release": { 134 | "operator": "#", 135 | "reference": "v0.1.1" 136 | } 137 | }, 138 | "dist": "git" 139 | }, 140 | "nimterop": { 141 | "name": "nimterop", 142 | "url": "https://github.com/genotrance/nimterop.git", 143 | "release": { 144 | "operator": "#", 145 | "reference": "v0.3.6" 146 | }, 147 | "requirement": { 148 | "identity": "nimterop", 149 | "operator": "#", 150 | "release": { 151 | "operator": "#", 152 | "reference": "v0.3.6" 153 | } 154 | }, 155 | "dist": "git" 156 | }, 157 | "regex": { 158 | "name": "regex", 159 | "url": "https://github.com/nitely/nim-regex", 160 | "release": { 161 | "operator": "#", 162 | "reference": "v0.13.0" 163 | }, 164 | "requirement": { 165 | "identity": "regex", 166 | "operator": "#", 167 | "release": { 168 | "operator": "#", 169 | "reference": "v0.13.0" 170 | } 171 | }, 172 | "dist": "git" 173 | }, 174 | "unicodedb": { 175 | "name": "unicodedb", 176 | "url": "https://github.com/nitely/nim-unicodedb", 177 | "release": { 178 | "operator": "#", 179 | "reference": "v0.7.2" 180 | }, 181 | "requirement": { 182 | "identity": "unicodedb", 183 | "operator": "#", 184 | "release": { 185 | "operator": "#", 186 | "reference": "v0.7.2" 187 | } 188 | }, 189 | "dist": "git" 190 | }, 191 | "unicodeplus": { 192 | "name": "unicodeplus", 193 | "url": "https://github.com/nitely/nim-unicodeplus", 194 | "release": { 195 | "operator": "#", 196 | "reference": "v0.5.1" 197 | }, 198 | "requirement": { 199 | "identity": "unicodeplus", 200 | "operator": "#", 201 | "release": { 202 | "operator": "#", 203 | "reference": "v0.5.1" 204 | } 205 | }, 206 | "dist": "git" 207 | }, 208 | "unittest2": { 209 | "name": "unittest2", 210 | "url": "https://github.com/stefantalpalaru/nim-unittest2", 211 | "release": { 212 | "operator": "#", 213 | "reference": "30c7d332d8ebab28d3240018f48f145ff20af239" 214 | }, 215 | "requirement": { 216 | "identity": "https://github.com/stefantalpalaru/nim-unittest2", 217 | "operator": "#", 218 | "release": { 219 | "operator": "#", 220 | "reference": "30c7d332d8ebab28d3240018f48f145ff20af239" 221 | } 222 | }, 223 | "dist": "git" 224 | }, 225 | "": { 226 | "name": "", 227 | "url": "git@github.com:disruptek/nimph.git", 228 | "release": { 229 | "operator": "#", 230 | "reference": "ce2d0a8dbf129b05f681438e2d21f932142eb5e6" 231 | }, 232 | "requirement": { 233 | "identity": "nimph", 234 | "operator": "#", 235 | "release": { 236 | "operator": "#", 237 | "reference": "ce2d0a8dbf129b05f681438e2d21f932142eb5e6" 238 | } 239 | }, 240 | "dist": "git" 241 | } 242 | } 243 | } 244 | } -------------------------------------------------------------------------------- /nimph.nimble: -------------------------------------------------------------------------------- 1 | version = "1.1.7" 2 | author = "disruptek" 3 | description = "nim package handler from the future" 4 | license = "MIT" 5 | 6 | bin = @["nimph"] 7 | srcDir = "src" 8 | 9 | # this breaks tests 10 | #installDirs = @["docs", "tests", "src"] 11 | 12 | requires "https://github.com/disruptek/cligen >= 2.0.2 & < 3.0.0" 13 | requires "https://github.com/zevv/npeg >= 0.26.0 & < 2.0.0" 14 | requires "https://github.com/disruptek/bump >= 1.8.31 & < 2.0.0" 15 | requires "https://github.com/disruptek/github >= 3.0.0 & < 4.0.0" 16 | requires "https://github.com/disruptek/jsonconvert < 2.0.0" 17 | requires "https://github.com/disruptek/badresults >= 2.1.2 & < 3.0.0" 18 | requires "https://github.com/disruptek/cutelog >= 2.0.0 & < 3.0.0" 19 | requires "https://github.com/disruptek/gittyup >= 3.2.2 & < 4.0.0" 20 | requires "https://github.com/disruptek/ups >= 0.0.7 & < 1.0.0" 21 | 22 | when not defined(release): 23 | requires "https://github.com/disruptek/balls >= 3.9.10 & < 4.0.0" 24 | 25 | task test, "run unit tests": 26 | when defined(windows): 27 | exec """balls.cmd --define:ssl""" 28 | else: 29 | exec """balls --define:ssl""" 30 | -------------------------------------------------------------------------------- /src/nimph.nim: -------------------------------------------------------------------------------- 1 | import std/uri except Url 2 | import std/tables 3 | import std/os 4 | import std/strutils 5 | import std/options 6 | import std/strformat 7 | import std/sequtils 8 | 9 | import bump 10 | import gittyup 11 | import badresults 12 | 13 | import nimph/spec 14 | import nimph/runner 15 | import nimph/project 16 | import nimph/doctor 17 | import nimph/thehub 18 | import nimph/config 19 | import nimph/package 20 | import nimph/dependency 21 | import nimph/locker 22 | import nimph/group 23 | import nimph/requirement 24 | 25 | template crash(why: string) = 26 | ## a good way to exit nimph 27 | error why 28 | return 1 29 | 30 | method pretty(ex: ref Exception): string {.base.} = 31 | let 32 | prefix = $typeof(ex) 33 | result = prefix.split(" ")[^1] & ": " & ex.msg 34 | 35 | template warnException() = 36 | warn getCurrentException().pretty 37 | 38 | const 39 | logLevel = 40 | when defined(debug): 41 | lvlDebug 42 | elif defined(release): 43 | lvlNotice 44 | elif defined(danger): 45 | lvlNotice 46 | else: 47 | lvlInfo 48 | 49 | template prepareForTheWorst(body: untyped) = 50 | when defined(release) or defined(danger): 51 | try: 52 | body 53 | except: 54 | warnException 55 | error "crashing because something bad happened" 56 | quit 1 57 | else: 58 | body 59 | 60 | template setupLocalProject(project: var Project; body: untyped) = 61 | if not findProject(project, getCurrentDir()): 62 | body 63 | else: 64 | try: 65 | debug "load all configs" 66 | project.cfg = loadAllCfgs(project.repo) 67 | debug "done loading configs" 68 | except Exception as e: 69 | crash "unable to parse nim configuration: " & e.msg 70 | 71 | template setupLocalProject(project: var Project) = 72 | setupLocalProject(project): 73 | crash &"unable to find a project; try `nimble init`?" 74 | 75 | template toggle(flags: set[Flag]; flag: Flag; switch: untyped) = 76 | when switch is bool: 77 | if switch: 78 | flags.incl flag 79 | else: 80 | flags.excl flag 81 | 82 | template composeFlags(defaults): set[Flag] = 83 | ## setup flags using cli inputs 84 | block: 85 | var 86 | flags: set[Flag] = defaults 87 | toggle(flags, Quiet, quiet) 88 | toggle(flags, Safe, safe_mode) 89 | toggle(flags, Dry, dry_run) 90 | toggle(flags, Strict, strict) 91 | toggle(flags, Force, force) 92 | toggle(flags, Network, network) 93 | flags 94 | 95 | proc findChildProjectUsing(group: DependencyGroup; name: string; 96 | flags: set[Flag]): Result[Project, string] = 97 | ## search the group for a named project using options specified in flags 98 | let 99 | name = name.destylize 100 | found = group.projectForName(name) 101 | 102 | block complete: 103 | var 104 | nature = "dependency" 105 | if found.isSome: 106 | result.ok found.get 107 | break complete 108 | elif Strict notin flags: 109 | for child in group.projects.values: 110 | if child.importName.destylize == name: 111 | result.ok child 112 | break complete 113 | nature = "project" 114 | let emsg = &"couldn't find a {nature} importable as `{name}`" # noqa 115 | result.err emsg 116 | 117 | proc searcher*(args: seq[string]; strict = false; 118 | log_level = logLevel; safe_mode = false; quiet = true; 119 | network = true; force = false; dry_run = false): int = 120 | ## cli entry to search github for nim packages 121 | 122 | # user's choice, our default 123 | setLogFilter(log_level) 124 | 125 | if args.len == 0: 126 | crash &"a search was requested but no query parameters were provided" 127 | let 128 | group = searchHub(args) 129 | if group.isNone: 130 | crash &"unable to retrieve search results from github" 131 | for repo in group.get.reversed: 132 | fatal "\n" & repo.renderShortly 133 | if group.get.len == 0: 134 | fatal &"😢no results" 135 | 136 | proc fixer*(strict = false; 137 | log_level = logLevel; safe_mode = false; quiet = false; 138 | network = true; force = false; dry_run = false): int = 139 | ## cli entry to evaluate and/or repair the environment 140 | 141 | # user's choice, our default 142 | setLogFilter(log_level) 143 | 144 | var 145 | project: Project 146 | setupLocalProject(project) 147 | 148 | if project.doctor(dry = dry_run): 149 | fatal &"👌{project.name} version {project.version} lookin' good" 150 | elif not dry_run: 151 | crash &"the doctor wasn't able to fix everything" 152 | else: 153 | warn "run `nimph doctor` to fix this stuff" 154 | 155 | proc nimbler*(args: seq[string]; strict = false; 156 | log_level = logLevel; safe_mode = false; quiet = true; 157 | network = true; force = false; dry_run = false): int = 158 | ## cli entry to pass-through nimble commands with a sane nimbleDir 159 | 160 | # user's choice, our default 161 | setLogFilter(log_level) 162 | 163 | var 164 | project: Project 165 | setupLocalProject(project) 166 | 167 | let 168 | nimble = project.runSomething("nimble", args) 169 | if not nimble.ok: 170 | crash &"nimble didn't like that" 171 | 172 | proc pather*(names: seq[string]; strict = false; 173 | log_level = logLevel; safe_mode = false; quiet = true; 174 | network = true; force = false; dry_run = false): int = 175 | ## cli entry to echo the path(s) of any dependencies 176 | 177 | # user's choice, our default 178 | setLogFilter(log_level) 179 | 180 | # setup flags for the operation 181 | let flags = composeFlags(defaultFlags) 182 | 183 | var 184 | project: Project 185 | setupLocalProject(project) 186 | 187 | if names.len == 0: 188 | crash &"give me an import name to retrieve its filesystem path" 189 | 190 | # setup our dependency group 191 | var group = project.newDependencyGroup(flags = flags) 192 | if not project.resolve(group): 193 | notice &"unable to resolve all dependencies for {project}" 194 | 195 | # for convenience, add the project itself if possible 196 | if not group.hasKey(project.importName): 197 | let dependency = newDependency(project) 198 | group.add dependency.requirement, dependency 199 | 200 | for name in names.items: 201 | var 202 | child = group.findChildProjectUsing(name, flags = flags) 203 | if child.isOk: 204 | echo child.get.repo 205 | else: 206 | error child.error 207 | result = 1 208 | 209 | proc runner*(args: seq[string]; git = false; strict = false; 210 | log_level = logLevel; safe_mode = false; quiet = true; 211 | network = true; force = false; dry_run = false): int = 212 | ## this is another pather, basically, that invokes the arguments in the path 213 | let 214 | exe = args[0] 215 | args = args[1..^1] 216 | 217 | # user's choice, our default 218 | setLogFilter(log_level) 219 | 220 | # setup flags for the operation 221 | let flags = composeFlags(defaultFlags) 222 | 223 | var 224 | project: Project 225 | setupLocalProject(project) 226 | 227 | # setup our dependency group 228 | var group = project.newDependencyGroup(flags = flags) 229 | if not project.resolve(group): 230 | notice &"unable to resolve all dependencies for {project}" 231 | 232 | # make sure we visit every project that fits the requirements 233 | for req, dependency in group.pairs: 234 | for child in dependency.projects.values: 235 | if child.dist == Git or not git: 236 | withinDirectory(child.repo): 237 | info &"running {exe} in {child.repo}" 238 | let 239 | got = project.runSomething(exe, args) 240 | if not got.ok: 241 | error &"{exe} didn't like that in {child.repo}" 242 | result = 1 243 | 244 | proc rollChild(child: var Project; requirement: Requirement; goal: RollGoal; 245 | safe_mode = false; dry_run = false): bool = 246 | ## try to roll a project to meet the goal inside a given requirement 247 | 248 | # early termination means there's nowhere else to go from here 249 | result = true 250 | 251 | block: 252 | if child.dist != Git: 253 | break 254 | if child.name.toLowerAscii in ["nim", "compiler"]: 255 | debug &"ignoring the compiler" 256 | break 257 | 258 | # if there's no suitable release available, we're done 259 | case goal: 260 | of Upgrade, Downgrade: 261 | if not child.betterReleaseExists(goal): 262 | debug &"no {goal} available for {child.name}" 263 | break 264 | of Specific: 265 | discard 266 | 267 | # if we're successful in rolling the project, we're done 268 | result = child.roll(requirement, goal = goal, dry_run = dry_run) 269 | if result: 270 | break 271 | 272 | # else let's see if we can offer useful output 273 | let 274 | best = child.tags.bestRelease(goal) 275 | case goal: 276 | of Upgrade: 277 | if child.version < best: 278 | notice &"the latest {child.name} release of {best} is masked" 279 | break 280 | of Downgrade: 281 | if child.version > best: 282 | notice &"the earliest {child.name} release of {best} is masked" 283 | break 284 | of Specific: 285 | discard 286 | 287 | # the user expected a change and got none 288 | if not dry_run: 289 | warn &"unable to {goal} {child.name}" 290 | 291 | proc updowner*(names: seq[string]; goal: RollGoal; strict = false; 292 | log_level = logLevel; safe_mode = false; quiet = true; 293 | network = true; force = false; dry_run = false): int = 294 | ## perform upgrades or downgrades of dependencies 295 | ## within project requirement specifications 296 | 297 | # user's choice, our default 298 | setLogFilter(log_level) 299 | 300 | # setup flags for the operation 301 | let flags = composeFlags(defaultFlags) 302 | 303 | var 304 | project: Project 305 | setupLocalProject(project) 306 | 307 | # setup our dependency group 308 | var group = project.newDependencyGroup(flags = flags) 309 | if not project.resolve(group): 310 | notice &"unable to resolve all dependencies for {project}" 311 | 312 | # we receive import names (or not) and upgrade or downgrade them to 313 | # opposite ends of the list of allowable versions, per our requirements 314 | if names.len == 0: 315 | for requirement, dependency in group.pairs: 316 | for child in dependency.projects.mvalues: 317 | if not child.rollChild(requirement, goal = goal, dry_run = dry_run): 318 | result = 1 319 | else: 320 | for name in names.items: 321 | let found = group.projectForName(name) 322 | if found.isSome: 323 | var child = found.get 324 | let require = group.reqForProject(child) 325 | if require.isNone: 326 | let emsg = &"found `{name}` but not its requirement" # noqa 327 | raise newException(ValueError, emsg) 328 | if not child.rollChild(require.get, goal = goal, dry_run = dry_run): 329 | result = 1 330 | else: 331 | error &"couldn't find `{name}` among our installed dependencies" 332 | 333 | if result == 0: 334 | fatal &"👌{project.name} is lookin' good" 335 | else: 336 | fatal &"👎{project.name} is not where you want it" 337 | 338 | proc roller*(names: seq[string]; strict = false; 339 | log_level = logLevel; safe_mode = false; quiet = false; 340 | network = true; force = false; dry_run = false): int = 341 | ## roll a project's dependencies to specific requirements 342 | 343 | # user's choice, our default 344 | setLogFilter(log_level) 345 | 346 | # setup flags for the operation 347 | let flags = composeFlags(defaultFlags) 348 | 349 | var 350 | project: Project 351 | setupLocalProject(project) 352 | 353 | # setup our dependency group 354 | var group = project.newDependencyGroup(flags = flags) 355 | if not project.resolve(group): 356 | notice &"unable to resolve all dependencies for {project}" 357 | 358 | # we receive requirements and add them to the group, then 359 | # we run fixDependencies to resolve them as best as can 360 | if names.len == 0: 361 | notice &"give me requirements as string arguments; eg. 'foo > 2.*'" 362 | result = 1 363 | return 364 | 365 | block doctor: 366 | # parse the requirements as if we pulled them right outta a .nimble 367 | let 368 | requires = parseRequires(names.join(", ")) 369 | if requires.isNone: 370 | notice &"unable to parse requirements statement(s)" 371 | result = 1 372 | break doctor 373 | 374 | # perform our usual dependency fixups using the doctor 375 | var 376 | state = DrState(kind: DrRetry) 377 | while state.kind == DrRetry: 378 | # everything seems groovy at the beginning 379 | result = 0 380 | # add each requirement to the dependency tree 381 | for requirement in requires.get.values: 382 | var 383 | dependency = newDependency(requirement) 384 | # we really don't care if requirements are added here 385 | discard group.addedRequirements(dependency) 386 | # make sure we can resolve the requirement 387 | if not project.resolve(group, requirement): 388 | notice &"unable to resolve dependencies for `{requirement}`" 389 | result = 1 390 | state.kind = DrError 391 | # this is game over 392 | break doctor 393 | if not project.fixDependencies(group, state): 394 | notice "failed to fix all dependencies" 395 | result = 1 396 | if state.kind notin {DrRetry}: 397 | break 398 | # reset the tree 399 | group.reset(project) 400 | 401 | if result == 0: 402 | fatal &"👌{project.name} is lookin' good" 403 | else: 404 | fatal &"👎{project.name} is not where you want it" 405 | 406 | proc graphProject(project: var Project; path: string; log_level = logLevel) = 407 | fatal " directory: " & path 408 | fatal " project: " & $project 409 | if project.dist == Git: 410 | # show tags for info or less 411 | if log_level <= lvlInfo: 412 | project.fetchTagTable 413 | if project.tags != nil and project.tags.len > 0: 414 | info "tagged release commits:" 415 | for tag, thing in project.tags.pairs: 416 | info &" tag: {tag:<20} {thing}" 417 | # show versions for info or less 418 | if log_level <= lvlInfo: 419 | let versions = project.versionChangingCommits 420 | if versions != nil and versions.len > 0: 421 | info "untagged version commits:" 422 | for ver, thing in versions.pairs: 423 | if not project.tags.hasThing(thing): 424 | info &" ver: {ver:<20} {thing}" 425 | 426 | proc graphDep(dependency: var Dependency; log_level = logLevel) = 427 | ## dump something vaguely useful to describe a dependency 428 | for pack in dependency.packages.keys: 429 | fatal " package: " & $pack 430 | for directory, project in dependency.projects.mpairs: 431 | graphProject(project, directory, log_level = log_level) 432 | 433 | proc graphDep(dependency: var Dependency; requirement: Requirement; 434 | log_level = logLevel) = 435 | ## dump something vaguely useful to describe a dependency 436 | for req in requirement.orphans: 437 | fatal "requirement: " & req.describe 438 | dependency.graphDep(log_level = log_level) 439 | 440 | proc grapher*(names: seq[string]; strict = false; 441 | log_level = logLevel; safe_mode = false; quiet = true; 442 | network = true; force = false; dry_run = false): int = 443 | ## graph requirements for the project or any of its dependencies 444 | 445 | # user's choice, our default 446 | setLogFilter(log_level) 447 | 448 | # setup flags for the operation 449 | let flags = composeFlags(defaultFlags) 450 | 451 | var 452 | project: Project 453 | setupLocalProject(project) 454 | 455 | # setup our dependency group 456 | var group = project.newDependencyGroup(flags = flags) 457 | if not project.resolve(group): 458 | notice &"unable to resolve all dependencies for {project}" 459 | 460 | # for convenience, add the project itself if possible 461 | if not group.hasKey(project.importName): 462 | let dependency = newDependency(project) 463 | group.add dependency.requirement, dependency 464 | 465 | if names.len == 0: 466 | for requirement, dependency in group.mpairs: 467 | fatal "" 468 | dependency.graphDep(requirement, log_level = log_level) 469 | else: 470 | for name in names.items: 471 | var 472 | child = group.findChildProjectUsing(name, flags = flags) 473 | if child.isErr: 474 | error child.error 475 | result = 1 476 | else: 477 | fatal "" 478 | let require = group.reqForProject(child.get) 479 | if require.isNone: 480 | notice &"found `{name}` but not its requirement" # noqa 481 | child.get.graphProject(child.get.repo, log_level = log_level) 482 | else: 483 | {.warning: "nim bug #12818".} 484 | for requirement, dependency in group.mpairs: 485 | if requirement == require.get: 486 | dependency.graphDep(requirement, log_level = log_level) 487 | 488 | proc dumpLockList(project: Project) = 489 | for room in project.allLockerRooms: 490 | once: 491 | fatal &"here's a list of available locks:" 492 | fatal &"\t{room.name}" 493 | 494 | proc lockfiler*(names: seq[string]; strict = false; 495 | log_level = logLevel; safe_mode = false; quiet = false; 496 | network = true; force = false; dry_run = false): int = 497 | ## cli entry to write a lockfile 498 | 499 | # user's choice, our default 500 | setLogFilter(log_level) 501 | 502 | var 503 | project: Project 504 | setupLocalProject(project) 505 | 506 | block: 507 | let name = names.join(" ") 508 | if name == "": 509 | project.dumpLockList 510 | fatal &"give me some arguments so i can name the lock" 511 | else: 512 | if project.lock(name): 513 | fatal &"👌locked {project} as `{name}`" 514 | break 515 | fatal &"👎unable to lock {project} as `{name}`" 516 | result = 1 517 | 518 | proc unlockfiler*(names: seq[string]; strict = false; 519 | log_level = logLevel; safe_mode = false; quiet = false; 520 | network = true; force = false; dry_run = false): int = 521 | ## cli entry to read a lockfile 522 | 523 | # user's choice, our default 524 | setLogFilter(log_level) 525 | 526 | var 527 | project: Project 528 | setupLocalProject(project) 529 | 530 | block: 531 | let name = names.join(" ") 532 | if name == "": 533 | project.dumpLockList 534 | fatal &"give me some arguments so i can fetch the lock by name" 535 | else: 536 | if project.unlock(name): 537 | fatal &"👌unlocked {project} via `{name}`" 538 | break 539 | fatal &"👎unable to unlock {project} via `{name}`" 540 | result = 1 541 | 542 | proc tagger*(strict = false; 543 | log_level = logLevel; safe_mode = false; quiet = false; 544 | network = true; force = false; dry_run = false): int = 545 | ## cli entry to add missing tags 546 | 547 | # user's choice, our default 548 | setLogFilter(log_level) 549 | 550 | var 551 | project: Project 552 | setupLocalProject(project) 553 | 554 | if project.fixTags(dry_run = dry_run, force = force): 555 | if dry_run: 556 | warn "run without --dry-run to fix these" 557 | else: 558 | crash &"the doctor wasn't able to fix everything" 559 | else: 560 | fatal &"👌{project.name} tags are lookin' good" 561 | 562 | proc forker*(names: seq[string]; strict = false; 563 | log_level = logLevel; safe_mode = false; quiet = false; 564 | network = true; force = false; dry_run = false): int = 565 | ## cli entry to remotely fork installed packages 566 | 567 | # user's choice, our default 568 | setLogFilter(log_level) 569 | 570 | # setup flags for the operation 571 | let flags = composeFlags(defaultFlags) 572 | 573 | var 574 | project: Project 575 | setupLocalProject(project) 576 | 577 | # setup our dependency group 578 | var group = project.newDependencyGroup(flags = flags) 579 | if not project.resolve(group): 580 | notice &"unable to resolve all dependencies for {project}" 581 | 582 | # for convenience, add the project itself if possible 583 | if not group.hasKey(project.importName): 584 | let dependency = newDependency(project) 585 | group.add dependency.requirement, dependency 586 | 587 | for name in names.items: 588 | var 589 | child = group.findChildProjectUsing(name, flags = flags) 590 | if child.isErr: 591 | error child.error 592 | result = 1 593 | continue 594 | let 595 | fork = child.get.forkTarget 596 | if not fork.ok: 597 | error fork.why 598 | result = 1 599 | continue 600 | info &"🍴forking {child.get}" 601 | let forked = forkHub(fork.owner, fork.repo) 602 | if forked.isNone: 603 | result = 1 604 | continue 605 | fatal &"🔱{forked.get.web}" 606 | case child.get.dist: 607 | of Git: 608 | let name = defaultRemote 609 | if not child.get.promoteRemoteLike(forked.get.git, name = name): 610 | notice &"unable to promote new fork to {name}" 611 | else: 612 | {.warning: "optionally upgrade a gitless install to clone".} 613 | 614 | proc cloner*(args: seq[string]; strict = false; 615 | log_level = logLevel; safe_mode = false; quiet = true; 616 | network = true; force = false; dry_run = false): int = 617 | ## cli entry to clone a package into the environment 618 | 619 | # user's choice, our default 620 | setLogFilter(log_level) 621 | 622 | # setup flags for the operation 623 | let flags = composeFlags(defaultFlags) 624 | 625 | var 626 | url: Uri 627 | name: string 628 | 629 | if args.len == 0: 630 | crash &"provide a single url, or a github search query" 631 | 632 | # if only one argument was supplied, see if we can parse it as a url 633 | if args.len == 1: 634 | try: 635 | let 636 | uri = parseUri(args[0]) 637 | if uri.isValid: 638 | url = uri 639 | name = url.importName 640 | except: 641 | discard 642 | 643 | var project: Project 644 | setupLocalProject(project) 645 | 646 | # if the input wasn't parsed to a url, 647 | if not url.isValid: 648 | # search github using the input as a query 649 | let 650 | query {.used.} = args.join(" ") 651 | hubs = searchHub(args) 652 | if hubs.isNone: 653 | crash &"unable to retrieve search results from github" 654 | 655 | # and pluck the first result, presumed to be the best 656 | block found: 657 | for repo in hubs.get.values: 658 | url = repo.git 659 | name = repo.name 660 | break found 661 | crash &"unable to find a package matching `{query}`" 662 | 663 | # if we STILL don't have a url, we're done 664 | if not url.isValid: 665 | crash &"unable to determine a valid url to clone" 666 | 667 | # perform the clone 668 | var 669 | cloned: Project 670 | if not project.clone(url, name, cloned): 671 | crash &"problem cloning {url}" 672 | 673 | # reset our paths to, hopefully, grab the new project 674 | project.cfg = loadAllCfgs(project.repo) 675 | 676 | # setup our dependency group 677 | var group = project.newDependencyGroup(flags = flags) 678 | if not project.resolve(group): 679 | notice &"unable to resolve all dependencies for {project}" 680 | 681 | # see if we can find this project in the dependencies 682 | let needed = group.projectForPath(cloned.repo) 683 | 684 | # if it's in there, let's get its requirement and roll to meet it 685 | block relocated: 686 | if needed.isSome: 687 | let requirement = group.reqForProject(cloned) 688 | if requirement.isNone: 689 | warn &"unable to retrieve requirement for {cloned.name}" 690 | else: 691 | # rollTowards will relocate us, too 692 | if cloned.rollTowards(requirement.get): 693 | notice &"rolled {cloned.name} to {cloned.version}" 694 | # so skip the tail of this block (and a 2nd relocate) 695 | break relocated 696 | notice &"unable to meet {requirement.get} with {cloned}" 697 | # rename the directory to match head release 698 | project.relocateDependency(cloned) 699 | 700 | # try to point it at github if it looks like it's our repo 701 | if not cloned.promote: 702 | debug &"did not promote remote to ssh" 703 | 704 | template dumpHelp(fun: typed; use: string) = 705 | try: 706 | discard fun(cmdline = @["--help"], prefix = " ", 707 | usage = use, noHdr = true) 708 | except HelpOnly: 709 | discard 710 | 711 | when isMainModule: 712 | import cligen 713 | type 714 | SubCommand = enum 715 | scHelp = "--help" 716 | scDoctor = "doctor" 717 | scSearch = "search" 718 | scClone = "clone" 719 | scNimble = "nimble" 720 | scPath = "path" 721 | scFork = "fork" 722 | scLock = "lock" 723 | scUnlock = "unlock" 724 | scTag = "tag" 725 | scRun = "run" 726 | scRoll = "roll" 727 | scUpDown = "outdated" 728 | scGraph = "graph" 729 | scVersion = "--version" 730 | 731 | AliasTable = Table[string, seq[string]] 732 | 733 | let 734 | logger = newCuteConsoleLogger() 735 | addHandler(logger) 736 | 737 | const 738 | release = projectVersion() 739 | if release.isSome: 740 | clCfg.version = $release.get 741 | else: 742 | clCfg.version = "(unknown version)" 743 | 744 | # setup some dispatchers for various subcommands 745 | dispatchGen(searcher, cmdName = $scSearch, dispatchName = "run" & $scSearch, 746 | doc="search github for packages") 747 | dispatchGen(fixer, cmdName = $scDoctor, dispatchName = "run" & $scDoctor, 748 | doc="repair (or report) env issues") 749 | dispatchGen(cloner, cmdName = $scClone, dispatchName = "run" & $scClone, 750 | doc="add a package to the env") 751 | dispatchGen(pather, cmdName = $scPath, dispatchName = "run" & $scPath, 752 | doc="fetch package path(s) by import name(s)") 753 | dispatchGen(forker, cmdName = $scFork, dispatchName = "run" & $scFork, 754 | doc="fork a package to your GitHub profile") 755 | dispatchGen(lockfiler, cmdName = $scLock, dispatchName = "run" & $scLock, 756 | doc="lock dependencies") 757 | dispatchGen(unlockfiler, cmdName = $scUnlock, dispatchName = "run" & $scUnlock, 758 | doc="unlock dependencies") 759 | dispatchGen(tagger, cmdName = $scTag, dispatchName = "run" & $scTag, 760 | doc="tag versions") 761 | dispatchGen(roller, cmdName = $scRoll, dispatchName = "run" & $scRoll, 762 | doc="roll project dependency versions") 763 | dispatchGen(updowner, cmdName = $scUpDown, dispatchName = "run" & $scUpDown, 764 | doc="upgrade or downgrade project dependencies") 765 | dispatchGen(grapher, cmdName = $scGraph, dispatchName = "run" & $scGraph, 766 | doc="graph project dependencies") 767 | dispatchGen(nimbler, cmdName = $scNimble, dispatchName = "run" & $scNimble, 768 | doc="Nimble handles other subcommands (with a proper nimbleDir)") 769 | dispatchGen(runner, cmdName = $scRun, dispatchName = "run" & $scRun, 770 | stopWords = @["--"], 771 | doc="execute the program & arguments in every dependency directory") 772 | const 773 | # these commands exist only as aliases to other commands 774 | trueAliases = { 775 | # the nurse is aka `nimph` without arguments... 776 | "nurse": @[$scDoctor, "--dry-run"], 777 | "fix": @[$scDoctor], 778 | "fetch": @[$scRun, "--git", "--", "git", "fetch"], 779 | "pull": @[$scRun, "--git", "--", "git", "pull"], 780 | "roll": @[$scRoll], 781 | "downgrade": @[$scUpDown, "--goal=downgrade"], 782 | "upgrade": @[$scUpDown, "--goal=upgrade"], 783 | "outdated": @[$scUpDown, "--goal=upgrade", "--dry-run"], 784 | }.toTable 785 | 786 | proc makeAliases(passthrough: openArray[string]): AliasTable {.compileTime.} = 787 | # command aliases can go here 788 | result = trueAliases 789 | 790 | # add in the default subcommands 791 | for sub in SubCommand.low .. SubCommand.high: 792 | if $sub notin result: 793 | result[$sub] = @[$sub] 794 | 795 | # associate known nimble subcommands 796 | for sub in passthrough.items: 797 | if sub notin result: 798 | result[sub] = @[$scNimble, sub] 799 | 800 | const 801 | # these are our subcommands that we want to include in help 802 | dispatchees = [scDoctor, scSearch, scClone, scPath, scFork, scLock, scUnlock, 803 | scTag, scUpDown, scRoll, scGraph, scRun] 804 | 805 | # these are nimble subcommands that we don't need to warn about 806 | passthrough = ["install", "uninstall", "build", "test", "doc", "dump", 807 | "refresh", "list", "tasks"] 808 | 809 | # associate commands to dispatchers created by cligen 810 | dispatchers = { 811 | scSearch: runsearch, 812 | scDoctor: rundoctor, 813 | scClone: runclone, 814 | scPath: runpath, 815 | scFork: runfork, 816 | scLock: runlock, 817 | scUnlock: rununlock, 818 | scTag: runtag, 819 | scRun: runrun, 820 | scRoll: runroll, 821 | scGraph: rungraph, 822 | scUpDown: runoutdated, 823 | }.toTable 824 | 825 | # setup the mapping between subcommand and expanded parameters 826 | aliases = makeAliases(passthrough) 827 | 828 | var 829 | # get the command line 830 | params = commandLineParams() 831 | 832 | # get the subcommand one way or another 833 | if params.len == 0: 834 | params = @["nurse"] 835 | let first = params[0].strip.toLowerAscii 836 | 837 | # try to parse the subcommand 838 | var sub: SubCommand 839 | if first in aliases: 840 | # expand the alias 841 | params = aliases[first].concat params[1..^1] 842 | # and then parse the subcommand 843 | sub = parseEnum[SubCommand](params[0]) 844 | else: 845 | # if we couldn't parse it, try passing it to nimble 846 | warn &"unrecognized subcommand `{first}`; passing it to Nimble..." 847 | sub = scNimble 848 | 849 | # take action according to the subcommand 850 | try: 851 | case sub: 852 | of scNimble: 853 | # remove any gratuitous `nimble` specified by user or alias 854 | if params[0] == "nimble": 855 | params = @["--"] & params[1..^1] 856 | # invoke nimble with the remaining parameters 857 | prepareForTheWorst: 858 | quit runnimble(cmdline = params) 859 | of scVersion: 860 | # report the version 861 | echo clCfg.version 862 | of scHelp: 863 | # yield some help 864 | echo "run `nimph` for a non-destructive report, or use a subcommand;" 865 | for command in dispatchees.items: 866 | let fun = dispatchers[command] 867 | once: 868 | fun.dumpHelp("all subcommands accept (at least) the following options:\n$options") 869 | case command: 870 | of scRun: 871 | fun.dumpHelp("\n$command --git $args\n$doc") 872 | of scUpDown: 873 | fun.dumpHelp("\n$command --goal=upgrade|downgrade $args\n$doc") 874 | else: 875 | fun.dumpHelp("\n$command $args\n$doc") 876 | echo "" 877 | echo " " & passthrough.join(", ") 878 | let nimbleUse = " $args\n$doc" 879 | # produce help for nimble subcommands 880 | runnimble.dumpHelp(nimbleUse) 881 | 882 | echo "\n Some additional subcommands are implemented as aliases:" 883 | for alias, arguments in trueAliases.pairs: 884 | # don't report aliases that are (trivial) aliases of themselves 😜 885 | if alias == arguments[0] and arguments.len == 1: 886 | continue 887 | let alias = "nimph " & alias 888 | echo &""" {alias:>16} -> nimph {arguments.join(" ")}""" 889 | else: 890 | # we'll enhance logging for these subcommands 891 | if first in ["outdated", "nurse"]: 892 | let newLog = max(0, logLevel.ord - 1).Level 893 | params = params.concat @["--log-level=" & $newLog] 894 | # invoke the appropriate dispatcher 895 | prepareForTheWorst: 896 | quit dispatchers[sub](cmdline = params[1..^1]) 897 | except HelpOnly: 898 | discard 899 | quit 0 900 | -------------------------------------------------------------------------------- /src/nimph.nim.cfg: -------------------------------------------------------------------------------- 1 | # toggle these if you like 2 | #--define:cutelogEmojis 3 | #--define:cutelogMonochrome 4 | #--define:cutelogBland 5 | 6 | # not recommended 7 | #--define:gitErrorsAreFatal 8 | 9 | # try it out; it's horrible 10 | #--define:writeNimbleDirPaths=true 11 | 12 | # github won't work without ssl enabled 13 | --define:ssl 14 | 15 | --hint[Processing]=off 16 | --hint[Link]=off 17 | #--define:npegTrace 18 | 19 | # for gratuitous search path debugging 20 | #--define:debugPath 21 | 22 | # fix nimble? 23 | --path="$config" 24 | --path="$nim" 25 | -------------------------------------------------------------------------------- /src/nimph/asjson.nim: -------------------------------------------------------------------------------- 1 | import std/uri 2 | import std/strutils 3 | import std/strformat 4 | import std/options 5 | import std/json 6 | 7 | import bump 8 | 9 | import nimph/spec 10 | import nimph/version 11 | import nimph/package 12 | import nimph/requirement 13 | 14 | proc toJson*(operator: Operator): JsonNode = 15 | result = newJString($operator) 16 | 17 | proc toOperator*(js: JsonNode): Operator = 18 | result = parseEnum[Operator](js.getStr) 19 | 20 | proc toJson*(version: Version): JsonNode = 21 | result = newJArray() 22 | for index in VersionIndex.low .. VersionIndex.high: 23 | result.add newJInt(version.at(index).int) 24 | 25 | proc toVersion*(js: JsonNode): Version = 26 | let 27 | e = js.getElems 28 | if e.len != VersionIndex.high + 1: 29 | let emsg = &"dunno what to do with a version of len {e.len}" 30 | raise newException(ValueError, emsg) 31 | result = (major: e[0].getInt.uint, 32 | minor: e[1].getInt.uint, 33 | patch: e[2].getInt.uint) 34 | 35 | proc toJson*(mask: VersionMask): JsonNode = 36 | # is it a *.*.*? 37 | if mask.at(0).isNone: 38 | result = newJString("*") 39 | else: 40 | result = newJArray() 41 | for index in VersionIndex.low .. VersionIndex.high: 42 | let value = mask.at(index) 43 | if value.isSome: 44 | result.add newJInt(value.get.int) 45 | 46 | proc toVersionMask*(js: JsonNode): VersionMask = 47 | block: 48 | if js.kind == JString: 49 | # it's a *.*.* 50 | break 51 | 52 | # it's an array with items in it 53 | let 54 | e = js.getElems 55 | if e.high > VersionIndex.high: 56 | let emsg = &"dunno what to do with a version mask of len {e.len}" 57 | raise newException(ValueError, emsg) 58 | for index in VersionIndex.low .. VersionIndex.high: 59 | if index > e.high: 60 | break 61 | result[index] = e[index].getInt.uint.some 62 | 63 | proc toJson*(release: Release): JsonNode = 64 | result = newJObject() 65 | result["operator"] = release.kind.toJson 66 | case release.kind: 67 | of Tag: 68 | result["reference"] = newJString(release.reference) 69 | of Wild, Caret, Tilde: 70 | result["accepts"] = release.accepts.toJson 71 | of Equal, AtLeast, Over, Under, NotMore: 72 | result["version"] = release.version.toJson 73 | 74 | proc toRelease*(js: JsonNode): Release = 75 | result = Release(kind: js["operator"].toOperator) 76 | case result.kind: 77 | of Tag: 78 | result.reference = js["reference"].getStr 79 | of Wild, Caret, Tilde: 80 | result.accepts = js["accepts"].toVersionMask 81 | of Equal, AtLeast, Over, Under, NotMore: 82 | result.version = js["version"].toVersion 83 | 84 | proc toJson*(requirement: Requirement): JsonNode = 85 | result = newJObject() 86 | result["identity"] = newJString(requirement.identity) 87 | result["operator"] = requirement.operator.toJson 88 | result["release"] = requirement.release.toJson 89 | 90 | proc toRequirement*(js: JsonNode): Requirement = 91 | result = newRequirement(js["identity"].getStr, 92 | operator = js["operator"].toOperator, 93 | release = js["release"].toRelease) 94 | 95 | proc toJson*(dist: DistMethod): JsonNode = 96 | result = newJString($dist) 97 | 98 | proc toDistMethod*(js: JsonNode): DistMethod = 99 | result = parseEnum[DistMethod](js.getStr) 100 | 101 | proc toJson*(uri: Uri): JsonNode = 102 | let url = case uri.scheme: 103 | of "ssh", "": 104 | uri.convertToSsh 105 | else: 106 | uri.normalizeUrl 107 | result = newJString($url) 108 | 109 | proc toUri*(js: JsonNode): Uri = 110 | result = parseUri(js.getStr) 111 | -------------------------------------------------------------------------------- /src/nimph/config.nim: -------------------------------------------------------------------------------- 1 | import std/osproc 2 | import std/json 3 | import std/nre 4 | import std/strtabs 5 | import std/strformat 6 | import std/tables 7 | import std/os 8 | import std/options 9 | import std/strutils 10 | import std/algorithm 11 | 12 | import npeg 13 | import bump 14 | 15 | import nimph/spec 16 | import nimph/runner 17 | 18 | include nimph/skullduggery 19 | export compileropts 20 | export nimconf 21 | 22 | when defined(debugPath): 23 | from std/sequtils import count 24 | 25 | type 26 | ProjectCfgParsed* = object 27 | table*: TableRef[string, seq[string]] 28 | why*: string 29 | ok*: bool 30 | 31 | ConfigSection = enum 32 | LockerRooms = "lockfiles" 33 | 34 | NimphConfig* = ref object 35 | path: string 36 | js: JsonNode 37 | 38 | template excludeAllNotes(config: ConfigRef; n: typed) = 39 | # nimskull doesn't support `excl`/`incl` on notes; this should work for both 40 | # compilers, however 41 | when isNimSkull: 42 | config.notes = config.notes - {n} 43 | config.mainPackageNotes = config.mainPackageNotes - {n} 44 | config.foreignPackageNotes = config.foreignPackageNotes - {n} 45 | else: 46 | config.notes.excl n 47 | when compiles(config.mainPackageNotes): 48 | config.mainPackageNotes.excl n 49 | when compiles(config.foreignPackageNotes): 50 | config.foreignPackageNotes.excl n 51 | 52 | template setDefaultsForConfig(result: ConfigRef) = 53 | # maybe we should turn off configuration hints for these reads 54 | when defined(debugPath): 55 | result.notes.incl hintPath 56 | elif not defined(debug): 57 | excludeAllNotes(result, hintConf) 58 | when compiles(hintLineTooLong): 59 | excludeAllNotes(result, hintLineTooLong) 60 | 61 | when defined(isNimSkull): 62 | proc readConfigEventWriter(config: ConfigRef, evt: ConfigFileEvent, 63 | writeFrom: InstantiationInfo) = 64 | ## Used to print config read events. Noop for now. 65 | discard 66 | 67 | proc parseConfigFile*(path: string): Option[ConfigRef] = 68 | ## use the compiler to parse a nim.cfg without changing to its directory 69 | var 70 | cache = newIdentCache() 71 | filename = path.absolutePath 72 | config = newConfigRef() 73 | 74 | # define symbols such as, say, nimbabel; 75 | # this allows us to correctly parse conditions in nim.cfg(s) 76 | initDefines(config.symbols) 77 | 78 | setDefaultsForConfig(config) 79 | 80 | let success = when defined(isNimSkull): 81 | readConfigFile(filename.AbsoluteFile, cache, config, readConfigEventWriter) 82 | else: 83 | readConfigFile(filename.AbsoluteFile, cache, config) 84 | 85 | if success: 86 | result = some(config) 87 | 88 | when false: 89 | proc overlayConfig(config: var ConfigRef; 90 | directory: string): bool {.deprecated.} = 91 | ## true if new config data was added to the env 92 | withinDirectory(directory): 93 | var 94 | priorProjectPath = config.projectPath 95 | let 96 | nextProjectPath = AbsoluteDir getCurrentDir() 97 | filename = nextProjectPath.string / NimCfg 98 | 99 | block complete: 100 | # do not overlay above the current config 101 | if nextProjectPath == priorProjectPath: 102 | break complete 103 | 104 | # if there's no config file, we're done 105 | result = filename.fileExists 106 | if not result: 107 | break complete 108 | 109 | try: 110 | # set the new project path for substitution purposes 111 | config.projectPath = nextProjectPath 112 | 113 | var cache = newIdentCache() 114 | result = readConfigFile(filename.AbsoluteFile, cache, config) 115 | 116 | if result: 117 | # this config is now authoritative, so force the project path 118 | priorProjectPath = nextProjectPath 119 | else: 120 | let emsg = &"unable to read config in {nextProjectPath}" # noqa 121 | warn emsg 122 | finally: 123 | # remember to reset the config's project path 124 | config.projectPath = priorProjectPath 125 | 126 | # a global that we set just once per invocation 127 | var 128 | compilerPrefixDir: AbsoluteDir 129 | 130 | proc findPrefixDir(): AbsoluteDir = 131 | ## determine the prefix directory for the current compiler 132 | if compilerPrefixDir.isEmpty: 133 | debug "find prefix" 134 | let 135 | compiler = runSomething("nim", 136 | @["--hints:off", 137 | "--dump.format:json", "dump", "dummy"], {poDaemon}) 138 | if not compiler.ok: 139 | warn "couldn't run the compiler to determine its location" 140 | raise newException(OSError, "cannot find a nim compiler") 141 | try: 142 | let 143 | js = parseJson(compiler.output) 144 | compilerPrefixDir = AbsoluteDir js["prefixdir"].getStr 145 | except JsonParsingError as e: 146 | warn "`nim dump` json parse error: " & e.msg 147 | raise 148 | except KeyError: 149 | warn "couldn't parse the prefix directory from `nim dump` output" 150 | compilerPrefixDir = AbsoluteDir parentDir(findExe"nim") 151 | debug "found prefix" 152 | result = compilerPrefixDir 153 | 154 | proc loadAllCfgs*(directory: string): ConfigRef = 155 | ## use the compiler to parse all the usual nim.cfgs; 156 | ## optionally change to the given (project?) directory first 157 | 158 | result = newConfigRef() 159 | 160 | # define symbols such as, say, nimbabel; 161 | # this allows us to correctly parse conditions in nim.cfg(s) 162 | initDefines(result.symbols) 163 | 164 | setDefaultsForConfig(result) 165 | 166 | # stuff the prefixDir so we load the compiler's config/nim.cfg 167 | # just like the compiler would if we were to invoke it directly 168 | result.prefixDir = findPrefixDir() 169 | 170 | withinDirectory(directory): 171 | # stuff the current directory as the project path 172 | result.projectPath = AbsoluteDir getCurrentDir() 173 | 174 | # now follow the compiler process of loading the configs 175 | var cache = newIdentCache() 176 | 177 | when isNimSkull: 178 | # XXX: nimskull returns whether reading was successful, but unused atm 179 | discard loadConfigs(NimCfg.RelativeFile, cache, result, readConfigEventWriter) 180 | # thanks, araq 181 | elif (NimMajor, NimMinor) >= (1, 5): 182 | var idgen = IdGenerator() 183 | loadConfigs(NimCfg.RelativeFile, cache, result, idgen) 184 | else: 185 | loadConfigs(NimCfg.RelativeFile, cache, result) 186 | 187 | when defined(debugPath): 188 | debug "loaded", result.searchPaths.len, "search paths" 189 | debug "loaded", result.lazyPaths.len, "lazy paths" 190 | for path in result.lazyPaths.items: 191 | debug "\t", path 192 | for path in result.lazyPaths.items: 193 | if result.lazyPaths.count(path) > 1: 194 | raise newException(Defect, "duplicate lazy path: " & path.string) 195 | 196 | proc appendConfig*(path: Target; config: string): bool = 197 | # make a temp file in an appropriate spot, with a significant name 198 | let 199 | temp = createTemporaryFile(path.package, dotNimble) 200 | debug &"writing {temp}" 201 | # but remember to remove the temp file later 202 | defer: 203 | debug &"removing {temp}" 204 | if not tryRemoveFile(temp): 205 | warn &"unable to remove temporary file `{temp}`" 206 | 207 | block complete: 208 | try: 209 | # if there's already a config, we'll start there 210 | if fileExists($path): 211 | debug &"copying {path} to {temp}" 212 | copyFile($path, temp) 213 | except Exception as e: 214 | warn &"unable make a copy of {path} to to {temp}: {e.msg}" 215 | break complete 216 | 217 | block writing: 218 | # open our temp file for writing 219 | var 220 | writer = temp.open(fmAppend) 221 | try: 222 | # add our new content with a trailing newline 223 | writer.writeLine "# added by nimph:\n" & config 224 | finally: 225 | # remember to close the temp file in any event 226 | writer.close 227 | 228 | # make sure the compiler can parse our new config 229 | if parseConfigFile(temp).isNone: 230 | break complete 231 | 232 | # copy the temp file over the original config 233 | try: 234 | debug &"copying {temp} over {path}" 235 | copyFile(temp, $path) 236 | except Exception as e: 237 | warn &"unable make a copy of {temp} to to {path}: {e.msg}" 238 | break complete 239 | 240 | # it worked, thank $deity 241 | result = true 242 | 243 | proc parseProjectCfg*(input: Target): ProjectCfgParsed = 244 | ## parse a .cfg for any lines we are entitled to mess with 245 | result = ProjectCfgParsed(ok: false, table: newTable[string, seq[string]]()) 246 | var 247 | table = result.table 248 | 249 | block success: 250 | if not fileExists($input): 251 | result.why = &"config file {input} doesn't exist" 252 | break success 253 | 254 | var 255 | content = readFile($input) 256 | if not content.endsWith("\n"): 257 | content &= "\n" 258 | let 259 | peggy = peg "document": 260 | nl <- ?'\r' * '\n' 261 | white <- {'\t', ' '} 262 | equals <- *white * {'=', ':'} * *white 263 | assignment <- +(1 - equals) 264 | comment <- '#' * *(1 - nl) 265 | strvalue <- '"' * *(1 - '"') * '"' 266 | endofval <- white | comment | nl 267 | anyvalue <- +(1 - endofval) 268 | hyphens <- '-'[0..2] 269 | ending <- *white * ?comment * nl 270 | nimblekeys <- i"nimblePath" | i"clearNimblePath" | i"noNimblePath" 271 | otherkeys <- i"path" | i"p" | i"define" | i"d" 272 | keys <- nimblekeys | otherkeys 273 | strsetting <- hyphens * >keys * equals * >strvalue * ending: 274 | table.mgetOrPut($1, @[]).add unescape($2) 275 | anysetting <- hyphens * >keys * equals * >anyvalue * ending: 276 | table.mgetOrPut($1, @[]).add $2 277 | toggle <- hyphens * >keys * ending: 278 | table.mgetOrPut($1, @[]).add "it's enabled, okay?" 279 | line <- strsetting | anysetting | toggle | (*(1 - nl) * nl) 280 | document <- *line * !1 281 | parsed = peggy.match(content) 282 | try: 283 | result.ok = parsed.ok 284 | if result.ok: 285 | break success 286 | result.why = parsed.repr 287 | except Exception as e: 288 | result.why = &"parse error in {input}: {e.msg}" 289 | 290 | proc isEmpty*(config: NimphConfig): bool = 291 | result = config.js.kind == JNull 292 | 293 | proc newNimphConfig*(path: string): NimphConfig = 294 | ## instantiate a new nimph config using the given path 295 | result = NimphConfig(path: path.absolutePath) 296 | if not result.path.fileExists: 297 | result.js = newJNull() 298 | else: 299 | try: 300 | result.js = parseFile(path) 301 | except Exception as e: 302 | error &"unable to parse {path}:" 303 | error e.msg 304 | 305 | template isStdLib*(config: ConfigRef; path: string): bool = 306 | path.startsWith(///config.libpath) 307 | 308 | template isStdlib*(config: ConfigRef; path: AbsoluteDir): bool = 309 | path.string.isStdLib 310 | 311 | iterator likelySearch*(config: ConfigRef; libsToo: bool): string = 312 | ## yield /-terminated directory paths likely added via --path 313 | for search in config.searchPaths.items: 314 | let 315 | search = ///search 316 | # we don't care about library paths 317 | if not libsToo and config.isStdLib(search): 318 | continue 319 | yield search 320 | 321 | iterator likelySearch*(config: ConfigRef; repo: string; libsToo: bool): string = 322 | ## yield /-terminated directory paths likely added via --path 323 | when defined(debug): 324 | if repo != repo.absolutePath: 325 | error &"repo {repo} wasn't normalized" 326 | 327 | for search in config.likelySearch(libsToo = libsToo): 328 | # limit ourselves to the repo? 329 | when WhatHappensInVegas: 330 | if search.startsWith(repo): 331 | yield search 332 | else: 333 | yield search 334 | 335 | iterator likelyLazy*(config: ConfigRef; least = 0): string = 336 | ## yield /-terminated directory paths likely added via --nimblePath 337 | # build a table of sightings of directories 338 | var popular = newCountTable[string]() 339 | for search in config.lazyPaths.items: 340 | let 341 | search = ///search 342 | parent = ///parentDir(search) 343 | when defined(debugPath): 344 | if search in popular: 345 | raise newException(Defect, "duplicate lazy path: " & search) 346 | if search notin popular: 347 | popular.inc search 348 | if search != parent: # silly: elide / 349 | if parent in popular: # the parent has to have been added 350 | popular.inc parent 351 | 352 | # sort the table in descending order 353 | popular.sort 354 | 355 | # yield the directories that exist 356 | for search, count in popular.pairs: 357 | # maybe we can ignore unpopular paths 358 | if least > count: 359 | continue 360 | yield search 361 | 362 | iterator likelyLazy*(config: ConfigRef; repo: string; least = 0): string = 363 | ## yield /-terminated directory paths likely added via --nimblePath 364 | when defined(debug): 365 | if repo != repo.absolutePath: 366 | error &"repo {repo} wasn't normalized" 367 | 368 | for search in config.likelyLazy(least = least): 369 | # limit ourselves to the repo? 370 | when WhatHappensInVegas: 371 | if search.startsWith(repo): 372 | yield search 373 | else: 374 | yield search 375 | 376 | iterator packagePaths*(config: ConfigRef; exists = true): string = 377 | ## yield package paths from the configuration as /-terminated strings; 378 | ## if the exists flag is passed, then the path must also exist. 379 | ## this should closely mimic the compiler's search 380 | 381 | # the method by which we de-dupe paths 382 | const mode = 383 | when FilesystemCaseSensitive: 384 | modeCaseSensitive 385 | else: 386 | modeCaseInsensitive 387 | var 388 | paths: seq[string] 389 | dedupe = newStringTable(mode) 390 | 391 | template addOne(p: AbsoluteDir) = 392 | let 393 | path = ///path 394 | if path in dedupe: 395 | continue 396 | dedupe[path] = "" 397 | paths.add path 398 | 399 | if config == nil: 400 | raise newException(Defect, "attempt to load search paths from nil config") 401 | 402 | for path in config.searchPaths: 403 | addOne(path) 404 | for path in config.lazyPaths: 405 | addOne(path) 406 | when defined(debugPath): 407 | debug &"package directory count: {paths.len}" 408 | 409 | # finally, emit paths as appropriate 410 | for path in paths: 411 | if exists and not path.dirExists: 412 | continue 413 | yield path 414 | 415 | proc suggestNimbleDir*(config: ConfigRef; local = ""; global = ""): string = 416 | ## come up with a useful nimbleDir based upon what we find in the 417 | ## current configuration, the location of the project, and the provided 418 | ## suggestions for local or global package directories 419 | var 420 | local = local 421 | global = global 422 | 423 | block either: 424 | # if a local directory is suggested, see if we can confirm its use 425 | if local != "" and local.dirExists: 426 | local = ///local 427 | assert local.endsWith(DirSep) 428 | for search in config.likelySearch(libsToo = false): 429 | if search.startsWith(local): 430 | # we've got a path statement pointing to a local path, 431 | # so let's assume that the suggested local path is legit 432 | result = local 433 | break either 434 | 435 | # nim 1.1.1 supports nimblePath storage in the config; 436 | # we follow a "standard" that we expect Nimble to use, 437 | # too, wherein the last-added --nimblePath wins 438 | when NimMajor >= 1 and NimMinor >= 1: 439 | if config.nimblePaths.len > 0: 440 | result = config.nimblePaths[0].string 441 | break either 442 | 443 | # otherwise, try to pick a global .nimble directory based upon lazy paths 444 | for search in config.likelyLazy: 445 | if search.endsWith(PkgDir & DirSep): 446 | result = search.parentDir # ie. the parent of pkgs 447 | else: 448 | result = search # doesn't look like pkgs... just use it 449 | break either 450 | 451 | # otherwise, try to make one up using the suggestion 452 | if global == "": 453 | raise newException(IOError, "can't guess global {dotNimble} directory") 454 | global = ///global 455 | assert global.endsWith(DirSep) 456 | result = global 457 | break either 458 | 459 | iterator pathSubsFor(config: ConfigRef; sub: string; conf: string): string = 460 | ## a convenience to work around the compiler's broken pathSubs; the `conf` 461 | ## string represents the path to the "current" configuration file 462 | block: 463 | if sub.toLowerAscii notin ["nimbledir", "nimblepath"]: 464 | yield ///config.pathSubs(&"${sub}", conf) 465 | break 466 | 467 | when declaredInScope nimbleSubs: 468 | for path in config.nimbleSubs(&"${sub}"): 469 | yield ///path 470 | else: 471 | # we have to pick the first lazy path because that's what Nimble does 472 | for search in config.lazyPaths: 473 | let 474 | search = ///search 475 | if search.endsWith(PkgDir & DirSep): 476 | yield ///parentDir(search) 477 | else: 478 | yield search 479 | break 480 | 481 | iterator pathSubstitutions(config: ConfigRef; path: string; 482 | conf: string; write: bool): string = 483 | ## compute the possible path substitions, including the original path 484 | const 485 | readSubs = @["nimcache", "config", "nimbledir", "nimblepath", 486 | "projectdir", "projectpath", "lib", "nim", "home"] 487 | writeSubs = 488 | when writeNimbleDirPaths: 489 | readSubs 490 | else: 491 | @["nimcache", "config", "projectdir", "lib", "nim", "home"] 492 | var 493 | matchedPath = false 494 | when defined(debug): 495 | if not conf.dirExists: 496 | raise newException(Defect, "passed a config file and not its path") 497 | let 498 | path = ///path 499 | conf = if conf.dirExists: conf else: conf.parentDir 500 | substitutions = if write: writeSubs else: readSubs 501 | 502 | for sub in substitutions.items: 503 | for attempt in config.pathSubsFor(sub, conf): 504 | # ignore any empty substitutions 505 | if attempt == "/": 506 | continue 507 | # note if any substitution matches the path 508 | if path == attempt: 509 | matchedPath = true 510 | if path.startsWith(attempt): 511 | yield path.replace(attempt, ///fmt"${sub}") 512 | # if a substitution matches the path, don't yield it at the end 513 | if not matchedPath: 514 | yield path 515 | 516 | proc bestPathSubstitution(config: ConfigRef; path: string; conf: string): string = 517 | ## compute the best path substitution, if any 518 | block found: 519 | for sub in config.pathSubstitutions(path, conf, write = true): 520 | result = sub 521 | break found 522 | result = path 523 | 524 | proc removeSearchPath*(config: ConfigRef; nimcfg: Target; path: string): bool = 525 | ## try to remove a path from a nim.cfg; true if it was 526 | ## successful and false if any error prevented success 527 | let 528 | fn = $nimcfg 529 | 530 | block complete: 531 | # well, that was easy 532 | if not fn.fileExists: 533 | break complete 534 | 535 | # make sure we can parse the configuration with the compiler 536 | if parseConfigFile(fn).isNone: 537 | error &"the compiler couldn't parse {nimcfg}" 538 | break complete 539 | 540 | # make sure we can parse the configuration using our "naive" npeg parser 541 | let 542 | parsed = nimcfg.parseProjectCfg 543 | if not parsed.ok: 544 | error &"could not parse {nimcfg} naïvely:" 545 | error parsed.why 546 | break complete 547 | 548 | # sanity 549 | when defined(debug): 550 | if path.absolutePath != path: 551 | raise newException(Defect, &"path `{path}` is not absolute") 552 | 553 | var 554 | content = fn.readFile 555 | # iterate over the entries we parsed naively, 556 | for key, values in parsed.table.pairs: 557 | for value in values.items: 558 | # skipping anything that it's a path, 559 | if key.toLowerAscii notin ["p", "path", "nimblepath"]: 560 | continue 561 | # and perform substitutions to see if one might match the value 562 | # we are trying to remove; the write flag is false so that we'll 563 | # use any $nimbleDir substitutions available to us, if possible 564 | for sub in config.pathSubstitutions(path, nimcfg.repo, write = false): 565 | if sub notin [value, ///value]: 566 | continue 567 | # perform a regexp substition to remove the entry from the content 568 | let 569 | regexp = re("(*ANYCRLF)(?i)(?s)(-{0,2}" & key.escapeRe & 570 | "[:=]\"?" & value.escapeRe & "/?\"?)\\s*") 571 | swapped = content.replace(regexp, "") 572 | # if that didn't work, cry a bit and move on 573 | if swapped == content: 574 | notice &"failed regex edit to remove path `{value}`" 575 | continue 576 | # make sure we search the new content next time through the loop 577 | content = swapped 578 | result = true 579 | # keep performing more substitutions 580 | 581 | # finally, write the edited content 582 | fn.writeFile(content) 583 | 584 | proc addSearchPath*(config: ConfigRef; nimcfg: Target; path: string): bool = 585 | ## add the given path to the given config file, using the compiler's 586 | ## configuration as input to determine the best path substitution 587 | let 588 | best = config.bestPathSubstitution(path, $nimcfg.repo) 589 | result = appendConfig(nimcfg, &"""--path="{best}"""") 590 | 591 | proc excludeSearchPath*(config: ConfigRef; nimcfg: Target; path: string): bool = 592 | ## add an exclusion for the given path to the given config file, using the 593 | ## compiler's configuration as input to determine the best path substitution 594 | let 595 | best = config.bestPathSubstitution(path, $nimcfg.repo) 596 | result = appendConfig(nimcfg, &"""--excludePath="{best}"""") 597 | 598 | iterator extantSearchPaths*(config: ConfigRef; least = 0): string = 599 | ## yield existing search paths from the configuration as /-terminated strings; 600 | ## this will yield library paths and nimblePaths with at least `least` uses 601 | if config == nil: 602 | raise newException(Defect, "attempt to load search paths from nil config") 603 | # path statements 604 | for path in config.likelySearch(libsToo = true): 605 | if dirExists(path): 606 | yield path 607 | # nimblePath statements 608 | for path in config.likelyLazy(least = least): 609 | if dirExists(path): 610 | yield path 611 | 612 | proc addLockerRoom*(config: var NimphConfig; name: string; room: JsonNode) = 613 | ## add the named lockfile (in json form) to the configuration file 614 | if config.isEmpty: 615 | config.js = newJObject() 616 | if $LockerRooms notin config.js: 617 | config.js[$LockerRooms] = newJObject() 618 | config.js[$LockerRooms][name] = room 619 | writeFile(config.path, config.js.pretty) 620 | 621 | proc getAllLockerRooms*(config: NimphConfig): JsonNode = 622 | ## retrieve a JObject holding all lockfiles in the configuration file 623 | block found: 624 | if not config.isEmpty: 625 | if $LockerRooms in config.js: 626 | result = config.js[$LockerRooms] 627 | break 628 | result = newJObject() 629 | 630 | proc getLockerRoom*(config: NimphConfig; name: string): JsonNode = 631 | ## retrieve the named lockfile (or JNull) from the configuration 632 | let 633 | rooms = config.getAllLockerRooms 634 | if name in rooms: 635 | result = rooms[name] 636 | else: 637 | result = newJNull() 638 | -------------------------------------------------------------------------------- /src/nimph/doctor.nim: -------------------------------------------------------------------------------- 1 | import std/strtabs 2 | import std/tables 3 | import std/strutils 4 | import std/options 5 | import std/os 6 | import std/strformat 7 | 8 | import bump 9 | import gittyup 10 | 11 | import nimph/spec 12 | import nimph/project 13 | import nimph/nimble 14 | import nimph/config 15 | import nimph/thehub 16 | import nimph/package 17 | import nimph/dependency 18 | import nimph/group 19 | 20 | import nimph/requirement 21 | 22 | type 23 | StateKind* = enum 24 | DrOkay = "okay" 25 | DrRetry = "retry" 26 | DrError = "error" 27 | 28 | DrState* = object 29 | kind*: StateKind 30 | why*: string 31 | 32 | proc fixTags*(project: var Project; dry_run = true; force = false): bool = 33 | block: 34 | if project.dist != Git or not project.repoLockReady: 35 | info "not looking for missing tags because the repository is unready" 36 | break 37 | 38 | # you gotta spend money to make money 39 | project.fetchTagTable 40 | if project.tags == nil: 41 | notice "not looking for missing tags because i couldn't fetch any" 42 | break 43 | 44 | # we're gonna fetch the dump to make sure our version is sane 45 | if not project.fetchDump: 46 | notice "not looking for missing tags because my dump failed" 47 | break 48 | if "version" notin project.dump or project.dump["version"].count(".") > 2: 49 | notice &"refusing to tag {project.name} because its version is bizarre" 50 | break 51 | 52 | # open the repo so we can keep it in memory for tagging purposes 53 | repository := openRepository(project.gitDir): 54 | error &"unable to open repo at `{project.repo}`: {code.dumpError}" 55 | break 56 | 57 | # match up tags to versions to commits; we should probably 58 | # copy these structures and remove matches, for efficiency... 59 | var tagsNeeded = 0 60 | for version, commit in project.versionChangingCommits.pairs: 61 | block found: 62 | if $version in project.tags: 63 | let exists = project.tags[$version] 64 | debug &"found tag `{exists}` for {version}" 65 | break found 66 | for text, tag in project.tags.pairs: 67 | if commit.oid == tag.oid: 68 | debug &"found tag `{text}` for {version}" 69 | break found 70 | if dry_run: 71 | notice &"{project.name} is missing a tag for version {version}" 72 | info &"version {version} arrived in {commit}" 73 | result = true 74 | tagsNeeded.inc 75 | else: 76 | thing := repository.lookupThing($commit.oid): 77 | notice &"unable to lookup {commit}" 78 | continue 79 | # try to create a tag for this version and commit 80 | var 81 | nextTag = project.tags.nextTagFor(version) 82 | tagged = thing.tagCreate(nextTag, force = force) 83 | # first, try using the committer's signature 84 | if tagged.isErr: 85 | notice &"unable to create signed tag for {version}" 86 | # fallback to a lightweight (unsigned) tag 87 | tagged = thing.tagCreateLightweight(nextTag, force = force) 88 | if tagged.isErr: 89 | notice &"unable to create new tag for {version}" 90 | break found 91 | let 92 | oid = tagged.get 93 | # if that worked, let them know we did something 94 | info &"created new tag {version} as tag-{oid}" 95 | # the oid created for the tag must be freed 96 | dealloc oid 97 | 98 | # save our advice 'til the end 99 | if tagsNeeded > 0: 100 | notice "use the `tag` subcommand to add missing tags" 101 | 102 | proc fixDependencies*(project: var Project; group: var DependencyGroup; 103 | state: var DrState): bool = 104 | ## try to fix any outstanding issues with a set of dependencies 105 | 106 | # by default, everything is fine 107 | result = true 108 | # but don't come back here 109 | state.kind = DrError 110 | for requirement, dependency in group.mpairs: 111 | # if the dependency is being met, 112 | if dependency.isHappy: 113 | # but the version is not suitable, 114 | if not dependency.isHappyWithVersion: 115 | # try to roll any supporting project to a version that'll work 116 | for child in dependency.projects.mvalues: 117 | # if we're allowed to, i mean 118 | if Dry notin group.flags: 119 | # and if it was successful, 120 | if child.rollTowards(requirement): 121 | # report success 122 | notice &"rolled to {child.release} to meet {requirement}" 123 | break 124 | # else report the problem and set failure 125 | for req in requirement.orphans: 126 | if not req.isSatisfiedBy(child, child.release): 127 | notice &"{req.describe} unmet by {child}" 128 | result = false 129 | 130 | # the dependency is fine, but maybe we don't have it in our paths? 131 | for child in dependency.projects.mvalues: 132 | for path in project.missingSearchPaths(child): 133 | # report or update the paths 134 | if Dry in group.flags: 135 | notice &"missing path `{path}` in `{project.nimcfg}`" 136 | result = false 137 | elif project.addSearchPath(path): 138 | info &"added path `{path}` to `{project.nimcfg}`" 139 | # yay, we get to reload again 140 | project.cfg = loadAllCfgs(project.repo) 141 | else: 142 | warn &"couldn't add path `{path}` to `{project.nimcfg}`" 143 | result = false 144 | # dependency is happy and (probably) in a search path now 145 | continue 146 | 147 | # so i just came back from lunch and i was in the drive-thru and 148 | # reading reddit and managed to bump into the truck in front of me. 🙄 149 | # 150 | # this tiny guy pops out the door of the truck and practically tumbles 151 | # down the running board before arriving at the door to my car. he's 152 | # so short that all i can see is his little balled-up fist raised over 153 | # his head. 154 | # 155 | # i roll the window down, and he immediately yells, "I'M NOT HAPPY!" 156 | # to which my only possible reply was, "Well, which one ARE you, then?" 157 | # 158 | # anyway, if we made it this far, we're not happy... 159 | if Dry in group.flags: 160 | notice &"{dependency.name} ({requirement}) missing" 161 | result = false 162 | # for now, we'll force trying again even though it's a security risk, 163 | # because it will make users happy sooner, and we love happy users 164 | else: 165 | block cloneokay: 166 | for package in dependency.packages.mvalues: 167 | var cloned: Project 168 | if project.clone(package.url, package.name, cloned): 169 | if cloned.rollTowards(requirement): 170 | notice &"rolled to {cloned.release} to meet {requirement}" 171 | else: 172 | # we didn't roll, so we may need to relocate 173 | project.relocateDependency(cloned) 174 | state.kind = DrRetry 175 | break cloneokay 176 | else: 177 | error &"error cloning {package}" 178 | # a subsequent iteration could clone successfully 179 | # no package was successfully cloned 180 | notice &"unable to satisfy {requirement.describe}" 181 | result = false 182 | 183 | # okay, we did some stuff... let's see where we are now 184 | if state.kind == DrRetry: 185 | discard 186 | elif result: 187 | state.kind = DrOkay 188 | else: 189 | state.kind = DrError 190 | 191 | proc doctor*(project: var Project; dry = true; strict = true): bool = 192 | ## perform some sanity tests against the project and 193 | ## try to fix any issues we find unless `dry` is true 194 | var 195 | flags: set[Flag] = {} 196 | 197 | template toggle(x: typed; flag: Flag; test: bool) = 198 | if test: x.incl flag else: x.excl flag 199 | 200 | flags.toggle Dry, dry 201 | flags.toggle Strict, strict 202 | 203 | block configuration: 204 | debug "checking compiler configuration" 205 | let 206 | nimcfg = project.nimCfg 207 | # try a compiler parse of nim.cfg 208 | if not fileExists($nimcfg): 209 | # at the moment, we support any combination of local/user/global deps 210 | if false: 211 | # strictly speaking, this isn't a problem 212 | warn &"there wasn't a {NimCfg} in {project.nimble.repo}" 213 | if nimcfg.appendConfig("--clearNimblePath"): 214 | info "i created a new one" 215 | else: 216 | error "and i wasn't able to make a new one" 217 | else: 218 | let 219 | parsed = parseConfigFile($nimcfg) 220 | if parsed.isNone: 221 | error &"i had some issues trying to parse {nimcfg}" 222 | result = false 223 | 224 | # try a naive parse of nim.cfg 225 | if fileExists($project.nimCfg): 226 | let 227 | nimcfg = project.nimCfg 228 | parsed = parseProjectCfg(project.nimCfg) 229 | if not parsed.ok: 230 | error &"i had some issues trying to parse {nimcfg}:" 231 | error parsed.why 232 | result = false 233 | 234 | # try to parse all nim configuration files 235 | block globalconfig: 236 | when defined(debugPath): 237 | for path in project.cfg.likelySearch(libsToo = true): 238 | debug &"\tsearch: {path}" 239 | for path in project.cfg.likelyLazy: 240 | debug &"\t lazy: {path}" 241 | else: 242 | ## this space intentionally left blank 243 | 244 | block whoami: 245 | debug "checking project version" 246 | # check our project version 247 | let 248 | version = project.knowVersion 249 | # contextual errors are output by knowVersion 250 | result = version.isValid 251 | if result: 252 | debug &"{project.name} version {version}" 253 | 254 | block dependencies: 255 | debug "checking dependencies" 256 | # check our deps dir 257 | let 258 | depsDir = project.nimbleDir 259 | #absolutePath(project.nimble.repo / DepDir).normalizedPath 260 | envDir = getEnv("NIMBLE_DIR", "") 261 | if not dirExists(depsDir): 262 | info &"if you create {depsDir}, i'll use it for local dependencies" 263 | 264 | # $NIMBLE_DIR could screw with our head 265 | if envDir != "": 266 | if absolutePath(envDir) != depsDir: 267 | notice "i'm not sure what to do with an alternate $NIMBLE_DIR set" 268 | result = false 269 | else: 270 | info "your $NIMBLE_DIR is set, but it's set correctly" 271 | 272 | block checknimble: 273 | debug "checking nimble" 274 | # make sure nimble is a thing 275 | if findExe("nimble") == "": 276 | error "i can't find nimble in the path" 277 | result = false 278 | 279 | debug "checking nimble dump of our project" 280 | # make sure we can dump our project 281 | let 282 | damp = fetchNimbleDump(project.nimble.repo) 283 | if not damp.ok: 284 | error damp.why 285 | result = false 286 | else: 287 | project.dump = damp.table 288 | 289 | # see if we can find a github token 290 | block github: 291 | debug "checking for github token" 292 | let 293 | token = findGithubToken() 294 | if token.isNone: 295 | notice &"i wasn't able to discover a github token" 296 | warn &"please add a GitHub OAUTH token to your $NIMPH_TOKEN" 297 | result = false 298 | 299 | # see if git works 300 | block nimgit: 301 | if not gittyup.init(): 302 | error "i'm not able to initialize nimgit2 for git operations" 303 | result = false 304 | elif not gittyup.shutdown(): 305 | error "i'm not able to shutdown nimgit2 after initialization" 306 | result = false 307 | else: 308 | debug "git init/shut seems to be working" 309 | 310 | # see if we can get the packages list; try to refresh it if necessary 311 | block packages: 312 | while true: 313 | let 314 | packs = getOfficialPackages(project.nimbleDir) 315 | once: 316 | block skiprefresh: 317 | if not packs.ok: 318 | if packs.why != "": 319 | error packs.why 320 | notice &"couldn't get nimble's package list from {project.nimbleDir}" 321 | elif packs.ageInDays > stalePackages: 322 | notice &"the nimble package list in {project.nimbleDir} is stale" 323 | elif packs.ageInDays > 1: 324 | info "the nimble package list is " & 325 | &"{packs.ageInDays} days old" 326 | break skiprefresh 327 | else: 328 | break skiprefresh 329 | if not dry: 330 | let refresh = project.runSomething("nimble", @["refresh", "--accept"]) 331 | if refresh.ok: 332 | info "nimble refreshed the package list" 333 | continue 334 | result = false 335 | if packs.ok: 336 | let packages {.used.} = packs.packages 337 | debug &"loaded {packages.len} packages from nimble" 338 | break 339 | 340 | # check dependencies and maybe install some 341 | block dependencies: 342 | var 343 | group = project.newDependencyGroup(flags) 344 | state = DrState(kind: DrRetry) 345 | 346 | while state.kind == DrRetry: 347 | # we need to reload the config each repeat through this loop so that we 348 | # can correctly identify new search paths after adding new packages 349 | if not project.resolve(group): 350 | notice &"unable to resolve all dependencies for {project}" 351 | result = false 352 | state.kind = DrError 353 | elif not project.fixDependencies(group, state): 354 | result = false 355 | # maybe we're done here 356 | if state.kind notin {DrRetry}: 357 | break 358 | # we need to try again, but first we'll reset the environment 359 | fatal "👍environment changed; re-examining dependencies..." 360 | group.reset(project) 361 | 362 | # if dependencies are available via --nimblePath, then warn of any 363 | # dependencies that aren't recorded as part of the dependency graph; 364 | # this might be usefully toggled in spec. this should only issue a 365 | # warning if local deps exist or multiple nimblePaths are found 366 | block extradeps: 367 | if project.hasLocalDeps or project.numberOfNimblePaths > 1: 368 | let imports = project.cfg.allImportTargets(project.repo) 369 | for target, linked in imports.pairs: 370 | if group.isUsing(target): 371 | continue 372 | # ignore standard library targets 373 | if project.cfg.isStdLib(target.repo): 374 | continue 375 | let name = linked.importName 376 | warn &"no `{name}` requirement for {target.repo}" 377 | 378 | # identify packages that aren't named according to their versions; rename 379 | # local dependencies and merely warn about others 380 | {.warning: "mislabeled project directories unimplemented".} 381 | 382 | # remove missing paths from nim.cfg if possible 383 | block missingpaths: 384 | when defined(debugPath): 385 | for path in project.cfg.searchPaths.items: 386 | debug &"\tsearch: {path.string}" 387 | for path in project.cfg.lazyPaths.items: 388 | debug &"\t lazy: {path}" 389 | 390 | template cleanUpPathIn(form: string; iter: untyped): untyped = 391 | block complete: 392 | while true: 393 | block resume: 394 | for path in likelySearch(project.cfg, libsToo = false): 395 | if not dirExists(path): 396 | if dry: 397 | warn "$# path $# does not exist" % [ form, path ] 398 | result = false 399 | elif project.removeSearchPath(path): 400 | info "removed missing $# path $#" % [ form, path ] 401 | break resume 402 | elif excludeMissingSearchPaths and project.excludeSearchPath(path): 403 | info "excluded missing $# path $#" % [ form, path ] 404 | break resume 405 | else: 406 | warn "unable to remove $# path $#" % [ form, path ] 407 | result = false 408 | break complete 409 | 410 | # search paths that are missing should be removed/excluded 411 | cleanUpPathIn "search", likelySearch(project.cfg, libsToo = false) 412 | # lazy paths that are missing can be explicitly removed/ignored 413 | cleanUpPathIn "nimblePath", likelyLazy(project.cfg, least = 0) 414 | 415 | # if a dependency (local or otherwise) is shadowed by another dependency 416 | # in one of the nimblePaths, then we should warn that a removal of one 417 | # dep will default to the other 418 | # 419 | # if a dependency is shadowed with a manual path specification, we should 420 | # call that a proper error and offer to remove the weakest member 421 | # 422 | # we should calculate shadowing by name and version according to the way 423 | # the compiler compares versions 424 | block shadoweddeps: 425 | {.warning: "shadowed deps needs implementing".} 426 | 427 | # if a package exists and is local to the project and picked up by the 428 | # config (search paths or lazy paths) and it isn't listed in the 429 | # requirements, then we should warn about it 430 | block unspecifiedrequirement: 431 | {.warning: "unspecified requirements needs implementing".} 432 | 433 | # if a required packaged has a srcDir defined in the .nimble, then it needs to 434 | # be specified in the search paths 435 | block unspecifiedsearchpath: 436 | {.warning: "unspecified search path needs implementing".} 437 | 438 | # warn of tags missing for a particular version/commit pair 439 | block identifymissingtags: 440 | if project.fixTags(dry_run = true): 441 | result = false 442 | 443 | # warn if the user appears to have multiple --nimblePaths in use 444 | block nimblepaths: 445 | let 446 | found = project.countNimblePaths 447 | # don't distinguish between local or user lazy paths (yet) 448 | if found.local + found.global > 1: 449 | fatal "❔it looks like you have multiple --nimblePaths defined:" 450 | for index, path in found.paths.pairs: 451 | fatal &"❔\t{index + 1}\t{path}" 452 | fatal "❔nim and nimph support this, but some humans find it confusing 😏" 453 | -------------------------------------------------------------------------------- /src/nimph/group.nim: -------------------------------------------------------------------------------- 1 | import std/os 2 | import std/strtabs 3 | import std/tables 4 | from std/sequtils import toSeq 5 | import std/uri except Url 6 | 7 | export strtabs.StringTableMode 8 | 9 | import nimph/spec 10 | 11 | type 12 | Group*[K; V: ref object] = ref object of RootObj 13 | table*: OrderedTableRef[K, V] 14 | imports*: StringTableRef 15 | flags*: set[Flag] 16 | mode: StringTableMode 17 | 18 | proc init*[K, V](group: Group[K, V]; flags: set[Flag]; mode = modeStyleInsensitive) = 19 | ## initialize the table and name cache 20 | group.table = newOrderedTable[K, V]() 21 | when K is Uri: 22 | group.mode = modeCaseSensitive 23 | else: 24 | group.mode = mode 25 | group.imports = newStringTable(group.mode) 26 | group.flags = flags 27 | 28 | proc addName[K: string, V](group: Group[K, V]; name: K; value: string) = 29 | ## add a name to the group, which points to value 30 | assert group.table.hasKey(value) 31 | group.imports[name] = value 32 | 33 | proc addName[K: Uri, V](group: Group[K, V]; url: K) = 34 | ## add a url to the group, which points to value 35 | assert group.table.hasKey(url) 36 | group.imports[$url] = $url 37 | when defined(debug): 38 | assert $url.bare notin group.imports 39 | group.imports[$url.bare] = $url 40 | 41 | proc delName*(group: Group; key: string) = 42 | ## remove a name from the group 43 | var 44 | remove: seq[string] 45 | # don't trust anyone; if the value matches, pull the name 46 | for name, value in group.imports.pairs: 47 | if value == key: 48 | remove.add name 49 | for name in remove: 50 | group.imports.del name 51 | 52 | proc del*[K: string, V](group: Group[K, V]; name: K) = 53 | ## remove from the group the named key and its associated value 54 | group.table.del name 55 | group.delName name 56 | 57 | proc del*[K: Uri, V](group: Group[K, V]; url: K) = 58 | ## remove from the group the url key and its associated value 59 | group.table.del url 60 | group.delName $url 61 | 62 | {.warning: "nim bug #12818".} 63 | proc len*[K, V](group: Group[K, V]): int = 64 | ## number of elements in the group 65 | result = group.table.len 66 | 67 | proc len*(group: Group): int = 68 | ## number of elements in the group 69 | result = group.table.len 70 | 71 | proc get*[K: string, V](group: Group[K, V]; key: K): V = 72 | ## fetch a value from the group using style-insensitive lookup 73 | if group.table.hasKey(key): 74 | result = group.table[key] 75 | elif group.imports.hasKey(key.importName): 76 | result = group.table[group.imports[key.importName]] 77 | else: 78 | let emsg = &"{key.importName} not found" 79 | raise newException(KeyError, emsg) 80 | 81 | proc mget*[K: string, V](group: var Group[K, V]; key: K): var V = 82 | ## fetch a value from the group using style-insensitive lookup 83 | if group.table.hasKey(key): 84 | result = group.table[key] 85 | elif group.imports.hasKey(key.importName): 86 | result = group.table[group.imports[key.importName]] 87 | else: 88 | let emsg = &"{key.importName} not found" 89 | raise newException(KeyError, emsg) 90 | 91 | proc `[]`*[K, V](group: var Group[K, V]; key: K): var V = 92 | ## fetch a value from the group using style-insensitive lookup 93 | result = group.mget(key) 94 | 95 | proc `[]`*[K, V](group: Group[K, V]; key: K): V = 96 | ## fetch a value from the group using style-insensitive lookup 97 | result = group.get(key) 98 | 99 | proc add*[K: string, V](group: Group[K, V]; key: K; value: V) = 100 | ## add a key and value to the group 101 | group.table[key] = value 102 | group.addName(key.importName, key) 103 | 104 | proc add*[K: string, V](group: Group[K, V]; url: Uri; value: V) = 105 | ## add a (bare) url as a key 106 | let 107 | naked = url.bare 108 | key = $naked 109 | group.table[key] = value 110 | # this gets picked up during instant-instantiation of a package from 111 | # a project's url, a la asPackage(project: Project): Package ... 112 | group.addName naked.importName, key 113 | 114 | proc `[]=`*[K, V](group: Group[K, V]; key: K; value: V) = 115 | ## set a key to a single value 116 | if group.hasKey(key): 117 | group.del key 118 | group.add key, value 119 | 120 | {.warning: "nim bug #12818".} 121 | proc add*[K: Uri, V](group: Group[K, V]; url: Uri; value: V) = 122 | ## add a (full) url as a key 123 | group.table[url] = value 124 | group.addName url 125 | 126 | iterator pairs*[K, V](group: Group[K, V]): tuple[key: K; val: V] = 127 | ## standard key/value pairs iterator 128 | for key, value in group.table.pairs: 129 | yield (key: key, val: value) 130 | 131 | {.warning: "nim bug #13510".} 132 | #iterator mpairs*[K, V](group: var Group[K, V]): tuple[key: K; val: var V] = 133 | iterator mpairs*[K, V](group: Group[K, V]): tuple[key: K; val: var V] = 134 | for key, value in group.table.mpairs: 135 | #yield (key: key, val: value) 136 | yield (key, value) 137 | 138 | iterator values*[K, V](group: Group[K, V]): V = 139 | ## standard value iterator 140 | for value in group.table.values: 141 | yield value 142 | 143 | iterator keys*[K, V](group: Group[K, V]): K = 144 | ## standard key iterator 145 | for key in group.table.keys: 146 | yield key 147 | 148 | iterator mvalues*[K, V](group: var Group[K, V]): var V = 149 | ## standard mutable value iterator 150 | for value in group.table.mvalues: 151 | yield value 152 | 153 | proc hasKey*[K, V](group: Group[K, V]; key: K): bool = 154 | ## true if the group contains the given key 155 | result = group.table.hasKey(key) 156 | 157 | proc contains*[K, V](group: Group[K, V]; key: K): bool = 158 | ## true if the group contains the given key or its importName 159 | result = group.table.contains(key) or group.imports.contains(key.importName) 160 | 161 | proc contains*[K, V](group: Group[K, V]; url: Uri): bool = 162 | ## true if a member of the group has the same (bare) url 163 | for value in group.values: 164 | if bareUrlsAreEqual(value.url, url): 165 | result = true 166 | break 167 | 168 | proc contains*[K, V](group: Group[K, V]; value: V): bool = 169 | ## true if the group contains the given value 170 | for v in group.values: 171 | if v == value: 172 | result = true 173 | break 174 | 175 | iterator reversed*[K, V](group: Group[K, V]): V = 176 | ## yield values in reverse order of entry 177 | let 178 | elems = toSeq group.values 179 | 180 | for index in countDown(elems.high, elems.low): 181 | yield elems[index] 182 | 183 | proc clear*[K, V](group: Group[K, V]) = 184 | ## clear the group without any other disruption 185 | group.table.clear 186 | group.imports.clear(group.mode) 187 | -------------------------------------------------------------------------------- /src/nimph/locker.nim: -------------------------------------------------------------------------------- 1 | import std/json 2 | import std/hashes 3 | import std/strformat 4 | import std/strtabs 5 | import std/tables 6 | import std/uri 7 | 8 | import nimph/spec 9 | import nimph/version 10 | import nimph/group 11 | import nimph/config 12 | import nimph/project 13 | import nimph/dependency 14 | import nimph/package 15 | import nimph/asjson 16 | import nimph/doctor 17 | import nimph/requirement 18 | 19 | type 20 | Locker* = ref object 21 | name*: string 22 | url*: Uri 23 | requirement*: Requirement 24 | dist*: DistMethod 25 | release*: Release 26 | LockerRoom* = ref object of Group[string, Locker] 27 | name*: string 28 | root*: Locker 29 | 30 | const 31 | # we use "" as a sigil to indicate the root of the project because 32 | # it's not a valid import name and won't be accepted by Group 33 | rootName = "" 34 | 35 | proc hash*(locker: Locker): Hash = 36 | # this is how we'll test equivalence 37 | var h: Hash = 0 38 | h = h !& locker.name.hash 39 | h = h !& locker.release.hash 40 | result = !$h 41 | 42 | proc hash*(room: LockerRoom): Hash = 43 | ## the hash of a lockerroom is the hash of its root and all lockers 44 | var h: Hash = 0 45 | for locker in room.values: 46 | h = h !& locker.hash 47 | h = h !& room.root.hash 48 | result = !$h 49 | 50 | proc `==`(a, b: Locker): bool = 51 | result = a.hash == b.hash 52 | 53 | proc `==`(a, b: LockerRoom): bool = 54 | result = a.hash == b.hash 55 | 56 | proc newLockerRoom*(name = ""; flags = defaultFlags): LockerRoom = 57 | result = LockerRoom(name: name, flags: flags) 58 | result.init(flags, mode = modeStyleInsensitive) 59 | 60 | proc newLocker(requirement: Requirement): Locker = 61 | result = Locker(requirement: requirement) 62 | 63 | proc newLocker(req: Requirement; name: string; project: Project): Locker = 64 | ## we use the req's identity and the project's release; this might need 65 | ## to change to simply use the project name, depending on an option... 66 | result = newRequirement(req.identity, Equal, project.release).newLocker 67 | result.url = project.url 68 | result.name = name 69 | result.dist = project.dist 70 | result.release = project.release 71 | 72 | proc newLockerRoom*(project: Project; flags = defaultFlags): LockerRoom = 73 | ## a new lockerroom using the project release as the root 74 | let 75 | requirement = newRequirement(project.name, Equal, project.release) 76 | result = newLockerRoom(flags = flags) 77 | result.root = newLocker(requirement, rootName, project) 78 | 79 | proc add*(room: var LockerRoom; req: Requirement; name: string; 80 | project: Project) = 81 | ## create a new locker for the requirement from the project and 82 | ## safely add it to the lockerroom 83 | var locker = newLocker(req, name, project) 84 | block found: 85 | for existing in room.values: 86 | if existing == locker: 87 | error &"unable to add equivalent lock for `{name}`" 88 | break found 89 | room.add name, locker 90 | 91 | proc fillRoom(room: var LockerRoom; dependencies: DependencyGroup): bool = 92 | ## fill a lockerroom with lockers constructed from the dependency tree; 93 | ## returns true if there were no missing/unready/shadowed dependencies 94 | result = true 95 | for requirement, dependency in dependencies.pairs: 96 | var shadowed = false 97 | if dependency.projects.len == 0: 98 | warn &"missing requirement {requirement}" 99 | result = false 100 | continue 101 | for project in dependency.projects.values: 102 | if not shadowed: 103 | shadowed = true 104 | if dependency.names.len > 1: 105 | warn &"multiple import names for {requirement}" 106 | for name in dependency.names.items: 107 | if project.dist != Git: 108 | warn &"{project} isn't in git; it's {project.dist} {project.repo}" 109 | elif not project.repoLockReady: 110 | result = false 111 | if room.hasKey(name): 112 | warn &"clashing import {name}" 113 | result = false 114 | continue 115 | room.add requirement, name, project 116 | continue 117 | warn &"shadowed project {project}" 118 | result = false 119 | 120 | proc fillDeps(dependencies: var DependencyGroup; 121 | room: LockerRoom; project: Project): bool = 122 | ## fill a dependency tree with lockers and run dependency resolution 123 | ## using the project; returns true if there were no resolution failures 124 | result = true 125 | for locker in room.values: 126 | var 127 | req = newRequirement(locker.requirement.identity, Equal, locker.release) 128 | dependency = req.newDependency 129 | discard dependencies.addedRequirements(dependency) 130 | result = result and project.resolve(dependencies, req) 131 | 132 | proc toJson*(locker: Locker): JsonNode = 133 | ## convert a Locker to a JObject 134 | result = newJObject() 135 | result["name"] = newJString(locker.name) 136 | result["url"] = locker.url.toJson 137 | result["release"] = locker.release.toJson 138 | result["requirement"] = locker.requirement.toJson 139 | result["dist"] = locker.dist.toJson 140 | 141 | proc toLocker*(js: JsonNode): Locker = 142 | ## convert a JObject to a Locker 143 | let 144 | req = js["requirement"].toRequirement 145 | result = req.newLocker 146 | result.name = js["name"].getStr 147 | result.url = js["url"].toUri 148 | result.release = js["release"].toRelease 149 | result.dist = js["dist"].toDistMethod 150 | 151 | proc toJson*(room: LockerRoom): JsonNode = 152 | ## convert a LockerRoom to a JObject 153 | result = newJObject() 154 | for name, locker in room.pairs: 155 | result[locker.name] = locker.toJson 156 | result[room.root.name] = room.root.toJson 157 | 158 | proc toLockerRoom*(js: JsonNode; name = ""): LockerRoom = 159 | ## convert a JObject to a LockerRoom 160 | result = newLockerRoom(name) 161 | for name, locker in js.pairs: 162 | if name == rootName: 163 | result.root = locker.toLocker 164 | elif result.hasKey(name): 165 | error &"ignoring duplicate locker `{name}`" 166 | else: 167 | result.add name, locker.toLocker 168 | 169 | proc getLockerRoom*(project: Project; name: string; room: var LockerRoom): bool = 170 | ## true if we pulled the named lockerroom out of the project's configuration 171 | let 172 | js = project.config.getLockerRoom(name) 173 | if js != nil and js.kind == JObject: 174 | room = js.toLockerRoom(name) 175 | result = true 176 | 177 | iterator allLockerRooms*(project: Project): LockerRoom = 178 | ## emit each lockerroom in the project's configuration 179 | for name, js in project.config.getAllLockerRooms.pairs: 180 | yield js.toLockerRoom(name) 181 | 182 | proc unlock*(project: var Project; name: string; flags = defaultFlags): bool = 183 | ## unlock a project using the named lockfile 184 | var 185 | dependencies = project.newDependencyGroup(flags = {Flag.Quiet} + flags) 186 | room = newLockerRoom(name, flags) 187 | 188 | block unlocked: 189 | if not project.getLockerRoom(name, room): 190 | notice &"unable to find a lock named `{name}`" 191 | break unlocked 192 | 193 | # warn about any locks performed against non-Git distributions 194 | for name, locker in room.pairs: 195 | if locker.dist != Git: 196 | let emsg = &"unsafe lock of `{name}` for " & 197 | &"{locker.requirement} as {locker.release}" # noqa 198 | warn emsg 199 | 200 | # perform doctor resolution of dependencies, etc. 201 | var 202 | state = DrState(kind: DrRetry) 203 | while state.kind == DrRetry: 204 | # it's our game to lose 205 | result = true 206 | # resolve dependencies for the lock 207 | if not dependencies.fillDeps(room, project): 208 | notice &"unable to resolve all dependencies for `{name}`" 209 | result = false 210 | state.kind = DrError 211 | # see if we can converge the environment to the lock 212 | elif not project.fixDependencies(dependencies, state): 213 | notice "failed to fix all dependencies" 214 | result = false 215 | # if the doctor doesn't want us to try again, we're done 216 | if state.kind notin {DrRetry}: 217 | break 218 | # empty the dependencies and rescan for projects 219 | dependencies.reset(project) 220 | 221 | proc lock*(project: var Project; name: string; flags = defaultFlags): bool = 222 | ## store a project's dependencies into the named lockfile 223 | var 224 | dependencies = project.newDependencyGroup(flags = {Flag.Quiet} + flags) 225 | room = newLockerRoom(project, flags) 226 | 227 | block locked: 228 | if project.getLockerRoom(name, room): 229 | notice &"lock `{name}` already exists; choose a new name" 230 | break locked 231 | 232 | # if we cannot resolve our dependencies, we can't lock the project 233 | result = project.resolve(dependencies) 234 | if not result: 235 | notice &"unable to resolve all dependencies for {project}" 236 | break locked 237 | 238 | # if the lockerroom isn't confident, we can't lock the project 239 | result = room.fillRoom(dependencies) 240 | if not result: 241 | notice &"not confident enough to lock {project}" 242 | break locked 243 | 244 | # compare this lockerroom to pre-existing lockerrooms and don't dupe it 245 | for exists in project.allLockerRooms: 246 | if exists == room: 247 | notice &"already locked these dependencies as `{exists.name}`" 248 | result = false 249 | break locked 250 | 251 | # write the lockerroom to the project's configuration 252 | project.config.addLockerRoom name, room.toJson 253 | -------------------------------------------------------------------------------- /src/nimph/nimble.nim: -------------------------------------------------------------------------------- 1 | import std/uri 2 | import std/json 3 | import std/options 4 | import std/strtabs 5 | import std/strutils 6 | import std/os 7 | import std/osproc 8 | import std/strformat 9 | 10 | import npeg 11 | 12 | import nimph/spec 13 | import nimph/runner 14 | 15 | type 16 | DumpResult* = object 17 | table*: StringTableRef 18 | why*: string 19 | ok*: bool 20 | 21 | NimbleMeta* = ref object 22 | js: JsonNode 23 | link: seq[string] 24 | 25 | proc parseNimbleDump*(input: string): Option[StringTableRef] = 26 | ## parse output from `nimble dump` 27 | var 28 | table = newStringTable(modeStyleInsensitive) 29 | let 30 | peggy = peg "document": 31 | nl <- ?'\r' * '\n' 32 | white <- {'\t', ' '} 33 | key <- +(1 - ':') 34 | value <- '"' * *(1 - '"') * '"' 35 | errline <- white * >*(1 - nl) * +nl: 36 | warn $1 37 | line <- >key * ':' * +white * >value * +nl: 38 | table[$1] = unescape($2) 39 | anyline <- line | errline 40 | document <- +anyline * !1 41 | parsed = peggy.match(input) 42 | if parsed.ok: 43 | result = table.some 44 | 45 | proc fetchNimbleDump*(path: string; nimbleDir = ""): DumpResult = 46 | ## parse nimble dump output into a string table 47 | result = DumpResult(ok: false) 48 | block fetched: 49 | withinDirectory(path): 50 | let 51 | nimble = runSomething("nimble", 52 | @["dump", path], {poDaemon}, nimbleDir = nimbleDir) 53 | if not nimble.ok: 54 | result.why = "nimble execution failed" 55 | if nimble.output.len > 0: 56 | error nimble.output 57 | break fetched 58 | 59 | let 60 | parsed = parseNimbleDump(nimble.output) 61 | if parsed.isNone: 62 | result.why = &"unable to parse `nimble dump` output" 63 | break fetched 64 | result.table = parsed.get 65 | result.ok = true 66 | 67 | proc hasUrl*(meta: NimbleMeta): bool = 68 | ## true if the metadata includes a url 69 | result = "url" in meta.js 70 | result = result and meta.js["url"].kind == JString 71 | result = result and meta.js["url"].getStr != "" 72 | 73 | proc url*(meta: NimbleMeta): Uri = 74 | ## return the url associated with the package 75 | if not meta.hasUrl: 76 | raise newException(ValueError, "url not available") 77 | result = parseUri(meta.js["url"].getStr) 78 | if result.anchor == "": 79 | if "vcsRevision" in meta.js: 80 | result.anchor = meta.js["vcsRevision"].getStr 81 | removePrefix(result.anchor, {'#'}) 82 | 83 | proc writeNimbleMeta*(path: string; url: Uri; revision: string): bool = 84 | ## try to write a new nimblemeta.json 85 | block complete: 86 | if not dirExists(path): 87 | warn &"{path} is not a directory; cannot write {nimbleMeta}" 88 | break complete 89 | var 90 | revision = revision 91 | removePrefix(revision, {'#'}) 92 | var 93 | js = %* { 94 | "url": $url, 95 | "vcsRevision": revision, 96 | "files": @[], 97 | "binaries": @[], 98 | "isLink": false, 99 | } 100 | writer = open(path / nimbleMeta, fmWrite) 101 | defer: 102 | writer.close 103 | writer.write($js) 104 | result = true 105 | 106 | proc isLink*(meta: NimbleMeta): bool = 107 | ## true if the metadata says it's a link 108 | if meta.js.kind == JObject: 109 | result = meta.js.getOrDefault("isLink").getBool 110 | 111 | proc isValid*(meta: NimbleMeta): bool = 112 | ## true if the metadata appears to hold some data 113 | result = meta.js != nil and meta.js.len > 0 114 | 115 | proc fetchNimbleMeta*(path: string): NimbleMeta = 116 | ## parse the nimblemeta.json file if it exists 117 | result = NimbleMeta(js: newJObject()) 118 | let 119 | metafn = path / nimbleMeta 120 | try: 121 | if metafn.fileExists: 122 | let 123 | content = readFile(metafn) 124 | result.js = parseJson(content) 125 | except Exception as e: 126 | discard e # noqa 127 | warn &"error while trying to parse {nimbleMeta}: {e.msg}" 128 | -------------------------------------------------------------------------------- /src/nimph/package.nim: -------------------------------------------------------------------------------- 1 | import std/strtabs 2 | import std/tables 3 | import std/times 4 | import std/os 5 | import std/hashes 6 | import std/strformat 7 | import std/sequtils 8 | import std/strutils 9 | import std/uri 10 | import std/json 11 | import std/options 12 | 13 | import npeg 14 | 15 | import nimph/spec 16 | import nimph/requirement 17 | 18 | import nimph/group 19 | export group 20 | 21 | type 22 | DistMethod* = enum 23 | Local = "local" 24 | Git = "git" 25 | Nest = "nest" 26 | Merc = "hg" 27 | 28 | Package* = ref object 29 | name*: string 30 | url*: Uri 31 | dist*: DistMethod 32 | tags*: seq[string] 33 | description*: string 34 | license*: string 35 | web*: Uri 36 | naive*: bool 37 | local*: bool 38 | path*: string 39 | author*: string 40 | 41 | PackageGroup* = Group[string, Package] 42 | 43 | PackagesResult* = object 44 | ok*: bool 45 | why*: string 46 | packages*: PackageGroup 47 | info: FileInfo 48 | 49 | proc importName*(package: Package): string = 50 | result = package.name.importName.toLowerAscii 51 | error &"import name {result} from {package.name}" 52 | 53 | proc newPackage*(name: string; path: string; dist: DistMethod; 54 | url: Uri): Package = 55 | ## create a new package that probably points to a local repo 56 | result = Package(name: name, dist: dist, url: url, 57 | path: path, local: path.dirExists) 58 | 59 | proc newPackage*(name: string; dist: DistMethod; url: Uri): Package = 60 | ## create a new package 61 | result = Package(name: name, dist: dist, url: url) 62 | 63 | proc newPackage*(url: Uri): Package = 64 | ## create a new package with only a url 65 | result = newPackage(name = url.packageName, dist = Git, 66 | url = url.convertToGit) 67 | # flag this package as not necessarily named correctly; 68 | # we had to guess at what the final name might be... 69 | result.naive = true 70 | 71 | proc newPackage(name: string; license: string; description: string): Package = 72 | ## create a new package for nimble's package list consumer 73 | result = Package(name: name, license: license, description: description) 74 | 75 | proc `$`*(package: Package): string = 76 | result = package.name 77 | if package.naive: 78 | result &= " (???)" 79 | 80 | proc newPackageGroup*(flags: set[Flag] = defaultFlags): PackageGroup = 81 | ## instantiate a new package group for collecting a list of packages 82 | result = PackageGroup(flags: flags) 83 | result.init(flags, mode = modeStyleInsensitive) 84 | 85 | proc aimAt*(package: Package; req: Requirement): Package = 86 | ## produce a refined package which might meet the requirement 87 | var 88 | aim = package.url 89 | if aim.anchor == "": 90 | aim.anchor = req.release.asUrlAnchor 91 | 92 | result = newPackage(name = package.name, dist = package.dist, url = aim) 93 | result.license = package.license 94 | result.description = package.description 95 | result.tags = package.tags 96 | result.naive = false 97 | result.web = package.web 98 | 99 | proc add(group: PackageGroup; js: JsonNode) = 100 | ## how packages get added to a group from the json list 101 | var 102 | name = js["name"].getStr 103 | package = newPackage(name = name, 104 | license = js.getOrDefault("license").getStr, 105 | description = js.getOrDefault("description").getStr) 106 | 107 | if "alias" in js: 108 | raise newException(ValueError, "don't add aliases thusly") 109 | 110 | if "url" in js: 111 | package.url = js["url"].getStr.parseUri 112 | if "web" in js: 113 | package.web = js["web"].getStr.parseUri 114 | else: 115 | package.web = package.url 116 | if "method" in js: 117 | package.dist = parseEnum[DistMethod](js["method"].getStr) 118 | if "author" in js: 119 | package.author = js["author"].getStr 120 | else: 121 | package.dist = Git # let's be explicit here 122 | if "tags" in js: 123 | package.tags = mapIt(js["tags"], it.getStr.toLowerAscii) 124 | 125 | group.add name, package 126 | 127 | proc getOfficialPackages*(nimbleDir: string): PackagesResult {.raises: [].} = 128 | ## parse the official packages list from nimbledir 129 | var 130 | filename = ///nimbleDir 131 | if filename.endsWith(//////PkgDir): 132 | filename = nimbledir.parentDir / officialPackages 133 | else: 134 | filename = nimbledir / officialPackages 135 | 136 | # make sure we have a sane return value 137 | result = PackagesResult(ok: false, why: "", packages: newPackageGroup()) 138 | 139 | var group = result.packages 140 | block parsing: 141 | try: 142 | # we might not even have to open the file; wouldn't that be wonderful? 143 | if not nimbledir.dirExists or not filename.fileExists: 144 | result.why = &"{filename} not found" 145 | break 146 | 147 | # grab the file info for aging purposes 148 | result.info = getFileInfo(filename) 149 | 150 | # okay, i guess we have to read and parse this silly thing 151 | let 152 | content = readFile(filename) 153 | js = parseJson(content) 154 | 155 | # consume the json array 156 | var 157 | aliases: seq[tuple[name: string; alias: string]] 158 | for node in js.items: 159 | # if it's an alias, stash it for later 160 | if "alias" in node: 161 | aliases.add (node.getOrDefault("name").getStr, 162 | node["alias"].getStr) 163 | continue 164 | 165 | # else try to add it to the group 166 | try: 167 | group.add node 168 | except Exception as e: 169 | notice node 170 | warn &"error parsing package: {e.msg}" 171 | 172 | # now add in the aliases we collected 173 | for name, alias in aliases.items: 174 | if alias in group: 175 | group.add name, group.get(alias) 176 | else: 177 | warn &"alias `{name}` refers to a missing package `{alias}`" 178 | 179 | # add a style-insensitive alias for the opposite case package-name 180 | let 181 | keys = toSeq group.keys 182 | for key in keys.items: 183 | # key -> "Goats_And_Pigs" 184 | {.warning: "work-around for arc bug".} 185 | let package = group[key] 186 | group[key.toLowerAscii] = package 187 | group[key.toUpperAscii] = package 188 | 189 | result.ok = true 190 | except Exception as e: 191 | result.why = e.msg 192 | 193 | proc ageInDays*(found: PackagesResult): int64 = 194 | ## days since the packages file was last refreshed 195 | result = (getTime() - found.info.lastWriteTime).inDays 196 | 197 | proc toUrl*(requirement: Requirement; group: PackageGroup): Option[Uri] = 198 | ## try to determine the distribution url for a requirement 199 | var url: Uri 200 | 201 | # if it could be a url, try to parse it as such 202 | result = requirement.toUrl 203 | if result.isNone: 204 | # otherwise, see if we can find it in the package group 205 | if requirement.identity in group: 206 | let 207 | package = group.get(requirement.identity) 208 | if package.dist notin {Local, Git}: 209 | warn &"the `{package.dist}` distribution method is unsupported" 210 | else: 211 | url = package.url 212 | result = url.some 213 | debug "parsed in packages", requirement 214 | 215 | # maybe stuff the reference into the anchor 216 | if result.isSome: 217 | url = result.get 218 | url.anchor = requirement.release.asUrlAnchor 219 | result = url.some 220 | 221 | proc hasUrl*(group: PackageGroup; url: Uri): bool = 222 | ## true if the url seems to match a package in the group 223 | for value in group.values: 224 | result = bareUrlsAreEqual(value.url.convertToGit, 225 | url.convertToGit) 226 | if result: 227 | break 228 | 229 | proc matching*(group: PackageGroup; req: Requirement): PackageGroup = 230 | ## select a subgroup of packages that appear to match the requirement 231 | result = newPackageGroup() 232 | if req.isUrl: 233 | let 234 | findurl = req.toUrl(group) 235 | if findurl.isNone: 236 | let emsg = &"couldn't parse url for requirement {req}" # noqa 237 | raise newException(ValueError, emsg) 238 | for name, package in group.pairs: 239 | if bareUrlsAreEqual(package.url.convertToGit, 240 | findurl.get.convertToGit): 241 | result.add name, package.aimAt(req) 242 | when defined(debug): 243 | debug "matched the url in packages", $package.url 244 | else: 245 | for name, package in group.pairs: 246 | if name == req.identity: 247 | result.add name, package.aimAt(req) 248 | when defined(debug): 249 | debug "matched the package by name" 250 | 251 | iterator urls*(group: PackageGroup): Uri = 252 | ## yield (an ideally git) url for each package in the group 253 | for package in group.values: 254 | yield if package.dist == Git: 255 | package.url.convertToGit 256 | else: 257 | package.url 258 | -------------------------------------------------------------------------------- /src/nimph/requirement.nim: -------------------------------------------------------------------------------- 1 | import std/options 2 | import std/strutils 3 | import std/strformat 4 | import std/tables 5 | import std/uri except Url 6 | import std/hashes 7 | 8 | import bump 9 | import npeg 10 | 11 | import nimph/spec 12 | import nimph/version 13 | 14 | type 15 | # the specification of a package requirement 16 | Requirement* = ref object 17 | identity*: string 18 | operator*: Operator 19 | release*: Release 20 | child*: Requirement 21 | notes*: string 22 | 23 | Requires* = OrderedTableRef[Requirement, Requirement] 24 | 25 | proc `$`*(req: Requirement): string = 26 | result = &"{req.identity}{req.operator}{req.release}" 27 | 28 | proc isValid*(req: Requirement): bool = 29 | ## true if the requirement seems sensible 30 | result = req.release.isValid 31 | if result: 32 | case req.operator: 33 | # if the operator is Tag, it's essentially a #== test 34 | of Tag: 35 | result = req.release.kind in {Tag} 36 | # if the operator supports a mask, then so might the release 37 | of Caret, Tilde, Wild: 38 | result = req.release.kind in {Wild, Equal} 39 | # if the operator supports only equality, apply it to tags, versions 40 | of Equal: 41 | result = req.release.kind in {Tag, Equal} 42 | # else it can only be a relative comparison to a complete version spec 43 | else: 44 | result = req.release.kind in {Equal} 45 | 46 | proc isSatisfiedBy(requirement: Requirement; version: Version): bool = 47 | ## true if the version satisfies the requirement 48 | let 49 | op = requirement.operator 50 | case op: 51 | of Tag: 52 | # try to parse a version from the tag and see if it matches exactly 53 | result = version == requirement.release.effectively 54 | of Caret: 55 | # the caret logic is designed to match that of cargo 56 | block caret: 57 | let accepts = requirement.release.accepts 58 | for index, field in accepts.pairs: 59 | if field.isNone: 60 | break 61 | if result == false: 62 | if field.get != 0: 63 | if field.get != version.at(index): 64 | result = false 65 | break caret 66 | result = true 67 | elif field.get > version.at(index): 68 | result = false 69 | break caret 70 | of Tilde: 71 | # the tilde logic is designed to match that of cargo 72 | block tilde: 73 | let accepts = requirement.release.accepts 74 | for index, field in accepts.pairs: 75 | if field.isNone or index == VersionIndex.high: 76 | break 77 | if field.get != version.at(index): 78 | result = false 79 | break tilde 80 | result = true 81 | of Wild: 82 | # wildcards match 3.1.* or simple strings like "3" (3.*.*) 83 | let accepts = requirement.release.accepts 84 | # all the fields must be acceptable 85 | if acceptable(accepts.major, op, version.major): 86 | if acceptable(accepts.minor, op, version.minor): 87 | if acceptable(accepts.patch, op, version.patch): 88 | result = true 89 | of Equal: 90 | result = version == requirement.release.version 91 | of AtLeast: 92 | result = version >= requirement.release.version 93 | of NotMore: 94 | result = version <= requirement.release.version 95 | of Under: 96 | result = version < requirement.release.version 97 | of Over: 98 | result = version > requirement.release.version 99 | 100 | proc isSatisfiedBy*(req: Requirement; spec: Release): bool = 101 | ## true if the requirement is satisfied by the specification 102 | case req.operator: 103 | of Tag: 104 | result = spec.reference == req.release.reference 105 | of Equal: 106 | result = spec == req.release 107 | of AtLeast: 108 | result = spec >= req.release 109 | of NotMore: 110 | result = spec <= req.release 111 | of Under: 112 | result = spec < req.release 113 | of Over: 114 | result = spec > req.release 115 | of Tilde, Caret, Wild: 116 | # check if the wildcard matches everything (only Wild, in theory) 117 | if req.release.accepts.major.isNone: 118 | result = true 119 | # otherwise, we might be able to treat it as a version 120 | elif spec.isSpecific: 121 | result = req.isSatisfiedBy spec.specifically 122 | # else we're gonna have to abstract "3" to "3.0.0" 123 | else: 124 | result = req.isSatisfiedBy spec.effectively 125 | 126 | proc hash*(req: Requirement): Hash = 127 | ## uniquely identify a requirement 128 | var h: Hash = 0 129 | h = h !& req.identity.hash 130 | h = h !& req.operator.hash 131 | h = h !& req.release.hash 132 | if req.child != nil: 133 | h = h !& req.child.hash 134 | result = !$h 135 | 136 | proc adopt*(parent: var Requirement; child: Requirement) = 137 | ## combine two requirements 138 | if parent != child: 139 | if parent.child == nil: 140 | parent.child = child 141 | else: 142 | parent.child.adopt child 143 | 144 | iterator children*(parent: Requirement; andParent = false): Requirement = 145 | ## yield the children of a parent requirement 146 | var req = parent 147 | if andParent: 148 | yield req 149 | while req.child != nil: 150 | req = req.child 151 | yield req 152 | 153 | proc newRequirement*(id: string; operator: Operator; 154 | release: Release, notes = ""): Requirement = 155 | ## create a requirement from a release, eg. that of a project 156 | when defined(debug): 157 | if id != id.strip: 158 | warn &"whitespace around requirement identity: `{id}`" 159 | if id == "": 160 | raise newException(ValueError, "requirements must have length, if not girth") 161 | result = Requirement(identity: id.strip, release: release, notes: notes) 162 | # if it parsed as Caret, Tilde, or Wild, then paint the requirement as such 163 | if result.release.kind in Wildlings: 164 | result.operator = result.release.kind 165 | elif result.release.kind in {Tag}: 166 | # eventually, we'll support tag comparisons... 167 | {.warning: "tag comparisons unsupported".} 168 | result.operator = result.release.kind 169 | else: 170 | result.operator = operator 171 | 172 | proc newRequirement*(id: string; operator: Operator; spec: string): Requirement = 173 | ## parse a requirement from a string 174 | result = newRequirement(id, operator, newRelease(spec, operator = operator)) 175 | 176 | proc newRequirement(id: string; operator: string; spec: string): Requirement = 177 | ## parse a requirement with the given operator from a string 178 | # using "" to mean "==" was retarded and i refuse to map my Equal 179 | # enum to "" in capitulation; nil carborundum illegitimi 180 | let op = 181 | case operator 182 | of "": Equal # nimble wuz here 183 | of "~=": Tilde # nimble wuz here 184 | of "^=": Caret # nimble wuz here 185 | else: parseEnum[Operator](operator) 186 | result = newRequirement(id, op, spec) 187 | 188 | iterator orphans*(parent: Requirement): Requirement = 189 | ## yield each requirement without their kids 190 | for child in parent.children(andParent = true): 191 | yield newRequirement(id = child.identity, operator = child.operator, 192 | release = child.release, notes = child.notes) 193 | 194 | proc parseRequires*(input: string): Option[Requires] = 195 | ## parse a `requires` string output from `nimble dump` 196 | ## also supports `~` and `^` and `*` operators a la cargo 197 | var 198 | requires = Requires() 199 | lastname: string 200 | 201 | let 202 | peggy = peg "document": 203 | white <- {'\t', ' '} 204 | url <- +Alnum * "://" * +(1 - white - ending - '#') 205 | name <- url | +(Alnum | '_') 206 | ops <- ">=" | "<=" | ">" | "<" | "==" | "~=" | "~" | "^=" | "^" | 0 207 | dstar <- +Digit | '*' 208 | ver <- (dstar * ('.' * dstar)[0..2]) | "any version" 209 | ending <- (*white * "," * *white) | (*white * "&" * *white) | !1 210 | tag <- '#' * +(1 - ending) 211 | spec <- tag | ver 212 | anyrecord <- >name: 213 | lastname = $1 214 | let req = newRequirement(id = $1, operator = Wild, spec = "*") 215 | if req notin requires: 216 | requires[req] = req 217 | andrecord <- *white * >ops * *white * >spec: 218 | let req = newRequirement(id = lastname, operator = $1, spec = $2) 219 | if req notin requires: 220 | requires[req] = req 221 | inrecord <- >name * *white * >ops * *white * >spec: 222 | lastname = $1 223 | let req = newRequirement(id = $1, operator = $2, spec = $3) 224 | if req notin requires: 225 | requires[req] = req 226 | record <- (inrecord | andrecord | anyrecord) * ending 227 | document <- *record 228 | parsed = peggy.match(input) 229 | if parsed.ok: 230 | result = requires.some 231 | 232 | proc isVirtual*(requirement: Requirement): bool = 233 | ## is the requirement something we should overlook? 234 | result = requirement.identity.toLowerAscii in ["nim"] 235 | 236 | proc isUrl*(requirement: Requirement): bool = 237 | ## a terrible way to determine if the requirement is a url 238 | result = ':' in requirement.identity 239 | 240 | proc asUrlAnchor*(release: Release): string = 241 | ## produce a suitable url anchor referencing a release 242 | case release.kind: 243 | of Tag: 244 | result = release.reference 245 | of Equal: 246 | result = $release.version 247 | else: 248 | debug &"no url-as-anchor for {release.kind}" 249 | removePrefix(result, {'#'}) 250 | 251 | proc toUrl*(requirement: Requirement): Option[Uri] = 252 | ## try to determine the distribution url for a requirement 253 | # if it could be a url, try to parse it as such 254 | if requirement.isUrl: 255 | try: 256 | var url = parseUri(requirement.identity) 257 | if requirement.release.kind in {Equal, Tag}: 258 | url.anchor = requirement.release.asUrlAnchor 259 | result = url.some 260 | except: 261 | warn &"unable to parse requirement `{requirement.identity}`" 262 | 263 | proc importName*(requirement: Requirement): string = 264 | ## guess the import name given only a requirement 265 | block: 266 | if requirement.isUrl: 267 | let url = requirement.toUrl 268 | if url.isSome: 269 | result = url.get.importName 270 | break 271 | result = requirement.identity.importName 272 | 273 | proc describe*(requirement: Requirement): string = 274 | ## describe a requirement and where it may have come from, if possible 275 | result = $requirement 276 | if requirement.notes != "": 277 | result &= " from " & requirement.notes 278 | -------------------------------------------------------------------------------- /src/nimph/runner.nim: -------------------------------------------------------------------------------- 1 | import std/strutils 2 | import std/strformat 3 | import std/logging 4 | import std/os 5 | import std/sequtils 6 | import std/osproc 7 | 8 | import nimph/spec 9 | 10 | type 11 | RunOutput* = object 12 | arguments*: seq[string] 13 | output*: string 14 | ok*: bool 15 | 16 | proc stripPkgs*(nimbleDir: string): string = 17 | ## omit and trailing /PkgDir from a path 18 | result = ///nimbleDir 19 | # the only way this is a problem is if the user stores deps in pkgs/pkgs, 20 | # but we can remove this hack once we have nimblePaths in nim-1.0 ... 21 | if result.endsWith(//////PkgDir): 22 | result = ///parentDir(result) 23 | 24 | proc runSomething*(exe: string; args: seq[string]; options: set[ProcessOption]; 25 | nimbleDir = ""): RunOutput = 26 | ## run a program with arguments, perhaps with a particular nimbleDir 27 | var 28 | command = findExe(exe) 29 | arguments = args 30 | opts = options 31 | block ran: 32 | if command == "": 33 | result = RunOutput(output: &"unable to find {exe} in path") 34 | warn result.output 35 | break ran 36 | 37 | if exe == "nimble": 38 | when defined(debug): 39 | arguments = @["--verbose"].concat arguments 40 | when defined(debugNimble): 41 | arguments = @["--debug"].concat arguments 42 | 43 | if nimbleDir != "": 44 | # we want to strip any trailing PkgDir arriving from elsewhere... 45 | var nimbleDir = nimbleDir.stripPkgs 46 | if not nimbleDir.dirExists: 47 | let emsg = &"{nimbleDir} is missing; can't run {exe}" # noqa 48 | raise newException(IOError, emsg) 49 | # the ol' belt-and-suspenders approach to specifying nimbleDir 50 | if exe == "nimble": 51 | arguments = @["--nimbleDir=" & nimbleDir].concat arguments 52 | putEnv("NIMBLE_DIR", nimbleDir) 53 | 54 | if poParentStreams in opts or poInteractive in opts: 55 | # sorry; i just find this easier to read than union() 56 | opts.incl poInteractive 57 | opts.incl poParentStreams 58 | # the user wants interactivity 59 | when defined(debug): 60 | debug command, arguments.join(" ") 61 | let 62 | process = startProcess(command, args = arguments, options = opts) 63 | result = RunOutput(ok: process.waitForExit == 0) 64 | else: 65 | # the user wants to capture output 66 | command &= " " & quoteShellCommand(arguments) 67 | when defined(debug): 68 | debug command 69 | let 70 | (output, code) = execCmdEx(command, opts) 71 | result = RunOutput(output: output, ok: code == 0) 72 | 73 | # for utility, also return the arguments we used 74 | result.arguments = arguments 75 | 76 | # a failure is worth noticing 77 | if not result.ok: 78 | notice exe & " " & arguments.join(" ") 79 | when defined(debug): 80 | debug "done running" 81 | -------------------------------------------------------------------------------- /src/nimph/skullduggery.nim: -------------------------------------------------------------------------------- 1 | #[ 2 | 3 | yeah, it's a hack and pulls gratuitous symbols into everything. 4 | while we support mainline nim, it's a price i'm willing to pay 5 | for not having to read this crap at the top of every file... 6 | 7 | ]# 8 | 9 | when defined(isNimSkull): 10 | # nimskull 11 | import compiler/ast/ast 12 | import compiler/ast/idents 13 | import compiler/ast/lineinfos 14 | import compiler/front/options as compileropts 15 | import compiler/front/nimconf 16 | import compiler/front/condsyms 17 | import compiler/utils/pathutils 18 | from compiler/front/cli_reporter import reportHook 19 | from compiler/ast/report_enums import ReportKind 20 | 21 | const 22 | isNimskull = true 23 | hintConf = report_enums.ReportKind.rextConf 24 | hintLineTooLong = report_enums.ReportKind.rlexLineTooLong 25 | 26 | proc newConfigRef(): ConfigRef = 27 | compileropts.newConfigRef(cli_reporter.reportHook) 28 | else: 29 | # mainline nim 30 | import compiler/ast 31 | import compiler/idents 32 | import compiler/nimconf 33 | import compiler/options as compileropts 34 | import compiler/pathutils 35 | import compiler/condsyms 36 | import compiler/lineinfos 37 | 38 | const isNimskull = false -------------------------------------------------------------------------------- /src/nimph/spec.nim: -------------------------------------------------------------------------------- 1 | import std/strformat 2 | import std/options 3 | import std/strutils 4 | import std/hashes 5 | import std/uri 6 | import std/os 7 | import std/times 8 | 9 | import cutelog 10 | export cutelog 11 | 12 | import ups/sanitize 13 | 14 | include nimph/skullduggery 15 | 16 | # slash attack /////////////////////////////////////////////////// 17 | when NimMajor >= 1 and NimMinor >= 1: 18 | template `///`*(a: string): string = 19 | # ensure a trailing DirSep 20 | joinPath(a, $DirSep, "") 21 | template `///`*(a: AbsoluteFile | AbsoluteDir): string = 22 | # ensure a trailing DirSep 23 | `///`(a.string) 24 | template `//////`*(a: string | AbsoluteFile | AbsoluteDir): string = 25 | # ensure a trailing DirSep and a leading DirSep 26 | joinPath($DirSep, "", `///`(a), $DirSep, "") 27 | else: 28 | template `///`*(a: string): string = 29 | # ensure a trailing DirSep 30 | joinPath(a, "") 31 | template `///`*(a: AbsoluteFile | AbsoluteDir): string = 32 | # ensure a trailing DirSep 33 | `///`(a.string) 34 | template `//////`*(a: string | AbsoluteFile | AbsoluteDir): string = 35 | # ensure a trailing DirSep and a leading DirSep 36 | "" / "" / `///`(a) / "" 37 | 38 | type 39 | Flag* {.pure.} = enum 40 | Quiet 41 | Strict 42 | Force 43 | Dry 44 | Safe 45 | Network 46 | 47 | RollGoal* = enum 48 | Upgrade = "upgrade" 49 | Downgrade = "downgrade" 50 | Specific = "roll" 51 | 52 | ForkTargetResult* = object 53 | ok*: bool 54 | why*: string 55 | owner*: string 56 | repo*: string 57 | url*: Uri 58 | 59 | const 60 | dotNimble* {.strdefine.} = "".addFileExt("nimble") 61 | dotNimbleLink* {.strdefine.} = "".addFileExt("nimble-link") 62 | dotGit* {.strdefine.} = "".addFileExt("git") 63 | dotHg* {.strdefine.} = "".addFileExt("hg") 64 | DepDir* {.strdefine.} = //////"deps" 65 | PkgDir* {.strdefine.} = //////"pkgs" 66 | NimCfg* {.strdefine.} = "nim".addFileExt("cfg") 67 | ghTokenFn* {.strdefine.} = "github_api_token" 68 | ghTokenEnv* {.strdefine.} = "NIMPH_TOKEN" 69 | hubTokenFn* {.strdefine.} = "".addFileExt("config") / "hub" 70 | stalePackages* {.intdefine.} = 14 71 | configFile* {.strdefine.} = "nimph".addFileExt("json") 72 | nimbleMeta* {.strdefine.} = "nimblemeta".addFileExt("json") 73 | officialPackages* {.strdefine.} = "packages_official".addFileExt("json") 74 | emptyRelease* {.strdefine.} = "#head" 75 | defaultRemote* {.strdefine.} = "origin" 76 | upstreamRemote* {.strdefine.} = "upstream" 77 | excludeMissingSearchPaths* {.booldefine.} = false 78 | excludeMissingLazyPaths* {.booldefine.} = true 79 | writeNimbleDirPaths* {.booldefine.} = false 80 | shortDate* = initTimeFormat "yyyy-MM-dd" 81 | # add Safe to defaultFlags to, uh, default to Safe mode 82 | defaultFlags*: set[Flag] = {Quiet, Strict} 83 | 84 | # when true, try to clamp analysis to project-local directories 85 | WhatHappensInVegas* = false 86 | 87 | template withinDirectory*(path: string; body: untyped): untyped = 88 | if not path.dirExists: 89 | raise newException(ValueError, path & " is not a directory") 90 | let cwd = getCurrentDir() 91 | setCurrentDir(path) 92 | defer: 93 | setCurrentDir(cwd) 94 | body 95 | 96 | template isValid*(url: Uri): bool = url.scheme.len != 0 97 | 98 | proc hash*(url: Uri): Hash = 99 | ## help hash URLs 100 | var h: Hash = 0 101 | for field in url.fields: 102 | when field is string: 103 | h = h !& field.hash 104 | elif field is bool: 105 | h = h !& field.hash 106 | result = !$h 107 | 108 | proc bare*(url: Uri): Uri = 109 | result = url 110 | result.anchor = "" 111 | 112 | proc bareUrlsAreEqual*(a, b: Uri): bool = 113 | ## compare two urls without regard to their anchors 114 | if a.isValid and b.isValid: 115 | var 116 | x = a.bare 117 | y = b.bare 118 | result = $x == $y 119 | 120 | proc pathToImport*(path: string): string = 121 | ## calculate how a path will be imported by the compiler 122 | assert path.len > 0 123 | result = path.lastPathPart.split("-")[0] 124 | assert result.len > 0 125 | 126 | proc normalizeUrl*(uri: Uri): Uri = 127 | result = uri 128 | if result.scheme == "" and result.path.contains("@"): 129 | let 130 | usersep = result.path.find("@") 131 | pathsep = result.path.find(":") 132 | result.path = uri.path[pathsep+1 .. ^1] 133 | result.username = uri.path[0 ..< usersep] 134 | result.hostname = uri.path[usersep+1 ..< pathsep] 135 | result.scheme = "ssh" 136 | 137 | # we used to do some ->git conversions here but they make increasingly 138 | # little sense since we really cannot be sure the user will be able to 139 | # use them, and with this doubt, we should err on the side of trusting 140 | # our input since it was, y'know, provided by a programmer. 141 | 142 | # https://github.com/disruptek/nimph/issues/145 143 | # we need to remove case-sensitivity of github paths 144 | if result.hostname.toLowerAscii == "github.com": 145 | result.path = result.path.toLowerAscii 146 | 147 | proc convertToGit*(uri: Uri): Uri = 148 | ## convert a url from any format (we will normalize it) 149 | ## into something like git://github.com/disruptek/nimph.git 150 | result = uri.normalizeUrl 151 | if result.scheme in ["", "http", "ssh"]: 152 | result.scheme = "git" 153 | if not result.path.endsWith(".git"): 154 | result.path &= ".git" 155 | result.username = "" 156 | 157 | proc convertToSsh*(uri: Uri): Uri = 158 | ## convert a url from any format (we will normalize it) 159 | ## into something like git@github.com:disruptek/nimph.git 160 | result = uri.convertToGit 161 | result.username = uri.username 162 | if not result.path[0].isAlphaNumeric: 163 | result.path = result.path[1..^1] 164 | if uri.username == "": 165 | result.username = "git" 166 | result.path = result.username & "@" & result.hostname & ":" & result.path 167 | result.username = "" 168 | result.hostname = "" 169 | result.scheme = "" 170 | 171 | proc prepareForClone*(uri: Uri): Uri = 172 | ## rewrite a url for the purposes of conducting a clone; 173 | ## this currently only has bearing on github urls, which 174 | ## must be rewritten to https if possible, since we cannot 175 | ## rely on the user's keys being correct 176 | result = normalizeUrl uri 177 | if result.hostname.toLowerAscii == "github.com": 178 | if result.scheme in ["ssh", "git", "http"]: 179 | result.scheme = "https" 180 | # add .git for consistency 181 | if not result.path.endsWith(".git"): 182 | result.path &= ".git" 183 | if result.username == "git": 184 | result.username = "" 185 | 186 | proc packageName*(name: string): string = 187 | ## return a string that is plausible as a package name 188 | when true: 189 | result = name 190 | else: 191 | const capsOkay = 192 | when FilesystemCaseSensitive: 193 | true 194 | else: 195 | false 196 | let 197 | sane = name.sanitizeIdentifier(capsOkay = capsOkay) 198 | if sane.isSome: 199 | result = sane.get 200 | else: 201 | raise newException(ValueError, "unable to sanitize `" & name & "`") 202 | 203 | proc packageName*(url: Uri): string = 204 | ## guess the name of a package from a url 205 | when defined(debug) or defined(debugPath): 206 | assert url.isValid 207 | var 208 | # ensure the path doesn't end in a slash 209 | path = url.path 210 | removeSuffix(path, {'/'}) 211 | result = packageName(path.extractFilename.changeFileExt("")) 212 | 213 | proc importName*(path: string): string = 214 | ## a uniform name usable in code for imports 215 | assert path.len > 0 216 | # strip any leading directories and extensions 217 | result = splitFile(path).name 218 | const capsOkay = 219 | when FilesystemCaseSensitive: 220 | true 221 | else: 222 | false 223 | let 224 | sane = path.sanitizeIdentifier(capsOkay = capsOkay) 225 | # if it's a sane identifier, use it 226 | if sane.isSome: 227 | result = $(get sane) 228 | elif not capsOkay: 229 | # emit a lowercase name on case-insensitive filesystems 230 | result = path.toLowerAscii 231 | # else, we're just emitting the existing file's basename 232 | 233 | proc importName*(url: Uri): string = 234 | let url = url.normalizeUrl 235 | if not url.isValid: 236 | raise newException(ValueError, "invalid url: " & $url) 237 | elif url.scheme == "file": 238 | result = url.path.importName 239 | else: 240 | result = url.packageName.importName 241 | 242 | proc forkTarget*(url: Uri): ForkTargetResult = 243 | result.url = url.normalizeUrl 244 | block success: 245 | if not result.url.isValid: 246 | result.why = &"url is invalid" 247 | break 248 | if result.url.hostname.toLowerAscii != "github.com": 249 | result.why = &"url {result.url} does not point to github" 250 | break 251 | if result.url.path.len < 1: 252 | result.why = &"unable to parse url {result.url}" 253 | break 254 | # split /foo/bar into (bar, foo) 255 | let start = if result.url.path.startsWith("/"): 1 else: 0 256 | (result.owner, result.repo) = result.url.path[start..^1].splitPath 257 | # strip .git 258 | if result.repo.endsWith(".git"): 259 | result.repo = result.repo[0..^len("git+2")] 260 | result.ok = result.owner.len > 0 and result.repo.len > 0 261 | if not result.ok: 262 | result.why = &"unable to parse url {result.url}" 263 | 264 | {.warning: "replace this with compiler code".} 265 | proc destylize*(s: string): string = 266 | ## this is how we create a uniformly comparable token 267 | result = s.toLowerAscii.replace("_") 268 | -------------------------------------------------------------------------------- /src/nimph/thehub.nim: -------------------------------------------------------------------------------- 1 | import std/tables 2 | import std/sequtils 3 | import std/httpclient 4 | import std/httpcore 5 | import std/json 6 | import std/os 7 | import std/options 8 | import std/strutils 9 | import std/strformat 10 | import std/uri 11 | import std/times 12 | 13 | import rest # actually adjacent to nimph.nim whatfer vendor/shadow reasons 14 | import github 15 | import jsonconvert 16 | 17 | import nimph/spec 18 | import nimph/group 19 | 20 | const 21 | hubTime* = initTimeFormat "yyyy-MM-dd\'T\'HH:mm:ss\'Z\'" 22 | 23 | type 24 | HubKind* = enum 25 | HubRelease 26 | HubTag 27 | HubCommit 28 | HubRepo 29 | HubIssue 30 | HubPull 31 | HubUser 32 | HubCode 33 | 34 | HubTree* = object 35 | sha*: string 36 | url*: Uri 37 | `type`*: string 38 | 39 | HubContact* = object 40 | name*: string 41 | email*: string 42 | date*: DateTime 43 | 44 | HubVerification* = object 45 | verified*: bool 46 | reason*: string 47 | signature*: string 48 | payload*: string 49 | 50 | HubCommitMeta* = object 51 | url*: Uri 52 | author*: HubContact 53 | committer*: HubContact 54 | message*: string 55 | commentCount*: int 56 | tree*: HubTree 57 | 58 | HubResult* = ref object 59 | htmlUrl*: Uri 60 | id*: int 61 | number*: int 62 | title*: string 63 | body*: string 64 | state*: string 65 | name*: string 66 | user*: HubResult 67 | tagName*: string 68 | targetCommitish*: string 69 | sha*: string 70 | created*: DateTime 71 | updated*: DateTime 72 | case kind*: HubKind: 73 | of HubCommit: 74 | tree*: HubTree 75 | author*: HubResult 76 | committer*: HubResult 77 | parents*: seq[HubTree] 78 | commit*: HubCommitMeta 79 | of HubTag: 80 | tagger*: HubContact 81 | `object`*: HubTree 82 | of HubRelease: 83 | draft*: bool 84 | prerelease*: bool 85 | of HubUser: 86 | login*: string 87 | of HubIssue: 88 | closedBy*: HubResult 89 | of HubPull: 90 | mergedBy*: HubResult 91 | merged*: bool 92 | of HubCode: 93 | path*: string 94 | repository*: HubResult 95 | of HubRepo: 96 | fullname*: string 97 | description*: string 98 | watchers*: int 99 | stars*: int 100 | forks*: int 101 | owner*: string 102 | size*: int 103 | pushed*: DateTime 104 | issues*: int 105 | clone*: Uri 106 | git*: Uri 107 | ssh*: Uri 108 | web*: Uri 109 | license*: string 110 | branch*: string 111 | original*: bool 112 | score*: float 113 | 114 | HubGroup* = ref object of Group[Uri, HubResult] 115 | 116 | HubSort* {.pure.} = enum 117 | Ascending = "asc" 118 | Descending = "desc" 119 | 120 | HubSortBy* {.pure.} = enum 121 | Best = "" 122 | Stars = "stars" 123 | Forks = "forks" 124 | Updated = "updated" 125 | 126 | proc shortly(stamp: DateTime): string = 127 | ## render a date shortly 128 | result = stamp.format(shortDate) 129 | 130 | proc renderShortly*(r: HubResult): string = 131 | result = &""" 132 | {r.web:<65} pushed {r.pushed.shortly} 133 | {r.size:>5} {"kb":<10} {r.issues:>4} {"issues":<10} {r.stars:>4} {"stars":<10} {r.forks:>4} {"forks":<10} created {r.created.shortly} 134 | {r.description} 135 | """ 136 | result = result.strip 137 | 138 | proc findGithubToken*(): Option[string] = 139 | ## find a github token in one of several places 140 | var 141 | token: string 142 | let 143 | hub = getHomeDir() / hubTokenFn 144 | file = getHomeDir() / dotNimble / ghTokenFn 145 | env = getEnv(ghTokenEnv, getEnv("GITHUB_TOKEN", getEnv("GHI_TOKEN", ""))) 146 | if env != "": 147 | token = env 148 | debug "using a github token from environment" 149 | elif fileExists(file): 150 | token = readFile(file) 151 | debug "using a github token from nimble" 152 | elif fileExists(hub): 153 | for line in lines(hub): 154 | if "oauth_token:" in line: 155 | token = line.strip.split(" ")[^1] 156 | debug "using a github token from hub" 157 | token = token.strip 158 | if token != "": 159 | result = token.some 160 | 161 | proc newHubContact*(js: JsonNode): HubContact = 162 | ## parse some json into a simple contact record 163 | let 164 | tz = utc() 165 | # 🐼 result = js.to(HubContact) 166 | if js == nil or "date" notin js or js.kind != JString: 167 | result = HubContact(date: now()) 168 | else: 169 | result = HubContact( 170 | date: js["date"].getStr.parse(hubTime, zone = tz) 171 | ) 172 | if js != nil: 173 | result.name = js.get("name", "") 174 | result.email = js.get("email", "") 175 | 176 | proc newHubTree*(js: JsonNode): HubTree = 177 | ## parse something like a commit tree 178 | result = HubTree() 179 | if js != nil: 180 | result.url = js.get("url", "").parseUri 181 | result.sha = js.get("sha", "") 182 | result.`type` = js.get("type", "") 183 | 184 | proc newHubCommitMeta*(js: JsonNode): HubCommitMeta = 185 | ## collect some ingredients found in a typical commit 186 | result = HubCommitMeta( 187 | committer: newHubContact js.getOrDefault("committer"), 188 | author: newHubContact js.getOrDefault("author") 189 | ) 190 | result.tree = newHubTree js.getOrDefault("tree") 191 | result.commentCount = js.get("comment_count", 0) 192 | 193 | proc newHubResult*(kind: HubKind; js: JsonNode): HubResult 194 | 195 | proc init*(result: var HubResult; js: JsonNode) = 196 | ## instantiate a new hub object using a jsonnode 197 | 198 | # impart a bit of sanity 199 | if js == nil or js.kind != JObject: 200 | raise newException(Defect, "nonsensical input: " & js.pretty) 201 | 202 | case result.kind: 203 | of HubRelease: discard 204 | of HubTag: discard 205 | of HubCommit: 206 | result.committer = HubUser.newHubResult(js["committer"]) 207 | result.author = HubUser.newHubResult(js["author"]) 208 | result.sha = js["sha"].getStr 209 | of HubIssue: 210 | if "closed_by" in js and js["closed_by"].kind == JObject: 211 | result.closedBy = HubUser.newHubResult(js["closed_by"]) 212 | of HubPull: 213 | result.merged = js.getOrDefault("merged").getBool 214 | if "merged_by" in js and js["merged_by"].kind == JObject: 215 | result.mergedBy = HubUser.newHubResult(js["merged_by"]) 216 | of HubCode: 217 | result.path = js.get("path", "") 218 | result.sha = js.get("sha", "") 219 | result.name = js.get("name", "") 220 | if "repository" in js: 221 | result.repository = HubRepo.newHubResult(js["repository"]) 222 | of HubRepo: 223 | result.fullname = js.get("full_name", "") 224 | result.owner = js.get("owner", "") 225 | result.name = js.get("name", "") 226 | result.description = js.get("description", "") 227 | result.stars = js.get("stargazers_count", 0) 228 | result.watchers = js.get("subscriber_count", 0) 229 | result.forks = js.get("forks_count", 0) 230 | result.issues = js.get("open_issues_count", 0) 231 | if "clone_url" in js: 232 | result.clone = js["clone_url"].getStr.parseUri 233 | if "git_url" in js: 234 | result.git = js["git_url"].getStr.parseUri 235 | if "ssh_url" in js: 236 | result.ssh = js["ssh_url"].getStr.parseUri 237 | if "homepage" in js and $js["homepage"] notin ["null", ""]: 238 | result.web = js["homepage"].getStr.parseUri 239 | if not result.web.isValid: 240 | result.web = result.htmlUrl 241 | if "license" in js: 242 | result.license = js["license"].getOrDefault("name").getStr 243 | result.branch = js.get("default_branch", "") 244 | result.original = not js.get("fork", false) 245 | result.score = js.get("score", 0.0) 246 | of HubUser: 247 | result.login = js.get("login", "") 248 | result.id = js.get("id", 0) 249 | if "title" in js: 250 | result.body = js.get("body", "") 251 | result.title = js.get("title", "") 252 | result.state = js.get("state", "") 253 | result.number = js.get("number", 0) 254 | if "user" in js and js["user"].kind == JObject: 255 | result.user = HubUser.newHubResult(js["user"]) 256 | 257 | proc newHubResult*(kind: HubKind; js: JsonNode): HubResult = 258 | # impart a bit of sanity 259 | if js == nil or js.kind != JObject: 260 | raise newException(Defect, "nonsensical input: " & js.pretty) 261 | 262 | template thenOrNow(label: string): DateTime = 263 | if js != nil and label in js and js[label].kind == JString: 264 | js[label].getStr.parse(hubTime, zone = tz) 265 | else: 266 | now() 267 | 268 | let 269 | tz = utc() 270 | kind = block: 271 | if "head" in js: 272 | HubPull 273 | elif kind == HubPull: 274 | HubIssue 275 | else: 276 | kind 277 | 278 | case kind 279 | of HubRelease: 280 | result = HubResult(kind: HubRelease, 281 | created: thenOrNow "created_at", 282 | updated: thenOrNow "updated_at") 283 | of HubTag: 284 | result = HubResult(kind: HubTag, 285 | tagger: newHubContact(js.getOrDefault("tagger")), 286 | `object`: newHubTree(js.getOrDefault("object")), 287 | created: thenOrNow "created_at", 288 | updated: thenOrNow "updated_at") 289 | of HubPull: 290 | result = HubResult(kind: HubPull, 291 | created: thenOrNow "created_at", 292 | updated: thenOrNow "updated_at") 293 | of HubCode: 294 | result = HubResult(kind: HubCode, 295 | created: thenOrNow "created_at", 296 | updated: thenOrNow "updated_at") 297 | of HubIssue: 298 | result = HubResult(kind: HubIssue, 299 | created: thenOrNow "created_at", 300 | updated: thenOrNow "updated_at") 301 | of HubRepo: 302 | result = HubResult(kind: HubRepo, 303 | pushed: thenOrNow "pushed_at", 304 | created: thenOrNow "created_at", 305 | updated: thenOrNow "updated_at") 306 | of HubCommit: 307 | result = HubResult(kind: HubCommit, 308 | commit: newHubCommitMeta(js.getOrDefault("commit")), 309 | created: thenOrNow "created_at", 310 | updated: thenOrNow "updated_at") 311 | of HubUser: 312 | result = HubResult(kind: HubUser, 313 | created: thenOrNow "created_at", 314 | updated: thenOrNow "updated_at") 315 | 316 | result.htmlUrl = js.get("html_url", "").parseUri 317 | result.init(js) 318 | 319 | proc newHubGroup*(flags: set[Flag] = defaultFlags): HubGroup = 320 | result = HubGroup(flags: flags) 321 | result.init(flags, mode = modeCaseSensitive) 322 | 323 | proc add*(group: var HubGroup; hub: HubResult) = 324 | {.warning: "nim bug #12818".} 325 | add[Uri, HubResult](group, hub.htmlUrl, hub) 326 | 327 | proc authorize*(request: Recallable): bool = 328 | ## find and inject credentials into a github request 329 | let token = findGithubToken() 330 | result = token.isSome 331 | if result: 332 | request.headers.del "Authorization" 333 | request.headers.add "Authorization", "token " & token.get 334 | else: 335 | error "unable to find a github authorization token" 336 | 337 | proc queryOne(recallable: Recallable; kind: HubKind): Option[HubResult] = 338 | ## issue a recallable query and parse the response as a single item 339 | block success: 340 | # start with installing our credentials into the request 341 | if not recallable.authorize: 342 | break success 343 | 344 | # send the request to github and see if they like it 345 | let response = recallable.issueRequest() 346 | if not response.code.is2xx: 347 | notice &"got response code {response.code} from github" 348 | break success 349 | 350 | # read the response and parse it to json 351 | let js = parseJson(response.body) 352 | 353 | # turn the json into a hub result object 354 | result = newHubResult(kind, js).some 355 | 356 | proc queryMany(recallable: Recallable; kind: HubKind): Option[HubGroup] = 357 | ## issue a recallable query and parse the response as a group of items 358 | block success: 359 | # start with installing our credentials into the request 360 | if not recallable.authorize: 361 | break success 362 | 363 | # send the request to github and see if they like it 364 | let response = recallable.issueRequest() 365 | if not response.code.is2xx: 366 | notice &"got response code {response.code} from github" 367 | break success 368 | 369 | # read the response and parse it to json 370 | let js = parseJson(response.body) 371 | 372 | # we know now that we'll be returning a group of some size 373 | var 374 | group = newHubGroup() 375 | result = group.some 376 | 377 | # add any parseable results to the group 378 | for node in js["items"].items: 379 | try: 380 | let item = newHubResult(kind, node) 381 | # if these are repositories, ignore forks 382 | if kind == HubRepo and not item.original: 383 | continue 384 | group.add item 385 | except Exception as e: 386 | warn "error parsing repo: " & e.msg 387 | 388 | proc getGitHubUser*(): Option[HubResult] = 389 | ## attempt to retrieve the authorized user 390 | var 391 | req = getUser.call(content = "") 392 | debug &"fetching github user" 393 | result = req.queryOne(HubUser) 394 | 395 | proc forkHub*(owner: string; repo: string): Option[HubResult] = 396 | ## attempt to fork an existing repository 397 | var 398 | req = postReposOwnerRepoForks.call(repo = repo, owner = owner, body = newJObject()) 399 | debug &"forking owner `{owner}` repo `{repo}`" 400 | result = req.queryOne(HubRepo) 401 | 402 | proc searchHub*(keywords: seq[string]; sort = Best; 403 | order = Descending): Option[HubGroup] = 404 | ## search github for packages 405 | var 406 | query = @["language:nim"].concat(keywords) 407 | req = getSearchRepositories.call(q = query.join(" "), 408 | sort = $sort, 409 | order = $order) 410 | debug &"searching github for {query}" 411 | result = req.queryMany(HubRepo) 412 | 413 | when not defined(ssl): 414 | {.error: "this won't work without defining `ssl`".} 415 | -------------------------------------------------------------------------------- /src/nimph/version.nim: -------------------------------------------------------------------------------- 1 | import std/strformat 2 | import std/hashes 3 | import std/strutils 4 | import std/tables 5 | import std/options 6 | 7 | import bump 8 | import npeg 9 | 10 | import nimph/spec 11 | 12 | type 13 | VersionField* = typeof(Version.major) 14 | VersionIndex* = range[0 .. 2] 15 | VersionMaskField* = Option[VersionField] 16 | VersionMask* = object 17 | major*: VersionMaskField 18 | minor*: VersionMaskField 19 | patch*: VersionMaskField 20 | 21 | Operator* = enum 22 | Tag = "#" 23 | Wild = "*" 24 | Tilde = "~" 25 | Caret = "^" 26 | Equal = "==" 27 | AtLeast = ">=" 28 | Over = ">" 29 | Under = "<" 30 | NotMore = "<=" 31 | 32 | # the specification of a version, release, or mask 33 | Release* = object 34 | case kind*: Operator 35 | of Tag: 36 | reference*: string 37 | of Wild, Caret, Tilde: 38 | accepts*: VersionMask 39 | of Equal, AtLeast, Over, Under, NotMore: 40 | version*: Version 41 | 42 | const 43 | Wildlings* = {Wild, Caret, Tilde} 44 | 45 | template starOrDigits(s: string): VersionMaskField = 46 | ## parse a star or digit as in a version mask 47 | if s == "*": 48 | # VersionMaskField is Option[VersionField] 49 | none(VersionField) 50 | else: 51 | some(s.parseUInt) 52 | 53 | proc parseDottedVersion*(input: string): Version = 54 | ## try to parse `1.2.3` into a `Version` 55 | let 56 | dotted = input.split('.') 57 | block: 58 | if dotted.len < 3: 59 | break 60 | try: 61 | result = (major: dotted[0].parseUInt, 62 | minor: dotted[1].parseUInt, 63 | patch: dotted[2].parseUInt) 64 | except ValueError: 65 | discard 66 | 67 | proc newVersionMask(input: string): VersionMask = 68 | ## try to parse `1.2` or `1.2.*` into a `VersionMask` 69 | let 70 | dotted = input.split('.') 71 | if dotted.len > 0: 72 | result.major = dotted[0].starOrDigits 73 | if dotted.len > 1: 74 | result.minor = dotted[1].starOrDigits 75 | if dotted.len > 2: 76 | result.patch = dotted[2].starOrDigits 77 | 78 | proc isValid*(release: Release): bool = 79 | ## true if the release seems plausible 80 | const sensible = @[ 81 | [ true, false, false], 82 | [ true, true, false], 83 | [ true, true, true ], 84 | ] 85 | case release.kind: 86 | of Tag: 87 | result = release.reference != "" 88 | of Wild, Caret, Tilde: 89 | let 90 | pattern = [release.accepts.major.isSome, 91 | release.accepts.minor.isSome, 92 | release.accepts.patch.isSome] 93 | result = pattern in sensible 94 | # let's say that *.*.* is valid; it could be useful 95 | if release.kind == Wild: 96 | result = result or pattern == [false, false, false] 97 | else: 98 | result = release.version.isValid 99 | 100 | proc newRelease*(version: Version): Release = 101 | ## create a new release using a version 102 | if not version.isValid: 103 | raise newException(ValueError, &"invalid version `{version}`") 104 | result = Release(kind: Equal, version: version) 105 | 106 | proc newRelease*(reference: string; operator = Equal): Release 107 | 108 | proc parseVersionLoosely*(content: string): Option[Release] = 109 | ## a very relaxed parser for versions found in tags, etc. 110 | ## only valid releases are emitted, however 111 | var 112 | release: Release 113 | let 114 | peggy = peg "document": 115 | ver <- +Digit * ('.' * +Digit)[0..2] 116 | record <- >ver * (!Digit | !1): 117 | if not release.isValid: 118 | release = newRelease($1, operator = Equal) 119 | document <- +(record | 1) * !1 120 | try: 121 | let 122 | parsed = peggy.match(content) 123 | if parsed.ok and release.isValid: 124 | result = release.some 125 | except Exception as e: 126 | let emsg = &"parse error in `{content}`: {e.msg}" # noqa 127 | warn emsg 128 | 129 | proc newRelease*(reference: string; operator = Equal): Release = 130 | ## parse a version, mask, or tag with an operator hint from the requirement 131 | if reference.startsWith("#") or operator == Tag: 132 | result = Release(kind: Tag, reference: reference) 133 | removePrefix(result.reference, {'#'}) 134 | elif reference in ["", "any version"]: 135 | result = Release(kind: Wild, accepts: newVersionMask("*")) 136 | elif "*" in reference: 137 | result = Release(kind: Wild, accepts: newVersionMask(reference)) 138 | elif operator in Wildlings: 139 | # thanks, jasper 140 | case operator: 141 | of Wildlings: 142 | result = Release(kind: operator, accepts: newVersionMask(reference)) 143 | else: 144 | raise newException(Defect, "inconceivable!") 145 | elif count(reference, '.') < 2: 146 | result = Release(kind: Wild, accepts: newVersionMask(reference)) 147 | else: 148 | result = newRelease(parseDottedVersion(reference)) 149 | 150 | proc `$`*(field: VersionMaskField): string = 151 | if field.isNone: 152 | result = "*" 153 | else: 154 | result = $field.get 155 | 156 | proc `$`*(mask: VersionMask): string = 157 | result = $mask.major 158 | result &= "." & $mask.minor 159 | result &= "." & $mask.patch 160 | 161 | proc omitStars*(mask: VersionMask): string = 162 | result = $mask.major 163 | if mask.minor.isSome: 164 | result &= "." & $mask.minor 165 | if mask.patch.isSome: 166 | result &= "." & $mask.patch 167 | 168 | proc `$`*(spec: Release): string = 169 | case spec.kind 170 | of Tag: 171 | result = $spec.kind & $spec.reference 172 | of Equal, AtLeast, Over, Under, NotMore: 173 | result = $spec.version 174 | of Wild, Caret, Tilde: 175 | result = spec.accepts.omitStars 176 | 177 | proc `==`*(a, b: VersionMaskField): bool = 178 | result = a.isNone == b.isNone 179 | if result and a.isSome: 180 | result = a.get == b.get 181 | 182 | proc `<`*(a, b: VersionMaskField): bool = 183 | result = a.isNone == b.isNone 184 | if result and a.isSome: 185 | result = a.get < b.get 186 | 187 | proc `==`*(a, b: VersionMask): bool = 188 | result = a.major == b.major 189 | result = result and a.minor == b.minor 190 | result = result and a.patch == b.patch 191 | 192 | proc `==`*(a, b: Release): bool = 193 | if a.kind == b.kind and a.isValid and b.isValid: 194 | case a.kind: 195 | of Tag: 196 | result = a.reference == b.reference 197 | of Wild, Caret, Tilde: 198 | result = a.accepts == b.accepts 199 | else: 200 | result = a.version == b.version 201 | 202 | proc `<`*(a, b: Release): bool = 203 | if a.kind == b.kind and a.isValid and b.isValid: 204 | case a.kind 205 | of Tag: 206 | result = a.reference < b.reference 207 | of Equal: 208 | result = a.version < b.version 209 | else: 210 | raise newException(ValueError, "inconceivable!") 211 | 212 | proc `<=`*(a, b: Release): bool = 213 | result = a == b or a < b 214 | 215 | proc `==`*(a: VersionMask; b: Version): bool = 216 | if a.major.isSome and a.major.get == b.major: 217 | if a.minor.isSome and a.minor.get == b.minor: 218 | if a.patch.isSome and a.patch.get == b.patch: 219 | result = true 220 | 221 | proc acceptable*(mask: VersionMaskField; op: Operator; 222 | value: VersionField): bool = 223 | ## true if the versionfield value passes the mask 224 | case op: 225 | of Wild: 226 | result = mask.isNone or value == mask.get 227 | of Caret: 228 | result = mask.isNone 229 | result = result or (value >= mask.get and mask.get > 0'u) 230 | result = result or (value == 0 and mask.get == 0) 231 | of Tilde: 232 | result = mask.isNone or value >= mask.get 233 | else: 234 | raise newException(Defect, "inconceivable!") 235 | 236 | proc at*[T: Version | VersionMask](version: T; index: VersionIndex): auto = 237 | ## like [int] but clashless 238 | case index: 239 | of 0: result = version.major 240 | of 1: result = version.minor 241 | of 2: result = version.patch 242 | 243 | proc `[]=`*(mask: var VersionMask; 244 | index: VersionIndex; value: VersionMaskField) = 245 | case index: 246 | of 0: mask.major = value 247 | of 1: mask.minor = value 248 | of 2: mask.patch = value 249 | 250 | iterator items*[T: Version | VersionMask](version: T): auto = 251 | for i in VersionIndex.low .. VersionIndex.high: 252 | yield version.at(i) 253 | 254 | iterator pairs*[T: Version | VersionMask](version: T): auto = 255 | for i in VersionIndex.low .. VersionIndex.high: 256 | yield (index: i, field: version.at(i)) 257 | 258 | proc isSpecific*(release: Release): bool = 259 | ## if the version/match specifies a full X.Y.Z version 260 | if release.kind in {Equal, AtLeast, NotMore} and release.isValid: 261 | result = true 262 | elif release.kind in Wildlings and release.accepts.patch.isSome: 263 | result = true 264 | 265 | proc specifically*(release: Release): Version = 266 | ## a full X.Y.Z version the release will match 267 | if not release.isSpecific: 268 | let emsg = &"release {release} is not specific" # noqa 269 | raise newException(Defect, emsg) 270 | if release.kind in Wildlings: 271 | result = (major: release.accepts.major.get, 272 | minor: release.accepts.minor.get, 273 | patch: release.accepts.patch.get) 274 | else: 275 | result = release.version 276 | 277 | proc effectively*(mask: VersionMask): Version = 278 | ## replace * with 0 in wildcard masks 279 | if mask.major.isNone: 280 | result = (0'u, 0'u, 0'u) 281 | elif mask.minor.isNone: 282 | result = (mask.major.get, 0'u, 0'u) 283 | elif mask.patch.isNone: 284 | result = (mask.major.get, mask.minor.get, 0'u) 285 | else: 286 | result = (mask.major.get, mask.minor.get, mask.patch.get) 287 | 288 | proc effectively*(release: Release): Version = 289 | ## convert a release to a version for rough comparisons 290 | case release.kind: 291 | of Tag: 292 | let parsed = parseVersionLoosely(release.reference) 293 | if parsed.isNone: 294 | result = (0'u, 0'u, 0'u) 295 | elif parsed.get.kind == Tag: 296 | raise newException(Defect, "inconceivable!") 297 | result = parsed.get.effectively 298 | of Wildlings: 299 | result = release.accepts.effectively 300 | of Equal: 301 | result = release.version 302 | else: 303 | raise newException(Defect, "not implemented") 304 | 305 | proc hash*(field: VersionMaskField): Hash = 306 | ## help hash version masks 307 | var h: Hash = 0 308 | if field.isNone: 309 | h = h !& '*'.hash 310 | else: 311 | h = h !& field.get.hash 312 | result = !$h 313 | 314 | proc hash*(mask: VersionMask): Hash = 315 | ## uniquely identify a version mask 316 | var h: Hash = 0 317 | h = h !& mask.major.hash 318 | h = h !& mask.minor.hash 319 | h = h !& mask.patch.hash 320 | result = !$h 321 | 322 | proc hash*(release: Release): Hash = 323 | ## uniquely identify a release 324 | var h: Hash = 0 325 | h = h !& release.kind.hash 326 | case release.kind: 327 | of Tag: 328 | h = h !& release.reference.hash 329 | of Wild, Tilde, Caret: 330 | h = h !& release.accepts.hash 331 | of Equal, AtLeast, Over, Under, NotMore: 332 | h = h !& release.version.hash 333 | result = !$h 334 | 335 | proc toMask*(version: Version): VersionMask = 336 | ## populate a versionmask with values from a version 337 | for i, field in version.pairs: 338 | result[i] = field.some 339 | 340 | proc importName*(target: Target): string = 341 | ## a uniform name usable in code for imports 342 | assert target.repo.len > 0 343 | result = target.repo.pathToImport.importName 344 | 345 | iterator likelyTags*(version: Version): string = 346 | ## produce tags with/without silly `v` prefixes 347 | let v = $version 348 | yield v 349 | yield "v" & v 350 | yield "V" & v 351 | yield "v." & v 352 | yield "V." & v 353 | 354 | iterator semanticVersionStrings*(mask: VersionMask): string = 355 | ## emit 3, 3.1, 3.1.4 (if possible) 356 | var 357 | last: string 358 | if mask.major.isSome: 359 | last = $mask.major.get 360 | yield last 361 | if mask.minor.isSome: 362 | last &= "." & $mask.minor.get 363 | yield last 364 | if mask.patch.isSome: 365 | yield last & "." & $mask.patch.get 366 | 367 | iterator semanticVersionStrings*(version: Version): string = 368 | ## emit 3, 3.1, 3.1.4 369 | yield $version.major 370 | yield $version.major & "." & $version.minor 371 | yield $version.major & "." & $version.minor & "." & $version.patch 372 | -------------------------------------------------------------------------------- /src/nimph/versiontags.nim: -------------------------------------------------------------------------------- 1 | import std/strutils 2 | import std/sets 3 | import std/options 4 | import std/hashes 5 | import std/strtabs 6 | import std/tables 7 | 8 | import bump 9 | import gittyup 10 | 11 | import nimph/spec 12 | import nimph/version 13 | 14 | import nimph/group 15 | export group 16 | 17 | type 18 | VersionTags* = Group[Version, GitThing] 19 | 20 | proc addName*(group: var VersionTags; mask: VersionMask; thing: GitThing) = 21 | ## add a versionmask to the group; note that this overwrites semvers 22 | for symbol in mask.semanticVersionStrings: 23 | group.imports[symbol] = $thing.oid 24 | 25 | proc addName*(group: var VersionTags; version: Version; thing: GitThing) = 26 | ## add a version to the group; note that this overwrites semvers 27 | for symbol in version.semanticVersionStrings: 28 | group.imports[symbol] = $thing.oid 29 | 30 | proc add*(group: var VersionTags; ver: Version; thing: GitThing) = 31 | ## add a version to the group; note that this overwrites semvers 32 | group.table[ver] = thing 33 | group.addName ver, thing 34 | 35 | proc del*(group: var VersionTags; ver: Version) = 36 | ## remove a version from the group; note that this doesn't rebind semvers 37 | if group.table.hasKey(ver): 38 | group.delName $group.table[ver].oid 39 | group.table.del ver 40 | 41 | proc `[]=`*(group: var VersionTags; ver: Version; thing: GitThing) = 42 | ## set a key to a single value 43 | group.del ver 44 | group.add ver, thing 45 | 46 | proc `[]`*(group: VersionTags; ver: Version): var GitThing = 47 | ## get a git thing by version 48 | result = group.table[ver] 49 | 50 | proc `[]`*(group: VersionTags; ver: VersionMask): var GitThing = 51 | ## get a git thing by versionmask 52 | for symbol in ver.semanticVersionStrings: 53 | if group.imports.hasKey(symbol): 54 | let 55 | complete = group.imports[symbol] 56 | result = group.table[parseDottedVersion(complete)] 57 | break 58 | 59 | proc newVersionTags*(flags = defaultFlags): VersionTags = 60 | result = VersionTags(flags: flags) 61 | result.init(flags, mode = modeStyleInsensitive) 62 | 63 | iterator richen*(tags: GitTagTable): tuple[release: Release; thing: GitThing] = 64 | ## yield releases that match the tags and the things they represent 65 | if tags == nil: 66 | raise newException(Defect, "are you lost?") 67 | # we're yielding #someoid, #tag, and whatever we can parse (version, mask) 68 | for tag, thing in tags.pairs: 69 | # someoid 70 | yield (release: newRelease($thing.oid, operator = Tag), thing: thing) 71 | # tag 72 | yield (release: newRelease(tag, operator = Tag), thing: thing) 73 | let parsed = parseVersionLoosely(tag) 74 | if parsed.isSome: 75 | # 3.1.4 or 3.1.* 76 | yield (release: parsed.get, thing: thing) 77 | 78 | proc releaseHashes*(release: Release; head = ""): HashSet[Hash] = 79 | ## a set of hashes that should match valid values for the release 80 | result.incl hash(release) 81 | case release.kind: 82 | of Tag: 83 | # someNiceTag 84 | result.incl hash(release.reference) 85 | # perform the #head->oid substitution here 86 | if release.reference.toLowerAscii == "head" and head != "": 87 | result.incl head.hash 88 | result.incl "head".hash 89 | result.incl "HEAD".hash 90 | of Wildlings: 91 | # 3, 3.1, 3.1.4 ... as available 92 | let effective = release.accepts.effectively 93 | for semantic in effective.semanticVersionStrings: 94 | result.incl hash(semantic) 95 | for semantic in release.accepts.semanticVersionStrings: 96 | result.incl hash(semantic) 97 | result.incl hash(effective) 98 | result.incl hash($effective) 99 | else: 100 | # 3, 3.1, 3.1.4 ... as available 101 | for semantic in release.version.semanticVersionStrings: 102 | result.incl hash(semantic) 103 | result.incl hash(release.version) 104 | result.incl hash($release.version) 105 | 106 | proc releaseHashes*(release: Release; thing: GitThing; head = ""): HashSet[Hash] = 107 | ## a set of hashes that should match valid values for the release; 108 | ## the thing is presumed to be an associated tag/commit/etc and we 109 | ## should include useful hashes for it 110 | result = release.releaseHashes(head = head) 111 | # when we have a commit, we'll add the hash of the commit and its oid string 112 | result.incl hash(thing) 113 | result.incl hash($thing.oid) 114 | 115 | iterator matches*(tags: GitTagTable; against: HashSet[Hash]; 116 | head: string = ""): 117 | tuple[release: Release; thing: GitThing] = 118 | ## see if any of the releases in the tag table will match `against` 119 | ## if so, yield the release and thing 120 | if tags == nil: 121 | raise newException(Defect, "are you lost?") 122 | for release, thing in tags.richen: 123 | # compute hashes to match against 124 | var symbols = release.releaseHashes(thing, head = head) 125 | # see if we scored any matches 126 | if against.intersection(symbols).len != 0: 127 | yield (release: release, thing: thing) 128 | -------------------------------------------------------------------------------- /src/rest.nim: -------------------------------------------------------------------------------- 1 | import times 2 | import httpclient 3 | import httpcore 4 | import json 5 | import strutils 6 | import uri 7 | 8 | export httpcore.HttpMethod, is2xx, is3xx, is4xx, is5xx 9 | 10 | type 11 | KeyVal = tuple[key: string; val: string] 12 | 13 | RestClientObj = object of RootObj 14 | keepalive: bool 15 | http: HttpClient 16 | headers: HttpHeaders 17 | RestClient* = ref RestClientObj 18 | 19 | RestCall* = ref object of RootObj 20 | client*: RestClient 21 | name*: string 22 | meth*: HttpMethod 23 | url*: Uri 24 | 25 | Recallable* = ref object of RootObj 26 | ## a handle on input/output of a re-issuable API call 27 | headers*: HttpHeaders 28 | client*: RestClient 29 | url*: string 30 | json*: JsonNode 31 | body*: string 32 | retries*: int 33 | began*: Time 34 | took*: Duration 35 | meth*: HttpMethod 36 | RestError* = object of CatchableError ## base for REST errors 37 | RetriesExhausted* = object of RestError ## ran outta retries 38 | CallRequestError* = object of RestError ## HTTP [45]00 status code 39 | 40 | proc massageHeaders*(node: JsonNode): seq[KeyVal] = 41 | if node == nil or node.kind != JObject or node.len == 0: 42 | return @[] 43 | else: 44 | for k, v in node.pairs: 45 | assert v.kind == JString 46 | result.add (key: k, val: v.getStr) 47 | 48 | method `$`*(e: ref RestError): string 49 | {.base, raises: [].}= 50 | result = $typeof(e) & " " & e.msg 51 | 52 | method `$`*(c: RestCall): string 53 | {.base, raises: [].}= 54 | result = $c.meth 55 | result = result.toUpperAscii & " " & c.name 56 | 57 | method initRestClient*(self: RestClient) {.base.} = 58 | self.http = newHttpClient() 59 | 60 | proc newRestClient*(): RestClient = 61 | new result 62 | result.initRestClient() 63 | 64 | method newRecallable*(call: RestCall; url: Uri; headers: HttpHeaders; 65 | body: string): Recallable 66 | {.base, raises: [Exception].} = 67 | ## make a new HTTP request that we can reissue if desired 68 | new result 69 | result.url = $url 70 | result.retries = 0 71 | result.body = body 72 | if call.client != nil and call.client.keepalive: 73 | result.client = call.client 74 | else: 75 | result.client = newRestClient() 76 | result.headers = headers 77 | result.client.headers = result.headers 78 | result.client.http.headers = result.headers 79 | result.meth = call.meth 80 | 81 | proc issueRequest*(rec: Recallable): Response 82 | {.raises: [RestError].} = 83 | ## submit a request and store some metrics 84 | assert rec.client != nil 85 | try: 86 | if rec.body == "": 87 | if rec.json != nil: 88 | rec.body = $rec.json 89 | rec.began = getTime() 90 | # 91 | # FIXME move this header-fu into something restClient-specific 92 | # 93 | if not rec.headers.isNil: 94 | rec.client.http.headers = rec.headers 95 | elif not rec.client.headers.isNil: 96 | rec.client.http.headers = rec.client.headers 97 | else: 98 | rec.client.http.headers = newHttpHeaders() 99 | result = rec.client.http.request(rec.url, rec.meth, body=rec.body) 100 | except CatchableError as e: 101 | raise newException(RestError, e.msg) 102 | except Exception as e: 103 | raise newException(RestError, e.msg) 104 | -------------------------------------------------------------------------------- /tests/nim.cfg: -------------------------------------------------------------------------------- 1 | --define:ssl 2 | --path="../src" 3 | #--d:npegGraph 4 | #--d:npegTrace 5 | 6 | # specify our preferred version of libgit2 7 | --define:git2SetVer:"master" 8 | #--define:git2SetVer:"v0.28.3" 9 | 10 | # and our preferred method of retrieval 11 | --define:git2Git 12 | 13 | hint[XDeclaredButNotUsed]=off 14 | -------------------------------------------------------------------------------- /tests/sample.cfg: -------------------------------------------------------------------------------- 1 | --nimblePath="deps/pkgs" 2 | --path="src/nimph" 3 | -define:test2=foo 4 | -d:test3:foo 5 | -d:test4 6 | -------------------------------------------------------------------------------- /tests/test.nim: -------------------------------------------------------------------------------- 1 | import std/strtabs 2 | import std/os 3 | import std/strutils 4 | import std/options 5 | import std/tables 6 | import std/uri 7 | 8 | import bump 9 | import gittyup 10 | import balls 11 | 12 | import nimph/spec 13 | import nimph/config 14 | import nimph/project 15 | import nimph/nimble 16 | import nimph/package 17 | import nimph/version 18 | import nimph/requirement 19 | import nimph/dependency 20 | import nimph/versiontags 21 | 22 | proc v(loose: string): Version = 23 | ## convenience 24 | let release = parseVersionLoosely(loose) 25 | result = release.get.version 26 | 27 | block: 28 | # let us shadow `project` 29 | suite "welcome to the nimph-o-matic 9000": 30 | const 31 | sample = "tests/sample.cfg" 32 | testcfg = newTarget(sample) 33 | was = staticRead(sample.extractFilename) 34 | 35 | var project: Project 36 | var deps: DependencyGroup 37 | 38 | test "open the project": 39 | let target = findTarget(".") 40 | check "finding targets": 41 | target.found.isSome 42 | findProject(project, (get target.found).repo) 43 | 44 | test "load a nim.cfg": 45 | let loaded = parseConfigFile(sample) 46 | check loaded.isSome 47 | 48 | test "naive parse": 49 | let parsed = parseProjectCfg(testcfg) 50 | check parsed.ok 51 | check "nimblePath" in parsed.table 52 | checkpoint $parsed.table 53 | check parsed.table["path"].len == 1 54 | check parsed.table["path"][0].len > 1 55 | for find in ["test4", "test3:foo", "test2=foo"]: 56 | block found: 57 | for values in parsed.table.values: 58 | for value in values.items: 59 | if value == find: 60 | break found 61 | fail "missing config values from parse" 62 | 63 | test "add a line to a config": 64 | check testcfg.appendConfig("--clearNimblePath") 65 | let now = readFile(sample) 66 | check "splitlines": 67 | # check for empty trailing line 68 | was.splitLines.len + 2 == now.splitLines.len 69 | now.splitLines[^1] == "" 70 | writeFile(sample, was) 71 | 72 | test "parse some dump output": 73 | let text = """oneline: "is fine"""" & "\n" 74 | let parsed = parseNimbleDump(text) 75 | check parsed.isSome 76 | 77 | test "via subprocess capture": 78 | let dumped = fetchNimbleDump(project.nimble.repo) 79 | check dumped.ok == true 80 | if dumped.ok: 81 | check dumped.table["Name"] == "nimph" 82 | 83 | const 84 | # how we'll render a release requirement like "package" 85 | anyRelease = "*" 86 | 87 | test "parse simple requires statements": 88 | let 89 | text1 = "nim >= 0.18.0, bump 1.8.6, github < 2.0.0" 90 | text2 = "" 91 | text3 = "nim #catsAndDogsLivingTogether" 92 | text4 = "goats" 93 | text5 = "goats ^1.2.3" 94 | text6 = "nim#catsAndDogsLivingTogether" 95 | text7 = "pigs 2.*.*" 96 | text8 = "git://github.com/disruptek/bump.git#1.8.8" 97 | text9 = "git://github.com/disruptek/bump.git" 98 | text10 = "pigs 2.*" 99 | text11 = "dogs ^3.2" 100 | text15 = "dogs ^=3.2" 101 | text12 = "owls ~4" 102 | text16 = "owls ~= 4" 103 | text13 = "owls any version" 104 | text14 = "owls >=1.0.0 &< 2" 105 | parsed1 = parseRequires(text1) 106 | parsed2 = parseRequires(text2) 107 | parsed3 = parseRequires(text3) 108 | parsed4 = parseRequires(text4) 109 | parsed5 = parseRequires(text5) 110 | parsed6 = parseRequires(text6) 111 | parsed7 = parseRequires(text7) 112 | parsed8 = parseRequires(text8) 113 | parsed9 = parseRequires(text9) 114 | parsed10 = parseRequires(text10) 115 | parsed11 = parseRequires(text11) 116 | parsed12 = parseRequires(text12) 117 | parsed13 = parseRequires(text13) 118 | parsed14 = parseRequires(text14) 119 | parsed15 = parseRequires(text15) 120 | parsed16 = parseRequires(text16) 121 | check parsed1.isSome 122 | check parsed2.isSome 123 | check parsed3.isSome 124 | check parsed4.isSome 125 | for req in parsed4.get.values: 126 | check $req.release == anyRelease 127 | check parsed5.isSome 128 | check parsed6.isSome 129 | for req in parsed6.get.values: 130 | check req.release.reference == "catsAndDogsLivingTogether" 131 | check parsed7.isSome 132 | for req in parsed7.get.values: 133 | check $req.release == "2" 134 | check parsed8.isSome 135 | for req in parsed8.get.values: 136 | check req.identity == "git://github.com/disruptek/bump.git" 137 | check req.release.reference == "1.8.8" 138 | for req in parsed9.get.values: 139 | check req.identity == "git://github.com/disruptek/bump.git" 140 | check $req.release == anyRelease 141 | for req in parsed10.get.values: 142 | check req.identity == "pigs" 143 | for req in parsed11.get.values: 144 | check req.identity == "dogs" 145 | for req in parsed15.get.values: 146 | check req.identity == "dogs" 147 | check req.isSatisfiedBy newRelease"3.2.4" 148 | check req.isSatisfiedBy newRelease"3.2.0" 149 | check req.isSatisfiedBy newRelease"3.3.0" 150 | check not req.isSatisfiedBy newRelease"3.1.0" 151 | check not req.isSatisfiedBy newRelease"3.1.9" 152 | check not req.isSatisfiedBy newRelease"4.0.0" 153 | for req in parsed12.get.values: 154 | check req.identity == "owls" 155 | for req in parsed16.get.values: 156 | check req.identity == "owls" 157 | check req.isSatisfiedBy newRelease"4.0.0" 158 | check req.isSatisfiedBy newRelease"4.2.0" 159 | check not req.isSatisfiedBy newRelease"5.0.0" 160 | check not req.isSatisfiedBy newRelease"3.9.0" 161 | for req in parsed13.get.values: 162 | check $req.release == anyRelease 163 | check req.isSatisfiedBy newRelease"1.8.8" 164 | check parsed14.get.len == 2 165 | for req in parsed14.get.values: 166 | checkpoint $req 167 | 168 | test "parse nimph requires statement": 169 | project.fetchDump() 170 | let 171 | text = project.dump["requires"] 172 | parsed = parseRequires(text) 173 | check parsed.isSome 174 | 175 | test "naive package naming": 176 | check "nim_Somepack" == importName(parseUri"git@github.com:some/nim-Somepack.git/") 177 | check "nim_Somepack" == importName(parseUri"git@github.com:some/nim-Somepack.git") 178 | check "somepack" == importName("/some/other/somepack-1.2.3".pathToImport) 179 | 180 | test "get the official packages list": 181 | let 182 | parsed = getOfficialPackages(project.nimbleDir) 183 | check parsed.ok == true 184 | check "release" in parsed.packages["bump"].tags 185 | 186 | test "requirements versus versions": 187 | let 188 | works = [ 189 | newRequirement("a", Equal, "1.2.3"), 190 | newRequirement("a", AtLeast, "1.2.3"), 191 | newRequirement("a", NotMore, "1.2.3"), 192 | newRequirement("a", Caret, "1"), 193 | newRequirement("a", Caret, "1.2"), 194 | newRequirement("a", Caret, "1.2.3"), 195 | newRequirement("a", Tilde, "1"), 196 | newRequirement("a", Tilde, "1.2"), 197 | newRequirement("a", Tilde, "1.2.0"), 198 | ] 199 | breaks = [ 200 | newRequirement("a", Equal, "1.2.4"), 201 | newRequirement("a", AtLeast, "1.2.4"), 202 | newRequirement("a", NotMore, "1.2.2"), 203 | newRequirement("a", Caret, "2"), 204 | newRequirement("a", Caret, "1.3"), 205 | newRequirement("a", Caret, "1.2.4"), 206 | newRequirement("a", Tilde, "0"), 207 | newRequirement("a", Tilde, "1.1"), 208 | newRequirement("a", Tilde, "1.1.2"), 209 | ] 210 | one23 = newRelease("1.2.3") 211 | for req in works.items: 212 | check req.isSatisfiedBy one23 213 | for req in breaks.items: 214 | check not req.isSatisfiedBy one23 215 | 216 | test "parse version loosely": 217 | let 218 | works = [ 219 | "v1.2.3", 220 | "V. 1.2.3", 221 | "1.2.3-rc2", 222 | "1.2.3a", 223 | "1.2.3", 224 | "1.2.3.4", 225 | "mary had a little l1.2.3mb whose fleece... ah you get the picture" 226 | ] 227 | for v in works.items: 228 | let parsed = v.parseVersionLoosely 229 | check parsed.isSome 230 | check $parsed.get == "1.2.3" 231 | check "".parseVersionLoosely.isNone 232 | 233 | block: 234 | ## load project config 235 | project.cfg = loadAllCfgs project.repo 236 | 237 | block: 238 | ## dependencies, path-for-name, project-for-path 239 | deps = newDependencyGroup(project, {Dry}) 240 | check project.resolve(deps) 241 | var path = deps.pathForName "cutelog" 242 | check path.isSome 243 | check dirExists(get path) 244 | var proj = deps.projectForPath path.get 245 | check proj.isSome 246 | check (get proj).name == "cutelog" 247 | 248 | repository := openRepository project.gitDir: 249 | fail"unable to open the repo" 250 | 251 | test "roll between versions": 252 | returnToHeadAfter project: 253 | for ver in ["0.6.6", "0.6.5"]: 254 | let release = newRelease(ver, operator = Tag) 255 | let req = newRequirement($project.url, operator = Tag, release) 256 | if project.rollTowards(req): 257 | for stat in repository.status(GIT_STATUS_SHOW_INDEX_AND_WORKDIR): 258 | check stat.isOk 259 | check GIT_STATUS_INDEX_MODIFIED notin stat.get.flags 260 | 261 | test "project version changes": 262 | returnToHeadAfter project: 263 | let versioned = project.versionChangingCommits 264 | let required = project.requirementChangingCommits 265 | when false: 266 | for key, value in versioned.pairs: 267 | checkpoint "versioned ", key 268 | for key, value in required.pairs: 269 | checkpoint "required ", key 270 | check "version oids as expected": 271 | $versioned[v"0.6.5"].oid == "8937c0b998376944fd93d6d8e7b3cf4db91dfb9b" 272 | $versioned[v"0.6.6"].oid == "5a3de5a5fc9b83d5a9bba23f7e950b37a96d10e6" 273 | 274 | test "basic tag table fetch": 275 | fetchTagTable project 276 | check project.tags != nil, "tag fetch yielded no table" 277 | check project.tags.len > 0, "tag fetch created empty table" 278 | 279 | test "make sure richen finds a tag": 280 | check not project.tags.isNil, "tag fetch unsuccessful" 281 | block found: 282 | for release, thing in project.tags.richen: 283 | when false: 284 | checkpoint $release 285 | checkpoint $thing 286 | if release == newRelease("0.6.14", operator = Tag): 287 | break found 288 | fail"tag for 0.6.14 was not found" 289 | -------------------------------------------------------------------------------- /tests/tspec.nim: -------------------------------------------------------------------------------- 1 | import std/os 2 | import std/strutils 3 | import std/options 4 | import std/uri 5 | 6 | import pkg/bump 7 | import pkg/balls 8 | 9 | import nimph/spec 10 | import nimph/version 11 | 12 | suite "welcome to the nimph-o-matic 9000": 13 | proc v(loose: string): Version = 14 | let release = parseVersionLoosely(loose) 15 | result = release.get.version 16 | 17 | test "some url munging": 18 | let 19 | sshUrl = parseUri"git@github.com:disruptek/nimph.git" 20 | gitUrl = parseUri"git://github.com/disruptek/nimph.git" 21 | webUrl = parseUri"https://github.com/disruptek/nimph" 22 | bigUrl = parseUri"https://github.com/Vindaar/ginger" 23 | bagUrl = parseUri"https://githob.com/Vindaar/ginger" 24 | check "convert to git": 25 | $sshUrl.convertToGit == $gitUrl 26 | $gitUrl.convertToGit == $gitUrl 27 | $webUrl.convertToGit == $webUrl & ".git" # !!! 28 | #check "convert to ssh": 29 | checkpoint $sshUrl.convertToSsh 30 | checkpoint $gitUrl.convertToSsh 31 | checkpoint $webUrl.convertToSsh 32 | check $sshUrl.convertToSsh == $sshUrl 33 | check $gitUrl.convertToSsh == $sshUrl 34 | check $webUrl.convertToSsh == $sshUrl 35 | check "normalize path case (only) for github": 36 | $bigUrl.normalizeUrl == ($bigUrl).toLowerAscii 37 | $bagUrl.normalizeUrl == $bagUrl 38 | check $gitUrl.prepareForClone == $webUrl & ".git" # !!! 39 | 40 | test "fork targets": 41 | for url in [ 42 | parseUri"git@github.com:disruptek/nimph.git", 43 | parseUri"git://github.com/disruptek/nimph.git", 44 | parseUri"https://github.com/disruptek/nimph", 45 | ].items: 46 | let fork {.used.} = url.forkTarget 47 | checkpoint $url 48 | #checkpoint fork.repr 49 | check fork.ok 50 | check fork.owner == "disruptek" and fork.repo == "nimph" 51 | 52 | test "url normalization": 53 | let 54 | sshUser = "git" 55 | sshUrl1 = "git@git.sr.ht:~kungtotte/dtt" 56 | sshHost1 = "git.sr.ht" 57 | sshPath1 = "~kungtotte/dtt" 58 | sshUrl2 = "git@github.com:disruptek/nimph.git" 59 | sshHost2 = "github.com" 60 | sshPath2 = "disruptek/nimph.git" 61 | normUrl1 = normalizeUrl(parseUri(sshUrl1)) 62 | normUrl2 = normalizeUrl(parseUri(sshUrl2)) 63 | 64 | check "more creepy urls": 65 | normUrl1.username == sshUser 66 | normUrl1.hostname == sshHost1 67 | normUrl1.path == sshPath1 68 | normUrl2.username == sshUser 69 | normUrl2.hostname == sshHost2 70 | normUrl2.path == sshPath2 71 | 72 | test "path joins": 73 | let 74 | p = "goats" 75 | o = "pigs/" 76 | check "slash attack": 77 | ///p == "goats/" 78 | ///o == "pigs/" 79 | //////p == "/goats/" 80 | //////o == "/pigs/" 81 | --------------------------------------------------------------------------------