├── .github ├── FUNDING.yml └── workflows │ ├── ci.yml │ └── install.yml ├── .gitignore ├── Cargo.toml ├── LICENSE-APACHE ├── LICENSE-MIT ├── README.md ├── build.rs └── src ├── alloc.rs ├── arena.rs ├── args.rs ├── clean.rs ├── collect.rs ├── communication.rs ├── cratemap.rs ├── cratename.rs ├── dependency.rs ├── feature.rs ├── filter.rs ├── hidden.rs ├── hint.rs ├── id.rs ├── impls.rs ├── index.html ├── lib.rs ├── load.rs ├── log.rs ├── macros.rs ├── main.rs ├── matrix.rs ├── max.rs ├── mend.rs ├── present.rs ├── query.rs ├── render.rs ├── stream.rs ├── timestamp.rs ├── total.rs ├── trace.rs ├── user.rs └── version.rs /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: dtolnay 2 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | pull_request: 6 | workflow_dispatch: 7 | schedule: [cron: "40 1 * * *"] 8 | 9 | permissions: 10 | contents: read 11 | 12 | env: 13 | RUSTFLAGS: -Dwarnings 14 | 15 | jobs: 16 | pre_ci: 17 | uses: dtolnay/.github/.github/workflows/pre_ci.yml@master 18 | 19 | test: 20 | name: Rust ${{matrix.rust}} 21 | needs: pre_ci 22 | if: needs.pre_ci.outputs.continue 23 | runs-on: ubuntu-latest 24 | strategy: 25 | fail-fast: false 26 | matrix: 27 | rust: [nightly, beta, stable, 1.75.0] 28 | timeout-minutes: 45 29 | steps: 30 | - uses: actions/checkout@v4 31 | - uses: dtolnay/rust-toolchain@master 32 | with: 33 | toolchain: ${{matrix.rust}} 34 | - name: Enable type layout randomization 35 | run: echo RUSTFLAGS=${RUSTFLAGS}\ -Zrandomize-layout >> $GITHUB_ENV 36 | if: matrix.rust == 'nightly' 37 | - run: cargo check 38 | - run: cargo test 39 | - uses: actions/upload-artifact@v4 40 | if: matrix.rust == 'nightly' && always() 41 | with: 42 | name: Cargo.lock 43 | path: Cargo.lock 44 | continue-on-error: true 45 | 46 | windows: 47 | name: Windows 48 | needs: pre_ci 49 | if: needs.pre_ci.outputs.continue 50 | runs-on: windows-latest 51 | timeout-minutes: 45 52 | steps: 53 | - uses: actions/checkout@v4 54 | - uses: dtolnay/rust-toolchain@stable 55 | - run: cargo check 56 | 57 | clippy: 58 | name: Clippy 59 | runs-on: ubuntu-latest 60 | if: github.event_name != 'pull_request' 61 | timeout-minutes: 45 62 | steps: 63 | - uses: actions/checkout@v4 64 | - uses: dtolnay/rust-toolchain@clippy 65 | - run: cargo clippy -- -Dclippy::all -Dclippy::pedantic 66 | 67 | outdated: 68 | name: Outdated 69 | runs-on: ubuntu-latest 70 | if: github.event_name != 'pull_request' 71 | timeout-minutes: 45 72 | steps: 73 | - uses: actions/checkout@v4 74 | - uses: dtolnay/rust-toolchain@stable 75 | - uses: dtolnay/install@cargo-outdated 76 | - run: cargo outdated --workspace --exit-code 1 77 | -------------------------------------------------------------------------------- /.github/workflows/install.yml: -------------------------------------------------------------------------------- 1 | name: Install 2 | 3 | on: 4 | workflow_dispatch: 5 | schedule: [cron: "40 1 * * *"] 6 | push: {tags: ['*']} 7 | 8 | permissions: {} 9 | 10 | env: 11 | RUSTFLAGS: -Dwarnings 12 | 13 | jobs: 14 | install: 15 | name: Install 16 | uses: dtolnay/.github/.github/workflows/check_install.yml@master 17 | with: 18 | crate: cargo-tally 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /*.tar.gz 2 | /Cargo.lock 3 | /dataflow-graph/ 4 | /report.txt 5 | /target/ 6 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "cargo-tally" 3 | version = "1.0.64" 4 | authors = ["David Tolnay "] 5 | categories = ["development-tools::cargo-plugins"] 6 | description = "Cargo subcommand for drawing graphs of the number of dependencies on a crate over time" 7 | edition = "2021" 8 | keywords = ["cargo", "subcommand"] 9 | license = "MIT OR Apache-2.0" 10 | repository = "https://github.com/dtolnay/cargo-tally" 11 | rust-version = "1.70" 12 | 13 | [lib] 14 | path = "src/hidden.rs" 15 | 16 | [dependencies] 17 | abomonation = "0.7" 18 | anyhow = "1.0.79" 19 | atomic-take = "1.0" 20 | bytesize = "2" 21 | cargo-subcommand-metadata = "0.1" 22 | chrono = "0.4.35" 23 | clap = { version = "4", features = ["deprecated"] } 24 | db-dump = "0.7.11" 25 | differential-dataflow-master = { version = "=0.13.0-dev.1", default-features = false } 26 | foldhash = "0.1" 27 | minipre = "0.2" 28 | num_cpus = "1.0" 29 | opener = "0.8" 30 | ref-cast = "1.0" 31 | regex = { version = "1.9.2", default-features = false, features = ["perf", "std"] } 32 | semver = "1.0" 33 | serde = "1.0.194" 34 | sysinfo = { version = "0.35", default-features = false, features = ["system"] } 35 | termcolor = "1.1" 36 | thiserror = "2" 37 | timely-master = { version = "=0.13.0-dev.1", default-features = false } 38 | typed-arena = "2.0" 39 | 40 | [package.metadata.docs.rs] 41 | targets = ["x86_64-unknown-linux-gnu"] 42 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | Permission is hereby granted, free of charge, to any 2 | person obtaining a copy of this software and associated 3 | documentation files (the "Software"), to deal in the 4 | Software without restriction, including without 5 | limitation the rights to use, copy, modify, merge, 6 | publish, distribute, sublicense, and/or sell copies of 7 | the Software, and to permit persons to whom the Software 8 | is furnished to do so, subject to the following 9 | conditions: 10 | 11 | The above copyright notice and this permission notice 12 | shall be included in all copies or substantial portions 13 | of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF 16 | ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED 17 | TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A 18 | PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT 19 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 20 | CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 21 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR 22 | IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 23 | DEALINGS IN THE SOFTWARE. 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Cargo tally 2 | 3 | Number of crates that depend directly on each regex version Fraction of crates that depend on failure vs anyhow and thiserror Fraction of crates.io that depends transitively on libc 4 | 5 | **`cargo tally` is a Cargo subcommand for drawing graphs of the number of crates 6 | that depend directly or indirectly on a crate over time.** 7 | 8 | ``` 9 | Usage: cargo tally [options] queries... 10 | 11 | Options: 12 | --db Path to crates.io's database dump [default: ./db-dump.tar.gz] 13 | --jobs, -j Number of threads to run differential dataflow 14 | --relative Display as a fraction of total crates, not absolute number 15 | --transitive Count transitive dependencies, not just direct dependencies 16 | ``` 17 | 18 | [github](https://github.com/dtolnay/cargo-tally) 19 | [crates.io](https://crates.io/crates/cargo-tally) 20 | [build status](https://github.com/dtolnay/cargo-tally/actions?query=branch%3Amaster) 21 | 22 |
23 | 24 | ## Installation 25 | 26 | ```console 27 | $ wget https://static.crates.io/db-dump.tar.gz 28 | $ cargo install cargo-tally 29 | ``` 30 | 31 | - Data is drawn from crates.io database dumps, which are published nightly by 32 | automation running on crates.io. You can download a new dump whenever you feel 33 | like having fresh data. 34 | 35 | - The tally command accepts a list of which crates to tally. This can either be 36 | the name of a crate like `serde` or a name with arbitrary semver version 37 | specification like `serde:1.0`. If a version is not specified, dependencies on 38 | all versions of the crate are tallied together. 39 | 40 | - The generated graphs use [D3](https://d3js.org/); the cargo tally command 41 | should pop open a browser showing your graph. It uses the same mechanism that 42 | `cargo doc --open` uses so hopefully it works well on various systems. 43 | 44 | --- 45 | 46 |
47 | 48 | ## Examples 49 | 50 | - Number of crates that depend directly on each major version of the regex 51 | crate. 52 | 53 | **`$ cargo tally regex:0.1 regex:0.2 regex:1.0`** 54 | 55 | ![Number of crates that depend directly on each major version of regex][regex] 56 | 57 | --- 58 | 59 |
60 | 61 | - Fraction of crates.io that depends directly on each major version of the regex 62 | crate. This is the same graph as the previous, but scaled to the exponentially 63 | growing total number of crates on crates.io. 64 | 65 | 66 | **`$ cargo tally regex:0.1 regex:0.2 regex:1.0 --relative`** 67 | 68 | ![Fraction of crates.io that depends directly on each major version of regex][regex-relative] 69 | 70 | --- 71 | 72 |
73 | 74 | - Fraction of crates.io that depends directly on various error handling 75 | libraries. Note that crates are not double-counted; a crate that depends on 76 | *both* `anyhow` and `thiserror` counts as only one for the purpose of the 77 | `anyhow+thiserror` curve. 78 | 79 | **`$ cargo tally --relative failure anyhow thiserror anyhow+thiserror`** 80 | 81 | ![Fraction of crates.io that depends directly on various error handling libraries][failure-anyhow-thiserror] 82 | 83 | --- 84 | 85 |
86 | 87 | - Fraction of crates.io that depends transitively on libc. 88 | 89 | **`$ cargo tally --relative --transitive libc`** 90 | 91 | ![Fraction of crates.io that depends transitively on libc][libc] 92 | 93 | [regex]: https://user-images.githubusercontent.com/1940490/122184090-bc75d600-ce40-11eb-856b-affc568d2e15.png 94 | [regex-relative]: https://user-images.githubusercontent.com/1940490/122184174-d31c2d00-ce40-11eb-8c17-bde6f3015c28.png 95 | [failure-anyhow-thiserror]: https://user-images.githubusercontent.com/1940490/122184103-bf70c680-ce40-11eb-890c-988cd96f4428.png 96 | [libc]: https://user-images.githubusercontent.com/1940490/122184112-c13a8a00-ce40-11eb-8bdb-a7f6f03d2d91.png 97 | 98 | --- 99 | 100 |
101 | 102 | ## Credits 103 | 104 | The implementation is powered by [differential-dataflow]. 105 | 106 | 107 | 108 | [differential-dataflow]: https://github.com/TimelyDataflow/differential-dataflow 109 | 110 |
111 | 112 | #### License 113 | 114 | 115 | Licensed under either of Apache License, Version 116 | 2.0 or MIT license at your option. 117 | 118 | 119 |
120 | 121 | 122 | Unless you explicitly state otherwise, any contribution intentionally submitted 123 | for inclusion in this project by you, as defined in the Apache-2.0 license, 124 | shall be dual licensed as above, without any additional terms or conditions. 125 | 126 | -------------------------------------------------------------------------------- /build.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use std::fs; 3 | use std::path::Path; 4 | use std::process; 5 | 6 | const CARGO_TALLY_MEMORY_LIMIT: &str = "CARGO_TALLY_MEMORY_LIMIT"; 7 | 8 | fn main() { 9 | let limit = if let Some(value) = env::var_os(CARGO_TALLY_MEMORY_LIMIT) { 10 | let Some(value) = value.to_str() else { 11 | eprintln!("failed to parse ${CARGO_TALLY_MEMORY_LIMIT}"); 12 | process::exit(1); 13 | }; 14 | let value = match value.parse::() { 15 | Ok(int) => int, 16 | Err(err) => { 17 | eprintln!("failed to parse ${CARGO_TALLY_MEMORY_LIMIT}: {err}"); 18 | process::exit(1); 19 | } 20 | }; 21 | Some(value) 22 | } else { 23 | None 24 | }; 25 | 26 | let out_dir = env::var_os("OUT_DIR").unwrap(); 27 | let out = Path::new(&out_dir).join("limit.mem"); 28 | fs::write(out, format!("{limit:?}\n")).unwrap(); 29 | 30 | let host = env::var_os("HOST").unwrap(); 31 | if let Some("windows") = host.to_str().unwrap().split('-').nth(2) { 32 | println!("cargo:rustc-cfg=host_os=\"windows\""); 33 | } 34 | 35 | println!("cargo:rerun-if-env-changed={CARGO_TALLY_MEMORY_LIMIT}"); 36 | println!("cargo:rustc-check-cfg=cfg(host_os, values(\"windows\"))"); 37 | } 38 | -------------------------------------------------------------------------------- /src/alloc.rs: -------------------------------------------------------------------------------- 1 | use bytesize::ByteSize; 2 | use std::alloc::{self, GlobalAlloc, Layout, System}; 3 | use std::fmt::{self, Display}; 4 | use std::ptr; 5 | use std::sync::atomic::{AtomicU64, Ordering}; 6 | 7 | struct Allocator { 8 | alloc: A, 9 | count: AtomicU64, 10 | total: AtomicU64, 11 | current: AtomicU64, 12 | peak: AtomicU64, 13 | } 14 | 15 | #[global_allocator] 16 | static ALLOC: Allocator = Allocator { 17 | alloc: System, 18 | count: AtomicU64::new(0), 19 | total: AtomicU64::new(0), 20 | current: AtomicU64::new(0), 21 | peak: AtomicU64::new(0), 22 | }; 23 | 24 | #[cfg(not(host_os = "windows"))] 25 | const LIMIT: Option = include!(concat!(env!("OUT_DIR"), "/limit.mem")); 26 | 27 | #[cfg(host_os = "windows")] 28 | const LIMIT: Option = include!(concat!(env!("OUT_DIR"), "\\limit.mem")); 29 | 30 | unsafe impl GlobalAlloc for Allocator 31 | where 32 | A: GlobalAlloc, 33 | { 34 | unsafe fn alloc(&self, layout: Layout) -> *mut u8 { 35 | self.count.fetch_add(1, Ordering::Relaxed); 36 | 37 | let size = layout.size() as u64; 38 | let prev = self.current.fetch_add(size, Ordering::Relaxed); 39 | self.total.fetch_add(size, Ordering::Relaxed); 40 | let peak = self 41 | .peak 42 | .fetch_max(prev + size, Ordering::Relaxed) 43 | .max(prev + size); 44 | 45 | if let Some(limit) = LIMIT { 46 | if peak > limit { 47 | alloc::handle_alloc_error(layout); 48 | } 49 | } 50 | 51 | unsafe { self.alloc.alloc(layout) } 52 | } 53 | 54 | unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { 55 | unsafe { self.alloc.dealloc(ptr, layout) }; 56 | 57 | let size = layout.size() as u64; 58 | self.current.fetch_sub(size, Ordering::Relaxed); 59 | } 60 | 61 | unsafe fn alloc_zeroed(&self, layout: Layout) -> *mut u8 { 62 | self.count.fetch_add(1, Ordering::Relaxed); 63 | 64 | let size = layout.size() as u64; 65 | let prev = self.current.fetch_add(size, Ordering::Relaxed); 66 | self.total.fetch_add(size, Ordering::Relaxed); 67 | let peak = self 68 | .peak 69 | .fetch_max(prev + size, Ordering::Relaxed) 70 | .max(prev + size); 71 | 72 | if let Some(limit) = LIMIT { 73 | if peak > limit { 74 | alloc::handle_alloc_error(layout); 75 | } 76 | } 77 | 78 | unsafe { self.alloc.alloc_zeroed(layout) } 79 | } 80 | 81 | unsafe fn realloc(&self, ptr: *mut u8, old_layout: Layout, new_size: usize) -> *mut u8 { 82 | self.count.fetch_add(1, Ordering::Relaxed); 83 | 84 | let align = old_layout.align(); 85 | let new_layout = unsafe { Layout::from_size_align_unchecked(new_size, align) }; 86 | 87 | let new_ptr = unsafe { self.alloc.realloc(ptr, old_layout, new_size) }; 88 | let old_size = old_layout.size() as u64; 89 | let new_size = new_size as u64; 90 | 91 | let peak = if ptr::eq(new_ptr, ptr) { 92 | if new_size > old_size { 93 | self.total.fetch_add(new_size - old_size, Ordering::Relaxed); 94 | let prev = self 95 | .current 96 | .fetch_add(new_size - old_size, Ordering::Relaxed); 97 | self.peak 98 | .fetch_max(prev + new_size - old_size, Ordering::Relaxed) 99 | .max(prev + new_size - old_size) 100 | } else { 101 | self.current 102 | .fetch_sub(old_size - new_size, Ordering::Relaxed); 103 | 0 104 | } 105 | } else { 106 | self.total.fetch_add(new_size, Ordering::Relaxed); 107 | let prev = if new_size > old_size { 108 | self.current 109 | .fetch_add(new_size - old_size, Ordering::Relaxed) 110 | } else { 111 | self.current 112 | .fetch_sub(old_size - new_size, Ordering::Relaxed) 113 | }; 114 | self.peak 115 | .fetch_max(prev + new_size, Ordering::Relaxed) 116 | .max(prev + new_size) 117 | }; 118 | 119 | if let Some(limit) = LIMIT { 120 | if peak > limit { 121 | alloc::handle_alloc_error(new_layout); 122 | } 123 | } 124 | 125 | new_ptr 126 | } 127 | } 128 | 129 | pub(crate) struct AllocStat { 130 | count: u64, 131 | total: ByteSize, 132 | peak: ByteSize, 133 | } 134 | 135 | pub(crate) fn stat() -> AllocStat { 136 | AllocStat { 137 | count: ALLOC.count.load(Ordering::Relaxed), 138 | total: ByteSize::b(ALLOC.total.load(Ordering::Relaxed)), 139 | peak: ByteSize::b(ALLOC.peak.load(Ordering::Relaxed)), 140 | } 141 | } 142 | 143 | impl Display for AllocStat { 144 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 145 | write!( 146 | formatter, 147 | "{} allocations, total {}, peak {}", 148 | self.count, self.total, self.peak, 149 | ) 150 | } 151 | } 152 | -------------------------------------------------------------------------------- /src/arena.rs: -------------------------------------------------------------------------------- 1 | use foldhash::HashMap; 2 | use std::any::TypeId; 3 | use std::fmt::{self, Debug}; 4 | use std::iter::Copied; 5 | use std::slice::Iter; 6 | use std::sync::OnceLock; 7 | use std::sync::{Mutex, PoisonError}; 8 | use typed_arena::Arena; 9 | 10 | #[derive(Ord, PartialOrd, Eq, PartialEq, Hash)] 11 | pub struct Slice { 12 | contents: &'static [T], 13 | } 14 | 15 | impl Slice 16 | where 17 | T: 'static, 18 | { 19 | pub const EMPTY: Self = Slice { contents: &[] }; 20 | 21 | pub fn new(slice: &[T]) -> Self 22 | where 23 | T: Send + Clone, 24 | { 25 | slice.iter().cloned().collect() 26 | } 27 | 28 | pub const fn from(contents: &'static [T]) -> Self { 29 | Slice { contents } 30 | } 31 | 32 | pub fn iter(&self) -> impl Iterator 33 | where 34 | T: Copy, 35 | { 36 | (*self).into_iter() 37 | } 38 | 39 | pub fn iter_ref(&self) -> impl Iterator { 40 | self.contents.iter() 41 | } 42 | 43 | pub fn is_empty(&self) -> bool { 44 | self.contents.is_empty() 45 | } 46 | } 47 | 48 | impl Copy for Slice where T: 'static {} 49 | 50 | impl Clone for Slice 51 | where 52 | T: 'static, 53 | { 54 | fn clone(&self) -> Self { 55 | *self 56 | } 57 | } 58 | 59 | impl FromIterator for Slice 60 | where 61 | T: 'static + Send + Clone, 62 | { 63 | #[allow(invalid_reference_casting)] // false positive: https://github.com/rust-lang/rust/issues/121074 64 | fn from_iter(iter: I) -> Self 65 | where 66 | I: IntoIterator, 67 | { 68 | let iter = iter.into_iter(); 69 | if iter.size_hint() == (0, Some(0)) { 70 | return Slice::EMPTY; 71 | } 72 | 73 | static ARENA: OnceLock>>> = OnceLock::new(); 74 | 75 | let mut map = ARENA 76 | .get_or_init(Mutex::default) 77 | .lock() 78 | .unwrap_or_else(PoisonError::into_inner); 79 | let arena: &Box = map 80 | .entry(TypeId::of::()) 81 | .or_insert_with(|| Box::new(Arena::::new())); 82 | let arena = unsafe { &*(&**arena as *const dyn Send as *const Arena) }; 83 | Slice { 84 | contents: arena.alloc_extend(iter), 85 | } 86 | } 87 | } 88 | 89 | impl IntoIterator for Slice 90 | where 91 | T: 'static + Copy, 92 | { 93 | type Item = T; 94 | type IntoIter = Copied>; 95 | 96 | fn into_iter(self) -> Self::IntoIter { 97 | self.contents.iter().copied() 98 | } 99 | } 100 | 101 | impl Debug for Slice 102 | where 103 | T: 'static + Debug, 104 | { 105 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 106 | Debug::fmt(self.contents, formatter) 107 | } 108 | } 109 | -------------------------------------------------------------------------------- /src/args.rs: -------------------------------------------------------------------------------- 1 | use crate::{cratename, user}; 2 | use clap::builder::{ArgAction, ValueParser}; 3 | use clap::{Arg, Command}; 4 | use regex::Regex; 5 | use semver::VersionReq; 6 | use std::env; 7 | use std::ffi::{OsStr, OsString}; 8 | use std::path::PathBuf; 9 | use std::str::FromStr; 10 | use thiserror::Error; 11 | 12 | #[derive(Debug)] 13 | pub(crate) struct Opt { 14 | pub db: PathBuf, 15 | pub exclude: Vec, 16 | pub jobs: usize, 17 | pub relative: bool, 18 | pub title: Option, 19 | pub transitive: bool, 20 | pub queries: Vec, 21 | } 22 | 23 | const USAGE: &str = "\ 24 | cargo tally [OPTIONS] QUERIES... 25 | cargo tally serde:1.0 'anyhow:^1.0 + thiserror'"; 26 | 27 | const TEMPLATE: &str = "\ 28 | {bin} {version} 29 | David Tolnay 30 | https://github.com/dtolnay/cargo-tally 31 | 32 | {usage-heading} 33 | {usage} 34 | 35 | {all-args}\ 36 | "; 37 | 38 | fn app(jobs_help: &String) -> Command { 39 | let mut app = Command::new("cargo-tally") 40 | .override_usage(USAGE) 41 | .help_template(TEMPLATE) 42 | .arg(arg_db()) 43 | .arg(arg_exclude()) 44 | .arg(arg_jobs(jobs_help)) 45 | .arg(arg_relative()) 46 | .arg(arg_title()) 47 | .arg(arg_transitive()) 48 | .arg(arg_queries()); 49 | if let Some(version) = option_env!("CARGO_PKG_VERSION") { 50 | app = app.version(version); 51 | } 52 | app 53 | } 54 | 55 | const DB: &str = "db"; 56 | const EXCLUDE: &str = "exclude"; 57 | const JOBS: &str = "jobs"; 58 | const RELATIVE: &str = "relative"; 59 | const TITLE: &str = "title"; 60 | const TRANSITIVE: &str = "transitive"; 61 | const QUERIES: &str = "queries"; 62 | 63 | pub(crate) fn parse() -> Opt { 64 | // | threads | duration | allocated | peak | 65 | // |---------|----------|-----------|---------| 66 | // | 1 | 38.6 s | 55.2 GB | 11.0 GB | 67 | // | 2 | 24.8 s | 55.4 GB | 10.2 GB | 68 | // | 4 | 14.2 s | 55.8 GB | 8.8 GB | 69 | // | 8 | 12.7 s | 58.4 GB | 8.3 GB | 70 | // | 16 | 12.6 s | 59.2 GB | 8.2 GB | 71 | // | 32 | 12.8 s | 63.2 GB | 8.4 GB | 72 | // | 64 | 14.0 s | 69.5 GB | 11.1 GB | 73 | let default_jobs = num_cpus::get().min(32); 74 | let jobs_help = format!( 75 | "Number of threads to run differential dataflow [default: {}]", 76 | default_jobs, 77 | ); 78 | 79 | let mut args: Vec<_> = env::args_os().collect(); 80 | if let Some(first) = args.get_mut(0) { 81 | *first = OsString::from("cargo-tally"); 82 | } 83 | if args.get(1).map(OsString::as_os_str) == Some(OsStr::new("tally")) { 84 | args.remove(1); 85 | } 86 | let matches = app(&jobs_help).get_matches_from(args); 87 | 88 | let db = PathBuf::from(matches.get_one::(DB).unwrap()); 89 | 90 | let exclude = matches 91 | .get_many::(EXCLUDE) 92 | .unwrap_or_default() 93 | .cloned() 94 | .collect(); 95 | 96 | let jobs = matches 97 | .get_one::(JOBS) 98 | .copied() 99 | .unwrap_or(default_jobs); 100 | 101 | let title = matches.get_one::(TITLE).map(String::clone); 102 | 103 | let relative = matches.get_flag(RELATIVE); 104 | let transitive = matches.get_flag(TRANSITIVE); 105 | 106 | let queries = matches 107 | .get_many::(QUERIES) 108 | .unwrap() 109 | .map(String::clone) 110 | .collect(); 111 | 112 | Opt { 113 | db, 114 | exclude, 115 | jobs, 116 | relative, 117 | title, 118 | transitive, 119 | queries, 120 | } 121 | } 122 | 123 | fn arg_db() -> Arg { 124 | Arg::new(DB) 125 | .long(DB) 126 | .num_args(1) 127 | .value_name("PATH") 128 | .default_value("./db-dump.tar.gz") 129 | .value_parser(ValueParser::path_buf()) 130 | .help("Path to crates.io's database dump") 131 | } 132 | 133 | fn arg_exclude() -> Arg { 134 | Arg::new(EXCLUDE) 135 | .long(EXCLUDE) 136 | .hide(true) 137 | .action(ArgAction::Append) 138 | .value_name("REGEX") 139 | .value_parser(Regex::from_str) 140 | .help("Ignore a dependency coming from any crates matching regex") 141 | } 142 | 143 | fn arg_jobs(help: &String) -> Arg { 144 | Arg::new(JOBS) 145 | .long(JOBS) 146 | .short('j') 147 | .num_args(1) 148 | .value_name("N") 149 | .value_parser(usize::from_str) 150 | .help(help) 151 | } 152 | 153 | fn arg_relative() -> Arg { 154 | Arg::new(RELATIVE) 155 | .long(RELATIVE) 156 | .num_args(0) 157 | .help("Display as a fraction of total crates, not absolute number") 158 | } 159 | 160 | fn arg_title() -> Arg { 161 | Arg::new(TITLE) 162 | .long(TITLE) 163 | .hide(true) 164 | .num_args(1) 165 | .value_name("TITLE") 166 | .value_parser(ValueParser::string()) 167 | .help("Graph title") 168 | } 169 | 170 | fn arg_transitive() -> Arg { 171 | Arg::new(TRANSITIVE) 172 | .long(TRANSITIVE) 173 | .num_args(0) 174 | .help("Count transitive dependencies, not just direct dependencies") 175 | } 176 | 177 | fn arg_queries() -> Arg { 178 | Arg::new(QUERIES) 179 | .required(true) 180 | .num_args(0..) 181 | .value_name("QUERIES") 182 | .value_parser(validate_query) 183 | .help("Queries") 184 | .hide(true) 185 | } 186 | 187 | #[derive(Error, Debug)] 188 | enum Error { 189 | #[error("invalid crates.io username")] 190 | InvalidUsername, 191 | #[error("invalid crate name according to crates.io")] 192 | InvalidCrateName, 193 | #[error(transparent)] 194 | Semver(#[from] semver::Error), 195 | } 196 | 197 | fn validate_query(string: &str) -> Result { 198 | for predicate in string.split('+') { 199 | let predicate = predicate.trim(); 200 | 201 | if let Some(username) = predicate.strip_prefix('@') { 202 | if username.split('/').all(user::valid) { 203 | continue; 204 | } else { 205 | return Err(Error::InvalidUsername); 206 | } 207 | } 208 | 209 | let (name, req) = if let Some((name, req)) = predicate.split_once(':') { 210 | (name, Some(req)) 211 | } else { 212 | (predicate, None) 213 | }; 214 | 215 | if !cratename::valid(name.trim()) { 216 | return Err(Error::InvalidCrateName); 217 | } 218 | 219 | if let Some(req) = req { 220 | VersionReq::from_str(req)?; 221 | } 222 | } 223 | 224 | Ok(string.to_owned()) 225 | } 226 | 227 | #[test] 228 | fn test_cli() { 229 | let jobs_help = String::new(); 230 | app(&jobs_help).debug_assert(); 231 | } 232 | -------------------------------------------------------------------------------- /src/clean.rs: -------------------------------------------------------------------------------- 1 | use crate::cratemap::CrateMap; 2 | use cargo_tally::arena::Slice; 3 | use cargo_tally::id::{CrateId, VersionId}; 4 | use cargo_tally::version::Version; 5 | use cargo_tally::{DbDump, Dependency}; 6 | use semver::{Comparator, Op}; 7 | use std::cmp; 8 | use std::collections::btree_map::{BTreeMap as Map, Entry}; 9 | 10 | pub(crate) fn clean(db_dump: &mut DbDump, crates: &CrateMap) { 11 | let mut crate_max_version: Map = Map::new(); 12 | let mut dependencies_per_version: Map> = Map::new(); 13 | 14 | for dep in &mut db_dump.dependencies { 15 | dependencies_per_version 16 | .entry(dep.version_id) 17 | .or_insert_with(Vec::new) 18 | .push(dep); 19 | } 20 | 21 | for rel in &db_dump.releases { 22 | match crate_max_version.entry(rel.crate_id) { 23 | Entry::Vacant(entry) => { 24 | entry.insert(&rel.num); 25 | } 26 | Entry::Occupied(entry) => { 27 | let entry = entry.into_mut(); 28 | *entry = cmp::max(entry, &rel.num); 29 | } 30 | } 31 | 32 | let mut no_dependencies = Vec::new(); 33 | let dependencies = dependencies_per_version 34 | .get_mut(&rel.id) 35 | .unwrap_or(&mut no_dependencies); 36 | let mut i = 0; 37 | while let Some(dep) = dependencies.get_mut(i) { 38 | if !crate_max_version.contains_key(&dep.crate_id) { 39 | // If every published version of a crate is a prerelease, Cargo 40 | // will resolve a `*` wildcard dependency to the max prerelease, 41 | // which we don't track. 42 | // 43 | // Other times, crates just go missing from the index, maybe for 44 | // legal reasons or because of leaked secrets. 45 | // https://github.com/rust-lang/crates.io-index/commit/a95f8bff541de7461638b5e4f75ee58747829ea3 46 | if crate::trace::VERBOSE { 47 | eprintln!( 48 | "unresolved dep {} {} on {} {}", 49 | crates.name(rel.crate_id).unwrap(), 50 | rel.num, 51 | crates.name(dep.crate_id).unwrap(), 52 | dep.req, 53 | ); 54 | } 55 | dependencies.remove(i); 56 | continue; 57 | } 58 | let max_version = crate_max_version[&dep.crate_id]; 59 | let mut incompatible_version = Version(semver::Version { 60 | major: 0, 61 | minor: 0, 62 | patch: 0, 63 | pre: semver::Prerelease::EMPTY, 64 | build: semver::BuildMetadata::EMPTY, 65 | }); 66 | // Produce a synthetic version which is semver incompatible with the 67 | // highest version currently published. 68 | if max_version.major > 0 { 69 | incompatible_version.major = max_version.major + 1; 70 | } else if max_version.minor > 0 { 71 | incompatible_version.minor = max_version.minor + 1; 72 | } else { 73 | incompatible_version.patch = max_version.patch + 1; 74 | } 75 | if dep.req.matches(&incompatible_version) { 76 | // If the declared dependency requirement claims this crate 77 | // works with the incompatible future release, we deem the 78 | // dependency silly and constrain it to remain compatible with 79 | // the current max published. This affects reqs like `0.*`. 80 | dep.req.comparators = Slice::new(&[Comparator { 81 | op: Op::Caret, 82 | major: max_version.major, 83 | minor: Some(max_version.minor), 84 | patch: Some(max_version.patch), 85 | pre: semver::Prerelease::EMPTY, 86 | }]); 87 | } 88 | i += 1; 89 | } 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/collect.rs: -------------------------------------------------------------------------------- 1 | use differential_dataflow::collection::Collection; 2 | use differential_dataflow::difference::Semigroup; 3 | use std::mem; 4 | use std::sync::{Arc, Mutex, PoisonError}; 5 | use timely::dataflow::Scope; 6 | use timely::Data; 7 | 8 | pub(crate) trait Collect { 9 | fn collect_into(&self, result: &Emitter); 10 | } 11 | 12 | pub(crate) struct ResultCollection { 13 | out: Arc>>, 14 | } 15 | 16 | pub(crate) struct Emitter { 17 | out: Arc>>, 18 | } 19 | 20 | impl ResultCollection { 21 | pub(crate) fn new() -> Self { 22 | let out = Arc::new(Mutex::new(Vec::new())); 23 | ResultCollection { out } 24 | } 25 | 26 | pub(crate) fn emitter(&self) -> Emitter { 27 | let out = Arc::clone(&self.out); 28 | Emitter { out } 29 | } 30 | } 31 | 32 | impl ResultCollection<(D, T, R)> 33 | where 34 | T: Ord, 35 | { 36 | pub(crate) fn sort(&self) { 37 | self.out 38 | .lock() 39 | .unwrap_or_else(PoisonError::into_inner) 40 | .sort_by( 41 | |(_ldata, ltimestamp, _ldiff), (_rdata, rtimestamp, _rdiff)| { 42 | ltimestamp.cmp(rtimestamp) 43 | }, 44 | ); 45 | } 46 | } 47 | 48 | impl Collect<(D, G::Timestamp, R)> for Collection 49 | where 50 | G: Scope, 51 | D: Data, 52 | R: Semigroup, 53 | G::Timestamp: Data, 54 | { 55 | fn collect_into(&self, result: &Emitter<(D, G::Timestamp, R)>) { 56 | let out = Arc::clone(&result.out); 57 | self.inspect_batch(move |_timestamp, slice| { 58 | out.lock() 59 | .unwrap_or_else(PoisonError::into_inner) 60 | .extend_from_slice(slice); 61 | }); 62 | } 63 | } 64 | 65 | impl IntoIterator for ResultCollection { 66 | type Item = T; 67 | type IntoIter = as IntoIterator>::IntoIter; 68 | 69 | fn into_iter(self) -> Self::IntoIter { 70 | let mut out = self.out.lock().unwrap_or_else(PoisonError::into_inner); 71 | mem::take(&mut *out).into_iter() 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/communication.rs: -------------------------------------------------------------------------------- 1 | // As far as I can tell, timely dataflow uses abomonation only for interprocess 2 | // communication. Within a single process, it uses the Clone impl instead. We 3 | // stub out the Abomonation impl since it will never be called. 4 | macro_rules! do_not_abomonate { 5 | ($($path:ident)::+ $(<$param:ident>)? $(where $($clause:tt)*)?) => { 6 | impl $(<$param>)? abomonation::Abomonation for $($path)::+ $(<$param>)? $(where $($clause)*)? { 7 | unsafe fn entomb(&self, _write: &mut W) -> std::io::Result<()> { 8 | unimplemented!("unexpected abomonation entomb"); 9 | } 10 | unsafe fn exhume<'a>(&mut self, _bytes: &'a mut [u8]) -> Option<&'a mut [u8]> { 11 | // Unwinding here is unsound because abomonation would have 12 | // blitted the source bytes into the destination with dangling 13 | // pointers, and is now relying on exhume to fix it up into a 14 | // valid object. We abort instead. 15 | std::process::exit(1); 16 | } 17 | fn extent(&self) -> usize { 18 | unimplemented!("unexpected abomonation extent"); 19 | } 20 | } 21 | 22 | impl $(<$param>)? serde::Serialize for $($path)::+ $(<$param>)? $(where $($clause)*)? { 23 | fn serialize(&self, _serializer: S) -> Result 24 | where 25 | S: serde::Serializer, 26 | { 27 | unimplemented!("unexpected serde serialize"); 28 | } 29 | } 30 | 31 | impl<'de, $($param)?> serde::Deserialize<'de> for $($path)::+ $(<$param>)? $(where $($clause)*)? { 32 | fn deserialize(_deserializer: D) -> Result 33 | where 34 | D: serde::Deserializer<'de>, 35 | { 36 | unimplemented!("unexpected serde deserialize"); 37 | } 38 | } 39 | }; 40 | } 41 | 42 | do_not_abomonate!(crate::Dependency); 43 | do_not_abomonate!(crate::Query); 44 | do_not_abomonate!(crate::Release); 45 | do_not_abomonate!(crate::arena::Slice where T: 'static); 46 | do_not_abomonate!(crate::feature::CrateFeature); 47 | do_not_abomonate!(crate::feature::DefaultFeatures); 48 | do_not_abomonate!(crate::feature::FeatureId); 49 | do_not_abomonate!(crate::feature::VersionFeature); 50 | do_not_abomonate!(crate::id::CrateId); 51 | do_not_abomonate!(crate::id::DependencyId); 52 | do_not_abomonate!(crate::id::QueryId); 53 | do_not_abomonate!(crate::id::VersionId); 54 | do_not_abomonate!(crate::max::Max); 55 | do_not_abomonate!(crate::present::Present); 56 | do_not_abomonate!(crate::timestamp::DateTime); 57 | do_not_abomonate!(crate::version::Version); 58 | do_not_abomonate!(crate::version::VersionReq); 59 | -------------------------------------------------------------------------------- /src/cratemap.rs: -------------------------------------------------------------------------------- 1 | use crate::cratename::{CrateName, CrateNameQuery}; 2 | use crate::user::User; 3 | use cargo_tally::id::CrateId; 4 | use db_dump::crate_owners::OwnerId; 5 | use ref_cast::RefCast; 6 | use std::collections::BTreeMap as Map; 7 | 8 | #[derive(Default)] 9 | pub struct CrateMap { 10 | names: Map, 11 | ids: Map, 12 | pub(crate) users: Map, 13 | pub(crate) owners: Map>, 14 | } 15 | 16 | impl CrateMap { 17 | pub fn new() -> Self { 18 | CrateMap::default() 19 | } 20 | 21 | pub fn insert(&mut self, id: CrateId, name: String) { 22 | assert!(!self.ids.contains_key(CrateNameQuery::ref_cast(&name))); 23 | assert!(!self.names.contains_key(&id)); 24 | self.ids.insert(CrateName::new(name.clone()), id); 25 | self.names.insert(id, name); 26 | } 27 | 28 | pub fn name(&self, id: CrateId) -> Option<&str> { 29 | self.names.get(&id).map(String::as_str) 30 | } 31 | 32 | pub fn id(&self, name: &str) -> Option { 33 | self.ids.get(CrateNameQuery::ref_cast(name)).copied() 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/cratename.rs: -------------------------------------------------------------------------------- 1 | use ref_cast::RefCast; 2 | use std::borrow::Borrow; 3 | use std::cmp::Ordering; 4 | 5 | pub const MAX_NAME_LENGTH: usize = 64; 6 | 7 | // Mirrored from https://github.com/rust-lang/crates.io/blob/54a3f10794db7f57e3602426389c369290a8a3d5/src/models/krate.rs 8 | pub fn valid(name: &str) -> bool { 9 | let under_max_length = name.chars().take(MAX_NAME_LENGTH + 1).count() <= MAX_NAME_LENGTH; 10 | valid_ident(name) && under_max_length 11 | } 12 | 13 | fn valid_ident(name: &str) -> bool { 14 | valid_feature_prefix(name) && name.chars().next().is_some_and(char::is_alphabetic) 15 | } 16 | 17 | fn valid_feature_prefix(name: &str) -> bool { 18 | !name.is_empty() 19 | && name 20 | .chars() 21 | .all(|c| c.is_ascii_alphanumeric() || c == '_' || c == '-') 22 | } 23 | 24 | pub(crate) struct CrateName(String); 25 | 26 | impl CrateName { 27 | pub(crate) fn new(string: String) -> Self { 28 | CrateName(string) 29 | } 30 | } 31 | 32 | impl Ord for CrateName { 33 | fn cmp(&self, rhs: &Self) -> Ordering { 34 | CrateNameQuery::ref_cast(&self.0).cmp(CrateNameQuery::ref_cast(&rhs.0)) 35 | } 36 | } 37 | 38 | impl PartialOrd for CrateName { 39 | fn partial_cmp(&self, rhs: &Self) -> Option { 40 | Some(self.cmp(rhs)) 41 | } 42 | } 43 | 44 | impl Eq for CrateName {} 45 | 46 | impl PartialEq for CrateName { 47 | fn eq(&self, rhs: &Self) -> bool { 48 | CrateNameQuery::ref_cast(&self.0).eq(CrateNameQuery::ref_cast(&rhs.0)) 49 | } 50 | } 51 | 52 | #[derive(RefCast)] 53 | #[repr(transparent)] 54 | pub(crate) struct CrateNameQuery(str); 55 | 56 | impl Borrow for CrateName { 57 | fn borrow(&self) -> &CrateNameQuery { 58 | CrateNameQuery::ref_cast(&self.0) 59 | } 60 | } 61 | 62 | impl Ord for CrateNameQuery { 63 | fn cmp(&self, rhs: &Self) -> Ordering { 64 | self.0 65 | .bytes() 66 | .map(SeparatorAgnosticByte) 67 | .cmp(rhs.0.bytes().map(SeparatorAgnosticByte)) 68 | } 69 | } 70 | 71 | impl PartialOrd for CrateNameQuery { 72 | fn partial_cmp(&self, rhs: &Self) -> Option { 73 | Some(self.cmp(rhs)) 74 | } 75 | } 76 | 77 | impl Eq for CrateNameQuery {} 78 | 79 | impl PartialEq for CrateNameQuery { 80 | fn eq(&self, rhs: &Self) -> bool { 81 | self.0 82 | .bytes() 83 | .map(SeparatorAgnosticByte) 84 | .eq(rhs.0.bytes().map(SeparatorAgnosticByte)) 85 | } 86 | } 87 | 88 | struct SeparatorAgnosticByte(u8); 89 | 90 | impl Ord for SeparatorAgnosticByte { 91 | fn cmp(&self, rhs: &Self) -> Ordering { 92 | let lhs = if self.0 == b'_' { b'-' } else { self.0 }; 93 | let rhs = if rhs.0 == b'_' { b'-' } else { rhs.0 }; 94 | lhs.cmp(&rhs) 95 | } 96 | } 97 | 98 | impl PartialOrd for SeparatorAgnosticByte { 99 | fn partial_cmp(&self, rhs: &Self) -> Option { 100 | Some(self.cmp(rhs)) 101 | } 102 | } 103 | 104 | impl Eq for SeparatorAgnosticByte {} 105 | 106 | impl PartialEq for SeparatorAgnosticByte { 107 | fn eq(&self, rhs: &Self) -> bool { 108 | self.cmp(rhs) == Ordering::Equal 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/dependency.rs: -------------------------------------------------------------------------------- 1 | #[derive(Copy, Clone, Debug)] 2 | pub enum DependencyKind { 3 | Normal, 4 | Build, 5 | Dev, 6 | } 7 | 8 | impl From for DependencyKind { 9 | fn from(dependency_kind: db_dump::dependencies::DependencyKind) -> Self { 10 | match dependency_kind { 11 | db_dump::dependencies::DependencyKind::Normal => DependencyKind::Normal, 12 | db_dump::dependencies::DependencyKind::Build => DependencyKind::Build, 13 | db_dump::dependencies::DependencyKind::Dev => DependencyKind::Dev, 14 | } 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/feature.rs: -------------------------------------------------------------------------------- 1 | use crate::arena::Slice; 2 | use crate::id::{CrateId, VersionId}; 3 | use std::collections::BTreeMap as Map; 4 | 5 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] 6 | #[repr(transparent)] 7 | pub struct FeatureId(pub u32); 8 | 9 | impl FeatureId { 10 | pub const CRATE: Self = FeatureId(0); 11 | pub const DEFAULT: Self = FeatureId(1); 12 | pub const TBD: Self = FeatureId(!0); 13 | } 14 | 15 | #[derive(Copy, Clone, Debug)] 16 | pub struct FeatureEnables { 17 | pub id: FeatureId, 18 | pub enables: Slice, 19 | pub weak_enables: Slice, 20 | } 21 | 22 | #[derive(Copy, Clone, Debug)] 23 | pub struct CrateFeature { 24 | pub crate_id: CrateId, 25 | pub feature_id: FeatureId, 26 | } 27 | 28 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] 29 | pub struct VersionFeature { 30 | pub version_id: VersionId, 31 | pub feature_id: FeatureId, 32 | } 33 | 34 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Debug)] 35 | pub struct DefaultFeatures(pub bool); 36 | 37 | pub struct FeatureNames { 38 | names: Vec, 39 | map: Map, 40 | } 41 | 42 | impl FeatureNames { 43 | pub fn new() -> Self { 44 | let mut feature_names = FeatureNames { 45 | names: Vec::new(), 46 | map: Map::new(), 47 | }; 48 | assert_eq!(feature_names.id(""), FeatureId::CRATE); 49 | assert_eq!(feature_names.id("default"), FeatureId::DEFAULT); 50 | feature_names 51 | } 52 | 53 | pub fn id(&mut self, name: &str) -> FeatureId { 54 | if let Some(id) = self.map.get(name) { 55 | *id 56 | } else { 57 | let new_id = FeatureId(u32::try_from(self.names.len()).unwrap()); 58 | self.names.push(name.to_owned()); 59 | self.map.insert(name.to_owned(), new_id); 60 | new_id 61 | } 62 | } 63 | 64 | pub fn name(&self, id: FeatureId) -> &str { 65 | &self.names[id.0 as usize] 66 | } 67 | } 68 | 69 | impl Default for FeatureNames { 70 | fn default() -> Self { 71 | FeatureNames::new() 72 | } 73 | } 74 | 75 | pub struct FeatureIter { 76 | krate: bool, 77 | default: bool, 78 | other: as IntoIterator>::IntoIter, 79 | } 80 | 81 | impl FeatureIter { 82 | pub fn new(default_features: DefaultFeatures, features: Slice) -> Self { 83 | FeatureIter { 84 | krate: !default_features.0 && features.is_empty(), 85 | default: default_features.0, 86 | other: features.into_iter(), 87 | } 88 | } 89 | } 90 | 91 | impl Iterator for FeatureIter { 92 | type Item = FeatureId; 93 | 94 | fn next(&mut self) -> Option { 95 | if self.krate { 96 | self.krate = false; 97 | Some(FeatureId::CRATE) 98 | } else if self.default { 99 | self.default = false; 100 | Some(FeatureId::DEFAULT) 101 | } else { 102 | self.other.next() 103 | } 104 | } 105 | } 106 | -------------------------------------------------------------------------------- /src/filter.rs: -------------------------------------------------------------------------------- 1 | use crate::cratemap::CrateMap; 2 | use cargo_tally::DbDump; 3 | use regex::Regex; 4 | 5 | pub(crate) fn filter(db_dump: &mut DbDump, crates: &CrateMap, exclude: &[Regex]) { 6 | if exclude.is_empty() { 7 | return; 8 | } 9 | db_dump.releases.retain(|rel| { 10 | let crate_name = crates.name(rel.crate_id).unwrap(); 11 | exclude.iter().all(|exclude| !exclude.is_match(crate_name)) 12 | }); 13 | } 14 | -------------------------------------------------------------------------------- /src/hidden.rs: -------------------------------------------------------------------------------- 1 | // There is no library public API. Only the command line tool is considered 2 | // public API. 3 | 4 | #[path = "lib.rs"] 5 | mod lib; 6 | 7 | #[doc(hidden)] 8 | pub use crate::lib::*; 9 | -------------------------------------------------------------------------------- /src/hint.rs: -------------------------------------------------------------------------------- 1 | use differential_dataflow::collection::Collection; 2 | use differential_dataflow::difference::Semigroup; 3 | use timely::dataflow::Scope; 4 | 5 | #[allow(non_snake_case)] 6 | pub(crate) trait TypeHint: Sized { 7 | type Element; 8 | 9 | fn T(self) -> Self 10 | where 11 | Self: TypeHint, 12 | { 13 | self 14 | } 15 | 16 | fn KV(self) -> Self 17 | where 18 | Self: TypeHint, 19 | { 20 | self 21 | } 22 | } 23 | 24 | impl TypeHint for Collection 25 | where 26 | G: Scope, 27 | R: Semigroup, 28 | { 29 | type Element = D; 30 | } 31 | 32 | impl TypeHint for &Collection 33 | where 34 | G: Scope, 35 | R: Semigroup, 36 | { 37 | type Element = D; 38 | } 39 | -------------------------------------------------------------------------------- /src/id.rs: -------------------------------------------------------------------------------- 1 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] 2 | #[repr(transparent)] 3 | pub struct QueryId(pub u8); 4 | 5 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] 6 | #[repr(transparent)] 7 | pub struct CrateId(pub u32); 8 | 9 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] 10 | #[repr(transparent)] 11 | pub struct VersionId(pub u32); 12 | 13 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash, Debug)] 14 | #[repr(transparent)] 15 | pub struct DependencyId(pub u32); 16 | 17 | impl From for CrateId { 18 | fn from(id: db_dump::crates::CrateId) -> Self { 19 | CrateId(id.0) 20 | } 21 | } 22 | 23 | impl From for VersionId { 24 | fn from(id: db_dump::versions::VersionId) -> Self { 25 | VersionId(id.0) 26 | } 27 | } 28 | 29 | impl From for DependencyId { 30 | fn from(id: u32) -> Self { 31 | DependencyId(id) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/impls.rs: -------------------------------------------------------------------------------- 1 | use crate::{Dependency, Query, Release}; 2 | use std::cmp::Ordering; 3 | 4 | impl Ord for Query { 5 | fn cmp(&self, other: &Self) -> Ordering { 6 | self.id.cmp(&other.id) 7 | } 8 | } 9 | 10 | impl PartialOrd for Query { 11 | fn partial_cmp(&self, other: &Self) -> Option { 12 | Some(self.cmp(other)) 13 | } 14 | } 15 | 16 | impl Eq for Query {} 17 | 18 | impl PartialEq for Query { 19 | fn eq(&self, other: &Self) -> bool { 20 | self.id == other.id 21 | } 22 | } 23 | 24 | impl Ord for Release { 25 | fn cmp(&self, other: &Self) -> Ordering { 26 | self.id.cmp(&other.id) 27 | } 28 | } 29 | 30 | impl PartialOrd for Release { 31 | fn partial_cmp(&self, other: &Self) -> Option { 32 | Some(self.cmp(other)) 33 | } 34 | } 35 | 36 | impl Eq for Release {} 37 | 38 | impl PartialEq for Release { 39 | fn eq(&self, other: &Self) -> bool { 40 | self.id == other.id 41 | } 42 | } 43 | 44 | impl Ord for Dependency { 45 | fn cmp(&self, other: &Self) -> Ordering { 46 | self.id.cmp(&other.id) 47 | } 48 | } 49 | 50 | impl PartialOrd for Dependency { 51 | fn partial_cmp(&self, other: &Self) -> Option { 52 | Some(self.cmp(other)) 53 | } 54 | } 55 | 56 | impl Eq for Dependency {} 57 | 58 | impl PartialEq for Dependency { 59 | fn eq(&self, other: &Self) -> bool { 60 | self.id == other.id 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /src/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 21 | 22 | 23 | 233 | 234 | 235 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | #![deny(unsafe_op_in_unsafe_fn)] 2 | #![allow(non_camel_case_types)] 3 | #![allow( 4 | clippy::arc_with_non_send_sync, // https://github.com/rust-lang/rust-clippy/issues/11076 5 | clippy::borrow_as_ptr, 6 | clippy::borrowed_box, 7 | clippy::cast_possible_truncation, 8 | clippy::cast_precision_loss, 9 | clippy::cast_ptr_alignment, 10 | clippy::cast_sign_loss, 11 | clippy::elidable_lifetime_names, 12 | clippy::into_iter_without_iter, 13 | clippy::items_after_statements, 14 | clippy::iter_not_returning_iterator, // https://github.com/rust-lang/rust-clippy/issues/8285 15 | clippy::let_underscore_untyped, 16 | clippy::mismatching_type_param_order, // https://github.com/rust-lang/rust-clippy/issues/8962 17 | clippy::missing_errors_doc, 18 | clippy::missing_panics_doc, 19 | clippy::module_name_repetitions, 20 | clippy::must_use_candidate, 21 | clippy::needless_lifetimes, 22 | clippy::needless_pass_by_value, 23 | clippy::ptr_as_ptr, 24 | clippy::significant_drop_in_scrutinee, 25 | clippy::too_many_lines, 26 | clippy::uninlined_format_args, 27 | clippy::unseparated_literal_suffix 28 | )] 29 | 30 | #[macro_use] 31 | mod stream; 32 | 33 | pub mod arena; 34 | pub(crate) mod collect; 35 | mod communication; 36 | pub mod dependency; 37 | pub mod feature; 38 | pub(crate) mod hint; 39 | pub mod id; 40 | mod impls; 41 | pub mod matrix; 42 | pub(crate) mod max; 43 | pub(crate) mod present; 44 | pub mod timestamp; 45 | pub mod version; 46 | 47 | use crate::arena::Slice; 48 | use crate::collect::{Collect, Emitter, ResultCollection}; 49 | use crate::dependency::DependencyKind; 50 | use crate::feature::{ 51 | DefaultFeatures, FeatureEnables, FeatureId, FeatureIter, FeatureNames, VersionFeature, 52 | }; 53 | use crate::hint::TypeHint; 54 | use crate::id::{CrateId, DependencyId, QueryId, VersionId}; 55 | use crate::matrix::Matrix; 56 | use crate::max::MaxByKey; 57 | use crate::present::Present; 58 | use crate::timestamp::{DateTime, Duration}; 59 | use crate::version::{Version, VersionReq}; 60 | use atomic_take::AtomicTake; 61 | use differential_dataflow::input::InputSession; 62 | use differential_dataflow::operators::arrange::{ArrangeByKey, ArrangeBySelf}; 63 | use differential_dataflow::operators::iterate::Variable; 64 | use differential_dataflow::operators::{Join, JoinCore, Threshold}; 65 | use std::env; 66 | use std::iter::once; 67 | use std::net::TcpStream; 68 | use std::ops::Deref; 69 | use timely::communication::allocator::Process; 70 | use timely::dataflow::operators::capture::EventWriter; 71 | use timely::dataflow::scopes::Child; 72 | use timely::dataflow::Scope; 73 | use timely::logging::{BatchLogger, TimelyEvent}; 74 | use timely::order::Product; 75 | use timely::progress::Timestamp; 76 | use timely::worker::{Config as WorkerConfig, Worker}; 77 | 78 | #[derive(Default)] 79 | pub struct DbDump { 80 | pub releases: Vec, 81 | pub dependencies: Vec, 82 | pub features: FeatureNames, 83 | } 84 | 85 | #[derive(Clone, Debug)] 86 | pub struct Release { 87 | pub id: VersionId, 88 | pub crate_id: CrateId, 89 | pub num: Version, 90 | pub created_at: DateTime, 91 | pub features: Slice, 92 | } 93 | 94 | #[derive(Copy, Clone, Debug)] 95 | pub struct Dependency { 96 | pub id: DependencyId, 97 | pub version_id: VersionId, 98 | pub crate_id: CrateId, 99 | pub req: VersionReq, 100 | pub feature_id: FeatureId, 101 | pub default_features: DefaultFeatures, 102 | pub features: Slice, 103 | pub kind: DependencyKind, 104 | } 105 | 106 | #[derive(Copy, Clone, Debug)] 107 | pub struct Query { 108 | pub id: QueryId, 109 | pub predicates: Slice, 110 | } 111 | 112 | #[derive(Copy, Clone, Debug)] 113 | pub struct Predicate { 114 | pub crate_id: CrateId, 115 | pub req: Option, 116 | } 117 | 118 | #[derive(Default)] 119 | struct Input { 120 | db_dump: DbDump, 121 | queries: Vec, 122 | } 123 | 124 | pub fn run(db_dump: DbDump, jobs: usize, transitive: bool, queries: &[Query]) -> Matrix { 125 | let num_queries = queries.len(); 126 | let queries = queries.to_owned(); 127 | let input = AtomicTake::new(Input { db_dump, queries }); 128 | let collection = ResultCollection::<(QueryId, DateTime, isize)>::new(); 129 | let results = collection.emitter(); 130 | 131 | let allocators = Process::new_vector(jobs); 132 | let other = Box::new(()); 133 | timely::communication::initialize_from(allocators, other, move |allocator| { 134 | let mut worker = Worker::new(WorkerConfig::default(), allocator); 135 | set_timely_worker_log(&worker); 136 | 137 | let mut queries = InputSession::::new(); 138 | let mut releases = InputSession::::new(); 139 | let mut dependencies = InputSession::::new(); 140 | 141 | worker.dataflow(|scope| { 142 | dataflow( 143 | scope, 144 | &mut queries, 145 | &mut releases, 146 | &mut dependencies, 147 | transitive, 148 | &results, 149 | ); 150 | }); 151 | 152 | let input = input.take().unwrap_or_default(); 153 | 154 | for query in input.queries { 155 | queries.update(query, Present); 156 | } 157 | queries.close(); 158 | 159 | for dep in input.db_dump.dependencies { 160 | dependencies.update(dep, Present); 161 | } 162 | dependencies.close(); 163 | 164 | for rel in input.db_dump.releases { 165 | releases.advance_to(rel.created_at); 166 | releases.update(rel, Present); 167 | } 168 | releases.close(); 169 | 170 | while worker.step_or_park(None) {} 171 | }) 172 | .unwrap(); 173 | 174 | let mut time = DateTime::minimum(); 175 | let mut values = vec![0u32; num_queries]; 176 | let mut matrix = Matrix::new(num_queries); 177 | collection.sort(); 178 | for (i, (query_id, timestamp, diff)) in collection.into_iter().enumerate() { 179 | if timestamp > time { 180 | if i > 0 { 181 | matrix.push(time, values.clone()); 182 | } 183 | time = timestamp; 184 | } 185 | let cell = &mut values[query_id.0 as usize]; 186 | if diff > 0 { 187 | *cell += diff as u32; 188 | } else { 189 | *cell = cell.checked_sub(-diff as u32).expect("value went negative"); 190 | } 191 | } 192 | if match matrix.iter().next_back() { 193 | Some((_timestamp, last)) => values != **last, 194 | None => values.iter().any(|&n| n != 0), 195 | } { 196 | matrix.push(time, values); 197 | } 198 | matrix 199 | } 200 | 201 | fn set_timely_worker_log(worker: &Worker) { 202 | let Some(addr) = env::var_os("TIMELY_WORKER_LOG_ADDR") else { 203 | return; 204 | }; 205 | 206 | let stream = match TcpStream::connect(addr.to_str().unwrap()) { 207 | Ok(stream) => stream, 208 | Err(err) => panic!("Could not connect logging stream to {addr:?}: {err}"), 209 | }; 210 | 211 | worker.log_register().insert::("timely", { 212 | let writer = EventWriter::new(stream); 213 | let mut logger = BatchLogger::new(writer); 214 | move |time, data| logger.publish_batch(time, data) 215 | }); 216 | } 217 | 218 | fn dataflow( 219 | scope: &mut Child, DateTime>, 220 | queries: &mut InputSession, 221 | releases: &mut InputSession, 222 | dependencies: &mut InputSession, 223 | transitive: bool, 224 | results: &Emitter<(QueryId, DateTime, isize)>, 225 | ) { 226 | type queries<'a> = stream![Query; Present]; 227 | let queries: queries = queries.to_collection(scope); 228 | 229 | type releases<'a> = stream![Release; Present]; 230 | let releases: releases = releases.to_collection(scope); 231 | 232 | type dependencies<'a> = stream![Dependency; Present]; 233 | let dependencies: dependencies = dependencies.to_collection(scope); 234 | 235 | // the version ids and version numbers that exist of each crate 236 | type releases_by_crate_id<'a> = stream![CrateId => (VersionId, Version); Present]; 237 | let releases_by_crate_id: releases_by_crate_id = 238 | releases.map(|rel| (rel.crate_id, (rel.id, rel.num))); 239 | let releases_by_crate_id = releases_by_crate_id.arrange_by_key(); 240 | 241 | // for each dependency spec, what release does it refer to currently? 242 | type resolved<'a> = stream![(CrateId, VersionReq) => VersionId; isize]; 243 | let resolved: resolved = dependencies 244 | .map(|dep| (dep.crate_id, dep.req)) 245 | .KV::() 246 | .join_core( 247 | &releases_by_crate_id, 248 | |crate_id, req, (version_id, version)| { 249 | req.matches(version) 250 | .then(|| ((*crate_id, *req), (version.clone(), *version_id))) 251 | }, 252 | ) 253 | .KV::<(CrateId, VersionReq), (Version, VersionId)>() 254 | .max_by_key() 255 | .KV::<(CrateId, VersionReq), (Version, VersionId)>() 256 | .map(|((crate_id, req), (_version, version_id))| ((crate_id, req), version_id)); 257 | let resolved = resolved.arrange_by_key(); 258 | 259 | // full dependency graph across all versions of all crates 260 | type dependency_edges<'a> = stream![VersionId => VersionId; isize]; 261 | let direct_dependency_edges: dependency_edges = dependencies 262 | .map(|dep| ((dep.crate_id, dep.req), dep.version_id)) 263 | .KV::<(CrateId, VersionReq), VersionId>() 264 | .join_core( 265 | &resolved, 266 | |(_crate_id, _req), from_version_id, to_version_id| { 267 | once((*from_version_id, *to_version_id)) 268 | }, 269 | ); 270 | 271 | // releases that are the most recent of their crate 272 | type most_recent_crate_version<'a> = stream![VersionId; isize]; 273 | let most_recent_crate_version: most_recent_crate_version = releases 274 | .map(|rel| { 275 | ( 276 | rel.crate_id, 277 | (rel.num.pre.is_empty(), rel.created_at, rel.id), 278 | ) 279 | }) 280 | .KV::() 281 | .max_by_key() 282 | .KV::() 283 | .map(|(_crate_id, (_not_prerelease, _created_at, version_id))| version_id); 284 | let most_recent_crate_version = most_recent_crate_version.arrange_by_self(); 285 | 286 | // releases that satisfy the predicate of each query 287 | type match_releases<'a> = stream![VersionId => QueryId; Present]; 288 | let match_releases: match_releases = queries 289 | .flat_map(|query| { 290 | query 291 | .predicates 292 | .iter() 293 | .map(move |pred| (pred.crate_id, (query.id, pred.req))) 294 | }) 295 | .KV::)>() 296 | .join_core( 297 | &releases_by_crate_id, 298 | |_crate_id, (query_id, version_req), (version_id, version)| { 299 | let matches = match version_req { 300 | None => true, 301 | Some(req) => req.matches(version), 302 | }; 303 | matches.then_some((*version_id, *query_id)) 304 | }, 305 | ); 306 | 307 | // releases that contribute into the result of each query 308 | type query_results<'a> = stream![VersionId => QueryId; isize]; 309 | let mut query_results: query_results = direct_dependency_edges 310 | .join_core(&most_recent_crate_version, |edge_from, edge_to, ()| { 311 | once((*edge_to, *edge_from)) 312 | }) 313 | .KV::() 314 | .join_map(&match_releases, |_edge_to, edge_from, query_id| { 315 | (*edge_from, *query_id) 316 | }); 317 | 318 | if transitive { 319 | type dependency_edges<'a> = stream![VersionFeature => VersionFeature; isize]; 320 | 321 | // dependency edges arising from an entry under [dependencies] 322 | let dep_dependency_edges: dependency_edges = dependencies 323 | .flat_map(|dep| match dep.kind { 324 | DependencyKind::Normal | DependencyKind::Build => Some(( 325 | (dep.crate_id, dep.req), 326 | ( 327 | dep.version_id, 328 | dep.feature_id, 329 | dep.default_features, 330 | dep.features, 331 | ), 332 | )), 333 | DependencyKind::Dev => None, 334 | }) 335 | .KV::<(CrateId, VersionReq), (VersionId, FeatureId, DefaultFeatures, Slice)>( 336 | ) 337 | .join_core( 338 | &resolved, 339 | |(_crate_id, _req), 340 | (version_id, feature_id, default_features, features), 341 | resolved_version_id| { 342 | let edge_from = VersionFeature { 343 | version_id: *version_id, 344 | feature_id: *feature_id, 345 | }; 346 | let resolved_version_id = *resolved_version_id; 347 | FeatureIter::new(*default_features, *features).map(move |feature_id| { 348 | let edge_to = VersionFeature { 349 | version_id: resolved_version_id, 350 | feature_id, 351 | }; 352 | (edge_from, edge_to) 353 | }) 354 | }, 355 | ); 356 | 357 | // dependency edges from crate feature enabling other feature of same crate 358 | let feature_intracrate_edges: dependency_edges = releases.explode(|rel| { 359 | let version_id = rel.id; 360 | let crate_id = rel.crate_id; 361 | rel.features 362 | .iter() 363 | .flat_map(move |feature| { 364 | let edge_from = VersionFeature { 365 | version_id, 366 | feature_id: feature.id, 367 | }; 368 | feature 369 | .enables 370 | .into_iter() 371 | .filter_map(move |crate_feature| { 372 | if crate_feature.crate_id == crate_id { 373 | let edge_to = VersionFeature { 374 | version_id, 375 | feature_id: crate_feature.feature_id, 376 | }; 377 | Some((edge_from, edge_to)) 378 | } else { 379 | None 380 | } 381 | }) 382 | .chain({ 383 | if feature.id == FeatureId::DEFAULT { 384 | None 385 | } else { 386 | let edge_to = VersionFeature { 387 | version_id, 388 | feature_id: FeatureId::CRATE, 389 | }; 390 | Some((edge_from, edge_to)) 391 | } 392 | }) 393 | }) 394 | .chain({ 395 | let edge_from = VersionFeature { 396 | version_id, 397 | feature_id: FeatureId::DEFAULT, 398 | }; 399 | let edge_to = VersionFeature { 400 | version_id, 401 | feature_id: FeatureId::CRATE, 402 | }; 403 | once((edge_from, edge_to)) 404 | }) 405 | .map(|(edge_from, edge_to)| ((edge_from, edge_to), 1)) 406 | }); 407 | 408 | // dependency edges from crate feature enabling feature of other crate 409 | let feature_dependency_edges: dependency_edges = releases 410 | .flat_map(|rel| { 411 | let version_id = rel.id; 412 | let crate_id = rel.crate_id; 413 | rel.features.into_iter().flat_map(move |feature| { 414 | // TODO: also handle `weak_enables` 415 | // https://github.com/dtolnay/cargo-tally/issues/56 416 | feature 417 | .enables 418 | .into_iter() 419 | .filter_map(move |crate_feature| { 420 | if crate_feature.crate_id == crate_id { 421 | None 422 | } else { 423 | Some(( 424 | (version_id, crate_feature.crate_id), 425 | (feature.id, crate_feature.feature_id), 426 | )) 427 | } 428 | }) 429 | }) 430 | }) 431 | .KV::<(VersionId, CrateId), (FeatureId, FeatureId)>() 432 | .join_map( 433 | &dependencies 434 | .map(|dep| ((dep.version_id, dep.crate_id), dep.req)) 435 | .KV::<(VersionId, CrateId), VersionReq>(), 436 | |(version_id, crate_id), (from_feature, to_feature), req| { 437 | ((*crate_id, *req), (*version_id, *from_feature, *to_feature)) 438 | }, 439 | ) 440 | .KV::<(CrateId, VersionReq), (VersionId, FeatureId, FeatureId)>() 441 | .join_core( 442 | &resolved, 443 | |(_crate_id, _req), 444 | (from_version_id, from_feature_id, to_feature_id), 445 | to_version_id| { 446 | let edge_from = VersionFeature { 447 | version_id: *from_version_id, 448 | feature_id: *from_feature_id, 449 | }; 450 | let edge_to = VersionFeature { 451 | version_id: *to_version_id, 452 | feature_id: *to_feature_id, 453 | }; 454 | Some((edge_from, edge_to)) 455 | }, 456 | ); 457 | 458 | // full dependency graph across all versions of all crates 459 | let incoming_transitive_dependency_edges = dep_dependency_edges 460 | .concat(&feature_intracrate_edges) 461 | .concat(&feature_dependency_edges) 462 | .KV::() 463 | .map_in_place(|edge| { 464 | let (edge_from, edge_to) = *edge; 465 | *edge = (edge_to, edge_from); 466 | }) 467 | .KV::() 468 | .arrange_by_key(); 469 | 470 | // fixed point of transitive dependencies graph 471 | type addend_transitive_releases<'a> = stream![VersionId => QueryId; isize]; 472 | let addend_transitive_releases: addend_transitive_releases = scope 473 | .iterative::(|nested| { 474 | let match_releases = match_releases 475 | .KV::() 476 | .explode(|(version_id, query_id)| { 477 | let version_feature = VersionFeature { 478 | version_id, 479 | feature_id: FeatureId::CRATE, 480 | }; 481 | once(((version_feature, query_id), 1)) 482 | }) 483 | .KV::() 484 | .enter(nested); 485 | let summary = Product::new(Duration::default(), 1); 486 | let variable = Variable::new_from(match_releases, summary); 487 | let result = variable 488 | .deref() 489 | .KV::() 490 | .join_core( 491 | &incoming_transitive_dependency_edges.enter(nested), 492 | |_edge_to, query_id, edge_from| Some((*edge_from, *query_id)), 493 | ) 494 | .KV::() 495 | .concat(&variable) 496 | .KV::() 497 | .distinct(); 498 | variable.set(&result).leave() 499 | }) 500 | .KV::() 501 | .map(|(version_feature, query_id)| (version_feature.version_id, query_id)); 502 | 503 | query_results = addend_transitive_releases 504 | .join_core(&most_recent_crate_version, |version_id, query_id, ()| { 505 | Some((*version_id, *query_id)) 506 | }) 507 | .KV::() 508 | .concat(&query_results); 509 | } 510 | 511 | query_results 512 | .distinct() 513 | .map(|(_version_id, query_id)| query_id) 514 | .consolidate() 515 | .collect_into(results); 516 | } 517 | -------------------------------------------------------------------------------- /src/load.rs: -------------------------------------------------------------------------------- 1 | use crate::cratemap::CrateMap; 2 | use crate::user::User; 3 | use anyhow::{bail, Result}; 4 | use cargo_tally::arena::Slice; 5 | use cargo_tally::dependency::DependencyKind; 6 | use cargo_tally::feature::{ 7 | CrateFeature, DefaultFeatures, FeatureEnables, FeatureId, FeatureNames, 8 | }; 9 | use cargo_tally::id::{CrateId, DependencyId, VersionId}; 10 | use cargo_tally::timestamp::DateTime; 11 | use cargo_tally::version::{Version, VersionReq}; 12 | use cargo_tally::{DbDump, Dependency, Release}; 13 | use db_dump::crate_owners::OwnerId; 14 | use std::cell::RefCell; 15 | use std::collections::{BTreeMap as Map, BTreeSet as Set}; 16 | use std::mem; 17 | use std::path::Path; 18 | 19 | pub(crate) fn load(path: impl AsRef) -> Result<(DbDump, CrateMap)> { 20 | let mut crates = CrateMap::new(); 21 | let mut users: Map = Map::new(); 22 | let mut teams: Map = Map::new(); 23 | let mut owners: Map> = Map::new(); 24 | let mut releases: Vec = Vec::new(); 25 | let mut dependencies: Vec = Vec::new(); 26 | let mut release_features: Vec, Vec)>> = 27 | Vec::new(); 28 | let mut dep_renames: Map = Map::new(); 29 | let mut dep_renames_resolve: Map<(VersionId, FeatureId), CrateId> = Map::new(); 30 | let feature_names = RefCell::new(FeatureNames::new()); 31 | 32 | db_dump::Loader::new() 33 | .crates(|row| { 34 | let crate_id = CrateId::from(row.id); 35 | crates.insert(crate_id, row.name); 36 | }) 37 | .users(|row| { 38 | users.insert(User::new(row.gh_login), OwnerId::User(row.id)); 39 | }) 40 | .teams(|row| { 41 | if let Some(team) = row.login.strip_prefix("github:") { 42 | if team.contains(':') { 43 | let team = team.replace(':', "/"); 44 | teams.insert(User::new(team), OwnerId::Team(row.id)); 45 | } 46 | } 47 | }) 48 | .crate_owners(|row| { 49 | owners 50 | .entry(row.owner_id) 51 | .or_insert_with(Vec::new) 52 | .push(CrateId::from(row.crate_id)); 53 | }) 54 | .versions(|row| { 55 | if row.yanked { 56 | return; 57 | } 58 | let crate_id = CrateId::from(row.crate_id); 59 | let mut features = Vec::new(); 60 | if !row.features.is_empty() { 61 | let mut feature_names = feature_names.borrow_mut(); 62 | for (feature, raw_enables) in &row.features { 63 | let feature_id = feature_names.id(feature); 64 | let mut enables = Vec::new(); 65 | let mut weak_enables = Vec::new(); 66 | for feature in raw_enables { 67 | let crate_id; 68 | let mut crate_feature_vec = &mut enables; 69 | let mut feature = feature.as_str(); 70 | if let Some(slash) = feature.find('/') { 71 | let mut crate_name = &feature[..slash]; 72 | if let Some(crate_name_weak) = crate_name.strip_suffix('?') { 73 | crate_name = crate_name_weak; 74 | crate_feature_vec = &mut weak_enables; 75 | } 76 | crate_id = feature_names.id(crate_name); 77 | feature = &feature[slash + 1..]; 78 | } else { 79 | crate_id = FeatureId::CRATE; 80 | } 81 | let feature_id = feature_names.id(feature); 82 | crate_feature_vec.push(CrateFeature { 83 | crate_id: CrateId(crate_id.0), 84 | feature_id, 85 | }); 86 | } 87 | features.push((feature_id, enables, weak_enables)); 88 | } 89 | } 90 | releases.push(Release { 91 | id: VersionId::from(row.id), 92 | crate_id, 93 | num: Version(row.num), 94 | created_at: DateTime::from(row.created_at), 95 | features: { 96 | release_features.push(features); 97 | Slice::EMPTY 98 | }, 99 | }); 100 | }) 101 | .dependencies(|row| { 102 | let dependency_id = DependencyId::from(row.id); 103 | let version_id = VersionId::from(row.version_id); 104 | let crate_id = CrateId::from(row.crate_id); 105 | let feature_id = if row.optional { 106 | FeatureId::TBD 107 | } else { 108 | FeatureId::CRATE 109 | }; 110 | let mut default_features = row.default_features; 111 | let mut features = Set::new(); 112 | if !row.features.is_empty() { 113 | let mut feature_names = feature_names.borrow_mut(); 114 | for feature in &row.features { 115 | let feature_id = feature_names.id(feature); 116 | if feature_id == FeatureId::DEFAULT { 117 | default_features = true; 118 | } else { 119 | features.insert(feature_id); 120 | } 121 | } 122 | } 123 | if let Some(explicit_name) = row.explicit_name { 124 | let mut feature_names = feature_names.borrow_mut(); 125 | dep_renames_resolve 126 | .insert((version_id, feature_names.id(&explicit_name)), crate_id); 127 | dep_renames.insert(dependency_id, explicit_name); 128 | } 129 | dependencies.push(Dependency { 130 | id: dependency_id, 131 | version_id, 132 | crate_id, 133 | req: VersionReq::from(row.req), 134 | feature_id, 135 | default_features: DefaultFeatures(default_features), 136 | features: Slice::from_iter(features), 137 | kind: DependencyKind::from(row.kind), 138 | }); 139 | }) 140 | .load(path)?; 141 | 142 | crate::mend::mend_crates(&mut crates); 143 | 144 | let known_broken = [(crates.id("modbus"), &Version::new(0, 1, 0), "test-server")]; 145 | 146 | let mut feature_names = mem::take(&mut *feature_names.borrow_mut()); 147 | let mut feature_buffer = Vec::new(); 148 | for (release, mut features) in releases.iter_mut().zip(release_features) { 149 | for (feature, enables, weak_enables) in &mut features { 150 | for crate_features in [&mut *enables, &mut *weak_enables] { 151 | for feature in crate_features { 152 | let feature_id = FeatureId(feature.crate_id.0); 153 | feature.crate_id = if feature_id == FeatureId::CRATE { 154 | release.crate_id 155 | } else if let Some(crate_id) = 156 | dep_renames_resolve.get(&(release.id, feature_id)) 157 | { 158 | *crate_id 159 | } else if let Some(crate_id) = { 160 | let name = feature_names.name(feature_id); 161 | crates.id(name) 162 | } { 163 | crate_id 164 | } else if known_broken.contains(&( 165 | Some(release.crate_id), 166 | &release.num, 167 | feature_names.name(feature_id), 168 | )) { 169 | release.crate_id 170 | } else { 171 | bail!( 172 | "{} v{} depends on {} which is not found", 173 | crates.name(release.crate_id).unwrap(), 174 | release.num, 175 | feature_names.name(feature_id), 176 | ); 177 | }; 178 | } 179 | } 180 | feature_buffer.push(FeatureEnables { 181 | id: *feature, 182 | enables: Slice::new(enables), 183 | weak_enables: Slice::new(weak_enables), 184 | }); 185 | } 186 | release.features = Slice::new(&feature_buffer); 187 | feature_buffer.clear(); 188 | } 189 | for dep in &mut dependencies { 190 | if dep.feature_id == FeatureId::TBD { 191 | dep.feature_id = feature_names.id(match dep_renames.get(&dep.id) { 192 | Some(explicit_name) => explicit_name, 193 | None => crates.name(dep.crate_id).unwrap(), 194 | }); 195 | } 196 | } 197 | 198 | let mut db_dump = DbDump { 199 | releases, 200 | dependencies, 201 | features: feature_names, 202 | }; 203 | 204 | crates.owners = owners; 205 | crates.users = users; 206 | crates.users.extend(teams); 207 | 208 | crate::mend::mend_releases(&mut db_dump, &crates); 209 | 210 | Ok((db_dump, crates)) 211 | } 212 | -------------------------------------------------------------------------------- /src/log.rs: -------------------------------------------------------------------------------- 1 | use std::fmt; 2 | use std::io::Write; 3 | use termcolor::{Color, ColorSpec, StandardStream, WriteColor}; 4 | 5 | pub trait Log { 6 | fn trace(&mut self) -> LogStream; 7 | fn warning(&mut self) -> LogStream; 8 | fn error(&mut self) -> LogStream; 9 | fn red(&mut self) -> LogStream; 10 | } 11 | 12 | impl Log for StandardStream { 13 | fn trace(&mut self) -> LogStream { 14 | let mut color = ColorSpec::new(); 15 | color.set_fg(Some(Color::Magenta)).set_dimmed(true); 16 | let _ = self.set_color(&color); 17 | LogStream(self) 18 | } 19 | 20 | fn warning(&mut self) -> LogStream { 21 | let mut color = ColorSpec::new(); 22 | color.set_fg(Some(Color::Yellow)); 23 | let _ = self.set_color(&color); 24 | LogStream(self) 25 | } 26 | 27 | fn error(&mut self) -> LogStream { 28 | let mut color = ColorSpec::new(); 29 | color.set_fg(Some(Color::Red)).set_bold(true); 30 | let _ = self.set_color(&color); 31 | let _ = write!(self, "error:"); 32 | let _ = self.reset(); 33 | let _ = write!(self, " "); 34 | LogStream(self) 35 | } 36 | 37 | fn red(&mut self) -> LogStream { 38 | let mut color = ColorSpec::new(); 39 | color.set_fg(Some(Color::Red)); 40 | let _ = self.set_color(&color); 41 | LogStream(self) 42 | } 43 | } 44 | 45 | pub struct LogStream<'a>(&'a mut StandardStream); 46 | 47 | impl<'a> LogStream<'a> { 48 | pub fn write_fmt(&mut self, args: fmt::Arguments) { 49 | let _ = self.0.write_fmt(args); 50 | } 51 | } 52 | 53 | impl<'a> Drop for LogStream<'a> { 54 | fn drop(&mut self) { 55 | let _ = self.0.reset(); 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/macros.rs: -------------------------------------------------------------------------------- 1 | macro_rules! const_assert_eq { 2 | ($left:expr, $right:expr) => { 3 | const _: [(); $left as usize] = [(); $right as usize]; 4 | }; 5 | } 6 | 7 | macro_rules! const_assert { 8 | ($($cond:expr),* $(,)?) => { 9 | const_assert_eq!($($cond)&&*, true); 10 | }; 11 | } 12 | 13 | macro_rules! version { 14 | ($major_minor:tt . $patch:tt) => {{ 15 | const major_minor: &'static [u8] = stringify!($major_minor).as_bytes(); 16 | const_assert! { 17 | major_minor.len() == 3, 18 | major_minor[0] >= b'0' && major_minor[0] <= b'9', 19 | major_minor[1] == b'.', 20 | major_minor[2] >= b'0' && major_minor[2] <= b'9', 21 | } 22 | cargo_tally::version::Version(semver::Version { 23 | major: (major_minor[0] - b'0') as u64, 24 | minor: (major_minor[2] - b'0') as u64, 25 | patch: $patch, 26 | pre: semver::Prerelease::EMPTY, 27 | build: semver::BuildMetadata::EMPTY, 28 | }) 29 | }}; 30 | } 31 | 32 | macro_rules! version_req { 33 | (^ $major_minor:tt) => {{ 34 | const major_minor: &'static [u8] = stringify!($major_minor).as_bytes(); 35 | const_assert! { 36 | major_minor.len() == 3, 37 | major_minor[0] >= b'0' && major_minor[0] <= b'9', 38 | major_minor[1] == b'.', 39 | major_minor[2] >= b'0' && major_minor[2] <= b'9', 40 | } 41 | const comparators: &'static [semver::Comparator] = &[semver::Comparator { 42 | op: semver::Op::Caret, 43 | major: (major_minor[0] - b'0') as u64, 44 | minor: Some((major_minor[2] - b'0') as u64), 45 | patch: None, 46 | pre: semver::Prerelease::EMPTY, 47 | }]; 48 | cargo_tally::version::VersionReq { 49 | comparators: cargo_tally::arena::Slice::from(comparators), 50 | } 51 | }}; 52 | } 53 | 54 | macro_rules! datetime { 55 | ($day:tt $month:ident $year:tt $hour:tt : $min:tt : $sec:tt) => {{ 56 | const_assert! { 57 | $day >= 1 && $day <= 31, 58 | $year >= 2014, 59 | $hour >= 0 && $hour <= 23, 60 | $min >= 0 && $min <= 59, 61 | $sec >= 0 && $sec <= 60, 62 | } 63 | cargo_tally::timestamp::DateTime::new( 64 | chrono::NaiveDate::from_ymd_opt($year, month_number!($month), $day).unwrap(), 65 | chrono::NaiveTime::from_hms_opt($hour, $min, $sec).unwrap(), 66 | ) 67 | }}; 68 | } 69 | 70 | #[rustfmt::skip] 71 | #[allow(unknown_lints, unused_macro_rules)] 72 | macro_rules! month_number { 73 | (Jan) => { 1 }; 74 | (Feb) => { 2 }; 75 | (Mar) => { 3 }; 76 | (Apr) => { 4 }; 77 | (May) => { 5 }; 78 | (Jun) => { 6 }; 79 | (Jul) => { 7 }; 80 | (Aug) => { 8 }; 81 | (Sep) => { 9 }; 82 | (Oct) => { 10 }; 83 | (Nov) => { 11 }; 84 | (Dec) => { 12 }; 85 | } 86 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | #![deny(unsafe_op_in_unsafe_fn)] 2 | #![allow(non_upper_case_globals)] 3 | #![allow( 4 | clippy::cast_lossless, 5 | clippy::cast_possible_truncation, 6 | clippy::cast_precision_loss, 7 | clippy::collapsible_else_if, 8 | clippy::elidable_lifetime_names, 9 | clippy::expl_impl_clone_on_copy, 10 | clippy::let_underscore_untyped, 11 | clippy::manual_range_contains, 12 | clippy::map_clone, 13 | clippy::module_name_repetitions, 14 | clippy::needless_lifetimes, 15 | clippy::redundant_else, 16 | clippy::single_match_else, 17 | clippy::too_many_lines, 18 | clippy::type_complexity, 19 | clippy::unconditional_recursion, // https://github.com/rust-lang/rust-clippy/issues/12133 20 | clippy::uninlined_format_args, 21 | clippy::unwrap_or_default, 22 | clippy::zero_prefixed_literal 23 | )] 24 | 25 | #[macro_use] 26 | mod macros; 27 | 28 | mod alloc; 29 | mod args; 30 | mod clean; 31 | mod cratemap; 32 | mod cratename; 33 | mod filter; 34 | mod load; 35 | mod log; 36 | mod mend; 37 | mod query; 38 | mod render; 39 | mod total; 40 | mod trace; 41 | mod user; 42 | 43 | use crate::load::load; 44 | use crate::log::Log; 45 | use crate::total::Total; 46 | use anyhow::Result; 47 | use std::io::{self, IsTerminal, Write}; 48 | use std::process; 49 | use std::time::Instant; 50 | use termcolor::{ColorChoice, StandardStream}; 51 | 52 | cargo_subcommand_metadata::description!( 53 | "Draw graphs of the number of dependencies on a crate over time" 54 | ); 55 | 56 | fn main() { 57 | let mut stderr = StandardStream::stderr(ColorChoice::Auto); 58 | if let Err(err) = try_main(&mut stderr) { 59 | writeln!(stderr.error(), "{}", err); 60 | process::exit(1); 61 | } 62 | } 63 | 64 | fn try_main(stderr: &mut StandardStream) -> Result<()> { 65 | let opt = args::parse(); 66 | 67 | if !opt.db.is_file() { 68 | write!(stderr.error(), "Database dump file does not exist: "); 69 | write!(stderr.red(), "{}", opt.db.display()); 70 | let _ = writeln!( 71 | stderr, 72 | "\nDownload one from https://static.crates.io/db-dump.tar.gz", 73 | ); 74 | process::exit(1); 75 | } 76 | 77 | let mut sysinfo = sysinfo::System::new(); 78 | sysinfo.refresh_memory(); 79 | let total_memory = sysinfo.total_memory(); 80 | let (min_memory, advised) = if opt.transitive { 81 | (10 * 1024 * 1024 * 1024, "12 GB") 82 | } else { 83 | (7 * 1024 * 1024 * 1024, "8 GB") 84 | }; 85 | if total_memory < min_memory && total_memory > 0 { 86 | writeln!( 87 | stderr.warning(), 88 | "warning: running with <{advised} memory is not advised.", 89 | ); 90 | } 91 | 92 | let stdout_isatty = io::stdout().is_terminal(); 93 | let stderr_isatty = io::stderr().is_terminal(); 94 | 95 | let instant = Instant::now(); 96 | let (mut db_dump, crates) = crate::load(&opt.db)?; 97 | crate::filter::filter(&mut db_dump, &crates, &opt.exclude); 98 | db_dump.releases.sort_by_key(|v| v.created_at); 99 | crate::clean::clean(&mut db_dump, &crates); 100 | let total = opt.relative.then(|| Total::index(&db_dump.releases)); 101 | if stderr_isatty { 102 | writeln!(stderr.trace(), "load time: {:.2?}", instant.elapsed()); 103 | } 104 | 105 | let query_strings = opt.queries.iter().map(String::as_str); 106 | let queries = query::parse(query_strings, &crates)?; 107 | let instant = Instant::now(); 108 | let results = cargo_tally::run(db_dump, opt.jobs, opt.transitive, &queries); 109 | if stderr_isatty { 110 | writeln!(stderr.trace(), "dataflow time: {:.2?}", instant.elapsed()); 111 | } 112 | 113 | let _ = stderr.flush(); 114 | let len = results.len(); 115 | let stdout = io::stdout(); 116 | let mut stdout = stdout.lock(); 117 | for (i, (timestamp, data)) in results.iter().enumerate() { 118 | if stdout_isatty && 10 + i == len && len > 20 { 119 | let _ = writeln!(stdout, "..."); 120 | } 121 | if !stdout_isatty || i < 10 || 10 + i >= len { 122 | if let Some(total) = &total { 123 | let total = total.eval(timestamp); 124 | let _ = writeln!(stdout, "{:?} {:?}", timestamp, data / total); 125 | } else { 126 | let _ = writeln!(stdout, "{:?} {:?}", timestamp, data); 127 | } 128 | } 129 | } 130 | let _ = stdout.flush(); 131 | 132 | if stdout_isatty { 133 | if results.is_empty() { 134 | writeln!(stderr.red(), "zero results"); 135 | } else { 136 | let labels = opt 137 | .queries 138 | .iter() 139 | .map(|query| query::format(query, &crates)) 140 | .collect::>(); 141 | let path = render::graph( 142 | opt.title.as_deref(), 143 | opt.transitive, 144 | &results, 145 | &labels, 146 | total.as_ref(), 147 | )?; 148 | if opener::open(&path).is_err() && stderr_isatty { 149 | let _ = writeln!(stderr, "graph written to {}", path.display()); 150 | } 151 | } 152 | } 153 | 154 | if stderr_isatty { 155 | writeln!(stderr.trace(), "{}", alloc::stat()); 156 | } 157 | 158 | Ok(()) 159 | } 160 | -------------------------------------------------------------------------------- /src/matrix.rs: -------------------------------------------------------------------------------- 1 | use crate::timestamp::DateTime; 2 | use ref_cast::RefCast; 3 | use std::fmt::{self, Debug}; 4 | use std::iter::Copied; 5 | use std::ops::{Deref, Div, Index}; 6 | use std::slice; 7 | 8 | pub struct Matrix { 9 | queries: usize, 10 | rows: Vec<(DateTime, Vec)>, 11 | } 12 | 13 | #[derive(RefCast)] 14 | #[repr(transparent)] 15 | pub struct Row([u32]); 16 | 17 | impl Matrix { 18 | pub(crate) fn new(queries: usize) -> Self { 19 | Matrix { 20 | queries, 21 | rows: Vec::new(), 22 | } 23 | } 24 | 25 | pub fn width(&self) -> usize { 26 | self.queries 27 | } 28 | 29 | pub fn is_empty(&self) -> bool { 30 | self.rows.is_empty() 31 | } 32 | 33 | pub fn len(&self) -> usize { 34 | self.rows.len() 35 | } 36 | 37 | pub fn iter(&self) -> Iter { 38 | Iter(self.rows.iter()) 39 | } 40 | 41 | pub(crate) fn push(&mut self, timestamp: DateTime, data: Vec) { 42 | self.rows.push((timestamp, data)); 43 | } 44 | } 45 | 46 | impl<'a> IntoIterator for &'a Matrix { 47 | type Item = (DateTime, &'a Row); 48 | type IntoIter = Iter<'a>; 49 | 50 | fn into_iter(self) -> Self::IntoIter { 51 | self.iter() 52 | } 53 | } 54 | 55 | pub struct Iter<'a>(slice::Iter<'a, (DateTime, Vec)>); 56 | 57 | impl<'a> Iterator for Iter<'a> { 58 | type Item = (DateTime, &'a Row); 59 | 60 | fn next(&mut self) -> Option { 61 | self.0 62 | .next() 63 | .map(|(timestamp, data)| (*timestamp, Row::ref_cast(data))) 64 | } 65 | } 66 | 67 | impl<'a> DoubleEndedIterator for Iter<'a> { 68 | fn next_back(&mut self) -> Option { 69 | self.0 70 | .next_back() 71 | .map(|(timestamp, data)| (*timestamp, Row::ref_cast(data))) 72 | } 73 | } 74 | 75 | impl Index for Row { 76 | type Output = u32; 77 | 78 | fn index(&self, i: usize) -> &Self::Output { 79 | &self.0[i] 80 | } 81 | } 82 | 83 | impl<'a> IntoIterator for &'a Row { 84 | type Item = u32; 85 | type IntoIter = Copied>; 86 | 87 | fn into_iter(self) -> Self::IntoIter { 88 | self.0.iter().copied() 89 | } 90 | } 91 | 92 | impl Deref for Row { 93 | type Target = [u32]; 94 | 95 | fn deref(&self) -> &Self::Target { 96 | &self.0 97 | } 98 | } 99 | 100 | pub struct RelativeRow<'a> { 101 | row: &'a Row, 102 | total: u32, 103 | } 104 | 105 | impl<'a> Div for &'a Row { 106 | type Output = RelativeRow<'a>; 107 | 108 | fn div(self, rhs: u32) -> Self::Output { 109 | RelativeRow { 110 | row: self, 111 | total: rhs, 112 | } 113 | } 114 | } 115 | 116 | impl Debug for Row { 117 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 118 | formatter.debug_list().entries(&self.0).finish() 119 | } 120 | } 121 | 122 | impl<'a> Debug for RelativeRow<'a> { 123 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 124 | let mut list = formatter.debug_list(); 125 | for value in self.row { 126 | list.entry(&(value as f32 / self.total as f32)); 127 | } 128 | list.finish() 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /src/max.rs: -------------------------------------------------------------------------------- 1 | use crate::hint::TypeHint; 2 | use crate::present::Present; 3 | use differential_dataflow::collection::Collection; 4 | use differential_dataflow::difference::{Multiply, Semigroup}; 5 | use differential_dataflow::lattice::Lattice; 6 | use differential_dataflow::operators::CountTotal; 7 | use differential_dataflow::ExchangeData; 8 | use std::fmt::Debug; 9 | use std::hash::Hash; 10 | use std::iter::once; 11 | use timely::dataflow::Scope; 12 | use timely::order::TotalOrder; 13 | 14 | pub(crate) trait MaxByKey 15 | where 16 | G: Scope, 17 | { 18 | fn max_by_key(&self) -> Collection; 19 | } 20 | 21 | impl MaxByKey for Collection 22 | where 23 | G: Scope, 24 | K: Clone + ExchangeData + Hash, 25 | V: Clone + Ord + ExchangeData + Debug, 26 | R: Semigroup, 27 | Max: Multiply>, 28 | G::Timestamp: TotalOrder + Lattice, 29 | { 30 | fn max_by_key(&self) -> Collection { 31 | self.explode(|(key, value)| once((key, Max { value }))) 32 | .T::() 33 | .count_total() 34 | .KV::>() 35 | .map(|(key, max)| (key, max.value)) 36 | } 37 | } 38 | 39 | #[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)] 40 | pub(crate) struct Max { 41 | value: T, 42 | } 43 | 44 | impl Multiply for Max { 45 | type Output = Self; 46 | 47 | fn multiply(self, rhs: &Present) -> Self::Output { 48 | let _ = rhs; 49 | self 50 | } 51 | } 52 | 53 | impl Semigroup for Max 54 | where 55 | T: Ord + Clone + Debug + 'static, 56 | { 57 | fn plus_equals(&mut self, rhs: &Self) { 58 | if self.value < rhs.value { 59 | self.value = rhs.value.clone(); 60 | } 61 | } 62 | 63 | fn is_zero(&self) -> bool { 64 | false 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/mend.rs: -------------------------------------------------------------------------------- 1 | //! Fill back in some deleted releases that cause nontrivial number of 2 | //! dependencies downstream to fail to resolve. 3 | 4 | use crate::cratemap::CrateMap; 5 | use cargo_tally::arena::Slice; 6 | use cargo_tally::dependency::DependencyKind; 7 | use cargo_tally::feature::{CrateFeature, DefaultFeatures, FeatureEnables, FeatureId}; 8 | use cargo_tally::id::{CrateId, DependencyId, VersionId}; 9 | use cargo_tally::{DbDump, Dependency, Release}; 10 | use std::collections::BTreeSet as Set; 11 | 12 | pub(crate) fn mend_crates(crates: &mut CrateMap) { 13 | let mut next_crate_id = CrateId(1); 14 | 15 | for crate_name in [ 16 | "futures", 17 | "git-version", 18 | "lazy_static", 19 | "partial-io", 20 | "quickcheck", 21 | "tokio-core", 22 | "tokio-io", 23 | "vela-utils", 24 | "xcm", 25 | "xcm-executor", 26 | ] { 27 | if crates.id(crate_name).is_none() { 28 | while crates.name(next_crate_id).is_some() { 29 | next_crate_id.0 += 1; 30 | } 31 | crates.insert(next_crate_id, crate_name.to_owned()); 32 | } 33 | } 34 | } 35 | 36 | pub(crate) fn mend_releases(db_dump: &mut DbDump, crates: &CrateMap) { 37 | let mut used_version_ids = Set::new(); 38 | let mut used_version_numbers = Set::new(); 39 | for rel in &db_dump.releases { 40 | used_version_ids.insert(rel.id); 41 | used_version_numbers.insert((rel.crate_id, rel.num.clone())); 42 | } 43 | 44 | let mut used_dependency_ids = Set::new(); 45 | for dep in &db_dump.dependencies { 46 | used_dependency_ids.insert(dep.id); 47 | } 48 | 49 | let mut next_version_id = VersionId(0); 50 | let mut next_version_id = || { 51 | while !used_version_ids.insert(next_version_id) { 52 | next_version_id.0 += 1; 53 | } 54 | next_version_id 55 | }; 56 | 57 | let mut next_dependency_id = DependencyId(0); 58 | let mut next_dependency_id = || { 59 | while !used_dependency_ids.insert(next_dependency_id) { 60 | next_dependency_id.0 += 1; 61 | } 62 | next_dependency_id 63 | }; 64 | 65 | let releases = &mut db_dump.releases; 66 | let mut push_release = |rel: Release| { 67 | assert!(used_version_numbers.insert((rel.crate_id, rel.num.clone()))); 68 | releases.push(rel); 69 | }; 70 | 71 | { 72 | let crate_id = crates.id("git-version").unwrap(); 73 | 74 | push_release(Release { 75 | id: next_version_id(), 76 | crate_id, 77 | num: version!(0.1.0), 78 | created_at: datetime!(18 Oct 2017 13:53:11), 79 | features: Slice::EMPTY, 80 | }); 81 | 82 | push_release(Release { 83 | id: next_version_id(), 84 | crate_id, 85 | num: version!(0.1.1), 86 | created_at: datetime!(18 Oct 2017 13:55:40), 87 | features: Slice::EMPTY, 88 | }); 89 | 90 | push_release(Release { 91 | id: next_version_id(), 92 | crate_id, 93 | num: version!(0.1.2), 94 | created_at: datetime!(18 Oct 2017 13:57:15), 95 | features: Slice::EMPTY, 96 | }); 97 | 98 | push_release(Release { 99 | id: next_version_id(), 100 | crate_id, 101 | num: version!(0.2.0), 102 | created_at: datetime!(5 Apr 2018 09:14:16), 103 | features: Slice::EMPTY, 104 | }); 105 | } 106 | 107 | { 108 | let crate_id = crates.id("partial-io").unwrap(); 109 | 110 | let features = Slice::new(&[FeatureEnables { 111 | id: db_dump.features.id("tokio"), 112 | enables: Slice::new(&[ 113 | CrateFeature { 114 | crate_id, 115 | feature_id: db_dump.features.id("tokio-io"), 116 | }, 117 | CrateFeature { 118 | crate_id, 119 | feature_id: db_dump.features.id("futures"), 120 | }, 121 | ]), 122 | weak_enables: Slice::new(&[]), 123 | }]); 124 | 125 | push_release({ 126 | let release = Release { 127 | id: next_version_id(), 128 | crate_id, 129 | num: version!(0.1.0), 130 | created_at: datetime!(26 May 2017 02:38:58), 131 | features, 132 | }; 133 | db_dump.dependencies.push(Dependency { 134 | id: next_dependency_id(), 135 | version_id: release.id, 136 | crate_id: crates.id("futures").unwrap(), 137 | req: version_req!(^0.1), 138 | feature_id: db_dump.features.id("futures"), 139 | default_features: DefaultFeatures(true), 140 | features: Slice::EMPTY, 141 | kind: DependencyKind::Normal, 142 | }); 143 | db_dump.dependencies.push(Dependency { 144 | id: next_dependency_id(), 145 | version_id: release.id, 146 | crate_id: crates.id("lazy_static").unwrap(), 147 | req: version_req!(^0.2), 148 | feature_id: FeatureId::CRATE, 149 | default_features: DefaultFeatures(true), 150 | features: Slice::EMPTY, 151 | kind: DependencyKind::Dev, 152 | }); 153 | db_dump.dependencies.push(Dependency { 154 | id: next_dependency_id(), 155 | version_id: release.id, 156 | crate_id: crates.id("quickcheck").unwrap(), 157 | req: version_req!(^0.4), 158 | feature_id: db_dump.features.id("quickcheck"), 159 | default_features: DefaultFeatures(true), 160 | features: Slice::EMPTY, 161 | kind: DependencyKind::Normal, 162 | }); 163 | db_dump.dependencies.push(Dependency { 164 | id: next_dependency_id(), 165 | version_id: release.id, 166 | crate_id: crates.id("quickcheck").unwrap(), 167 | req: version_req!(^0.4), 168 | feature_id: FeatureId::CRATE, 169 | default_features: DefaultFeatures(true), 170 | features: Slice::EMPTY, 171 | kind: DependencyKind::Dev, 172 | }); 173 | db_dump.dependencies.push(Dependency { 174 | id: next_dependency_id(), 175 | version_id: release.id, 176 | crate_id: crates.id("tokio-core").unwrap(), 177 | req: version_req!(^0.1), 178 | feature_id: FeatureId::CRATE, 179 | default_features: DefaultFeatures(true), 180 | features: Slice::EMPTY, 181 | kind: DependencyKind::Dev, 182 | }); 183 | db_dump.dependencies.push(Dependency { 184 | id: next_dependency_id(), 185 | version_id: release.id, 186 | crate_id: crates.id("tokio-io").unwrap(), 187 | req: version_req!(^0.1), 188 | feature_id: db_dump.features.id("tokio-io"), 189 | default_features: DefaultFeatures(true), 190 | features: Slice::EMPTY, 191 | kind: DependencyKind::Normal, 192 | }); 193 | release 194 | }); 195 | 196 | push_release({ 197 | let release = Release { 198 | id: next_version_id(), 199 | crate_id, 200 | num: version!(0.1.1), 201 | created_at: datetime!(27 May 2017 00:56:37), 202 | features, 203 | }; 204 | db_dump.dependencies.push(Dependency { 205 | id: next_dependency_id(), 206 | version_id: release.id, 207 | crate_id: crates.id("futures").unwrap(), 208 | req: version_req!(^0.1), 209 | feature_id: db_dump.features.id("futures"), 210 | default_features: DefaultFeatures(true), 211 | features: Slice::EMPTY, 212 | kind: DependencyKind::Normal, 213 | }); 214 | db_dump.dependencies.push(Dependency { 215 | id: next_dependency_id(), 216 | version_id: release.id, 217 | crate_id: crates.id("lazy_static").unwrap(), 218 | req: version_req!(^0.2), 219 | feature_id: FeatureId::CRATE, 220 | default_features: DefaultFeatures(true), 221 | features: Slice::EMPTY, 222 | kind: DependencyKind::Dev, 223 | }); 224 | db_dump.dependencies.push(Dependency { 225 | id: next_dependency_id(), 226 | version_id: release.id, 227 | crate_id: crates.id("quickcheck").unwrap(), 228 | req: version_req!(^0.4), 229 | feature_id: FeatureId::CRATE, 230 | default_features: DefaultFeatures(true), 231 | features: Slice::EMPTY, 232 | kind: DependencyKind::Dev, 233 | }); 234 | db_dump.dependencies.push(Dependency { 235 | id: next_dependency_id(), 236 | version_id: release.id, 237 | crate_id: crates.id("quickcheck").unwrap(), 238 | req: version_req!(^0.4), 239 | feature_id: db_dump.features.id("quickcheck"), 240 | default_features: DefaultFeatures(true), 241 | features: Slice::EMPTY, 242 | kind: DependencyKind::Normal, 243 | }); 244 | db_dump.dependencies.push(Dependency { 245 | id: next_dependency_id(), 246 | version_id: release.id, 247 | crate_id: crates.id("tokio-core").unwrap(), 248 | req: version_req!(^0.1), 249 | feature_id: FeatureId::CRATE, 250 | default_features: DefaultFeatures(true), 251 | features: Slice::EMPTY, 252 | kind: DependencyKind::Dev, 253 | }); 254 | db_dump.dependencies.push(Dependency { 255 | id: next_dependency_id(), 256 | version_id: release.id, 257 | crate_id: crates.id("tokio-io").unwrap(), 258 | req: version_req!(^0.1), 259 | feature_id: db_dump.features.id("tokio-io"), 260 | default_features: DefaultFeatures(true), 261 | features: Slice::EMPTY, 262 | kind: DependencyKind::Normal, 263 | }); 264 | release 265 | }); 266 | 267 | push_release({ 268 | let release = Release { 269 | id: next_version_id(), 270 | crate_id, 271 | num: version!(0.2.0), 272 | created_at: datetime!(30 May 2017 21:01:28), 273 | features, 274 | }; 275 | db_dump.dependencies.push(Dependency { 276 | id: next_dependency_id(), 277 | version_id: release.id, 278 | crate_id: crates.id("futures").unwrap(), 279 | req: version_req!(^0.1), 280 | feature_id: db_dump.features.id("futures"), 281 | default_features: DefaultFeatures(true), 282 | features: Slice::EMPTY, 283 | kind: DependencyKind::Normal, 284 | }); 285 | db_dump.dependencies.push(Dependency { 286 | id: next_dependency_id(), 287 | version_id: release.id, 288 | crate_id: crates.id("lazy_static").unwrap(), 289 | req: version_req!(^0.2), 290 | feature_id: FeatureId::CRATE, 291 | default_features: DefaultFeatures(true), 292 | features: Slice::EMPTY, 293 | kind: DependencyKind::Dev, 294 | }); 295 | db_dump.dependencies.push(Dependency { 296 | id: next_dependency_id(), 297 | version_id: release.id, 298 | crate_id: crates.id("quickcheck").unwrap(), 299 | req: version_req!(^0.4), 300 | feature_id: FeatureId::CRATE, 301 | default_features: DefaultFeatures(true), 302 | features: Slice::EMPTY, 303 | kind: DependencyKind::Dev, 304 | }); 305 | db_dump.dependencies.push(Dependency { 306 | id: next_dependency_id(), 307 | version_id: release.id, 308 | crate_id: crates.id("quickcheck").unwrap(), 309 | req: version_req!(^0.4), 310 | feature_id: db_dump.features.id("quickcheck"), 311 | default_features: DefaultFeatures(true), 312 | features: Slice::EMPTY, 313 | kind: DependencyKind::Normal, 314 | }); 315 | db_dump.dependencies.push(Dependency { 316 | id: next_dependency_id(), 317 | version_id: release.id, 318 | crate_id: crates.id("tokio-core").unwrap(), 319 | req: version_req!(^0.1), 320 | feature_id: FeatureId::CRATE, 321 | default_features: DefaultFeatures(true), 322 | features: Slice::EMPTY, 323 | kind: DependencyKind::Dev, 324 | }); 325 | db_dump.dependencies.push(Dependency { 326 | id: next_dependency_id(), 327 | version_id: release.id, 328 | crate_id: crates.id("tokio-io").unwrap(), 329 | req: version_req!(^0.1), 330 | feature_id: db_dump.features.id("tokio-io"), 331 | default_features: DefaultFeatures(true), 332 | features: Slice::EMPTY, 333 | kind: DependencyKind::Normal, 334 | }); 335 | release 336 | }); 337 | 338 | push_release({ 339 | let release = Release { 340 | id: next_version_id(), 341 | crate_id, 342 | num: version!(0.2.1), 343 | created_at: datetime!(30 May 2017 21:47:41), 344 | features, 345 | }; 346 | db_dump.dependencies.push(Dependency { 347 | id: next_dependency_id(), 348 | version_id: release.id, 349 | crate_id: crates.id("futures").unwrap(), 350 | req: version_req!(^0.1), 351 | feature_id: db_dump.features.id("futures"), 352 | default_features: DefaultFeatures(true), 353 | features: Slice::EMPTY, 354 | kind: DependencyKind::Normal, 355 | }); 356 | db_dump.dependencies.push(Dependency { 357 | id: next_dependency_id(), 358 | version_id: release.id, 359 | crate_id: crates.id("lazy_static").unwrap(), 360 | req: version_req!(^0.2), 361 | feature_id: FeatureId::CRATE, 362 | default_features: DefaultFeatures(true), 363 | features: Slice::EMPTY, 364 | kind: DependencyKind::Dev, 365 | }); 366 | db_dump.dependencies.push(Dependency { 367 | id: next_dependency_id(), 368 | version_id: release.id, 369 | crate_id: crates.id("quickcheck").unwrap(), 370 | req: version_req!(^0.4), 371 | feature_id: FeatureId::CRATE, 372 | default_features: DefaultFeatures(true), 373 | features: Slice::EMPTY, 374 | kind: DependencyKind::Dev, 375 | }); 376 | db_dump.dependencies.push(Dependency { 377 | id: next_dependency_id(), 378 | version_id: release.id, 379 | crate_id: crates.id("quickcheck").unwrap(), 380 | req: version_req!(^0.4), 381 | feature_id: db_dump.features.id("quickcheck"), 382 | default_features: DefaultFeatures(true), 383 | features: Slice::EMPTY, 384 | kind: DependencyKind::Normal, 385 | }); 386 | db_dump.dependencies.push(Dependency { 387 | id: next_dependency_id(), 388 | version_id: release.id, 389 | crate_id: crates.id("tokio-core").unwrap(), 390 | req: version_req!(^0.1), 391 | feature_id: FeatureId::CRATE, 392 | default_features: DefaultFeatures(true), 393 | features: Slice::EMPTY, 394 | kind: DependencyKind::Dev, 395 | }); 396 | db_dump.dependencies.push(Dependency { 397 | id: next_dependency_id(), 398 | version_id: release.id, 399 | crate_id: crates.id("tokio-io").unwrap(), 400 | req: version_req!(^0.1), 401 | feature_id: db_dump.features.id("tokio-io"), 402 | default_features: DefaultFeatures(true), 403 | features: Slice::EMPTY, 404 | kind: DependencyKind::Normal, 405 | }); 406 | release 407 | }); 408 | 409 | push_release({ 410 | let release = Release { 411 | id: next_version_id(), 412 | crate_id, 413 | num: version!(0.2.2), 414 | created_at: datetime!(12 Jun 2017 05:26:52), 415 | features, 416 | }; 417 | db_dump.dependencies.push(Dependency { 418 | id: next_dependency_id(), 419 | version_id: release.id, 420 | crate_id: crates.id("futures").unwrap(), 421 | req: version_req!(^0.1), 422 | feature_id: db_dump.features.id("futures"), 423 | default_features: DefaultFeatures(true), 424 | features: Slice::EMPTY, 425 | kind: DependencyKind::Normal, 426 | }); 427 | db_dump.dependencies.push(Dependency { 428 | id: next_dependency_id(), 429 | version_id: release.id, 430 | crate_id: crates.id("lazy_static").unwrap(), 431 | req: version_req!(^0.2), 432 | feature_id: FeatureId::CRATE, 433 | default_features: DefaultFeatures(true), 434 | features: Slice::EMPTY, 435 | kind: DependencyKind::Dev, 436 | }); 437 | db_dump.dependencies.push(Dependency { 438 | id: next_dependency_id(), 439 | version_id: release.id, 440 | crate_id: crates.id("quickcheck").unwrap(), 441 | req: version_req!(^0.4), 442 | feature_id: FeatureId::CRATE, 443 | default_features: DefaultFeatures(true), 444 | features: Slice::EMPTY, 445 | kind: DependencyKind::Dev, 446 | }); 447 | db_dump.dependencies.push(Dependency { 448 | id: next_dependency_id(), 449 | version_id: release.id, 450 | crate_id: crates.id("quickcheck").unwrap(), 451 | req: version_req!(^0.4), 452 | feature_id: db_dump.features.id("quickcheck"), 453 | default_features: DefaultFeatures(true), 454 | features: Slice::EMPTY, 455 | kind: DependencyKind::Normal, 456 | }); 457 | db_dump.dependencies.push(Dependency { 458 | id: next_dependency_id(), 459 | version_id: release.id, 460 | crate_id: crates.id("tokio-core").unwrap(), 461 | req: version_req!(^0.1), 462 | feature_id: FeatureId::CRATE, 463 | default_features: DefaultFeatures(true), 464 | features: Slice::EMPTY, 465 | kind: DependencyKind::Dev, 466 | }); 467 | db_dump.dependencies.push(Dependency { 468 | id: next_dependency_id(), 469 | version_id: release.id, 470 | crate_id: crates.id("tokio-io").unwrap(), 471 | req: version_req!(^0.1), 472 | feature_id: db_dump.features.id("tokio-io"), 473 | default_features: DefaultFeatures(true), 474 | features: Slice::EMPTY, 475 | kind: DependencyKind::Normal, 476 | }); 477 | release 478 | }); 479 | 480 | push_release({ 481 | let release = Release { 482 | id: next_version_id(), 483 | crate_id, 484 | num: version!(0.2.3), 485 | created_at: datetime!(20 Jul 2017 20:01:22), 486 | features, 487 | }; 488 | db_dump.dependencies.push(Dependency { 489 | id: next_dependency_id(), 490 | version_id: release.id, 491 | crate_id: crates.id("futures").unwrap(), 492 | req: version_req!(^0.1), 493 | feature_id: db_dump.features.id("futures"), 494 | default_features: DefaultFeatures(true), 495 | features: Slice::EMPTY, 496 | kind: DependencyKind::Normal, 497 | }); 498 | db_dump.dependencies.push(Dependency { 499 | id: next_dependency_id(), 500 | version_id: release.id, 501 | crate_id: crates.id("lazy_static").unwrap(), 502 | req: version_req!(^0.2), 503 | feature_id: FeatureId::CRATE, 504 | default_features: DefaultFeatures(true), 505 | features: Slice::EMPTY, 506 | kind: DependencyKind::Dev, 507 | }); 508 | db_dump.dependencies.push(Dependency { 509 | id: next_dependency_id(), 510 | version_id: release.id, 511 | crate_id: crates.id("quickcheck").unwrap(), 512 | req: version_req!(^0.4), 513 | feature_id: FeatureId::CRATE, 514 | default_features: DefaultFeatures(true), 515 | features: Slice::EMPTY, 516 | kind: DependencyKind::Dev, 517 | }); 518 | db_dump.dependencies.push(Dependency { 519 | id: next_dependency_id(), 520 | version_id: release.id, 521 | crate_id: crates.id("quickcheck").unwrap(), 522 | req: version_req!(^0.4), 523 | feature_id: db_dump.features.id("quickcheck"), 524 | default_features: DefaultFeatures(true), 525 | features: Slice::EMPTY, 526 | kind: DependencyKind::Normal, 527 | }); 528 | db_dump.dependencies.push(Dependency { 529 | id: next_dependency_id(), 530 | version_id: release.id, 531 | crate_id: crates.id("tokio-core").unwrap(), 532 | req: version_req!(^0.1), 533 | feature_id: FeatureId::CRATE, 534 | default_features: DefaultFeatures(true), 535 | features: Slice::EMPTY, 536 | kind: DependencyKind::Dev, 537 | }); 538 | db_dump.dependencies.push(Dependency { 539 | id: next_dependency_id(), 540 | version_id: release.id, 541 | crate_id: crates.id("tokio-io").unwrap(), 542 | req: version_req!(^0.1), 543 | feature_id: db_dump.features.id("tokio-io"), 544 | default_features: DefaultFeatures(true), 545 | features: Slice::EMPTY, 546 | kind: DependencyKind::Normal, 547 | }); 548 | release 549 | }); 550 | 551 | push_release({ 552 | let release = Release { 553 | id: next_version_id(), 554 | crate_id, 555 | num: version!(0.2.4), 556 | created_at: datetime!(19 Aug 2017 23:37:51), 557 | features, 558 | }; 559 | db_dump.dependencies.push(Dependency { 560 | id: next_dependency_id(), 561 | version_id: release.id, 562 | crate_id: crates.id("futures").unwrap(), 563 | req: version_req!(^0.1), 564 | feature_id: db_dump.features.id("futures"), 565 | default_features: DefaultFeatures(true), 566 | features: Slice::EMPTY, 567 | kind: DependencyKind::Normal, 568 | }); 569 | db_dump.dependencies.push(Dependency { 570 | id: next_dependency_id(), 571 | version_id: release.id, 572 | crate_id: crates.id("lazy_static").unwrap(), 573 | req: version_req!(^0.2), 574 | feature_id: FeatureId::CRATE, 575 | default_features: DefaultFeatures(true), 576 | features: Slice::EMPTY, 577 | kind: DependencyKind::Dev, 578 | }); 579 | db_dump.dependencies.push(Dependency { 580 | id: next_dependency_id(), 581 | version_id: release.id, 582 | crate_id: crates.id("quickcheck").unwrap(), 583 | req: version_req!(^0.4), 584 | feature_id: FeatureId::CRATE, 585 | default_features: DefaultFeatures(true), 586 | features: Slice::EMPTY, 587 | kind: DependencyKind::Dev, 588 | }); 589 | db_dump.dependencies.push(Dependency { 590 | id: next_dependency_id(), 591 | version_id: release.id, 592 | crate_id: crates.id("quickcheck").unwrap(), 593 | req: version_req!(^0.4), 594 | feature_id: db_dump.features.id("quickcheck"), 595 | default_features: DefaultFeatures(true), 596 | features: Slice::EMPTY, 597 | kind: DependencyKind::Normal, 598 | }); 599 | db_dump.dependencies.push(Dependency { 600 | id: next_dependency_id(), 601 | version_id: release.id, 602 | crate_id: crates.id("tokio-core").unwrap(), 603 | req: version_req!(^0.1), 604 | feature_id: FeatureId::CRATE, 605 | default_features: DefaultFeatures(true), 606 | features: Slice::EMPTY, 607 | kind: DependencyKind::Dev, 608 | }); 609 | db_dump.dependencies.push(Dependency { 610 | id: next_dependency_id(), 611 | version_id: release.id, 612 | crate_id: crates.id("tokio-io").unwrap(), 613 | req: version_req!(^0.1), 614 | feature_id: db_dump.features.id("tokio-io"), 615 | default_features: DefaultFeatures(true), 616 | features: Slice::EMPTY, 617 | kind: DependencyKind::Normal, 618 | }); 619 | release 620 | }); 621 | 622 | push_release({ 623 | let release = Release { 624 | id: next_version_id(), 625 | crate_id, 626 | num: version!(0.2.5), 627 | created_at: datetime!(18 Nov 2017 02:26:25), 628 | features, 629 | }; 630 | db_dump.dependencies.push(Dependency { 631 | id: next_dependency_id(), 632 | version_id: release.id, 633 | crate_id: crates.id("futures").unwrap(), 634 | req: version_req!(^0.1), 635 | feature_id: db_dump.features.id("futures"), 636 | default_features: DefaultFeatures(true), 637 | features: Slice::EMPTY, 638 | kind: DependencyKind::Normal, 639 | }); 640 | db_dump.dependencies.push(Dependency { 641 | id: next_dependency_id(), 642 | version_id: release.id, 643 | crate_id: crates.id("lazy_static").unwrap(), 644 | req: version_req!(^0.2), 645 | feature_id: FeatureId::CRATE, 646 | default_features: DefaultFeatures(true), 647 | features: Slice::EMPTY, 648 | kind: DependencyKind::Dev, 649 | }); 650 | db_dump.dependencies.push(Dependency { 651 | id: next_dependency_id(), 652 | version_id: release.id, 653 | crate_id: crates.id("quickcheck").unwrap(), 654 | req: version_req!(^0.4), 655 | feature_id: FeatureId::CRATE, 656 | default_features: DefaultFeatures(true), 657 | features: Slice::EMPTY, 658 | kind: DependencyKind::Dev, 659 | }); 660 | db_dump.dependencies.push(Dependency { 661 | id: next_dependency_id(), 662 | version_id: release.id, 663 | crate_id: crates.id("quickcheck").unwrap(), 664 | req: version_req!(^0.4), 665 | feature_id: db_dump.features.id("quickcheck"), 666 | default_features: DefaultFeatures(true), 667 | features: Slice::EMPTY, 668 | kind: DependencyKind::Normal, 669 | }); 670 | db_dump.dependencies.push(Dependency { 671 | id: next_dependency_id(), 672 | version_id: release.id, 673 | crate_id: crates.id("tokio-core").unwrap(), 674 | req: version_req!(^0.1), 675 | feature_id: FeatureId::CRATE, 676 | default_features: DefaultFeatures(true), 677 | features: Slice::EMPTY, 678 | kind: DependencyKind::Dev, 679 | }); 680 | db_dump.dependencies.push(Dependency { 681 | id: next_dependency_id(), 682 | version_id: release.id, 683 | crate_id: crates.id("tokio-io").unwrap(), 684 | req: version_req!(^0.1), 685 | feature_id: db_dump.features.id("tokio-io"), 686 | default_features: DefaultFeatures(true), 687 | features: Slice::EMPTY, 688 | kind: DependencyKind::Normal, 689 | }); 690 | release 691 | }); 692 | 693 | push_release({ 694 | let release = Release { 695 | id: next_version_id(), 696 | crate_id, 697 | num: version!(0.3.0), 698 | created_at: datetime!(12 Jan 2018 22:15:15), 699 | features, 700 | }; 701 | db_dump.dependencies.push(Dependency { 702 | id: next_dependency_id(), 703 | version_id: release.id, 704 | crate_id: crates.id("futures").unwrap(), 705 | req: version_req!(^0.1), 706 | feature_id: db_dump.features.id("futures"), 707 | default_features: DefaultFeatures(true), 708 | features: Slice::EMPTY, 709 | kind: DependencyKind::Normal, 710 | }); 711 | db_dump.dependencies.push(Dependency { 712 | id: next_dependency_id(), 713 | version_id: release.id, 714 | crate_id: crates.id("lazy_static").unwrap(), 715 | req: version_req!(^1.0), 716 | feature_id: FeatureId::CRATE, 717 | default_features: DefaultFeatures(true), 718 | features: Slice::EMPTY, 719 | kind: DependencyKind::Dev, 720 | }); 721 | db_dump.dependencies.push(Dependency { 722 | id: next_dependency_id(), 723 | version_id: release.id, 724 | crate_id: crates.id("quickcheck").unwrap(), 725 | req: version_req!(^0.6), 726 | feature_id: FeatureId::CRATE, 727 | default_features: DefaultFeatures(true), 728 | features: Slice::EMPTY, 729 | kind: DependencyKind::Dev, 730 | }); 731 | db_dump.dependencies.push(Dependency { 732 | id: next_dependency_id(), 733 | version_id: release.id, 734 | crate_id: crates.id("quickcheck").unwrap(), 735 | req: version_req!(^0.6), 736 | feature_id: db_dump.features.id("quickcheck"), 737 | default_features: DefaultFeatures(true), 738 | features: Slice::EMPTY, 739 | kind: DependencyKind::Normal, 740 | }); 741 | db_dump.dependencies.push(Dependency { 742 | id: next_dependency_id(), 743 | version_id: release.id, 744 | crate_id: crates.id("tokio-core").unwrap(), 745 | req: version_req!(^0.1), 746 | feature_id: FeatureId::CRATE, 747 | default_features: DefaultFeatures(true), 748 | features: Slice::EMPTY, 749 | kind: DependencyKind::Dev, 750 | }); 751 | db_dump.dependencies.push(Dependency { 752 | id: next_dependency_id(), 753 | version_id: release.id, 754 | crate_id: crates.id("tokio-io").unwrap(), 755 | req: version_req!(^0.1), 756 | feature_id: db_dump.features.id("tokio-io"), 757 | default_features: DefaultFeatures(true), 758 | features: Slice::EMPTY, 759 | kind: DependencyKind::Normal, 760 | }); 761 | release 762 | }); 763 | } 764 | 765 | { 766 | let crate_id = crates.id("xcm").unwrap(); 767 | 768 | push_release(Release { 769 | id: next_version_id(), 770 | crate_id, 771 | num: version!(0.0.0), 772 | created_at: datetime!(9 Mar 2021 05:51:34), 773 | features: Slice::EMPTY, 774 | }); 775 | } 776 | 777 | { 778 | let crate_id = crates.id("xcm-executor").unwrap(); 779 | 780 | push_release(Release { 781 | id: next_version_id(), 782 | crate_id, 783 | num: version!(0.0.0), 784 | created_at: datetime!(9 Mar 2021 06:21:39), 785 | features: Slice::EMPTY, 786 | }); 787 | } 788 | } 789 | -------------------------------------------------------------------------------- /src/present.rs: -------------------------------------------------------------------------------- 1 | use differential_dataflow::difference::{Multiply, Semigroup}; 2 | 3 | #[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Debug)] 4 | pub(crate) struct Present; 5 | 6 | impl Semigroup for Present { 7 | fn plus_equals(&mut self, rhs: &Present) { 8 | let _ = rhs; 9 | } 10 | 11 | fn is_zero(&self) -> bool { 12 | false 13 | } 14 | } 15 | 16 | impl Multiply for Present { 17 | type Output = Present; 18 | 19 | fn multiply(self, rhs: &Present) -> Self::Output { 20 | let _ = rhs; 21 | Present 22 | } 23 | } 24 | 25 | impl Multiply for isize { 26 | type Output = isize; 27 | 28 | fn multiply(self, rhs: &Present) -> Self::Output { 29 | let _ = rhs; 30 | self 31 | } 32 | } 33 | 34 | impl Multiply for Present { 35 | type Output = isize; 36 | 37 | fn multiply(self, rhs: &isize) -> Self::Output { 38 | *rhs 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/query.rs: -------------------------------------------------------------------------------- 1 | use crate::cratemap::CrateMap; 2 | use crate::user::UserQuery; 3 | use anyhow::{bail, format_err, Error, Result}; 4 | use cargo_tally::arena::Slice; 5 | use cargo_tally::id::QueryId; 6 | use cargo_tally::version::VersionReq; 7 | use cargo_tally::{Predicate, Query}; 8 | use ref_cast::RefCast; 9 | use std::fmt::{self, Display}; 10 | use std::str::{FromStr, Split}; 11 | 12 | // for example &["serde:1.0", "anyhow:^1.0 + thiserror"] 13 | pub fn parse<'a>( 14 | queries: impl IntoIterator, 15 | crates: &CrateMap, 16 | ) -> Result> { 17 | queries 18 | .into_iter() 19 | .enumerate() 20 | .map(|(i, query)| { 21 | let id = QueryId(u8::try_from(i).unwrap()); 22 | match parse_predicates(query, crates) { 23 | Ok(predicates) => Ok(Query { id, predicates }), 24 | Err(err) => bail!("failed to parse query {:?}: {}", query, err), 25 | } 26 | }) 27 | .collect() 28 | } 29 | 30 | fn parse_predicates(string: &str, crates: &CrateMap) -> Result> { 31 | let mut predicates = Vec::new(); 32 | 33 | for predicate in IterPredicates::new(string, crates) { 34 | let predicate = predicate?; 35 | match predicate { 36 | RawPredicate::Crate(predicate) => predicates.push(predicate), 37 | RawPredicate::User(username) => { 38 | let Some(user_id) = crates.users.get(username) else { 39 | let kind = if username.is_team() { "team" } else { "user" }; 40 | bail!("no crates owned by {} @{}", kind, username); 41 | }; 42 | predicates.extend( 43 | crates 44 | .owners 45 | .get(user_id) 46 | .map(Vec::as_slice) 47 | .unwrap_or_default() 48 | .iter() 49 | .map(|&crate_id| Predicate { 50 | crate_id, 51 | req: None, 52 | }), 53 | ); 54 | } 55 | } 56 | } 57 | 58 | Ok(Slice::new(&predicates)) 59 | } 60 | 61 | pub fn format(query: &str, crates: &CrateMap) -> String { 62 | DisplayQuery { query, crates }.to_string() 63 | } 64 | 65 | struct DisplayQuery<'a> { 66 | query: &'a str, 67 | crates: &'a CrateMap, 68 | } 69 | 70 | impl<'a> Display for DisplayQuery<'a> { 71 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 72 | for (i, predicate) in IterPredicates::new(self.query, self.crates).enumerate() { 73 | if i > 0 { 74 | formatter.write_str(" or ")?; 75 | } 76 | 77 | let predicate = predicate.unwrap(); 78 | match predicate { 79 | RawPredicate::Crate(predicate) => { 80 | let original_name = self.crates.name(predicate.crate_id).unwrap(); 81 | formatter.write_str(original_name)?; 82 | if let Some(req) = predicate.req { 83 | write!(formatter, ":{}", req)?; 84 | } 85 | } 86 | RawPredicate::User(username) => { 87 | let (username, _user_id) = self.crates.users.get_key_value(username).unwrap(); 88 | write!(formatter, "@{}", username)?; 89 | } 90 | } 91 | } 92 | Ok(()) 93 | } 94 | } 95 | 96 | enum RawPredicate<'a> { 97 | Crate(Predicate), 98 | User(&'a UserQuery), 99 | } 100 | 101 | struct IterPredicates<'a> { 102 | split: Split<'a, char>, 103 | crates: &'a CrateMap, 104 | } 105 | 106 | impl<'a> IterPredicates<'a> { 107 | fn new(query: &'a str, crates: &'a CrateMap) -> Self { 108 | IterPredicates { 109 | split: query.split('+'), 110 | crates, 111 | } 112 | } 113 | } 114 | 115 | impl<'a> Iterator for IterPredicates<'a> { 116 | type Item = Result>; 117 | 118 | fn next(&mut self) -> Option { 119 | let predicate = self.split.next()?.trim(); 120 | 121 | if let Some(username) = predicate.strip_prefix('@') { 122 | return Some(Ok(RawPredicate::User(UserQuery::ref_cast(username)))); 123 | } 124 | 125 | let (name, req) = if let Some((name, req)) = predicate.split_once(':') { 126 | match VersionReq::from_str(req) { 127 | Ok(req) => (name, Some(req)), 128 | Err(err) => return Some(Err(Error::new(err))), 129 | } 130 | } else { 131 | (predicate, None) 132 | }; 133 | 134 | let Some(crate_id) = self.crates.id(name) else { 135 | return Some(Err(format_err!("no crate named {}", name))); 136 | }; 137 | 138 | Some(Ok(RawPredicate::Crate(Predicate { crate_id, req }))) 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/render.rs: -------------------------------------------------------------------------------- 1 | use crate::total::Total; 2 | use anyhow::Result; 3 | use cargo_tally::matrix::Matrix; 4 | use cargo_tally::timestamp::DateTime; 5 | use std::cmp; 6 | use std::env; 7 | use std::fmt::{self, Display}; 8 | use std::fs; 9 | use std::path::PathBuf; 10 | 11 | pub(crate) fn graph( 12 | title: Option<&str>, 13 | transitive: bool, 14 | results: &Matrix, 15 | labels: &[String], 16 | total: Option<&Total>, 17 | ) -> Result { 18 | let now = DateTime::now(); 19 | 20 | let relative = total.is_some(); 21 | let title = if let Some(title) = title { 22 | title 23 | } else if relative { 24 | if transitive { 25 | "fraction of crates.io depending transitively" 26 | } else { 27 | "fraction of crates.io depending directly" 28 | } 29 | } else { 30 | if transitive { 31 | "number of crates depending transitively" 32 | } else { 33 | "number of crates depending directly" 34 | } 35 | }; 36 | 37 | let mut data = String::new(); 38 | data += "[\n"; 39 | for (i, label) in labels.iter().enumerate() { 40 | data += " {\"name\":\""; 41 | data += label; 42 | data += "\", \"values\":[\n"; 43 | let mut prev = None; 44 | for (timestamp, row) in results { 45 | let value = row[i]; 46 | if prev.is_none() { 47 | if value == 0 { 48 | continue; 49 | } 50 | let mut secs = timestamp.seconds(); 51 | if timestamp.subsec_nanos() == 0 { 52 | secs = secs.saturating_sub(1); 53 | } 54 | let timestamp = DateTime::from_timestamp(secs, 0); 55 | data += &Row(timestamp, 0, total).to_string(); 56 | } else if prev == Some(value) { 57 | continue; 58 | } 59 | data += &Row(timestamp, value, total).to_string(); 60 | prev = Some(value); 61 | } 62 | let (timestamp, last) = results.iter().next_back().unwrap(); 63 | if timestamp < now { 64 | data += &Row(now, last[i], total).to_string(); 65 | } 66 | data += " ]},\n"; 67 | } 68 | data += " ]"; 69 | 70 | let template = include_str!("index.html"); 71 | let mut preprocessor_context = minipre::Context::new(); 72 | preprocessor_context 73 | .define("CARGO_TALLY_TITLE", format!("\"{}\"", title.escape_debug())) 74 | .define("CARGO_TALLY_DATA", data) 75 | .define("CARGO_TALLY_RELATIVE", (relative as usize).to_string()); 76 | let html = minipre::process_str(template, &mut preprocessor_context)?; 77 | 78 | let dir = env::temp_dir().join("cargo-tally"); 79 | fs::create_dir_all(&dir)?; 80 | let path = dir.join(format!("{}.html", now.millis())); 81 | fs::write(&path, html)?; 82 | Ok(path) 83 | } 84 | 85 | struct Row<'a>(DateTime, u32, Option<&'a Total>); 86 | 87 | impl<'a> Display for Row<'a> { 88 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 89 | formatter.write_str(" {\"time\":")?; 90 | write!(formatter, "{}", self.0.millis())?; 91 | formatter.write_str(", \"edges\":")?; 92 | if let Some(total) = self.2 { 93 | let total = total.eval(self.0); 94 | if total == 0 { 95 | formatter.write_str("0")?; 96 | } else if self.1 == total { 97 | // Bump a 100% down to 50%. The only graph affected by this is 98 | // `cargo tally --relative --transitive @alexcrichton` and while 99 | // 50% is not an accurate datum, this hack makes that graph more 100 | // readable by avoiding the y-axis getting extended all the way 101 | // to 100% in the first day of crates.io's existence. 102 | formatter.write_str("0.5")?; 103 | } else { 104 | let fraction = self.1 as f32 / total as f32; 105 | write_truncated(formatter, fraction)?; 106 | } 107 | } else { 108 | write!(formatter, "{}", self.1)?; 109 | } 110 | formatter.write_str("},\n")?; 111 | Ok(()) 112 | } 113 | } 114 | 115 | fn write_truncated(formatter: &mut fmt::Formatter, fraction: f32) -> fmt::Result { 116 | let mut repr = fraction.to_string(); 117 | let nonzero_digit = |ch: char| ch >= '1' && ch <= '9'; 118 | if let Some(first_nonzero) = repr.find(nonzero_digit) { 119 | repr.truncate(cmp::min(first_nonzero + 4, repr.len())); 120 | } 121 | if let Some(last_nonzero) = repr.rfind(nonzero_digit) { 122 | repr.truncate(last_nonzero + 1); 123 | } 124 | formatter.write_str(&repr) 125 | } 126 | -------------------------------------------------------------------------------- /src/stream.rs: -------------------------------------------------------------------------------- 1 | macro_rules! stream { 2 | ($k:ty => $v:ty; $r:ty) => { 3 | stream![($k, $v); $r] 4 | }; 5 | ($d:ty; $r:ty) => { 6 | differential_dataflow::collection::Collection< 7 | timely::dataflow::scopes::Child< 8 | 'a, 9 | timely::worker::Worker, 10 | crate::timestamp::DateTime, 11 | >, 12 | $d, 13 | $r, 14 | > 15 | }; 16 | } 17 | -------------------------------------------------------------------------------- /src/timestamp.rs: -------------------------------------------------------------------------------- 1 | use chrono::{NaiveDate, NaiveDateTime, NaiveTime, TimeZone, Utc}; 2 | use differential_dataflow::lattice::Lattice; 3 | use std::cmp; 4 | use std::fmt::{self, Debug, Display}; 5 | use timely::order::{PartialOrder, TotalOrder}; 6 | use timely::progress::timestamp::{PathSummary, Refines, Timestamp}; 7 | 8 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] 9 | #[repr(transparent)] 10 | pub struct DateTime(chrono::DateTime); 11 | 12 | #[derive(Copy, Clone, Ord, PartialOrd, Eq, PartialEq)] 13 | #[repr(transparent)] 14 | pub struct Duration(chrono::Duration); 15 | 16 | impl DateTime { 17 | pub fn new(date: NaiveDate, time: NaiveTime) -> Self { 18 | DateTime(Utc.from_utc_datetime(&NaiveDateTime::new(date, time))) 19 | } 20 | 21 | pub fn now() -> Self { 22 | DateTime(Utc::now()) 23 | } 24 | 25 | pub fn seconds(&self) -> i64 { 26 | self.0.timestamp() 27 | } 28 | 29 | pub fn millis(&self) -> i64 { 30 | self.0.timestamp_millis() 31 | } 32 | 33 | pub fn subsec_nanos(&self) -> u32 { 34 | self.0.timestamp_subsec_nanos() 35 | } 36 | 37 | pub fn from_timestamp(secs: i64, nanos: u32) -> Self { 38 | DateTime(chrono::DateTime::from_timestamp(secs, nanos).unwrap()) 39 | } 40 | } 41 | 42 | impl From> for DateTime { 43 | fn from(date_time: chrono::DateTime) -> Self { 44 | DateTime(date_time) 45 | } 46 | } 47 | 48 | impl Timestamp for DateTime { 49 | type Summary = Duration; 50 | 51 | fn minimum() -> Self { 52 | Self::from_timestamp(0, 0) 53 | } 54 | } 55 | 56 | impl Lattice for DateTime { 57 | fn join(&self, other: &Self) -> Self { 58 | cmp::max(*self, *other) 59 | } 60 | 61 | fn meet(&self, other: &Self) -> Self { 62 | cmp::min(*self, *other) 63 | } 64 | } 65 | 66 | impl PartialOrder for DateTime { 67 | fn less_than(&self, other: &Self) -> bool { 68 | self < other 69 | } 70 | 71 | fn less_equal(&self, other: &Self) -> bool { 72 | self <= other 73 | } 74 | } 75 | 76 | impl TotalOrder for DateTime {} 77 | 78 | impl PathSummary for Duration { 79 | fn results_in(&self, src: &DateTime) -> Option { 80 | src.0.checked_add_signed(self.0).map(DateTime) 81 | } 82 | 83 | fn followed_by(&self, other: &Self) -> Option { 84 | self.0.checked_add(&other.0).map(Duration) 85 | } 86 | } 87 | 88 | impl Refines<()> for DateTime { 89 | fn to_inner(_other: ()) -> Self { 90 | Self::minimum() 91 | } 92 | 93 | #[allow(clippy::unused_unit)] 94 | fn to_outer(self) -> () {} 95 | 96 | #[allow(clippy::unused_unit)] 97 | fn summarize(_path: ::Summary) -> () {} 98 | } 99 | 100 | impl PartialOrder for Duration { 101 | fn less_than(&self, other: &Self) -> bool { 102 | self < other 103 | } 104 | 105 | fn less_equal(&self, other: &Self) -> bool { 106 | self <= other 107 | } 108 | } 109 | 110 | impl Default for DateTime { 111 | fn default() -> Self { 112 | Self::minimum() 113 | } 114 | } 115 | 116 | impl Display for DateTime { 117 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 118 | Display::fmt(&self.0, formatter) 119 | } 120 | } 121 | 122 | impl Debug for DateTime { 123 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 124 | Debug::fmt(&self.0, formatter) 125 | } 126 | } 127 | 128 | impl Default for Duration { 129 | fn default() -> Self { 130 | Duration(chrono::Duration::nanoseconds(0)) 131 | } 132 | } 133 | 134 | impl Debug for Duration { 135 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 136 | Debug::fmt(&self.0, formatter) 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /src/total.rs: -------------------------------------------------------------------------------- 1 | use cargo_tally::timestamp::DateTime; 2 | use cargo_tally::Release; 3 | use std::collections::BTreeSet as Set; 4 | 5 | pub(crate) struct Total { 6 | times: Vec, 7 | } 8 | 9 | impl Total { 10 | pub(crate) fn index(releases: &[Release]) -> Self { 11 | let mut crate_ids = Set::new(); 12 | let mut times = Vec::new(); 13 | for release in releases { 14 | if crate_ids.insert(release.crate_id) { 15 | times.push(release.created_at); 16 | } 17 | } 18 | Total { times } 19 | } 20 | 21 | pub(crate) fn eval(&self, time: DateTime) -> u32 { 22 | match self.times.binary_search(&time) { 23 | Ok(i) => 1 + i as u32, 24 | Err(i) => i as u32, 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/trace.rs: -------------------------------------------------------------------------------- 1 | pub(crate) const VERBOSE: bool = false; 2 | -------------------------------------------------------------------------------- /src/user.rs: -------------------------------------------------------------------------------- 1 | use ref_cast::RefCast; 2 | use std::borrow::Borrow; 3 | use std::cmp::Ordering; 4 | use std::fmt::{self, Display}; 5 | 6 | pub(crate) fn valid(name: &str) -> bool { 7 | name.chars().all(|ch| { 8 | (ch >= '0' && ch <= '9') 9 | || (ch >= 'A' && ch <= 'Z') 10 | || (ch >= 'a' && ch <= 'z') 11 | || ch == '-' 12 | }) && !name.contains("--") 13 | && !name.starts_with('-') 14 | && !name.ends_with('-') 15 | && !name.is_empty() 16 | && name.len() <= 39 17 | } 18 | 19 | pub(crate) struct User(String); 20 | 21 | impl User { 22 | pub(crate) fn new(string: String) -> Self { 23 | User(string) 24 | } 25 | } 26 | 27 | impl Ord for User { 28 | fn cmp(&self, rhs: &Self) -> Ordering { 29 | UserQuery::ref_cast(&self.0).cmp(UserQuery::ref_cast(&rhs.0)) 30 | } 31 | } 32 | 33 | impl PartialOrd for User { 34 | fn partial_cmp(&self, rhs: &Self) -> Option { 35 | Some(self.cmp(rhs)) 36 | } 37 | } 38 | 39 | impl Eq for User {} 40 | 41 | impl PartialEq for User { 42 | fn eq(&self, rhs: &Self) -> bool { 43 | UserQuery::ref_cast(&self.0).eq(UserQuery::ref_cast(&rhs.0)) 44 | } 45 | } 46 | 47 | impl Display for User { 48 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 49 | Display::fmt(&self.0, formatter) 50 | } 51 | } 52 | 53 | #[derive(RefCast)] 54 | #[repr(transparent)] 55 | pub(crate) struct UserQuery(str); 56 | 57 | impl UserQuery { 58 | pub(crate) fn is_team(&self) -> bool { 59 | self.0.contains('/') 60 | } 61 | } 62 | 63 | impl Borrow for User { 64 | fn borrow(&self) -> &UserQuery { 65 | UserQuery::ref_cast(&self.0) 66 | } 67 | } 68 | 69 | impl Ord for UserQuery { 70 | fn cmp(&self, rhs: &Self) -> Ordering { 71 | self.0 72 | .bytes() 73 | .map(CaseAgnosticByte) 74 | .cmp(rhs.0.bytes().map(CaseAgnosticByte)) 75 | } 76 | } 77 | 78 | impl PartialOrd for UserQuery { 79 | fn partial_cmp(&self, rhs: &Self) -> Option { 80 | Some(self.cmp(rhs)) 81 | } 82 | } 83 | 84 | impl Eq for UserQuery {} 85 | 86 | impl PartialEq for UserQuery { 87 | fn eq(&self, rhs: &Self) -> bool { 88 | self.0 89 | .bytes() 90 | .map(CaseAgnosticByte) 91 | .eq(rhs.0.bytes().map(CaseAgnosticByte)) 92 | } 93 | } 94 | 95 | impl Display for UserQuery { 96 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 97 | Display::fmt(&self.0, formatter) 98 | } 99 | } 100 | 101 | struct CaseAgnosticByte(u8); 102 | 103 | impl Ord for CaseAgnosticByte { 104 | fn cmp(&self, rhs: &Self) -> Ordering { 105 | self.0.to_ascii_lowercase().cmp(&rhs.0.to_ascii_lowercase()) 106 | } 107 | } 108 | 109 | impl PartialOrd for CaseAgnosticByte { 110 | fn partial_cmp(&self, rhs: &Self) -> Option { 111 | Some(self.cmp(rhs)) 112 | } 113 | } 114 | 115 | impl Eq for CaseAgnosticByte {} 116 | 117 | impl PartialEq for CaseAgnosticByte { 118 | fn eq(&self, rhs: &Self) -> bool { 119 | self.cmp(rhs) == Ordering::Equal 120 | } 121 | } 122 | -------------------------------------------------------------------------------- /src/version.rs: -------------------------------------------------------------------------------- 1 | use crate::arena::Slice; 2 | use semver::{Comparator, Op}; 3 | use std::cmp::Ordering; 4 | use std::fmt::{self, Debug, Display}; 5 | use std::ops::{Deref, DerefMut}; 6 | use std::str::FromStr; 7 | 8 | #[derive(Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] 9 | pub struct Version(pub semver::Version); 10 | 11 | impl Version { 12 | pub const fn new(major: u64, minor: u64, patch: u64) -> Self { 13 | Version(semver::Version::new(major, minor, patch)) 14 | } 15 | } 16 | 17 | #[derive(Copy, Clone, Eq, PartialEq, Hash)] 18 | pub struct VersionReq { 19 | pub comparators: Slice, 20 | } 21 | 22 | impl VersionReq { 23 | pub fn matches(&self, version: &Version) -> bool { 24 | matches_req(self.comparators, version) 25 | } 26 | } 27 | 28 | impl Deref for Version { 29 | type Target = semver::Version; 30 | 31 | fn deref(&self) -> &Self::Target { 32 | &self.0 33 | } 34 | } 35 | 36 | impl DerefMut for Version { 37 | fn deref_mut(&mut self) -> &mut Self::Target { 38 | &mut self.0 39 | } 40 | } 41 | 42 | impl Display for Version { 43 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 44 | Display::fmt(&self.0, formatter) 45 | } 46 | } 47 | 48 | impl Debug for Version { 49 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 50 | write!(formatter, "Version({})", self) 51 | } 52 | } 53 | 54 | impl Ord for VersionReq { 55 | fn cmp(&self, other: &Self) -> Ordering { 56 | let mut lhs = self.comparators.iter_ref(); 57 | let mut rhs = other.comparators.iter_ref(); 58 | 59 | loop { 60 | let Some(x) = lhs.next() else { 61 | return if rhs.next().is_none() { 62 | Ordering::Equal 63 | } else { 64 | Ordering::Less 65 | }; 66 | }; 67 | 68 | let Some(y) = rhs.next() else { 69 | return Ordering::Greater; 70 | }; 71 | 72 | match (x.op as usize, x.major, x.minor, x.patch, &x.pre).cmp(&( 73 | y.op as usize, 74 | y.major, 75 | y.minor, 76 | y.patch, 77 | &y.pre, 78 | )) { 79 | Ordering::Equal => (), 80 | non_eq => return non_eq, 81 | } 82 | } 83 | } 84 | } 85 | 86 | impl PartialOrd for VersionReq { 87 | fn partial_cmp(&self, other: &Self) -> Option { 88 | Some(self.cmp(other)) 89 | } 90 | } 91 | 92 | impl From for VersionReq { 93 | fn from(req: semver::VersionReq) -> Self { 94 | let comparators = Slice::new(&req.comparators); 95 | VersionReq { comparators } 96 | } 97 | } 98 | 99 | impl FromStr for VersionReq { 100 | type Err = semver::Error; 101 | 102 | fn from_str(string: &str) -> Result { 103 | semver::VersionReq::from_str(string).map(VersionReq::from) 104 | } 105 | } 106 | 107 | impl Display for VersionReq { 108 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 109 | if self.comparators.is_empty() { 110 | return formatter.write_str("*"); 111 | } 112 | for (i, comparator) in self.comparators.iter_ref().enumerate() { 113 | if i > 0 { 114 | formatter.write_str(", ")?; 115 | } 116 | write!(formatter, "{}", comparator)?; 117 | } 118 | Ok(()) 119 | } 120 | } 121 | 122 | impl Debug for VersionReq { 123 | fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result { 124 | write!(formatter, "VersionReq({})", self) 125 | } 126 | } 127 | 128 | fn matches_req(comparators: Slice, ver: &Version) -> bool { 129 | for cmp in comparators.iter_ref() { 130 | if !matches_impl(cmp, ver) { 131 | return false; 132 | } 133 | } 134 | 135 | if ver.pre.is_empty() { 136 | return true; 137 | } 138 | 139 | // If a version has a prerelease tag (for example, 1.2.3-alpha.3) then it 140 | // will only be allowed to satisfy req if at least one comparator with the 141 | // same major.minor.patch also has a prerelease tag. 142 | for cmp in comparators.iter_ref() { 143 | if pre_is_compatible(cmp, ver) { 144 | return true; 145 | } 146 | } 147 | 148 | false 149 | } 150 | 151 | fn matches_impl(cmp: &Comparator, ver: &Version) -> bool { 152 | match cmp.op { 153 | Op::Exact | Op::Wildcard => matches_exact(cmp, ver), 154 | Op::Greater => matches_greater(cmp, ver), 155 | Op::GreaterEq => matches_exact(cmp, ver) || matches_greater(cmp, ver), 156 | Op::Less => matches_less(cmp, ver), 157 | Op::LessEq => matches_exact(cmp, ver) || matches_less(cmp, ver), 158 | Op::Tilde => matches_tilde(cmp, ver), 159 | Op::Caret => matches_caret(cmp, ver), 160 | _ => unimplemented!(), 161 | } 162 | } 163 | 164 | fn matches_exact(cmp: &Comparator, ver: &Version) -> bool { 165 | if ver.major != cmp.major { 166 | return false; 167 | } 168 | 169 | if let Some(minor) = cmp.minor { 170 | if ver.minor != minor { 171 | return false; 172 | } 173 | } 174 | 175 | if let Some(patch) = cmp.patch { 176 | if ver.patch != patch { 177 | return false; 178 | } 179 | } 180 | 181 | ver.pre == cmp.pre 182 | } 183 | 184 | fn matches_greater(cmp: &Comparator, ver: &Version) -> bool { 185 | if ver.major != cmp.major { 186 | return ver.major > cmp.major; 187 | } 188 | 189 | match cmp.minor { 190 | None => return false, 191 | Some(minor) => { 192 | if ver.minor != minor { 193 | return ver.minor > minor; 194 | } 195 | } 196 | } 197 | 198 | match cmp.patch { 199 | None => return false, 200 | Some(patch) => { 201 | if ver.patch != patch { 202 | return ver.patch > patch; 203 | } 204 | } 205 | } 206 | 207 | ver.pre > cmp.pre 208 | } 209 | 210 | fn matches_less(cmp: &Comparator, ver: &Version) -> bool { 211 | if ver.major != cmp.major { 212 | return ver.major < cmp.major; 213 | } 214 | 215 | match cmp.minor { 216 | None => return false, 217 | Some(minor) => { 218 | if ver.minor != minor { 219 | return ver.minor < minor; 220 | } 221 | } 222 | } 223 | 224 | match cmp.patch { 225 | None => return false, 226 | Some(patch) => { 227 | if ver.patch != patch { 228 | return ver.patch < patch; 229 | } 230 | } 231 | } 232 | 233 | ver.pre < cmp.pre 234 | } 235 | 236 | fn matches_tilde(cmp: &Comparator, ver: &Version) -> bool { 237 | if ver.major != cmp.major { 238 | return false; 239 | } 240 | 241 | if let Some(minor) = cmp.minor { 242 | if ver.minor != minor { 243 | return false; 244 | } 245 | } 246 | 247 | if let Some(patch) = cmp.patch { 248 | if ver.patch != patch { 249 | return ver.patch > patch; 250 | } 251 | } 252 | 253 | ver.pre >= cmp.pre 254 | } 255 | 256 | fn matches_caret(cmp: &Comparator, ver: &Version) -> bool { 257 | if ver.major != cmp.major { 258 | return false; 259 | } 260 | 261 | let Some(minor) = cmp.minor else { 262 | return true; 263 | }; 264 | 265 | let Some(patch) = cmp.patch else { 266 | return if cmp.major > 0 { 267 | ver.minor >= minor 268 | } else { 269 | ver.minor == minor 270 | }; 271 | }; 272 | 273 | if cmp.major > 0 { 274 | if ver.minor != minor { 275 | return ver.minor > minor; 276 | } else if ver.patch != patch { 277 | return ver.patch > patch; 278 | } 279 | } else if minor > 0 { 280 | if ver.minor != minor { 281 | return false; 282 | } else if ver.patch != patch { 283 | return ver.patch > patch; 284 | } 285 | } else if ver.minor != minor || ver.patch != patch { 286 | return false; 287 | } 288 | 289 | ver.pre >= cmp.pre 290 | } 291 | 292 | fn pre_is_compatible(cmp: &Comparator, ver: &Version) -> bool { 293 | cmp.major == ver.major 294 | && cmp.minor == Some(ver.minor) 295 | && cmp.patch == Some(ver.patch) 296 | && !cmp.pre.is_empty() 297 | } 298 | --------------------------------------------------------------------------------