├── LICENSE-THIRD-PARTY ├── data └── tests │ └── fixtures │ ├── .gitkeep │ └── crates │ └── .gitkeep ├── rust-toolchain ├── CONTRIBUTING.md ├── SECURITY.md ├── Dockerfile ├── src ├── server │ ├── mod.rs │ ├── model.rs │ ├── git_protocol.rs │ └── file_server.rs ├── commands │ ├── command_prelude.rs │ ├── mod.rs │ ├── server.rs │ ├── rustup_init.rs │ ├── channel.rs │ └── crates.rs ├── main.rs ├── handler │ ├── mod.rs │ ├── rustup.rs │ ├── channel.rs │ ├── crates_file.rs │ └── index.rs ├── errors.rs ├── cloud │ ├── s3.rs │ └── mod.rs ├── config.default.toml ├── cli.rs ├── download.rs └── config.rs ├── .github ├── dependabot.yml └── workflows │ └── ci.yaml ├── .gitignore ├── LICENSE-MIT ├── Cargo.toml ├── README.md ├── CODE_OF_CONDUCT.md └── LICENSE-APACHE /LICENSE-THIRD-PARTY: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /data/tests/fixtures/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /rust-toolchain: -------------------------------------------------------------------------------- 1 | stable 2 | -------------------------------------------------------------------------------- /data/tests/fixtures/crates/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ## Contributing to Open Rust Initiative 2 | 3 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | ## Reporting a Vulnerability 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | ARG RUNTIME_IMAGE 2 | FROM $RUNTIME_IMAGE 3 | COPY target/release/freighter /usr/local/bin 4 | ENTRYPOINT freighter 5 | -------------------------------------------------------------------------------- /src/server/mod.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | pub mod file_server; 9 | pub mod git_protocol; 10 | mod model; 11 | -------------------------------------------------------------------------------- /src/commands/command_prelude.rs: -------------------------------------------------------------------------------- 1 | //! provide common utils for add a flag to clap 2 | //! 3 | //! 4 | //! 5 | //! 6 | use clap::Command; 7 | pub use clap::{value_parser, Arg, ArgAction}; 8 | 9 | /// Add a custom flag to subcommand 10 | pub fn flag(name: &'static str, help: &'static str) -> Arg { 11 | Arg::new(name) 12 | .long(name) 13 | .help(help) 14 | .action(ArgAction::SetTrue) 15 | } 16 | 17 | pub fn subcommand(name: &'static str) -> Command { 18 | Command::new(name) 19 | } 20 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "cargo" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "weekly" -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Generated by Cargo 2 | # will have compiled files and executables 3 | /target/ 4 | 5 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries 6 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html 7 | Cargo.lock 8 | 9 | # These are backup files generated by rustfmt 10 | **/*.rs.bk 11 | 12 | 13 | # Added by cargo 14 | 15 | /target 16 | 17 | # Add by maintianer 18 | 19 | .idea 20 | 21 | # Added by cargo 22 | # 23 | # already existing elements were commented out 24 | 25 | # crates.io index and crates 26 | data/tests/fixtures/crates-io-index 27 | data/tests/fixtures/crates/*.crate 28 | data/** 29 | 30 | 31 | # VSCode 32 | .vscode -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | //! [Freighter](https://github.com/open-rust-initiative/freighter) is an open source project to helping build the DevOps infrastructure for proxying the [crates.io](https://crates.io) 2 | //! and provide simple registry functionality for local development. 3 | //! 4 | //! 5 | //! 6 | //! 7 | mod cli; 8 | mod cloud; 9 | mod commands; 10 | mod config; 11 | mod download; 12 | mod errors; 13 | mod handler; 14 | mod server; 15 | 16 | /// 17 | /// Main entry point for the [Freighter](https://github.com/open-rust-initiative/freighter) application. 18 | /// 19 | fn main() { 20 | let mut config = config::Config::new(); 21 | 22 | let result = cli::main(&mut config); 23 | 24 | if let Err(e) = result { 25 | e.print() 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/handler/mod.rs: -------------------------------------------------------------------------------- 1 | //! crates mod contains index, crates and rustup 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | pub mod channel; 9 | pub mod crates_file; 10 | pub mod index; 11 | pub mod rustup; 12 | 13 | #[derive(Clone, Default, Debug)] 14 | pub enum DownloadMode { 15 | Init, 16 | // indicates this operation is fix error downloads 17 | Fix, 18 | #[default] 19 | Increment, 20 | } 21 | impl DownloadMode { 22 | pub fn new(init: bool, fix: bool) -> Self { 23 | if init { 24 | DownloadMode::Init 25 | } else if fix { 26 | DownloadMode::Fix 27 | } else { 28 | DownloadMode::Increment 29 | } 30 | } 31 | } 32 | 33 | pub mod utils { 34 | 35 | // the path rules of crates index file 36 | pub fn index_suffix(name: &str) -> String { 37 | match name.len() { 38 | 1..=2 => format!("{}/{}", name.len(), name), 39 | 3 => format!("{}/{}/{}", name.len(), &name[0..1], name), 40 | _ => format!("{}/{}/{}", &name[0..2], &name[2..4], name), 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /LICENSE-MIT: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2022 Open Rust Initiative 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/commands/mod.rs: -------------------------------------------------------------------------------- 1 | //! The commands mod is a fork of [**cargo** commands struct](https://github.com/rust-lang/cargo/blob/master/src/bin/cargo/commands/mod.rs). 2 | //! 3 | //! 4 | //! 5 | //! 6 | 7 | use clap::ArgMatches; 8 | 9 | use crate::cli::App; 10 | use crate::config::Config; 11 | use crate::errors::FreightResult; 12 | 13 | pub mod channel; 14 | pub mod command_prelude; 15 | pub mod crates; 16 | pub mod rustup_init; 17 | pub mod server; 18 | 19 | /// The builtin function is the entry point of commands mod. Each subcommand is a 20 | /// `clap::Command<'static>` type, and the `exec` function is logic entry. 21 | /// Add the subcommand to the `Vec>` and will list in the **freighter** 22 | /// subcommands. 23 | /// 24 | /// Each subcommand is a mod in the `src/commands` directory, the `cli` function is the entry 25 | /// point and the `exec` function is logic entry. 26 | /// 27 | pub fn builtin() -> Vec { 28 | vec![ 29 | crates::cli(), 30 | rustup_init::cli(), 31 | channel::cli(), 32 | server::cli(), 33 | ] 34 | } 35 | 36 | /// 37 | /// 38 | /// 39 | pub fn builtin_exec(cmd: &str) -> Option FreightResult> { 40 | let f = match cmd { 41 | "crates" => crates::exec, 42 | "rustup" => rustup_init::exec, 43 | "channel" => channel::exec, 44 | "server" => server::exec, 45 | _ => return None, 46 | }; 47 | 48 | Some(f) 49 | } 50 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "freighter-registry" 3 | version = "0.4.0" 4 | edition = "2021" 5 | license = "MIT" 6 | homepage = "https://github.com/open-rust-Initiative/freighter" 7 | repository = "https://github.com/open-rust-Initiative/freighter" 8 | description = "The Freighter's purpose is to help the community and company to build the proxy for crates.io and the registry for the development environment." 9 | 10 | # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html 11 | 12 | [dependencies] 13 | anyhow = "1.0.79" 14 | thiserror = "1.0.56" 15 | clap = {version = "4.4.13", features = ["cargo"]} 16 | git2 = "0.18.1" 17 | git2-curl = "0.19" 18 | url = "2.5.0" 19 | serde = { version = "1.0.195", features = ["derive"] } 20 | serde_json = "1.0.111" 21 | walkdir = "2.4.0" 22 | reqwest = { version = "0.11.13", features = ["blocking"] } 23 | openssl = { version = "0.10.62", features = ["vendored"] } 24 | chrono = "0.4.31" 25 | sha2 = "0.10.8" 26 | dirs = "5.0.1" 27 | toml = "0.8.8" 28 | log4rs = {version = "1.2.0", features = ["toml_format"] } 29 | tokio = { version = "1.35.1", features = ["macros", "process"] } 30 | warp = { version = "0.3.6", features = ["tls"] } 31 | tokio-util = { version = "0.7.10", features = ["codec"] } 32 | tracing = "0.1.40" 33 | tracing-subscriber = "0.3.18" 34 | bytes = "1.5.0" 35 | async-trait = "0.1.77" 36 | tokio-test = "0.4.3" 37 | rayon = "1.8.0" 38 | 39 | 40 | [dev-dependencies] 41 | 42 | 43 | [build-dependencies] 44 | -------------------------------------------------------------------------------- /src/errors.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | pub type FreightResult = Result<(), FreighterError>; 9 | 10 | /// 11 | /// 12 | #[derive(Debug)] 13 | pub struct FreighterError { 14 | pub error: Option, 15 | pub code: i32, 16 | } 17 | 18 | /// The Freighter error is the error type used at Freighter's CLI and others. 19 | /// 20 | impl FreighterError { 21 | pub fn new(error: anyhow::Error, code: i32) -> FreighterError { 22 | FreighterError { 23 | error: Some(error), 24 | code, 25 | } 26 | } 27 | 28 | pub fn unknown_command(cmd: String) -> FreighterError { 29 | FreighterError { 30 | error: anyhow::anyhow!("Unknown command: {}", cmd).into(), 31 | code: 1, 32 | } 33 | } 34 | 35 | pub fn code(code: i32) -> FreighterError { 36 | FreighterError { error: None, code } 37 | } 38 | 39 | pub fn print(&self) { 40 | tracing::info!("{}", self.error.as_ref().unwrap()); 41 | } 42 | } 43 | 44 | /// 45 | /// 46 | impl From for FreighterError { 47 | fn from(err: anyhow::Error) -> FreighterError { 48 | FreighterError::new(err, 101) 49 | } 50 | } 51 | 52 | /// 53 | /// 54 | impl From for FreighterError { 55 | fn from(err: clap::Error) -> FreighterError { 56 | let code = i32::from(err.use_stderr()); 57 | FreighterError::new(err.into(), code) 58 | } 59 | } 60 | 61 | /// 62 | /// 63 | impl From for FreighterError { 64 | fn from(err: std::io::Error) -> FreighterError { 65 | FreighterError::new(err.into(), 1) 66 | } 67 | } 68 | 69 | /// 70 | /// 71 | impl From for FreighterError { 72 | fn from(err: git2::Error) -> FreighterError { 73 | FreighterError::new(err.into(), 1) 74 | } 75 | } 76 | 77 | /// 78 | /// 79 | impl From for FreighterError { 80 | fn from(err: walkdir::Error) -> FreighterError { 81 | FreighterError::new(err.into(), 1) 82 | } 83 | } 84 | 85 | /// 86 | /// 87 | impl From for FreighterError { 88 | fn from(err: reqwest::Error) -> FreighterError { 89 | FreighterError::new(err.into(), 1) 90 | } 91 | } 92 | -------------------------------------------------------------------------------- /src/cloud/s3.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | use std::{path::Path, process::Command}; 9 | 10 | use crate::errors::{FreightResult, FreighterError}; 11 | 12 | use super::CloudStorage; 13 | 14 | #[derive(Default, Clone)] 15 | pub struct S3cmd {} 16 | 17 | impl CloudStorage for S3cmd { 18 | fn upload_file(&self, file_path: &Path, s3_path: &str, bucket: &str) -> FreightResult { 19 | // cargo download url is https://crates.rust-lang.pub/crates/{name}/{version}/download 20 | // 21 | 22 | // Upload to the Digital Ocean Spaces with s3cmd 23 | // URL: s3://rust-lang/crates/{}/{} 24 | // cmd: s3cmd put {file_path} s3://rust-lang/crates/{s3_path} --acl-public 25 | // cmd: s3cmd put {file_path} s3://rust-lang/crates/{s3_path} --acl-public --no-mime-magic 26 | // cmd: s3cmd put {file_path} s3://rust-lang/crates/{s3_path} --acl-public --no-mime-magic --guess-mime-type 27 | // cmd: s3cmd put {file_path} s3://rust-lang/crates/{s3_path} --acl-public --no-mime-magic --guess-mime-type --add-header="Content-Type: application/octet-stream" 28 | let s3_full_path = format!("s3://{}/{}", bucket, s3_path); 29 | tracing::debug!("s3_full_path: {}", s3_full_path); 30 | let status = Command::new("s3cmd") 31 | .arg("put") 32 | .arg(file_path) 33 | .arg(s3_full_path) 34 | .arg("--acl-public") 35 | .status() 36 | .expect("failed to execute process"); 37 | if !status.success() { 38 | return Err(FreighterError::code(status.code().unwrap())); 39 | } 40 | Ok(()) 41 | } 42 | 43 | fn upload_folder(&self, folder: &str, bucket: &str) -> FreightResult { 44 | tracing::info!("syncing folder... {} to s3: {}", folder, bucket); 45 | let status = Command::new("s3cmd") 46 | .arg("sync") 47 | .arg(folder) 48 | .arg(format!("s3://{}/", bucket)) 49 | .arg("--acl-public") 50 | .status() 51 | .expect("failed to execute s3cmd sync"); 52 | if !status.success() { 53 | return Err(FreighterError::code(status.code().unwrap())); 54 | } 55 | Ok(()) 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/cloud/mod.rs: -------------------------------------------------------------------------------- 1 | //! provide common functionality for cloud operation 2 | //! 3 | //! 4 | 5 | use std::{ 6 | path::{Path, PathBuf}, 7 | sync::Arc, 8 | }; 9 | 10 | use rayon::ThreadPool; 11 | use walkdir::WalkDir; 12 | 13 | use crate::{errors::FreightResult, handler::crates_file::is_not_hidden}; 14 | 15 | use self::s3::S3cmd; 16 | 17 | pub mod s3; 18 | 19 | /// provide a common file upload interface 20 | pub trait CloudStorage { 21 | /// upload a single file to target storage 22 | fn upload_file(&self, file_path: &Path, s3_path: &str, bucket: &str) -> FreightResult; 23 | 24 | /// this operation will upload all files in folder 25 | fn upload_folder(&self, folder: &str, bucket: &str) -> FreightResult; 26 | } 27 | 28 | // this method is used to handle 'upload' subcommand for upload all files to obs server 29 | pub fn upload_with_pool( 30 | path: PathBuf, 31 | thread_pool: Arc, 32 | bucket_name: String, 33 | cloud_storage: S3cmd, 34 | ) -> FreightResult { 35 | let cloud = Arc::new(cloud_storage); 36 | let bucket_name = format!( 37 | "{}/{}", 38 | bucket_name, 39 | path.file_name().unwrap().to_str().unwrap() 40 | ); 41 | thread_pool.scope(|s| { 42 | WalkDir::new(path) 43 | .min_depth(1) 44 | .max_depth(1) 45 | .into_iter() 46 | .filter_entry(is_not_hidden) 47 | .filter_map(|v| v.ok()) 48 | .for_each(|x| { 49 | let bucket_name = bucket_name.clone(); 50 | let cloud_in = cloud.clone(); 51 | s.spawn(move |_| { 52 | let path = x.path(); 53 | cloud_in 54 | .upload_folder(path.to_str().unwrap(), &bucket_name) 55 | .unwrap(); 56 | }); 57 | }); 58 | }); 59 | Ok(()) 60 | } 61 | 62 | pub fn upload_single_dir( 63 | path: PathBuf, 64 | crates_name: String, 65 | bucket_name: String, 66 | cloud_storage: T, 67 | ) { 68 | let bucket_name = format!( 69 | "{}/{}", 70 | bucket_name, 71 | path.file_name().unwrap().to_str().unwrap() 72 | ); 73 | tracing::info!("bucket_path: {}", bucket_name); 74 | cloud_storage 75 | .upload_folder(path.join(crates_name).to_str().unwrap(), &bucket_name) 76 | .unwrap(); 77 | } 78 | -------------------------------------------------------------------------------- /src/commands/server.rs: -------------------------------------------------------------------------------- 1 | //! **server** subcommand focus on start a git proxy server and file server to fetch from local or upstream. The core 2 | //! function implemented in the `src/server/server`. 3 | //! 4 | //! 5 | //! **server** subcommand provide major functions include: 6 | //! 7 | //! Arguments: 8 | //! - __ip__: start server with ip address 9 | //! - __port__: start server with port 10 | //! 11 | //! # handle crates file requests 12 | //! - crates file is in the format of "/crates/crates-name/0.1.0/download" 13 | //! 14 | //! # handle rustup-init file requests 15 | //! - rustup-init file is in the format of "/rustup/dist/aarch64-fuchsia" 16 | //! 17 | //! # handle rust toolchain file requests 18 | //! - rust toolchain file is in the format of "/dist/2022-11-03/rust-1.65.0-aarch64-unknown-linux-gnu.tar.gz" 19 | //! 20 | //! # handle git client requests to crates.io-index 21 | //! - git client request is in the format of "/git/crates.io-index" 22 | 23 | use std::net::IpAddr; 24 | use std::path::PathBuf; 25 | 26 | use clap::{arg, ArgMatches}; 27 | 28 | use crate::commands::command_prelude::*; 29 | use crate::config::Config; 30 | use crate::errors::FreightResult; 31 | use crate::server::file_server::{self, FileServer}; 32 | 33 | pub fn cli() -> clap::Command { 34 | clap::Command::new("server") 35 | .arg( 36 | arg!(-i --"ip" "specify the ip address") 37 | .value_parser(value_parser!(IpAddr)) 38 | .default_value("127.0.0.1"), 39 | ) 40 | .arg( 41 | arg!(-p --"port" "specify the listening port") 42 | .value_parser(value_parser!(u16)) 43 | .default_value("8000"), 44 | ) 45 | .arg( 46 | arg!(-c --"cert-path" "Path to a TLS certificate file") 47 | .value_parser(value_parser!(PathBuf)), 48 | ) 49 | .arg( 50 | arg!(-k --"key-path" "Path to a TLS key file") 51 | .value_parser(value_parser!(PathBuf)), 52 | ) 53 | .about("Start git and file proxy server") 54 | .help_template( 55 | "\ 56 | Start the git proxy server and file http server. 57 | 58 | USAGE: 59 | {usage} 60 | 61 | OPTIONS: 62 | {options} 63 | 64 | EXAMPLES 65 | 1. Start server by port 8080 66 | 67 | freighter server -p 8080 68 | 69 | 2. Start server with certificate 70 | 71 | freighter server -p 443 --cert-path /home/cert --key-path /home/key 72 | 73 | \n", 74 | ) 75 | } 76 | 77 | /// 78 | /// 79 | /// 80 | /// 81 | pub fn exec(config: &mut Config, args: &ArgMatches) -> FreightResult { 82 | let addr: IpAddr = args.get_one::("ip").cloned().unwrap(); 83 | let port: u16 = args.get_one::("port").cloned().unwrap(); 84 | let cert_path: Option = args.get_one::("cert-path").cloned(); 85 | let key_path: Option = args.get_one::("key-path").cloned(); 86 | 87 | let file_server = &FileServer { 88 | cert_path, 89 | key_path, 90 | addr, 91 | port, 92 | }; 93 | 94 | file_server::start(config, file_server); 95 | Ok(()) 96 | } 97 | -------------------------------------------------------------------------------- /src/handler/rustup.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | use rayon::{ThreadPool, ThreadPoolBuilder}; 9 | use std::{path::PathBuf, sync::Arc}; 10 | use url::Url; 11 | 12 | use crate::{ 13 | config::ProxyConfig, 14 | config::RustUpConfig, 15 | download::{download_and_check_hash, download_file_with_sha, DownloadOptions}, 16 | errors::FreightResult, 17 | }; 18 | 19 | //rustup support platforms, see https://doc.rust-lang.org/beta/rustc/platform-support.html 20 | const PLATFORMS: &[&str] = &[ 21 | "aarch64-linux-android", 22 | "aarch64-unknown-linux-gnu", 23 | "arm-linux-androideabi", 24 | "arm-unknown-linux-gnueabi", 25 | "arm-unknown-linux-gnueabihf", 26 | "armv7-linux-androideabi", 27 | "armv7-unknown-linux-gnueabihf", 28 | "i686-apple-darwin", 29 | "i686-linux-android", 30 | "i686-unknown-linux-gnu", 31 | "mips-unknown-linux-gnu", 32 | "mips64-unknown-linux-gnuabi64", 33 | "mips64el-unknown-linux-gnuabi64", 34 | "mipsel-unknown-linux-gnu", 35 | "powerpc-unknown-linux-gnu", 36 | "powerpc64-unknown-linux-gnu", 37 | "powerpc64le-unknown-linux-gnu", 38 | "s390x-unknown-linux-gnu", 39 | "x86_64-apple-darwin", 40 | "x86_64-linux-android", 41 | "x86_64-unknown-freebsd", 42 | "x86_64-unknown-linux-gnu", 43 | "x86_64-unknown-linux-musl", 44 | "x86_64-unknown-netbsd", 45 | "i686-pc-windows-gnu", 46 | "i686-pc-windows-msvc", 47 | "x86_64-pc-windows-gnu", 48 | "x86_64-pc-windows-msvc", 49 | ]; 50 | 51 | #[derive(Debug, Clone)] 52 | pub struct RustUpOptions { 53 | pub config: RustUpConfig, 54 | 55 | pub proxy: ProxyConfig, 56 | 57 | pub rustup_path: PathBuf, 58 | 59 | pub thread_pool: Arc, 60 | } 61 | 62 | impl Default for RustUpOptions { 63 | fn default() -> Self { 64 | let thread_pool = Arc::new(ThreadPoolBuilder::new().build().unwrap()); 65 | RustUpOptions { 66 | thread_pool, 67 | config: RustUpConfig::default(), 68 | proxy: ProxyConfig::default(), 69 | rustup_path: PathBuf::default(), 70 | } 71 | } 72 | } 73 | 74 | /// entrance function 75 | pub fn sync_rustup_init(opts: &RustUpOptions) -> FreightResult { 76 | let download_url = format!("{}/rustup/release-stable.toml", opts.config.domain); 77 | let file = opts.rustup_path.join("release-stable.toml"); 78 | let down_opts = &DownloadOptions { 79 | proxy: opts.proxy.clone(), 80 | url: Url::parse(&download_url).unwrap(), 81 | path: file, 82 | }; 83 | 84 | download_and_check_hash(down_opts, None, true).unwrap(); 85 | 86 | opts.thread_pool.scope(|s| { 87 | PLATFORMS.iter().for_each(|platform| { 88 | let rustup_path = opts.rustup_path.clone(); 89 | let file_name = if platform.contains("windows") { 90 | "rustup-init.exe".to_owned() 91 | } else { 92 | "rustup-init".to_owned() 93 | }; 94 | let domain = opts.config.domain.clone(); 95 | let proxy = opts.proxy.clone(); 96 | s.spawn(move |_| { 97 | let download_url = format!("{}/rustup/dist/{}/{}", domain, platform, file_name); 98 | let folder = rustup_path.join("dist").join(platform); 99 | download_file_with_sha(&download_url, &folder, &file_name, &proxy).unwrap(); 100 | }); 101 | }); 102 | }); 103 | 104 | Ok(()) 105 | } 106 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Freighter - Rust Proxy and Registry 2 | 3 | The Freighter's purpose is to help the community and company to build the proxy for crates.io and the registry for the development environment. 4 | 5 | ### Why need the Freighter? 6 | 7 | When developing a program using Rust in a company, we need to host a proxy for crates.io and private crates registry for the following reasons: 8 | 9 | - The developers will only be allowed use crates of the company with security and complete evaluation. 10 | - Some crates need to upgrade functions or fix bugs, and the new version does not allow developers to contribute upstream. 11 | - Some private crates share with different teams and products in the development process. 12 | 13 | ### What are the features? 14 | 15 | - The Freighter is a crate registry for private crates, public crates index and crates sync from crates.io. The registry can store the files in the local disk or storage service compliance S3. 16 | - The Freighter has an analytics engine for rating public crates. 17 | - The Freighter has a blacklist and whitelist of evaluated crates used in DevOps. 18 | 19 | ### How to use the Freighter? 20 | 21 | Freighter's functionality mainly consists of four parts: synchronizing crates index and crates; syncing the rustup-init files; syncing the rust toolchain files; providing a HTTP server that support static file server, parse the Git protocol, and offering API services such as crate publication. 22 | 23 | Freighter can be executed as a standalone executable program. You can build it using the **cargo build --release** command and then copy it to your `/usr/local/bin directory`. 24 | 25 | #### 1. Prerequisite 26 | * Freighter defaults to storing data in the **default directory /Users/${USERNAME}/freighter**. To customize the storage directory, modify the config.toml file. 27 | * The config file can be obtained by executing any command; Freighter copies the config.default.toml file to the default directory. 28 | * Alternatively, the config file can be manually copied from the source code directory src which named **config.default.toml**. 29 | * Customize the storage path for data by modifying configurations like **log_path, index_path, crates_path**, etc., in the config file. 30 | * Freighter uses the config.toml configuration in the default directory for its operations. To use a custom config path, add the -c parameter when executing a command. For example: 31 | ```bash 32 | freighter -c /path/to/config.toml 33 | ``` 34 | You can specify paths like /tmp/freighter/config.toml, /tmp/freighter, or /tmp, and Freighter will automatically interpret them as /tmp/freighter/config.toml. 35 | 36 | #### 2. Synchronizing Crates Index and Crates 37 | To sync crate files, Freighter needs to first sync the crates index. You can use the following command to sync the index file: 38 | 39 | ```bash 40 | freighter crates pull 41 | ``` 42 | 43 | This command will create a crates.io-index directory in the default path **/Users/${USERNAME}/freighter** and fetch the index. If the index already exists, it will attempt to update it. 44 | 45 | **Full download**: Next, you can use the download command with the init parameter to download the full set of crates files: 46 | 47 | ```bash 48 | freighter crates download --init 49 | ``` 50 | 51 | **Incremental update**: Without the init parameter, Freighter will compare log records in the **working directory** to determine the index and crates that need incremental updates: 52 | 53 | ```bash 54 | freighter crates download 55 | ``` 56 | 57 | #### 3.Syncing the rustup-init Files 58 | #### 4.Syncing the Rust Toolchain Files 59 | #### 5.Http Server 60 | 61 | ### How to contribute? 62 | 63 | This project enforce the [DCO](https://developercertificate.org). 64 | 65 | Contributors sign-off that they adhere to these requirements by adding a Signed-off-by line to commit messages. 66 | 67 | ```bash 68 | This is my commit message 69 | 70 | Signed-off-by: Random J Developer 71 | ``` 72 | 73 | Git even has a -s command line option to append this automatically to your commit message: 74 | 75 | ```bash 76 | $ git commit -s -m 'This is my commit message' 77 | ``` 78 | 79 | ### License 80 | 81 | Freighter is licensed under this Licensed: 82 | 83 | * MIT LICENSE ( [LICENSE-MIT](LICENSE-MIT) or https://opensource.org/licenses/MIT) 84 | * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or https://www.apache.org/licenses/LICENSE-2.0) 85 | 86 | ### Acknowledgements 87 | -------------------------------------------------------------------------------- /src/config.default.toml: -------------------------------------------------------------------------------- 1 | 2 | 3 | [log] 4 | # The path which log file is saved 5 | log_path = "" 6 | 7 | # log format encoder, see https://docs.rs/log4rs/1.2.0/log4rs/append/file/struct.FileAppenderDeserializer.html#configuration 8 | # for more information 9 | encoder = "{d}:{l} - {m}{n}" 10 | 11 | # log level 12 | level = "info" 13 | 14 | # log file will be deleted and regenerated if exceeded the threshold, unit is MB 15 | limit = 100 16 | 17 | 18 | [crates] 19 | # The path which the crates index file is saved 20 | index_path = "" 21 | 22 | # The path which the crates file is saved 23 | crates_path = "" 24 | 25 | # download index from domain 26 | index_domain = "https://github.com/rust-lang/crates.io-index.git" 27 | 28 | # download crates from domain 29 | domain = "https://static.crates.io/crates" 30 | 31 | # Number of crates download threads 32 | download_threads = 16 33 | 34 | # When providing services, freighter will retrieve files from the specified location in the following sequence and 35 | # return them to the requesting client. localhost means read from local filesystem 36 | serve_domains = [ 37 | "https://rust-proxy.obs.cn-east-3.myhuaweicloud.com", 38 | # "localhost", 39 | # "https://rsproxy.cn", 40 | # "https://static.crates.io", 41 | ] 42 | 43 | #(optional) set up a git local path you want to serve 44 | serve_index = "/opt/rust/" 45 | 46 | [rustup] 47 | # The path which the rustup file is saved 48 | rustup_path = "" 49 | 50 | # The path which the rustup release sha file is saved 51 | dist_path = "" 52 | 53 | # which domain to download rustup from 54 | domain = "https://static.rust-lang.org" 55 | 56 | # Number of rust toolchain download threads 57 | download_threads = 16 58 | 59 | # released rust versions that you want to sync with 60 | sync_stable_versions = [ 61 | "1.9", 62 | "1.10", 63 | "1.11", 64 | "1.12", 65 | "1.12.1", 66 | "1.13", 67 | "1.14", 68 | "1.15", 69 | "1.15.1", 70 | "1.16", 71 | "1.17", 72 | "1.18", 73 | "1.19", 74 | "1.20", 75 | "1.21", 76 | "1.22", 77 | "1.22.1", 78 | "1.23", 79 | "1.24", 80 | "1.24.1", 81 | "1.25", 82 | "1.26", 83 | "1.26.1", 84 | "1.26.2", 85 | "1.27", 86 | "1.27.1", 87 | "1.27.2", 88 | "1.28", 89 | "1.29", 90 | "1.29.1", 91 | "1.29.2", 92 | "1.30", 93 | "1.30.1", 94 | "1.31", 95 | "1.31.1", 96 | "1.32", 97 | "1.33", 98 | "1.34", 99 | "1.34.1", 100 | "1.34.2", 101 | "1.35", 102 | "1.36", 103 | "1.37", 104 | "1.38", 105 | "1.39", 106 | "1.40", 107 | "1.41", 108 | "1.41.1", 109 | "1.42", 110 | "1.43", 111 | "1.43.1", 112 | "1.44", 113 | "1.44.1", 114 | "1.45", 115 | "1.45.1", 116 | "1.45.2", 117 | "1.46", 118 | "1.47", 119 | "1.48", 120 | "1.49", 121 | "1.50", 122 | "1.51", 123 | "1.52", 124 | "1.52.1", 125 | "1.53", 126 | "1.54", 127 | "1.55", 128 | "1.56", 129 | "1.56.1", 130 | "1.57", 131 | "1.58", 132 | "1.58.1", 133 | "1.59", 134 | "1.60", 135 | "1.61", 136 | "1.62", 137 | "1.62.1", 138 | "1.63", 139 | "1.64", 140 | "1.65", 141 | "1.66", 142 | "1.66.1", 143 | "1.67", 144 | "1.67.1", 145 | "1.68", 146 | "1.68.1", 147 | "1.68.2", 148 | "1.69", 149 | "1.70", 150 | "1.71", 151 | "1.71.1", 152 | "1.72", 153 | "1.72.1", 154 | "1.73", 155 | "1.74", 156 | "1.74.1", 157 | "1.75", 158 | ] 159 | 160 | # days you want to keep for historical nightly version 161 | sync_nightly_days = 30 162 | 163 | # days you want to keep for historical beta version 164 | sync_beta_days = 30 165 | 166 | # When providing services, freighter will retrieve files from the specified location in the following sequence and 167 | # return them to the requesting client. localhost means read from local filesystem 168 | serve_domains = [ 169 | "https://rust-proxy.obs.cn-east-3.myhuaweicloud.com", 170 | # "localhost", 171 | # "https://rsproxy.cn", 172 | # "https://static.rust-lang.org", 173 | ] 174 | 175 | # (optional) set this value to sync historical nightly and beta version 176 | history_version_start_date = "2016-03-01" 177 | 178 | 179 | [proxy] 180 | # use true flag to enable proxy 181 | enable = false 182 | 183 | # used for git index proxy 184 | git_index_proxy = "http://127.0.0.1:6780" 185 | 186 | # used for crates and toolchain download proxy 187 | download_proxy = "http://127.0.0.1:6780" 188 | -------------------------------------------------------------------------------- /src/commands/rustup_init.rs: -------------------------------------------------------------------------------- 1 | //! **rustup** subcommand focus on download rustup init files and toolchains from upstream. The core 2 | //! function implemented in the `src/crates/rustup`. 3 | //! 4 | //! 5 | //! **rustup** subcommand provide major functions include: 6 | //! 7 | //! Arguments: 8 | //! - __domain__: you can choose your own upstream by adding this argument in command 9 | //! - __download-threads__: specify the download threads to parallel download, 10 | //! this param can be changed in the configuration file or pass it here 11 | //! 12 | //! # download subcommand 13 | //! - sync rustup init from upstream to local 14 | //! - download subcommand will fetch only the latest version of init file, and this can't be changed by config. 15 | //! - before each download, freighter will try to fetch the sha256 of the file and compare with local file if it exists 16 | //! and will skip downloading if they are matching. 17 | //! 18 | //! # upload subcommand 19 | //! upload file to Object Storage Service compatible with [AWS S3](https://aws.amazon.com/s3/) 20 | //! - Digitalocean Spaces 21 | //! - Huawei Cloud OBS 22 | //! - Alibaba Cloud OSS 23 | //! - Tencent Cloud COS 24 | //! - AWS S3 25 | //! - minio 26 | //! - Ceph 27 | //! 28 | //! Arguments: 29 | //! - __bucket__: set the s3 bucket you want to upload files to, you must provide this param before upload. 30 | //! 31 | 32 | use std::sync::Arc; 33 | 34 | use clap::{arg, ArgMatches}; 35 | use rayon::ThreadPoolBuilder; 36 | 37 | use crate::cloud::s3::S3cmd; 38 | use crate::cloud::CloudStorage; 39 | use crate::commands::command_prelude::*; 40 | use crate::config::Config; 41 | use crate::errors::FreightResult; 42 | use crate::handler::rustup::{sync_rustup_init, RustUpOptions}; 43 | 44 | pub fn cli() -> clap::Command { 45 | clap::Command::new("rustup") 46 | .subcommand(subcommand("download")) 47 | .subcommand(subcommand("upload") 48 | .arg( 49 | arg!(-b --"bucket" "set the s3 bucket you want to upload files to") 50 | .required(true) 51 | )) 52 | .subcommand_required(true) 53 | .arg_required_else_help(true) 54 | .about("Sync the Rustup toolchain from the upstream to the local registry") 55 | .arg(arg!(-t --"download-threads" "specify the download thread count") 56 | .value_parser(value_parser!(usize)) 57 | ) 58 | .arg(arg!(-d --"domain" "specify the source you want to sync from")) 59 | .help_template( 60 | "\ 61 | Sync the rustup init files from the upstream(static.rust-lang.org) to the local filesystem, other cloud 62 | storage services, or other registries. 63 | 64 | USAGE: 65 | {usage} 66 | 67 | OPTIONS: 68 | {options} 69 | 70 | EXAMPLES 71 | 1. Download toolchains from source domain(not necessary, default from static.rust-lang.org) 72 | with 64 download threads 73 | 74 | freighter rustup -t 64 -d https://www.example.com download 75 | 76 | 2. Upload rustup init file to s3 bucket: 77 | 78 | freighter rustup upload -b bucket-name 79 | 80 | \n") 81 | } 82 | 83 | /// 84 | /// 85 | /// 86 | pub fn exec(config: &mut Config, args: &ArgMatches) -> FreightResult { 87 | crate::cli::init_log(&config.log, &config.log_path, "rustup").unwrap(); 88 | 89 | let mut opts = RustUpOptions { 90 | proxy: config.proxy.to_owned(), 91 | config: config.rustup.to_owned(), 92 | rustup_path: config.rustup_path.to_owned(), 93 | ..Default::default() 94 | }; 95 | 96 | if let Some(domain) = args.get_one::("domain").cloned() { 97 | opts.config.domain = domain; 98 | } 99 | 100 | if let Some(download_threads) = args.get_one::("download-threads").cloned() { 101 | opts.config.download_threads = download_threads; 102 | }; 103 | 104 | opts.thread_pool = Arc::new( 105 | ThreadPoolBuilder::new() 106 | .num_threads(opts.config.download_threads) 107 | .build() 108 | .unwrap(), 109 | ); 110 | 111 | tracing::info!("RustUpOptions info : {:#?}", opts); 112 | 113 | match args.subcommand() { 114 | Some(("download", _)) => sync_rustup_init(&opts)?, 115 | Some(("upload", args)) => { 116 | let bucket_name = args.get_one::("bucket").cloned().unwrap(); 117 | let s3cmd = S3cmd::default(); 118 | s3cmd 119 | .upload_folder(opts.rustup_path.to_str().unwrap(), &bucket_name) 120 | .unwrap(); 121 | } 122 | Some((cmd, _)) => { 123 | unreachable!("unexpected command {}", cmd) 124 | } 125 | None => { 126 | unreachable!("unexpected command") 127 | } 128 | }; 129 | 130 | Ok(()) 131 | } 132 | -------------------------------------------------------------------------------- /src/server/model.rs: -------------------------------------------------------------------------------- 1 | use std::collections::BTreeMap; 2 | 3 | use serde::{Deserialize, Serialize}; 4 | 5 | #[derive(Debug, Serialize, Deserialize)] 6 | pub struct CratesPublish { 7 | // List of strings of the authors. 8 | // May be empty. 9 | pub authors: Vec, 10 | // Optional object of "status" badges. Each value is an object of 11 | // arbitrary string to string mappings. 12 | // crates.io has special interpretation of the format of the badges. 13 | pub badges: Badge, 14 | // Array of strings of categories for the package. 15 | pub categories: Vec, 16 | // Array of direct dependencies of the package. 17 | pub deps: Vec, 18 | // Description field from the manifest. 19 | // May be null. crates.io requires at least some content. 20 | pub description: String, 21 | // String of the URL to the website for this package's documentation. 22 | // May be null. 23 | pub documentation: String, 24 | // Set of features defined for the package. 25 | // Each feature maps to an array of features or dependencies it enables. 26 | // Cargo does not impose limitations on feature names, but crates.io 27 | // requires alphanumeric ASCII, `_` or `-` characters. 28 | pub features: BTreeMap>, 29 | // String of the URL to the website for this package's home page. 30 | // May be null. 31 | pub homepage: String, 32 | // Array of strings of keywords for the package. 33 | pub keywords: Vec, 34 | // String of the license for the package. 35 | // May be null. crates.io requires either `license` or `license_file` to be set. 36 | pub license: String, 37 | // String of a relative path to a license file in the crate. 38 | // May be null. 39 | pub license_file: Option, 40 | // The `links` string value from the package's manifest, or null if not 41 | // specified. This field is optional and defaults to null. 42 | pub links: Option, 43 | pub name: String, 44 | // String of the content of the README file. 45 | // May be null. 46 | pub readme: String, 47 | // String of a relative path to a README file in the crate. 48 | // May be null. 49 | pub readme_file: String, 50 | // String of the URL to the website for the source repository of this package. 51 | // May be null. 52 | pub repository: String, 53 | // The version of the package being published. 54 | pub vers: String, 55 | } 56 | 57 | #[derive(Debug, Serialize, Deserialize)] 58 | pub struct Dep { 59 | // Boolean of whether or not default features are enabled. 60 | pub default_features: bool, 61 | // Array of features (as strings) enabled for this dependency. 62 | pub features: Vec, 63 | // The dependency kind. 64 | // "dev", "build", or "normal". 65 | pub kind: String, 66 | // Name of the dependency. 67 | // If the dependency is renamed from the original package name, 68 | // this is the original name. The new package name is stored in 69 | // the `explicit_name_in_toml` field. 70 | pub name: String, 71 | // Boolean of whether or not this is an optional dependency. 72 | pub optional: bool, 73 | // The URL of the index of the registry where this dependency is 74 | // from as a string. If not specified or null, it is assumed the 75 | // dependency is in the current registry. 76 | pub registry: String, 77 | // The target platform for the dependency. 78 | // null if not a target dependency. 79 | // Otherwise, a string such as "cfg(windows)". 80 | pub target: Option, 81 | // The semver requirement for this dependency. 82 | pub version_req: String, 83 | // If the dependency is renamed, this is a string of the new 84 | // package name. If not specified or null, this dependency is not 85 | // renamed. 86 | pub explicit_name_in_toml: Option, 87 | } 88 | 89 | #[derive(Debug, Serialize, Deserialize)] 90 | pub struct Badge {} 91 | 92 | #[derive(Debug, Serialize, Deserialize, Default)] 93 | pub struct PublishRsp { 94 | // Optional object of warnings to display to the user. 95 | pub warnings: Warning, 96 | } 97 | 98 | #[derive(Debug, Serialize, Deserialize, Default)] 99 | pub struct Warning { 100 | // Array of strings of categories that are invalid and ignored. 101 | pub invalid_categories: Vec, 102 | // Array of strings of badge names that are invalid and ignored. 103 | pub invalid_badges: Vec, 104 | // Array of strings of arbitrary warnings to display to the user. 105 | pub other: Vec, 106 | } 107 | 108 | #[derive(Debug, Serialize, Deserialize, Default)] 109 | pub struct Errors { 110 | // Array of errors to display to the user. 111 | pub errors: Vec, 112 | } 113 | 114 | impl Errors { 115 | pub fn new(detail: String) -> Errors { 116 | Errors { 117 | errors: vec![ErrorDetail { detail }], 118 | } 119 | } 120 | } 121 | 122 | #[derive(Debug, Serialize, Deserialize)] 123 | pub struct ErrorDetail { 124 | // The error message as a string. 125 | pub detail: String, 126 | } 127 | -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | //! parse config from config.toml and read work-dir argument if provided. 2 | //! 3 | //! 4 | //! Arguments: 5 | //! - __work-dir__(optional): specify the work dir, where to download crates, rust toolchains and storage logs, default: $HOME/.freighter 6 | //! 7 | //! example: 8 | //! 9 | //! ```bash 10 | //! freighter --work-dir /mnt/data/ 11 | //! or 12 | //! freighter -c /mnt/data/ 13 | //! ``` 14 | //! 15 | 16 | use std::path::{PathBuf, Path}; 17 | use std::str::FromStr; 18 | 19 | use clap::{arg, crate_version, ArgMatches}; 20 | use log4rs::append::console::ConsoleAppender; 21 | use log4rs::append::rolling_file::policy::compound::roll::delete::DeleteRoller; 22 | use log4rs::append::rolling_file::policy::compound::trigger::size::SizeTrigger; 23 | use log4rs::append::rolling_file::policy::compound::CompoundPolicy; 24 | use log4rs::append::rolling_file::RollingFileAppender; 25 | use log4rs::config::runtime::Config as Log4rsConfig; 26 | use log4rs::config::Logger; 27 | use log4rs::config::{Appender, Root}; 28 | use log4rs::encode::pattern::PatternEncoder; 29 | use tracing::log::LevelFilter; 30 | 31 | use crate::commands::{self}; 32 | use crate::config::{Config, LogConfig}; 33 | use crate::errors::{FreightResult, FreighterError}; 34 | 35 | /// 36 | /// 37 | /// 38 | /// 39 | /// 40 | 41 | pub type App = clap::Command; 42 | 43 | /// 44 | /// 45 | pub fn main(config: &mut Config) -> FreightResult { 46 | // log4rs::init_file("log4rs.yaml", Default::default()).unwrap(); 47 | 48 | let args = cli().try_get_matches().unwrap_or_else(|e| e.exit()); 49 | 50 | let config_parent = args.get_one::("config-path").cloned().map(PathBuf::from); 51 | let mut config = config.load(config_parent); 52 | 53 | let (cmd, subcommand_args) = match args.subcommand() { 54 | Some((cmd, args)) => (cmd, args), 55 | _ => { 56 | // No subcommand provided. 57 | cli().print_help()?; 58 | return Ok(()); 59 | } 60 | }; 61 | 62 | execute_subcommand(&mut config, cmd, subcommand_args) 63 | } 64 | 65 | /// 66 | /// 67 | fn cli() -> App { 68 | let usage = "freighter [SUBCOMMAND]"; 69 | 70 | App::new("freighter") 71 | .version(crate_version!()) 72 | .disable_colored_help(true) 73 | .disable_help_subcommand(true) 74 | .override_usage(usage) 75 | .author("Open Rust Initiative") 76 | .arg(arg!(-c --"config-path" "specify the config path, default: $HOME/freighter/config.toml") 77 | ) 78 | .help_template( 79 | "\ 80 | Freighter - A crate registry from the Open Rust Initiative Community 81 | 82 | USAGE: 83 | {usage} 84 | 85 | Some common freighter commands are (see all commands with --list): 86 | crates Sync the index and crate files from the upstream to local, cloud or registry 87 | rustup Sync the rustup files from the upstream to local, cloud or registry 88 | channel Sync the toolchain files from the upstream to local, cloud or registry 89 | server Start git and file http server 90 | 91 | See 'freighter help ' for more information on a specific command.\n" 92 | ) 93 | .subcommands(commands::builtin()) 94 | } 95 | 96 | /// 97 | /// 98 | pub fn execute_subcommand(config: &mut Config, cmd: &str, args: &ArgMatches) -> FreightResult { 99 | if let Some(f) = commands::builtin_exec(cmd) { 100 | f(config, args) 101 | } else { 102 | Err(FreighterError::unknown_command(cmd.to_string())) 103 | } 104 | } 105 | /// read values(log format encoder, log limit and level) from config file 106 | /// and then initialize config for log4rs, log will preserve in /log_path/log by default 107 | pub fn init_log(config: &LogConfig, log_path: &Path, sub_command: &str) -> FreightResult { 108 | // attach file name 109 | let binding = log_path.join(format!("{}.log", sub_command)); 110 | let log_path = binding.to_str().unwrap(); 111 | let level = LevelFilter::from_str(&config.level).unwrap(); 112 | 113 | let encoder = PatternEncoder::new(&config.encoder); 114 | 115 | let stdout = ConsoleAppender::builder() 116 | .encoder(Box::new(encoder.clone())) 117 | .build(); 118 | 119 | let policy = CompoundPolicy::new( 120 | Box::new(SizeTrigger::new(config.limit * 1024 * 1024)), 121 | Box::::default(), 122 | ); 123 | 124 | let file = RollingFileAppender::builder() 125 | .encoder(Box::new(encoder)) 126 | .build(log_path, Box::new(policy)) 127 | .unwrap(); 128 | 129 | let log4rs_config = Log4rsConfig::builder() 130 | .appender(Appender::builder().build("stdout", Box::new(stdout))) 131 | .appender(Appender::builder().build("file", Box::new(file))) 132 | .logger( 133 | Logger::builder() 134 | .appender("file") 135 | .additive(false) 136 | .build("app::file", level), 137 | ) 138 | .build( 139 | Root::builder() 140 | .appender("stdout") 141 | .appender("file") 142 | .build(level), 143 | ) 144 | .unwrap(); 145 | 146 | log4rs::init_config(log4rs_config).unwrap(); 147 | 148 | Ok(()) 149 | } 150 | -------------------------------------------------------------------------------- /.github/workflows/ci.yaml: -------------------------------------------------------------------------------- 1 | name: Continuous Integration (CI) 2 | on: 3 | push: 4 | branches: 5 | - main 6 | 7 | jobs: 8 | test: 9 | runs-on: ubuntu-22.04 10 | timeout-minutes: 10 11 | steps: 12 | - uses: actions/checkout@v3 13 | with: 14 | submodules: true 15 | - uses: Swatinem/rust-cache@v2 16 | - name: cargo test 17 | run: | 18 | rustup component add rustfmt 19 | cargo check 20 | cargo fmt --check 21 | cargo test --workspace 22 | release: 23 | name: release ${{ matrix.binary.name }} ${{ matrix.arch.name }} 24 | needs: [test] 25 | runs-on: ubuntu-22.04 26 | strategy: 27 | fail-fast: false 28 | matrix: 29 | binary: 30 | - workspace: . 31 | name: freighter 32 | arch: 33 | - name: amd64 34 | runtime-image: "debian@sha256:9b42b2e7eddd84eaddb67b45567cdc0e03ec826bf252352f300147cfb8ce5a6d" 35 | - name: aarch64 36 | runtime-image: "debian@sha256:c583ed77e10b69b167c09cba3d82f903a9a7af481cbb75166d5307135f7b2c77" 37 | steps: 38 | - name: Checkout 39 | uses: actions/checkout@v3 40 | with: 41 | submodules: true 42 | - name: Setup QEMU 43 | if: matrix.arch.name != 'amd64' 44 | run: docker run --rm --privileged multiarch/qemu-user-static --reset -p yes 45 | - name: Setup Rust 46 | run: | 47 | rm -rf ~/.cargo 48 | docker run -d --name=init \ 49 | -v $PWD/${{ matrix.binary.workspace }}:/repo/ -w /repo \ 50 | ghcr.io/innopals/rust-centos7:$(cat rust-toolchain)-${{ matrix.arch.name }} \ 51 | tail -f /etc/issue 52 | docker cp init:/root/.cargo ~/.cargo 53 | mkdir -p ${HOME}/.local/bin 54 | cd ${HOME}/.local/bin 55 | echo '#!/bin/bash' > rustc 56 | echo 'docker exec init rustc $@' > rustc 57 | echo '#!/bin/bash' > cargo 58 | echo 'docker exec init cargo $@' > cargo 59 | chmod +x * 60 | rustc -vV 61 | - uses: Swatinem/rust-cache@v2 62 | with: 63 | key: ${{ matrix.binary.workspace }}/${{ matrix.binary.name }}-${{ matrix.arch.name }} 64 | cache-on-failure: true 65 | workspaces: ${{ matrix.binary.workspace }} -> target 66 | - name: Start Build Container 67 | run: | 68 | docker run -d --name=build \ 69 | -v $PWD/${{ matrix.binary.workspace }}:/repo/ -v $HOME/.cargo:/root/.cargo -w /repo \ 70 | ghcr.io/innopals/rust-centos7:$(cat rust-toolchain)-${{ matrix.arch.name }} \ 71 | tail -f /etc/issue 72 | - name: Build Artifacts 73 | run: | 74 | docker exec build bash -c 'set -e 75 | cargo build --release 76 | strip target/release/${{ matrix.binary.name }} 77 | cp target/release/${{ matrix.binary.name }} target/release/${{ matrix.binary.name }}-${{ matrix.arch.name }} 78 | ' 79 | sudo chown -R runner:docker ${{ matrix.binary.workspace }}/target 80 | sudo chown -R runner:docker $HOME/.cargo 81 | - name: Publish Artifacts 82 | if: startsWith(github.ref, 'refs/tags/') 83 | uses: softprops/action-gh-release@v1 84 | with: 85 | files: | 86 | ${{ matrix.binary.workspace }}/target/release/${{ matrix.binary.name }}-${{ matrix.arch.name }} 87 | - name: Draft Artifacts 88 | if: ${{ !startsWith(github.ref, 'refs/tags/') }} 89 | uses: softprops/action-gh-release@v1 90 | with: 91 | draft: true 92 | name: "latest" 93 | tag_name: "latest" 94 | files: | 95 | ${{ matrix.binary.workspace }}/target/release/${{ matrix.binary.name }}-${{ matrix.arch.name }} 96 | - name: Log in to the Container registry 97 | uses: docker/login-action@v2 98 | with: 99 | registry: ghcr.io 100 | username: ${{ github.actor }} 101 | password: ${{ secrets.GITHUB_TOKEN }} 102 | - name: Nightly Docker Image 103 | run: | 104 | export IMAGE_TAG="ghcr.io/${{ github.repository }}-nightly:${{ matrix.arch.name }}-${{ github.sha }}" 105 | echo "Building image $IMAGE_TAG" 106 | docker buildx build \ 107 | --push \ 108 | --build-arg RUNTIME_IMAGE=${{ matrix.arch.runtime-image }} \ 109 | -t $IMAGE_TAG . 110 | release-image: 111 | if: startsWith(github.ref, 'refs/tags/') 112 | name: release docker image 113 | needs: [release] 114 | runs-on: ubuntu-22.04 115 | steps: 116 | - name: Log in to the Container registry 117 | uses: docker/login-action@v2 118 | with: 119 | registry: ghcr.io 120 | username: ${{ github.actor }} 121 | password: ${{ secrets.GITHUB_TOKEN }} 122 | - name: Publish Docker Image 123 | run: | 124 | docker pull ghcr.io/${{ github.repository }}-nightly:amd64-${{ github.sha }} 125 | docker pull ghcr.io/${{ github.repository }}-nightly:aarch64-${{ github.sha }} 126 | export IMAGE_TAG="ghcr.io/${{ github.repository }}:${{ github.ref_name }}" 127 | docker manifest create $IMAGE_TAG \ 128 | --amend ghcr.io/${{ github.repository }}-nightly:amd64-${{ github.sha }} \ 129 | --amend ghcr.io/${{ github.repository }}-nightly:aarch64-${{ github.sha }} 130 | docker manifest push --purge $IMAGE_TAG 131 | -------------------------------------------------------------------------------- /src/server/git_protocol.rs: -------------------------------------------------------------------------------- 1 | #![allow(incomplete_features)] 2 | use bytes::{Buf, BufMut, BytesMut}; 3 | use std::{collections::HashMap, path::PathBuf, process::Stdio}; 4 | use tokio::{ 5 | io::{AsyncBufReadExt, AsyncReadExt, AsyncWriteExt, BufReader}, 6 | process::{ChildStdout, Command}, 7 | }; 8 | 9 | use warp::{ 10 | http, 11 | hyper::{body::Sender, Body, Response}, 12 | Rejection, 13 | }; 14 | 15 | use crate::errors::FreighterError; 16 | 17 | #[derive(Default)] 18 | pub struct GitCommand {} 19 | 20 | /// ### References Codes 21 | /// 22 | /// - [conduit-git-http-backend][]. 23 | /// 24 | /// 25 | /// handle request from git client 26 | impl GitCommand { 27 | pub async fn git_info_refs( 28 | &self, 29 | mut body: impl Buf, 30 | work_dir: PathBuf, 31 | ) -> Result, Rejection> { 32 | let mut cmd = Command::new("git"); 33 | // git 数据检查 34 | cmd.args([ 35 | "upload-pack", 36 | // "--http-backend-info-refs", 37 | "--stateless-rpc", 38 | "--advertise-refs", 39 | work_dir.join("crates.io-index").to_str().unwrap(), 40 | ]); 41 | cmd.stdin(Stdio::piped()).stdout(Stdio::piped()); 42 | 43 | let p = cmd.spawn().unwrap(); 44 | let mut git_input = p.stdin.unwrap(); 45 | 46 | while body.has_remaining() { 47 | git_input.write_all_buf(&mut body.chunk()).await.unwrap(); 48 | 49 | let cnt = body.chunk().len(); 50 | body.advance(cnt); 51 | } 52 | let git_output = BufReader::new(p.stdout.unwrap()); 53 | let mut headers = HashMap::new(); 54 | headers.insert( 55 | "Content-Type".to_string(), 56 | "application/x-git-upload-pack-advertisement".to_string(), 57 | ); 58 | headers.insert( 59 | "Cache-Control".to_string(), 60 | "no-cache, max-age=0, must-revalidate".to_string(), 61 | ); 62 | tracing::info!("headers: {:?}", headers); 63 | let mut resp = Response::builder(); 64 | for (key, val) in headers { 65 | resp = resp.header(&key, val); 66 | } 67 | 68 | let (sender, body) = Body::channel(); 69 | tokio::spawn(send(sender, git_output, true)); 70 | 71 | let resp = resp.body(body).unwrap(); 72 | Ok(resp) 73 | } 74 | 75 | pub async fn git_upload_pack( 76 | &self, 77 | mut body: impl Buf, 78 | work_dir: PathBuf, 79 | method: http::Method, 80 | content_type: Option, 81 | ) -> Result, Rejection> { 82 | let mut cmd = Command::new("git"); 83 | cmd.arg("http-backend"); 84 | cmd.env("GIT_PROJECT_ROOT", &work_dir); 85 | cmd.env("PATH_INFO", "/crates.io-index/git-upload-pack"); 86 | cmd.env("REQUEST_METHOD", method.as_str()); 87 | // cmd.env("QUERY_STRING", query); 88 | if let Some(content_type) = content_type { 89 | cmd.env("CONTENT_TYPE", content_type); 90 | } 91 | cmd.env("GIT_HTTP_EXPORT_ALL", "true"); 92 | cmd.stderr(Stdio::inherit()); 93 | cmd.stdout(Stdio::piped()); 94 | cmd.stdin(Stdio::piped()); 95 | 96 | let p = cmd.spawn().unwrap(); 97 | let mut git_input = p.stdin.unwrap(); 98 | 99 | while body.has_remaining() { 100 | git_input.write_all_buf(&mut body.chunk()).await.unwrap(); 101 | 102 | let cnt = body.chunk().len(); 103 | println!( 104 | "request body: {:?}", 105 | String::from_utf8(body.chunk().to_vec()).unwrap() 106 | ); 107 | body.advance(cnt); 108 | } 109 | 110 | let mut git_output = BufReader::new(p.stdout.unwrap()); 111 | 112 | let mut headers = HashMap::new(); 113 | loop { 114 | let mut line = String::new(); 115 | git_output.read_line(&mut line).await.unwrap(); 116 | let line = line.trim_end(); 117 | if line.is_empty() { 118 | break; 119 | } 120 | if let Some((key, value)) = line.split_once(": ") { 121 | headers.insert(key.to_string(), value.to_string()); 122 | } 123 | } 124 | tracing::info!("headers: {:?}", headers); 125 | let mut resp = Response::builder(); 126 | for (key, val) in headers { 127 | resp = resp.header(&key, val); 128 | } 129 | 130 | let (sender, body) = Body::channel(); 131 | tokio::spawn(send(sender, git_output, false)); 132 | let resp = resp.body(body).unwrap(); 133 | Ok(resp) 134 | } 135 | } 136 | 137 | async fn send( 138 | mut sender: Sender, 139 | mut git_output: BufReader, 140 | add_refs: bool, 141 | ) -> Result<(), FreighterError> { 142 | if add_refs { 143 | let mut buf = BytesMut::new(); 144 | buf.put(&b"001e# service=git-upload-pack\n0000"[..]); 145 | sender.send_data(buf.freeze()).await.unwrap(); 146 | } 147 | 148 | loop { 149 | let mut bytes_out = BytesMut::new(); 150 | let res = git_output.read_buf(&mut bytes_out).await; 151 | tracing::info!("reading output :{:?}", res); 152 | if bytes_out.is_empty() { 153 | tracing::info!("send:empty"); 154 | return Ok(()); 155 | } 156 | if add_refs { 157 | tracing::info!("send: bytes_out: {:?}", bytes_out.clone().freeze()); 158 | } 159 | sender.send_data(bytes_out.freeze()).await.unwrap(); 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Contributor Covenant Code of Conduct 2 | 3 | ## Our Pledge 4 | 5 | We as members, contributors, and leaders pledge to make participation in our 6 | community a harassment-free experience for everyone, regardless of age, body 7 | size, visible or invisible disability, ethnicity, sex characteristics, gender 8 | identity and expression, level of experience, education, socio-economic status, 9 | nationality, personal appearance, race, religion, or sexual identity 10 | and orientation. 11 | 12 | We pledge to act and interact in ways that contribute to an open, welcoming, 13 | diverse, inclusive, and healthy community. 14 | 15 | ## Our Standards 16 | 17 | Examples of behavior that contributes to a positive environment for our 18 | community include: 19 | 20 | * Demonstrating empathy and kindness toward other people 21 | * Being respectful of differing opinions, viewpoints, and experiences 22 | * Giving and gracefully accepting constructive feedback 23 | * Accepting responsibility and apologizing to those affected by our mistakes, 24 | and learning from the experience 25 | * Focusing on what is best not just for us as individuals, but for the 26 | overall community 27 | 28 | Examples of unacceptable behavior include: 29 | 30 | * The use of sexualized language or imagery, and sexual attention or 31 | advances of any kind 32 | * Trolling, insulting or derogatory comments, and personal or political attacks 33 | * Public or private harassment 34 | * Publishing others' private information, such as a physical or email 35 | address, without their explicit permission 36 | * Other conduct which could reasonably be considered inappropriate in a 37 | professional setting 38 | 39 | ## Enforcement Responsibilities 40 | 41 | Community leaders are responsible for clarifying and enforcing our standards of 42 | acceptable behavior and will take appropriate and fair corrective action in 43 | response to any behavior that they deem inappropriate, threatening, offensive, 44 | or harmful. 45 | 46 | Community leaders have the right and responsibility to remove, edit, or reject 47 | comments, commits, code, wiki edits, issues, and other contributions that are 48 | not aligned to this Code of Conduct, and will communicate reasons for moderation 49 | decisions when appropriate. 50 | 51 | ## Scope 52 | 53 | This Code of Conduct applies within all community spaces, and also applies when 54 | an individual is officially representing the community in public spaces. 55 | Examples of representing our community include using an official e-mail address, 56 | posting via an official social media account, or acting as an appointed 57 | representative at an online or offline event. 58 | 59 | ## Enforcement 60 | 61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 62 | reported to the community leaders responsible for enforcement at 63 | eli@patch.sh. 64 | All complaints will be reviewed and investigated promptly and fairly. 65 | 66 | All community leaders are obligated to respect the privacy and security of the 67 | reporter of any incident. 68 | 69 | ## Enforcement Guidelines 70 | 71 | Community leaders will follow these Community Impact Guidelines in determining 72 | the consequences for any action they deem in violation of this Code of Conduct: 73 | 74 | ### 1. Correction 75 | 76 | **Community Impact**: Use of inappropriate language or other behavior deemed 77 | unprofessional or unwelcome in the community. 78 | 79 | **Consequence**: A private, written warning from community leaders, providing 80 | clarity around the nature of the violation and an explanation of why the 81 | behavior was inappropriate. A public apology may be requested. 82 | 83 | ### 2. Warning 84 | 85 | **Community Impact**: A violation through a single incident or series 86 | of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No 89 | interaction with the people involved, including unsolicited interaction with 90 | those enforcing the Code of Conduct, for a specified period of time. This 91 | includes avoiding interactions in community spaces as well as external channels 92 | like social media. Violating these terms may lead to a temporary or 93 | permanent ban. 94 | 95 | ### 3. Temporary Ban 96 | 97 | **Community Impact**: A serious violation of community standards, including 98 | sustained inappropriate behavior. 99 | 100 | **Consequence**: A temporary ban from any sort of interaction or public 101 | communication with the community for a specified period of time. No public or 102 | private interaction with the people involved, including unsolicited interaction 103 | with those enforcing the Code of Conduct, is allowed during this period. 104 | Violating these terms may lead to a permanent ban. 105 | 106 | ### 4. Permanent Ban 107 | 108 | **Community Impact**: Demonstrating a pattern of violation of community 109 | standards, including sustained inappropriate behavior, harassment of an 110 | individual, or aggression toward or disparagement of classes of individuals. 111 | 112 | **Consequence**: A permanent ban from any sort of public interaction within 113 | the community. 114 | 115 | ## Attribution 116 | 117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], 118 | version 2.0, available at 119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. 120 | 121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct 122 | enforcement ladder](https://github.com/mozilla/diversity). 123 | 124 | [homepage]: https://www.contributor-covenant.org 125 | 126 | For answers to common questions about this code of conduct, see the FAQ at 127 | https://www.contributor-covenant.org/faq. Translations are available at 128 | https://www.contributor-covenant.org/translations. 129 | -------------------------------------------------------------------------------- /src/download.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | use std::{ 9 | fs::{self, File}, 10 | io::{self, BufWriter}, 11 | path::{Path, PathBuf}, 12 | }; 13 | 14 | use crate::config::ProxyConfig; 15 | use crate::errors::FreighterError; 16 | 17 | use sha2::{Digest, Sha256}; 18 | use url::form_urlencoded::byte_serialize; 19 | use url::Url; 20 | 21 | pub trait Download { 22 | /// download file to a folder with given url and path 23 | /// return false if connect success but download failed 24 | fn download_to_folder(&self, msg: &str) -> Result; 25 | } 26 | 27 | /// use reqwest to handle https download requests 28 | pub struct BlockingReqwest { 29 | pub opts: DownloadOptions, 30 | } 31 | 32 | #[derive(Clone)] 33 | pub struct DownloadOptions { 34 | pub proxy: ProxyConfig, 35 | pub url: Url, 36 | pub path: PathBuf, 37 | } 38 | 39 | impl Download for BlockingReqwest { 40 | fn download_to_folder(&self, prefix_msg: &str) -> Result { 41 | let DownloadOptions { proxy, url, path } = &self.opts; 42 | 43 | let client_builder = reqwest::blocking::Client::builder(); 44 | let reqwest_client = if proxy.enable { 45 | let proxy = reqwest::Proxy::all(proxy.download_proxy.clone()).unwrap(); 46 | client_builder.proxy(proxy).build().unwrap() 47 | } else { 48 | client_builder.build().unwrap() 49 | }; 50 | let mut url = url.clone(); 51 | encode_huaweicloud_url(&mut url); 52 | let mut resp = reqwest_client.get(url.clone()).send()?; 53 | if resp.status().is_success() { 54 | // generate parent folder if not exist 55 | if let Some(parent) = path.parent() { 56 | if !parent.exists() { 57 | fs::create_dir_all(parent).unwrap(); 58 | } 59 | } 60 | let mut out = BufWriter::new(File::create(path).unwrap()); 61 | io::copy(&mut resp, &mut out).unwrap(); 62 | tracing::info!("{} {:?}", prefix_msg, out.get_ref()); 63 | } else { 64 | tracing::error!( 65 | "download failed, Please check your url: {}", 66 | url.to_string() 67 | ); 68 | return Ok(false); 69 | } 70 | Ok(true) 71 | } 72 | } 73 | 74 | // download remote sha file and then download file for hash check 75 | pub fn download_file_with_sha( 76 | url: &str, 77 | file_folder: &Path, 78 | file_name: &str, 79 | proxy: &ProxyConfig, 80 | ) -> Result { 81 | let sha_url = format!("{}{}", url, ".sha256"); 82 | let sha_name = format!("{}{}", file_name, ".sha256"); 83 | let sha_path = file_folder.join(sha_name); 84 | //always update sha256 file 85 | let down_sha = &DownloadOptions { 86 | proxy: proxy.clone(), 87 | url: Url::parse(&sha_url).unwrap(), 88 | path: sha_path, 89 | }; 90 | let res = download_and_check_hash(down_sha, None, true).unwrap(); 91 | if res { 92 | let content = fs::read_to_string(&down_sha.path).unwrap(); 93 | let sha256 = &content[..64]; 94 | let down_file = &DownloadOptions { 95 | proxy: proxy.clone(), 96 | url: Url::parse(url).unwrap(), 97 | path: file_folder.join(file_name), 98 | }; 99 | download_and_check_hash(down_file, Some(sha256), false) 100 | } else { 101 | Ok(false) 102 | } 103 | } 104 | 105 | /// download file from remote and calculate it's hash 106 | /// return true if download and success, return false if file already exists 107 | /// -- check_sum: weather need to check hash before download 108 | /// -- is_override: override file if check_sum is none 109 | pub fn download_and_check_hash( 110 | opts: &DownloadOptions, 111 | check_sum: Option<&str>, 112 | is_override: bool, 113 | ) -> Result { 114 | let br = BlockingReqwest { 115 | opts: opts.to_owned(), 116 | }; 117 | let path = &opts.path; 118 | if path.is_file() && path.exists() { 119 | let mut hasher = Sha256::new(); 120 | let mut file = File::open(path)?; 121 | io::copy(&mut file, &mut hasher)?; 122 | let result = hasher.finalize(); 123 | let hex = format!("{:x}", result); 124 | 125 | //if need to calculate hash 126 | if check_sum.is_some() { 127 | return if hex == check_sum.unwrap() { 128 | tracing::info!("###[ALREADY] \t{:?}", file); 129 | Ok(false) 130 | } else { 131 | tracing::warn!("!!![REMOVE] \t\t {:?} !", file); 132 | fs::remove_file(path)?; 133 | br.download_to_folder("!!![REMOVED DOWNLOAD] \t\t ") 134 | }; 135 | } else if !is_override { 136 | tracing::info!( 137 | "file exist but not pass check_sum, skipping download {}", 138 | path.display() 139 | ); 140 | return Ok(false); 141 | } 142 | } 143 | br.download_to_folder("&&&[NEW] \t\t ") 144 | } 145 | 146 | pub fn encode_huaweicloud_url(url: &mut Url) { 147 | if let Some(domain) = url.domain() { 148 | if domain.contains("myhuaweicloud.com") && url.path().starts_with("/crates") { 149 | let mut path = PathBuf::from(url.path()); 150 | let encode_path: String = 151 | byte_serialize(path.file_name().unwrap().to_str().unwrap().as_bytes()).collect(); 152 | path.pop(); 153 | path.push(&encode_path); 154 | url.set_path(path.to_str().unwrap()); 155 | } 156 | } 157 | } 158 | 159 | #[cfg(test)] 160 | mod tests { 161 | 162 | use reqwest::Url; 163 | 164 | use crate::download; 165 | 166 | #[test] 167 | fn test_huaweicloud_url_serial() { 168 | let mut url = Url::parse("https://rust-proxy.obs.cn-east-3.myhuaweicloud.com/crates/google-coordinate1/google-coordinate1-0.1.1+20141215.crate").unwrap(); 169 | download::encode_huaweicloud_url(&mut url); 170 | assert_eq!(url.to_string(), "https://rust-proxy.obs.cn-east-3.myhuaweicloud.com/crates/google-coordinate1/google-coordinate1-0.1.1%2B20141215.crate"); 171 | 172 | // Skip routes that don't start with /crates 173 | let mut url = Url::parse("https://rust-proxy.obs.cn-east-3.myhuaweicloud.com/dist/2023-06-05/google-coordinate1-0.1.1+20141215.crate").unwrap(); 174 | download::encode_huaweicloud_url(&mut url); 175 | assert_eq!(url.to_string(), "https://rust-proxy.obs.cn-east-3.myhuaweicloud.com/dist/2023-06-05/google-coordinate1-0.1.1+20141215.crate"); 176 | } 177 | } 178 | -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | 7 | use serde::{Deserialize, Serialize}; 8 | use std::{ 9 | fs, 10 | io::ErrorKind, 11 | path::{Path, PathBuf}, 12 | }; 13 | 14 | /// parse config from file 15 | #[derive(Serialize, Deserialize, Debug, Default, Clone)] 16 | pub struct Config { 17 | #[serde(default = "default_value_for_path")] 18 | pub index_path: PathBuf, 19 | #[serde(default = "default_value_for_path")] 20 | pub crates_path: PathBuf, 21 | #[serde(default = "default_value_for_path")] 22 | pub log_path: PathBuf, 23 | #[serde(default = "default_value_for_path")] 24 | pub rustup_path: PathBuf, 25 | #[serde(default = "default_value_for_path")] 26 | pub dist_path: PathBuf, 27 | 28 | pub crates: CratesConfig, 29 | pub rustup: RustUpConfig, 30 | pub log: LogConfig, 31 | pub proxy: ProxyConfig, 32 | } 33 | 34 | #[derive(Serialize, Deserialize, Debug, Default, Clone)] 35 | pub struct LogConfig { 36 | #[serde(deserialize_with = "path_option_from_str")] 37 | pub log_path: Option, 38 | pub encoder: String, 39 | pub level: String, 40 | pub limit: u64, 41 | } 42 | 43 | #[derive(Serialize, Deserialize, Debug, Clone, Default)] 44 | pub struct CratesConfig { 45 | #[serde(deserialize_with = "path_option_from_str")] 46 | pub index_path: Option, 47 | #[serde(deserialize_with = "path_option_from_str")] 48 | pub crates_path: Option, 49 | pub index_domain: String, 50 | pub domain: String, 51 | pub download_threads: usize, 52 | pub serve_domains: Option>, 53 | pub serve_index: Option, 54 | } 55 | 56 | /// config for rustup mirror sync 57 | #[derive(Serialize, Deserialize, Debug, Clone, Default)] 58 | pub struct RustUpConfig { 59 | #[serde(deserialize_with = "path_option_from_str")] 60 | pub rustup_path: Option, 61 | #[serde(deserialize_with = "path_option_from_str")] 62 | pub dist_path: Option, 63 | pub domain: String, 64 | pub download_threads: usize, 65 | pub sync_stable_versions: Vec, 66 | pub sync_nightly_days: i64, 67 | pub sync_beta_days: i64, 68 | pub serve_domains: Option>, 69 | pub history_version_start_date: Option, 70 | } 71 | 72 | #[derive(Serialize, Deserialize, Debug, Clone, Default)] 73 | pub struct ProxyConfig { 74 | pub enable: bool, 75 | pub git_index_proxy: String, 76 | pub download_proxy: String, 77 | } 78 | 79 | // deserialize a string from a TOML file into an Option 80 | fn path_option_from_str<'de, D>(deserializer: D) -> Result, D::Error> 81 | where 82 | D: serde::Deserializer<'de>, 83 | { 84 | let s: String = Deserialize::deserialize(deserializer)?; 85 | Ok(if s.is_empty() { 86 | None 87 | } else { 88 | Some(Path::new(&s).to_path_buf()) 89 | }) 90 | } 91 | 92 | fn default_value_for_path() -> PathBuf { 93 | PathBuf::new() 94 | } 95 | 96 | /// 97 | impl Config { 98 | pub fn new() -> Config { 99 | Config { 100 | index_path: PathBuf::new(), 101 | crates_path: PathBuf::new(), 102 | log_path: PathBuf::new(), 103 | rustup_path: PathBuf::new(), 104 | dist_path: PathBuf::new(), 105 | rustup: RustUpConfig::default(), 106 | crates: CratesConfig::default(), 107 | log: LogConfig::default(), 108 | proxy: ProxyConfig::default(), 109 | } 110 | } 111 | 112 | pub fn load(&self, config_parent: Option) -> Config { 113 | let config_path = format_path(&config_parent, "config.toml"); 114 | let mut config = Self::read_config_or_init(&config_path); 115 | 116 | config.index_path = format_path(&config.crates.index_path, "crates.io-index"); 117 | config.crates_path = format_path(&config.crates.crates_path, "crates"); 118 | config.log_path = format_path(&config.log.log_path, "log"); 119 | config.rustup_path = format_path(&config.rustup.rustup_path, "rustup"); 120 | config.dist_path = format_path(&config.rustup.dist_path, "dist"); 121 | config 122 | } 123 | 124 | // read from config file, default config file will be created if not exist 125 | pub fn read_config_or_init(config_path: &Path) -> Config { 126 | let content = match fs::read_to_string(config_path) { 127 | Ok(content) => content, 128 | Err(err) => match err.kind() { 129 | ErrorKind::NotFound => { 130 | if let Some(parent) = config_path.parent() { 131 | if !parent.exists() { 132 | fs::create_dir_all(parent).unwrap(); 133 | } 134 | } 135 | //rewrite config.default.toml under config_path 136 | fs::write(config_path, include_str!("config.default.toml")).unwrap(); 137 | fs::read_to_string(config_path).unwrap() 138 | } 139 | other_error => panic!("Can't read config file: {}", other_error), 140 | }, 141 | }; 142 | match toml::from_str(&content) { 143 | Ok(config) => config, 144 | Err(err) => panic!("Config file doesn't match, maybe it's outdated or you have provided a invalid value, 145 | you can manually delete it and try again. 146 | Caused by {}", err), 147 | } 148 | } 149 | } 150 | 151 | pub fn format_path(config_path: &Option, name: &str) -> PathBuf { 152 | let default_dir = dirs::home_dir().unwrap().join("freighter"); 153 | let path = match config_path { 154 | Some(path) => path, 155 | None => &default_dir, 156 | }; 157 | let path_str = path.to_str().unwrap(); 158 | if !path_str.contains("freighter") { 159 | return path.join("freighter").join(name); 160 | } else if !path_str.contains(name) { 161 | return path.join(name); 162 | } 163 | path.to_path_buf() 164 | } 165 | 166 | #[cfg(test)] 167 | mod tests { 168 | use std::path::PathBuf; 169 | 170 | use crate::config::format_path; 171 | 172 | #[test] 173 | fn test_format_path() { 174 | // test config path 175 | let default = dirs::home_dir().unwrap(); 176 | assert_eq!(format_path(&None, "config.toml"), default.join("freighter/config.toml")); 177 | assert_eq!(format_path(&Some("/tmp".into()), "config.toml"), PathBuf::from("/tmp/freighter/config.toml")); 178 | 179 | // test index path 180 | assert_eq!(format_path(&None, "index"), default.join("freighter/index")); 181 | assert_eq!(format_path(&Some("/tmp".into()), "index"), PathBuf::from("/tmp/freighter/index")); 182 | assert_eq!(format_path(&Some("/tmp/freighter".into()), "index"), PathBuf::from("/tmp/freighter/index")); 183 | assert_eq!(format_path(&Some("/tmp/freighter/index".into()), "index"), PathBuf::from("/tmp/freighter/index")); 184 | } 185 | } 186 | -------------------------------------------------------------------------------- /src/commands/channel.rs: -------------------------------------------------------------------------------- 1 | //! **channel** subcommand focus on download rust toolchains from upstream. The core 2 | //! function implemented in the `src/crates/channel`. 3 | //! 4 | //! 5 | //! **channel** subcommand provide major functions include: 6 | //! 7 | //! Arguments: 8 | //! - __domain__: you can choose your own upstream by adding this argument in command 9 | //! - __download-threads__: specify the download threads to parallel download, 10 | //! this param can be changed in the configuration file or pass it here 11 | //! - __no-progressbar__: not implemented 12 | //! 13 | //! # download subcommand 14 | //! - before each download, freighter will try to fetch the sha256 of the file and compare with local file if it exists 15 | //! and will skip downloading if they are matching. 16 | //! 17 | //! - sync server rust toolchains version from upstream to local 18 | //! - by default, this subcommand will fetch latest stable, beta, nightly and 19 | //! the specified version in your toml config file: __rustup.sync_stable_versions__ 20 | //! - if you are using --version arguments in subcommand, freighter will only download the version you specified, 21 | //! learn more about [rust release info](https://forge.rust-lang.org/index.html) here 22 | //! - in the download process, freighter will first download the channel file, for example: channel-rust-1.29.toml 23 | //! 24 | //! Arguments: 25 | //! - __clean__: clean history files read by config file after download successfully. 26 | //! - __version__: only download the version you specified, 27 | //! you can provide any version format supported by rust-org, such as stable, beta or nightly-2022-07-31. 28 | //! 29 | //! # upload subcommand 30 | //! upload file to Object Storage Service compatible with [AWS S3](https://aws.amazon.com/s3/) 31 | //! - Digitalocean Spaces 32 | //! - Huawei Cloud OBS 33 | //! - Alibaba Cloud OSS 34 | //! - Tencent Cloud COS 35 | //! - AWS S3 36 | //! - minio 37 | //! - Ceph 38 | //! 39 | //! Arguments: 40 | //! - __bucket__: set the s3 bucket you want to upload files to, you must provide this param before upload. 41 | //! 42 | 43 | use std::sync::Arc; 44 | 45 | use clap::{arg, ArgMatches}; 46 | use rayon::ThreadPoolBuilder; 47 | 48 | use crate::cloud; 49 | use crate::cloud::s3::S3cmd; 50 | use crate::commands::command_prelude::*; 51 | use crate::config::Config; 52 | use crate::errors::FreightResult; 53 | use crate::handler::channel::{sync_rust_toolchain, ChannelOptions}; 54 | 55 | pub fn cli() -> clap::Command { 56 | clap::Command::new("channel") 57 | .subcommand(subcommand("download") 58 | .arg(flag("clean", "clean up historical versions")) 59 | .arg(arg!(-v --"version" "only download the version you specified")) 60 | .arg(flag("init", "this command will download the histoey release stable version which you matain in your config file")) 61 | .arg(flag("upload", "upload every crate file after download")) 62 | .arg(flag("history", "only sync history nightly and beta versions")) 63 | .arg(arg!(-b --"bucket" "set the s3 bucket name you want to upload files")) 64 | .arg(flag("delete-after-upload", "this will delete file after upload")) 65 | ) 66 | .subcommand(subcommand("upload") 67 | .arg(arg!(-b --"bucket" "set the s3 bucket name you want to upload files") 68 | .required(true) 69 | )) 70 | .subcommand_required(true) 71 | .arg_required_else_help(true) 72 | .about("Sync the Rust toolchain from the upstream to the local registry") 73 | .arg(flag("no-progressbar", "Hide progressbar when start sync")) 74 | .arg(arg!(-t --"download-threads" "specify the download thread count") 75 | .value_parser(value_parser!(usize)) 76 | ) 77 | .arg(arg!(-d --"domain" "specify the source you want to sync from")) 78 | .help_template( 79 | "\ 80 | Sync the rust toolchain files from the upstream(static.rust-lang.org) to the local filesystem, other cloud 81 | storage services, or other registries. 82 | 83 | USAGE: 84 | {usage} 85 | 86 | OPTIONS: 87 | {options} 88 | 89 | EXAMPLES 90 | 1. Download toolchains from source domain(not necessary, default from static.rust-lang.org) 91 | with 64 download threads and then clean historical files 92 | 93 | freighter channel -t 64 -d https://www.example.com download --clean 94 | 95 | 2. Upload rust toolchains to s3 bucket: 96 | 97 | freighter channel upload -b bucket-name 98 | 99 | 3. Download specify version: 100 | 101 | freighter channel download -v nightly-2022-07-31 102 | 103 | \n") 104 | } 105 | 106 | /// 107 | /// 108 | /// 109 | pub fn exec(config: &mut Config, args: &ArgMatches) -> FreightResult { 110 | crate::cli::init_log(&config.log, &config.log_path, "channel").unwrap(); 111 | 112 | let mut opts = ChannelOptions { 113 | config: config.rustup.to_owned(), 114 | proxy: config.proxy.to_owned(), 115 | dist_path: config.dist_path.to_owned(), 116 | ..Default::default() 117 | }; 118 | 119 | if let Some(domain) = args.get_one::("domain").cloned() { 120 | opts.config.domain = domain; 121 | } 122 | 123 | if let Some(download_threads) = args.get_one::("download-threads").cloned() { 124 | opts.config.download_threads = download_threads; 125 | }; 126 | 127 | opts.thread_pool = Arc::new( 128 | ThreadPoolBuilder::new() 129 | .num_threads(opts.config.download_threads) 130 | .build() 131 | .unwrap(), 132 | ); 133 | 134 | tracing::info!("Default ChannelOptions : {:#?}", opts); 135 | 136 | match args.subcommand() { 137 | Some(("download", args)) => { 138 | let down_opts = &ChannelOptions { 139 | upload: args.get_flag("upload"), 140 | bucket: args.get_one::("bucket").cloned(), 141 | clean: args.get_flag("clean"), 142 | version: args.get_one::("version").cloned(), 143 | delete_after_upload: args.get_flag("delete-after-upload"), 144 | sync_history: args.get_flag("history"), 145 | init: args.get_flag("init"), 146 | ..opts 147 | }; 148 | if down_opts.upload && down_opts.bucket.is_none() { 149 | unreachable!("can not upload with empty bucket name") 150 | } 151 | sync_rust_toolchain(down_opts)? 152 | } 153 | Some(("upload", args)) => { 154 | let bucket_name = args.get_one::("bucket").cloned().unwrap(); 155 | let s3cmd = S3cmd::default(); 156 | cloud::upload_with_pool(opts.dist_path, opts.thread_pool, bucket_name, s3cmd).unwrap(); 157 | } 158 | Some((cmd, _)) => { 159 | unreachable!("unexpected command {}", cmd) 160 | } 161 | None => { 162 | unreachable!("unexpected command") 163 | } 164 | }; 165 | 166 | Ok(()) 167 | } 168 | -------------------------------------------------------------------------------- /src/commands/crates.rs: -------------------------------------------------------------------------------- 1 | //! **crates** subcommand focus on the sync crates index and crate files from upstream. The core 2 | //! function implemented in the `src/crates/index`. 3 | //! 4 | //! **crates** subcommand provide major functions include: 5 | //! 6 | //! Arguments: 7 | //! - __domain__: you can choose your own upstream by adding this argument in command, 8 | //! this param can be changed in the configuration file or pass it here 9 | //! - __download-threads__: specify the download threads to parallel download, 10 | //! this param can be changed in the configuration file or pass it here 11 | //! - __no-progressbar__: Whether to hide progress bar when start downloading 12 | //! 13 | //! # pull subcommand 14 | //! 15 | //! sync crates index from upstream to local: 16 | //! 17 | //! - The crates index is a git repository, and **cargo** clone and update from [GitHub](https://github.com/rust-lang/crates.io-index). 18 | //! - The clone use `bare` mode, more details in the [cargo guide](https://github.com/rust-lang/cargo/blob/6b6b0b486d73c03ed952591d880debec1d47c534/src/doc/src/guide/cargo-home.md#directories) 19 | //! 20 | //! # download subcommand 21 | //! sync crate file from upstream to local: 22 | //! 23 | //! - The crate file of upstream location detail from [crates.io-index](https://github.com/rust-lang/crates.io-index/blob/master/.github/workflows/update-dl-url.yml) 24 | //! ```YAML 25 | //! env: 26 | //! URL_api: "https://crates.io/api/v1/crates" 27 | //! URL_cdn: "https://static.crates.io/crates/sync{crate}/{crate}-{version}.crate" 28 | //! URL_s3_primary: "https://crates-io.s3-us-west-1.amazonaws.com/crates/{crate}/{crate}-{version}.crate" 29 | //! URL_s3_fallback: "https://crates-io-fallback.s3-eu-west-1.amazonaws.com/crates/{crate}/{crate}-{version}.crate" 30 | //! ``` 31 | //! 32 | //! Arguments: 33 | //! - __init__: Whether to download all the crates files for initialization. 34 | //! - __upload__: Whether to upload single file to s3 after download success. 35 | //! - __bucket__: set the s3 bucket you want to upload files to, you must provide this param before upload. 36 | //! - __delete-after-upload__: This optional parameter will be used to delete files after upload. 37 | //! 38 | //! # upload subcommand 39 | //! 40 | //! - Sync crate file to Object Storage Service compatible with [AWS S3](https://aws.amazon.com/s3/) 41 | //! - Digitalocean Spaces 42 | //! - Huawei Cloud OBS 43 | //! - Alibaba Cloud OSS 44 | //! - Tencent Cloud COS 45 | //! - AWS S3 46 | //! - minio 47 | //! - Ceph 48 | //! Arguments: 49 | //! - __bucket__: set the s3 bucket you want to upload files to, you must provide this param before upload. 50 | //! 51 | 52 | use std::sync::Arc; 53 | 54 | use clap::{arg, ArgMatches}; 55 | use rayon::ThreadPoolBuilder; 56 | 57 | use crate::commands::command_prelude::*; 58 | use crate::config::Config; 59 | use crate::errors::FreightResult; 60 | use crate::handler::crates_file::{download, upload_to_s3, CratesOptions}; 61 | use crate::handler::index::{pull, CrateIndex}; 62 | use crate::handler::DownloadMode; 63 | 64 | /// The __crates__ subcommand 65 | /// 66 | 67 | pub fn cli() -> clap::Command { 68 | clap::Command::new("crates") 69 | .arg(flag("no-progressbar", "Hide progressbar when start sync")) 70 | .arg(arg!(-t --"download-threads" "specify the download threads to parallel download, 71 | this param can be changed in the configuration file or pass it here") 72 | .value_parser(value_parser!(usize)) 73 | ) 74 | .arg(arg!(-d --"domain" "specify the source you want to sync from, 75 | this param can be changed in the configuration file or pass it here")) 76 | .subcommand(subcommand("pull")) 77 | .subcommand(subcommand("upload") 78 | .arg(arg!(-b --"bucket" "set the s3 bucket name you want to upload files").required(true)) 79 | .arg(arg!(--"name" "only upload specify crates")) 80 | ) 81 | .subcommand(subcommand("download") 82 | .arg(flag("init", "Start init download of crates file, this will traverse all index for full download")) 83 | .arg(flag("fix", "Handle the crates file that download failed, this opetion will traverse error log")) 84 | .arg(arg!(--"name" "only fix the crates you specified, this command will try to re-download the crates")) 85 | .arg(flag("upload", "upload every crate file after download")) 86 | .arg(arg!(-b --"bucket" "set the s3 bucket name you want to upload files")) 87 | .arg(flag("delete-after-upload", "this will delete file after upload")) 88 | ) 89 | .subcommand_required(true) 90 | .arg_required_else_help(true) 91 | .about("Sync the crates from the upstream(crates.io) to the local registry") 92 | .help_template( 93 | "\ 94 | Sync the crates index and crate files from the upstream(crates.io) to the local filesystem, other cloud 95 | storage services, or other registries. 96 | 97 | USAGE: 98 | {usage} 99 | 100 | OPTIONS: 101 | {options} 102 | 103 | EXAMPLES 104 | 1. Sync the crates index with specify config path 105 | 106 | freighter -c /mnt/freighter/config.toml crates pull 107 | 108 | 2. Download all crates file and unload: 109 | 110 | freighter crates download --init --upload --bucket crates 111 | 112 | 3. Download crates file with multi-thread: 113 | 114 | freighter crates -t 32 download --init 115 | 116 | \n") 117 | } 118 | 119 | /// 120 | /// 121 | /// 122 | pub fn exec(config: &mut Config, args: &ArgMatches) -> FreightResult { 123 | crate::cli::init_log(&config.log, &config.log_path, "crates").unwrap(); 124 | 125 | let opts = &mut CratesOptions { 126 | config: config.crates.to_owned(), 127 | proxy: config.proxy.to_owned(), 128 | index: CrateIndex::new(&config.crates.index_domain, config.index_path.to_owned()), 129 | no_progressbar: args.get_flag("no-progressbar"), 130 | crates_path: config.crates_path.to_owned(), 131 | log_path: config.log_path.to_owned(), 132 | ..Default::default() 133 | }; 134 | let domain = args.get_one::("domain").cloned(); 135 | 136 | match args.get_one::("download-threads").cloned() { 137 | Some(download_threads) => opts.config.download_threads = download_threads, 138 | None => tracing::info!("use default thread count: {}", opts.config.download_threads), 139 | }; 140 | opts.thread_pool = Arc::new( 141 | ThreadPoolBuilder::new() 142 | .num_threads(opts.config.download_threads) 143 | .build() 144 | .unwrap(), 145 | ); 146 | 147 | tracing::info!("CratesOptions info : {:#?}", opts); 148 | 149 | match args.subcommand() { 150 | Some(("pull", _args)) => { 151 | if let Some(source) = domain { 152 | config.crates.index_domain = source; 153 | } 154 | pull(opts)? 155 | } 156 | Some(("download", args)) => { 157 | opts.upload = args.get_flag("upload"); 158 | opts.download_mode = DownloadMode::new(args.get_flag("init"), args.get_flag("fix")); 159 | opts.delete_after_upload = args.get_flag("delete-after-upload"); 160 | opts.crates_name = args.get_one::("name").cloned(); 161 | if opts.upload { 162 | if let Some(bucket_name) = args.get_one::("bucket").cloned() { 163 | opts.bucket_name = bucket_name 164 | } else { 165 | unreachable!("can not upload with empty bucket name") 166 | } 167 | } 168 | if let Some(source) = domain { 169 | config.crates.domain = source; 170 | } 171 | download(opts)? 172 | } 173 | Some(("upload", args)) => { 174 | opts.bucket_name = args.get_one::("bucket").cloned().unwrap(); 175 | opts.crates_name = args.get_one::("name").cloned(); 176 | upload_to_s3(opts)? 177 | } 178 | Some((cmd, _)) => { 179 | unreachable!("unexpected command {}", cmd) 180 | } 181 | None => { 182 | unreachable!("unexpected command") 183 | } 184 | }; 185 | 186 | Ok(()) 187 | } 188 | -------------------------------------------------------------------------------- /LICENSE-APACHE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | https://www.apache.org/licenses/LICENSE-2.0 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright 2022 Open Rust Initiative 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | https://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/handler/channel.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | use std::{ 9 | collections::HashMap, 10 | fs::{self, DirEntry}, 11 | path::{Path, PathBuf}, 12 | sync::Arc, 13 | }; 14 | 15 | use chrono::{Duration, NaiveDate, Utc}; 16 | use rayon::{ 17 | prelude::{IntoParallelRefIterator, ParallelIterator}, 18 | ThreadPool, ThreadPoolBuilder, 19 | }; 20 | use serde::Deserialize; 21 | use url::Url; 22 | use walkdir::WalkDir; 23 | 24 | use crate::{ 25 | cloud::{s3::S3cmd, CloudStorage}, 26 | config::{ProxyConfig, RustUpConfig}, 27 | download::{download_and_check_hash, download_file_with_sha, DownloadOptions}, 28 | errors::{FreightResult, FreighterError}, 29 | }; 30 | 31 | #[derive(Debug, Deserialize)] 32 | pub struct Channel { 33 | #[serde(alias = "manifest-version")] 34 | pub manifest_version: String, 35 | pub date: String, 36 | pub pkg: HashMap, 37 | } 38 | 39 | #[derive(Debug, Deserialize)] 40 | pub struct Pkg { 41 | pub version: String, 42 | pub target: HashMap, 43 | } 44 | 45 | #[derive(Debug, Deserialize)] 46 | pub struct Target { 47 | pub available: bool, 48 | #[serde(skip_serializing_if = "Option::is_none")] 49 | pub url: Option, 50 | #[serde(skip_serializing_if = "Option::is_none")] 51 | pub hash: Option, 52 | #[serde(skip_serializing_if = "Option::is_none")] 53 | pub xz_url: Option, 54 | #[serde(skip_serializing_if = "Option::is_none")] 55 | pub xz_hash: Option, 56 | } 57 | 58 | #[derive(Debug, Clone)] 59 | pub struct ChannelOptions { 60 | pub config: RustUpConfig, 61 | 62 | pub proxy: ProxyConfig, 63 | 64 | /// Whether to clean historical versions. 65 | pub clean: bool, 66 | /// only sync that version 67 | pub version: Option, 68 | 69 | pub dist_path: PathBuf, 70 | 71 | pub bucket: Option, 72 | 73 | pub upload: bool, 74 | 75 | pub delete_after_upload: bool, 76 | 77 | pub sync_history: bool, 78 | 79 | pub init: bool, 80 | 81 | pub thread_pool: Arc, 82 | } 83 | 84 | impl Default for ChannelOptions { 85 | fn default() -> Self { 86 | let thread_pool = Arc::new(ThreadPoolBuilder::new().build().unwrap()); 87 | ChannelOptions { 88 | thread_pool, 89 | config: RustUpConfig::default(), 90 | proxy: ProxyConfig::default(), 91 | clean: false, 92 | version: None, 93 | dist_path: PathBuf::default(), 94 | bucket: None, 95 | upload: false, 96 | delete_after_upload: false, 97 | sync_history: false, 98 | init: false, 99 | } 100 | } 101 | } 102 | 103 | /// entrance function 104 | pub fn sync_rust_toolchain(opts: &ChannelOptions) -> FreightResult { 105 | let config = &opts.config; 106 | if let Some(version) = &opts.version { 107 | // step 1 : sync specified channel version 108 | sync_channel(opts, version)?; 109 | } else if opts.sync_history { 110 | // step 2: sync historical nightly and beta versions 111 | if let Some(date) = config.history_version_start_date.clone() { 112 | let start_date = NaiveDate::parse_from_str(&date, "%Y-%m-%d").unwrap(); 113 | tracing::info!( 114 | "step 2: sync historical nightly and beta versions from {}", 115 | start_date 116 | ); 117 | let today = Utc::now().date_naive(); 118 | if today >= start_date { 119 | let duration_days = (today - start_date).num_days().try_into().unwrap(); 120 | for day in start_date.iter_days().take(duration_days) { 121 | sync_channel(opts, &format!("beta-{}", day))?; 122 | sync_channel(opts, &format!("nightly-{}", day))?; 123 | } 124 | } else { 125 | tracing::error!("start date {} is after today {}", start_date, today); 126 | } 127 | } 128 | } else { 129 | // step 3.1: sync latest stable, beta and nightly channel 130 | tracing::info!("step 3.1: sync latest stable, beta and nightly channel"); 131 | sync_channel(opts, "stable")?; 132 | sync_channel(opts, "beta")?; 133 | sync_channel(opts, "nightly")?; 134 | if opts.init { 135 | // step 3.2: sync specified channel version by config file 136 | tracing::info!("step 3.2:(optional) sync specified channel version by config file"); 137 | config.sync_stable_versions.iter().for_each(|channel| { 138 | sync_channel(opts, channel).unwrap(); 139 | }); 140 | } 141 | } 142 | // step 3: clean local historical channel files if needed 143 | if opts.clean { 144 | let channels = [ 145 | ("beta", config.sync_beta_days), 146 | ("nightly", config.sync_nightly_days), 147 | ]; 148 | for channel in channels { 149 | clean_historical_version(&opts.dist_path, channel).unwrap(); 150 | } 151 | } 152 | Ok(()) 153 | } 154 | 155 | // sync the latest toolchain by given a channel name(stable, beta, nightly) or history version by version number 156 | pub fn sync_channel(opts: &ChannelOptions, channel: &str) -> FreightResult { 157 | let channel_name; 158 | let channel_url; 159 | let channel_folder; 160 | tracing::info!("starting download channel: {}", channel); 161 | if let Some(date) = channel.strip_prefix("nightly-") { 162 | channel_name = String::from("channel-rust-nightly.toml"); 163 | channel_url = format!("{}/dist/{}/{}", opts.config.domain, date, channel_name); 164 | channel_folder = opts.dist_path.to_owned().join(date); 165 | } else if let Some(date) = channel.strip_prefix("beta-") { 166 | channel_name = String::from("channel-rust-beta.toml"); 167 | channel_url = format!("{}/dist/{}/{}", opts.config.domain, date, channel_name); 168 | channel_folder = opts.dist_path.to_owned().join(date); 169 | } else { 170 | channel_name = format!("channel-rust-{}.toml", channel); 171 | channel_url = format!("{}/dist/{}", opts.config.domain, channel_name); 172 | channel_folder = opts.dist_path.to_owned(); 173 | } 174 | match download_file_with_sha(&channel_url, &channel_folder, &channel_name, &opts.proxy) { 175 | Ok(res) => { 176 | let channel_toml = &channel_folder.join(channel_name); 177 | if !res && !channel_toml.exists() { 178 | tracing::error!("skipping channel: {}", channel); 179 | return Ok(()); 180 | } 181 | // parse_channel_file and download; 182 | let download_list = parse_channel_file(channel_toml).unwrap(); 183 | let s3cmd = Arc::new(S3cmd::default()); 184 | opts.thread_pool.install(|| { 185 | download_list.par_iter().for_each(|(url, hash)| { 186 | // example: https://static.rust-lang.org/dist/2022-11-03/rust-1.65.0-i686-pc-windows-gnu.msi 187 | // these code was used to remove url prefix "https://static.rust-lang.org/dist" 188 | // and get "2022-11-03/rust-1.65.0-i686-pc-windows-gnu.msi" 189 | let path: PathBuf = std::iter::once(opts.dist_path.to_owned()) 190 | .chain( 191 | url.split('/').map(PathBuf::from).collect::>()[4..] 192 | .to_owned(), 193 | ) 194 | .collect(); 195 | let (upload, dist_path, bucket, delete_after_upload) = ( 196 | opts.upload, 197 | opts.dist_path.to_owned(), 198 | opts.bucket.to_owned(), 199 | opts.delete_after_upload, 200 | ); 201 | 202 | let mut url = Url::parse(url).unwrap(); 203 | url.set_host(Url::parse(&opts.config.domain).unwrap().host_str()) 204 | .unwrap(); 205 | 206 | let down_opts = &DownloadOptions { 207 | proxy: opts.proxy.clone(), 208 | url, 209 | path, 210 | }; 211 | let path = &down_opts.path; 212 | let downloaded = download_and_check_hash(down_opts, Some(hash), false).unwrap(); 213 | if downloaded && upload { 214 | let s3_path = format!( 215 | "dist{}", 216 | path.to_str() 217 | .unwrap() 218 | .replace(dist_path.to_str().unwrap(), "") 219 | ); 220 | let uploaded = s3cmd.upload_file(path, &s3_path, &bucket.unwrap()); 221 | if uploaded.is_ok() && delete_after_upload { 222 | fs::remove_file(path).unwrap(); 223 | } 224 | }; 225 | }); 226 | }); 227 | 228 | replace_toml_and_sha(opts, s3cmd, channel_toml); 229 | } 230 | Err(_err) => { 231 | tracing::info!("skipping download channel:{}", channel); 232 | } 233 | } 234 | Ok(()) 235 | } 236 | 237 | // upload toml file and sha256 after all files handle success 238 | pub fn replace_toml_and_sha(opts: &ChannelOptions, s3cmd: Arc, channel_toml: &Path) { 239 | let shafile = channel_toml.with_extension("toml.sha256"); 240 | let files: Vec<&Path> = vec![channel_toml, &shafile]; 241 | if opts.upload { 242 | for file in files { 243 | let s3_path = format!( 244 | "dist{}", 245 | file.to_str() 246 | .unwrap() 247 | .replace(opts.dist_path.to_str().unwrap(), "") 248 | ); 249 | s3cmd 250 | .upload_file(file, &s3_path, &opts.bucket.clone().unwrap()) 251 | .unwrap(); 252 | } 253 | } 254 | } 255 | // parse channel file to get download url and hash 256 | pub fn parse_channel_file(path: &Path) -> Result, FreighterError> { 257 | let content = fs::read_to_string(path).unwrap(); 258 | let channel: Channel = toml::from_str(&content).unwrap(); 259 | let res: Vec<(String, String)> = channel 260 | .pkg 261 | .into_iter() 262 | .flat_map(|(_, pkg)| { 263 | pkg.target 264 | .into_iter() 265 | .flat_map(|(_, target)| -> Vec<(String, String)> { 266 | let mut result: Vec<(String, String)> = Vec::new(); 267 | if target.xz_url.is_some() && target.xz_hash.is_some() { 268 | result.push((target.xz_url.unwrap(), target.xz_hash.unwrap())); 269 | } 270 | if target.url.is_some() && target.hash.is_some() { 271 | let url = target.url.unwrap(); 272 | let hash = target.hash.unwrap(); 273 | if !url.is_empty() && !hash.is_empty() { 274 | result.push((url, hash)); 275 | } 276 | } 277 | result 278 | }) 279 | }) 280 | .collect(); 281 | Ok(res) 282 | } 283 | 284 | pub fn clean_historical_version(dist_path: &PathBuf, channels: (&str, i64)) -> FreightResult { 285 | let (channel, sync_days) = channels; 286 | // filter dir less than sync_nightly_days ago 287 | fs::read_dir(dist_path) 288 | .unwrap() 289 | .filter_map(|v| v.ok()) 290 | .filter(|entry| compare_date(entry, sync_days)) 291 | .for_each(|entry| { 292 | WalkDir::new(entry.path()) 293 | .into_iter() 294 | .filter_map(|f| f.ok()) 295 | .for_each(|entry| { 296 | let file_name = entry.file_name().to_str().unwrap(); 297 | if file_name.contains(channel) { 298 | fs::remove_file(entry.path()).unwrap(); 299 | tracing::info!("!!![REMOVE] \t\t {:?} !", entry.path()); 300 | } 301 | }); 302 | // remove whole directory when it's empty 303 | if entry.path().read_dir().unwrap().next().is_none() { 304 | fs::remove_dir_all(entry.path()).unwrap(); 305 | tracing::info!("!!![REMOVE] \t\t {:?} !", entry.path()); 306 | } 307 | }); 308 | 309 | Ok(()) 310 | } 311 | 312 | pub fn compare_date(entry: &DirEntry, sync_days: i64) -> bool { 313 | if entry.file_type().unwrap().is_dir() { 314 | let date = match NaiveDate::parse_from_str(entry.file_name().to_str().unwrap(), "%Y-%m-%d") 315 | { 316 | Ok(date) => date, 317 | Err(_) => { 318 | tracing::error!( 319 | "can't parse dir :{} and skipping... ", 320 | entry.path().display() 321 | ); 322 | return false; 323 | } 324 | }; 325 | Utc::now().date_naive() - date > Duration::days(sync_days) 326 | } else { 327 | false 328 | } 329 | } 330 | -------------------------------------------------------------------------------- /src/handler/crates_file.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | use std::io::Write; 9 | 10 | use std::collections::{BTreeMap, HashSet}; 11 | use std::fs::{self, File, OpenOptions}; 12 | use std::io::{BufRead, BufReader, ErrorKind}; 13 | use std::path::{Path, PathBuf}; 14 | use std::str; 15 | use std::sync::{Arc, Mutex}; 16 | 17 | use chrono::Utc; 18 | use rayon::{Scope, ThreadPool, ThreadPoolBuilder}; 19 | use serde::{Deserialize, Serialize}; 20 | use url::Url; 21 | use walkdir::{DirEntry, WalkDir}; 22 | 23 | use crate::cloud::s3::S3cmd; 24 | use crate::cloud::{self, CloudStorage}; 25 | use crate::config::{CratesConfig, ProxyConfig}; 26 | use crate::download::{download_and_check_hash, DownloadOptions}; 27 | use crate::errors::FreightResult; 28 | use crate::handler::index; 29 | 30 | use super::index::CrateIndex; 31 | use super::{utils, DownloadMode}; 32 | 33 | /// CratesOptions preserve the sync subcommand config 34 | #[derive(Clone, Debug)] 35 | pub struct CratesOptions { 36 | pub config: CratesConfig, 37 | 38 | pub proxy: ProxyConfig, 39 | 40 | pub index: CrateIndex, 41 | 42 | /// Whether to hide progressbar when start sync. 43 | pub no_progressbar: bool, 44 | 45 | /// start traverse all directories 46 | pub download_mode: DownloadMode, 47 | 48 | pub upload: bool, 49 | 50 | pub crates_path: PathBuf, 51 | 52 | // handle a single crate with name 53 | pub crates_name: Option, 54 | 55 | pub log_path: PathBuf, 56 | 57 | pub bucket_name: String, 58 | 59 | pub delete_after_upload: bool, 60 | 61 | pub thread_pool: Arc, 62 | } 63 | 64 | impl Default for CratesOptions { 65 | fn default() -> Self { 66 | let thread_pool = Arc::new(ThreadPoolBuilder::new().build().unwrap()); 67 | CratesOptions { 68 | thread_pool, 69 | config: CratesConfig::default(), 70 | proxy: ProxyConfig::default(), 71 | index: CrateIndex::default(), 72 | no_progressbar: false, 73 | download_mode: DownloadMode::default(), 74 | upload: false, 75 | crates_path: PathBuf::default(), 76 | crates_name: None, 77 | log_path: PathBuf::default(), 78 | bucket_name: String::default(), 79 | delete_after_upload: false, 80 | } 81 | } 82 | } 83 | 84 | impl CratesOptions { 85 | // the path rules of craes index file 86 | pub fn get_index_path(&self, name: &str) -> PathBuf { 87 | let suffix = utils::index_suffix(name); 88 | self.index.path.join(suffix) 89 | } 90 | } 91 | 92 | /// Crate preserve the crates info parse from registry json file 93 | #[derive(Serialize, Deserialize, Debug)] 94 | pub struct IndexFile { 95 | pub name: String, 96 | pub vers: String, 97 | pub deps: Vec, 98 | #[serde(skip_serializing_if = "Option::is_none")] 99 | pub cksum: Option, 100 | pub features: BTreeMap>, 101 | #[serde(skip_serializing_if = "Option::is_none")] 102 | pub features2: Option>>, 103 | pub yanked: Option, 104 | #[serde(default)] 105 | pub links: Option, 106 | #[serde(skip_serializing_if = "Option::is_none")] 107 | pub v: Option, 108 | } 109 | 110 | #[derive(Serialize, Deserialize, Debug)] 111 | pub struct ErrorCrate { 112 | pub name: String, 113 | pub vers: String, 114 | pub time: String, 115 | } 116 | 117 | /// Dependencies maintain relationships between crate 118 | /// 119 | /// 120 | #[derive(Serialize, Deserialize, Debug, PartialEq, PartialOrd, Ord, Eq)] 121 | pub struct Dependency { 122 | pub name: String, 123 | pub req: String, 124 | pub features: Vec, 125 | pub optional: bool, 126 | pub default_features: bool, 127 | pub target: Option, 128 | pub kind: Option, 129 | #[serde(skip_serializing_if = "Option::is_none")] 130 | pub package: Option, 131 | } 132 | 133 | /// DependencyKind represents which stage the current dependency is 134 | /// 135 | /// 136 | #[derive(Copy, Clone, Serialize, Deserialize, Debug, PartialEq, PartialOrd, Ord, Eq)] 137 | #[serde(rename_all = "lowercase")] 138 | pub enum DependencyKind { 139 | Normal, 140 | Build, 141 | Dev, 142 | } 143 | 144 | /// full download and Incremental download from registry 145 | pub fn download(opts: &CratesOptions) -> FreightResult { 146 | match opts.download_mode { 147 | DownloadMode::Init => full_downloads(opts).unwrap(), 148 | DownloadMode::Fix => fix_download(opts).unwrap(), 149 | DownloadMode::Increment => incremental_download(opts).unwrap(), 150 | } 151 | Ok(()) 152 | } 153 | 154 | /// 155 | /// 156 | /// ```YAML 157 | ///env: 158 | /// URL_api: "https://crates.io/api/v1/crates" 159 | /// URL_cdn: "https://static.crates.io/crates/{crate}/{crate}-{version}.crate" 160 | /// URL_s3_primary: "https://crates-io.s3-us-west-1.amazonaws.com/crates/{crate}/{crate}-{version}.crate" 161 | /// URL_s3_fallback: "https://crates-io-fallback.s3-eu-west-1.amazonaws.com/crates/{crate}/{crate}-{version}.crate" 162 | /// ``` 163 | pub fn full_downloads(opts: &CratesOptions) -> FreightResult { 164 | let err_record = open_file_with_mutex(&opts.log_path); 165 | opts.thread_pool.scope(|s| { 166 | WalkDir::new(&opts.index.path) 167 | .into_iter() 168 | .filter_entry(is_not_hidden) 169 | .filter_map(|v| v.ok()) 170 | .for_each(|x| { 171 | if x.file_type().is_file() && x.path().extension().unwrap_or_default() != "json" { 172 | parse_index_and_download(&x.path().to_path_buf(), opts, s, &err_record) 173 | .unwrap(); 174 | } 175 | }); 176 | }); 177 | Ok(()) 178 | } 179 | 180 | pub fn incremental_download(opts: &CratesOptions) -> FreightResult { 181 | tracing::info!("{:?}", opts.thread_pool); 182 | let it = WalkDir::new(&opts.log_path) 183 | .into_iter() 184 | .filter_entry(|e| { 185 | e.file_name() 186 | .to_str() 187 | .unwrap() 188 | .contains(&Utc::now().date_naive().to_string()) 189 | || e.file_type().is_dir() 190 | }) 191 | .filter_map(|v| v.ok()); 192 | let mut input = match it.last() { 193 | Some(dir) => { 194 | if dir.file_type().is_file() { 195 | OpenOptions::new() 196 | .read(true) 197 | .write(true) 198 | .open(dir.path()) 199 | .unwrap() 200 | } else { 201 | panic!("Cannot get record file, run freighter crates pull before download") 202 | } 203 | } 204 | None => panic!("Did you forget to run freighter crates pull before download?"), 205 | }; 206 | let buffered = BufReader::new(&mut input); 207 | tracing::info!("crates.io-index modified:"); 208 | let err_record = open_file_with_mutex(&opts.log_path); 209 | // get last line of record file 210 | let mut lines: Vec = buffered.lines().map(|line| line.unwrap()).collect(); 211 | lines.reverse(); 212 | if let Some(line) = lines.first() { 213 | let vec: Vec<&str> = line.split(',').collect(); 214 | tracing::info!("{:?}", line); 215 | index::git2_diff(opts, vec[0], vec[1], err_record).unwrap(); 216 | } 217 | Ok(()) 218 | } 219 | 220 | /// fix the previous error download crates 221 | pub fn fix_download(opts: &CratesOptions) -> FreightResult { 222 | let file_name = &opts.log_path.join("error-crates.log"); 223 | 224 | let mut visited: HashSet = HashSet::new(); 225 | let err_record_with_mutex = open_file_with_mutex(&opts.log_path); 226 | 227 | opts.thread_pool.scope(|s| { 228 | if opts.crates_name.is_some() { 229 | let index_path = opts.get_index_path(&opts.crates_name.clone().unwrap()); 230 | parse_index_and_download(&index_path, opts, s, &err_record_with_mutex).unwrap(); 231 | } else { 232 | let err_record = OpenOptions::new().read(true).open(file_name).unwrap(); 233 | let buffered = BufReader::new(err_record); 234 | for line in buffered.lines() { 235 | let line = line.unwrap(); 236 | let c: ErrorCrate = serde_json::from_str(&line).unwrap(); 237 | let ErrorCrate { 238 | name, 239 | vers, 240 | time: _, 241 | } = c; 242 | if !visited.contains(&name) { 243 | let index_path = opts.get_index_path(&name); 244 | parse_index_and_download(&index_path, opts, s, &err_record_with_mutex).unwrap(); 245 | visited.insert(name.to_owned()); 246 | tracing::info!("handle success: {}-{}", &name, &vers); 247 | } else { 248 | // skipping visited 249 | tracing::info!("skip different version of same crates: {}-{}", &name, &vers); 250 | } 251 | } 252 | } 253 | }); 254 | 255 | if opts.crates_name.is_none() { 256 | fs::remove_file(file_name).unwrap(); 257 | } 258 | Ok(()) 259 | } 260 | 261 | pub fn upload_to_s3(opts: &CratesOptions) -> FreightResult { 262 | let s3cmd = S3cmd::default(); 263 | if opts.crates_name.is_none() { 264 | cloud::upload_with_pool( 265 | opts.crates_path.clone(), 266 | opts.thread_pool.clone(), 267 | opts.bucket_name.clone(), 268 | s3cmd, 269 | ) 270 | .unwrap(); 271 | } else { 272 | cloud::upload_single_dir( 273 | opts.crates_path.clone(), 274 | opts.crates_name.clone().unwrap(), 275 | opts.bucket_name.clone(), 276 | s3cmd, 277 | ) 278 | } 279 | Ok(()) 280 | } 281 | 282 | /// open error record file with Mutex 283 | pub fn open_file_with_mutex(log_path: &Path) -> Arc> { 284 | let file_name = log_path.join("error-crates.log"); 285 | let err_record = match OpenOptions::new().write(true).append(true).open(&file_name) { 286 | Ok(f) => Arc::new(Mutex::new(f)), 287 | Err(err) => match err.kind() { 288 | ErrorKind::NotFound => Arc::new(Mutex::new(File::create(&file_name).unwrap())), 289 | other_error => panic!("something wrong: {}", other_error), 290 | }, 291 | }; 292 | err_record 293 | } 294 | 295 | /// Check whether the directory is hidden 296 | pub fn is_not_hidden(entry: &DirEntry) -> bool { 297 | entry 298 | .file_name() 299 | .to_str() 300 | .map(|s| entry.depth() == 0 || !s.starts_with('.')) 301 | .unwrap_or(false) 302 | } 303 | 304 | pub fn parse_index_and_download( 305 | index_path: &PathBuf, 306 | opts: &CratesOptions, 307 | scope: &Scope, 308 | err_record: &Arc>, 309 | ) -> FreightResult { 310 | match File::open(index_path) { 311 | Ok(f) => { 312 | let buffered = BufReader::new(f); 313 | 314 | for line in buffered.lines() { 315 | let line = line.unwrap(); 316 | let c: IndexFile = serde_json::from_str(&line).unwrap(); 317 | let err_record = Arc::clone(err_record); 318 | let opts = opts.clone(); 319 | 320 | let url = Url::parse(&format!( 321 | "{}/{}/{}-{}.crate", 322 | opts.config.domain, &c.name, &c.name, &c.vers 323 | )) 324 | .unwrap(); 325 | 326 | let file = opts 327 | .crates_path 328 | .join(&c.name) 329 | .join(format!("{}-{}.crate", &c.name, &c.vers)); 330 | 331 | scope.spawn(move |_| { 332 | download_crates_with_log(file, &opts, url, c, err_record).unwrap(); 333 | }); 334 | } 335 | } 336 | Err(err) => match err.kind() { 337 | ErrorKind::NotFound => { 338 | tracing::warn!( 339 | "This file might have been removed from crates.io:{}", 340 | &index_path.display() 341 | ); 342 | } 343 | other_error => panic!("something wrong while open the index file: {}", other_error), 344 | }, 345 | }; 346 | Ok(()) 347 | } 348 | 349 | pub fn download_crates_with_log( 350 | path: PathBuf, 351 | opts: &CratesOptions, 352 | url: Url, 353 | index_file: IndexFile, 354 | err_record: Arc>, 355 | ) -> FreightResult { 356 | let down_opts = &DownloadOptions { 357 | proxy: opts.proxy.clone(), 358 | url, 359 | path, 360 | }; 361 | 362 | match download_and_check_hash(down_opts, Some(&index_file.cksum.unwrap()), false) { 363 | Ok(download_succ) => { 364 | let path = &down_opts.path; 365 | if download_succ && opts.upload { 366 | let s3 = S3cmd::default(); 367 | let s3_path = format!( 368 | "crates{}", 369 | path.to_str() 370 | .unwrap() 371 | .replace(opts.crates_path.to_str().unwrap(), "") 372 | ); 373 | tracing::info!("s3_path: {}, {}", s3_path, opts.delete_after_upload); 374 | let uploaded = s3.upload_file(path, &s3_path, &opts.bucket_name); 375 | if uploaded.is_ok() && opts.delete_after_upload { 376 | fs::remove_file(path).unwrap(); 377 | } 378 | } 379 | Ok(()) 380 | } 381 | Err(err) => { 382 | let mut err_file = err_record.lock().unwrap(); 383 | let err_crate = ErrorCrate { 384 | name: index_file.name, 385 | vers: index_file.vers, 386 | time: Utc::now().timestamp().to_string(), 387 | }; 388 | let json = serde_json::to_string(&err_crate).unwrap(); 389 | // Write the JSON to the file 390 | err_file.write_all(json.as_bytes()).unwrap(); 391 | err_file.write_all(b"\n")?; 392 | tracing::error!("{:?}", err); 393 | Err(err) 394 | } 395 | } 396 | } 397 | -------------------------------------------------------------------------------- /src/server/file_server.rs: -------------------------------------------------------------------------------- 1 | //! 2 | //! 3 | //! 4 | //! 5 | //! 6 | //! 7 | 8 | use std::{ 9 | net::{IpAddr, SocketAddr}, 10 | path::PathBuf, 11 | }; 12 | 13 | use warp::{hyper::Uri, reject::Reject, Filter}; 14 | 15 | use crate::config::Config; 16 | 17 | #[derive(Debug, PartialEq, Clone)] 18 | struct MissingFile { 19 | pub uri: Uri, 20 | } 21 | impl Reject for MissingFile {} 22 | 23 | #[derive(Debug)] 24 | pub struct FileServer { 25 | pub cert_path: Option, 26 | pub key_path: Option, 27 | pub addr: IpAddr, 28 | pub port: u16, 29 | } 30 | 31 | /// start server 32 | #[tokio::main] 33 | pub async fn start(config: &Config, file_server: &FileServer) { 34 | tracing_subscriber::fmt::init(); 35 | // storage::init().await; 36 | let routes = filters::build_route(config.to_owned()) 37 | .recover(handlers::handle_rejection) 38 | .with(warp::trace::request()); 39 | 40 | let (cert_path, key_path, addr, port) = ( 41 | &file_server.cert_path, 42 | &file_server.key_path, 43 | file_server.addr, 44 | file_server.port, 45 | ); 46 | 47 | match (cert_path, key_path) { 48 | (Some(cert_path), Some(key_path)) => { 49 | let socket_addr = SocketAddr::new(addr, port); 50 | warp::serve(routes) 51 | .tls() 52 | .cert_path(cert_path) 53 | .key_path(key_path) 54 | .run(socket_addr) 55 | .await; 56 | } 57 | (None, None) => { 58 | let socket_addr = SocketAddr::new(addr, port); 59 | warp::serve(routes).run(socket_addr).await 60 | } 61 | (Some(_), None) => { 62 | tracing::error!("set cert_path but not set key_path.") 63 | } 64 | (None, Some(_)) => { 65 | tracing::error!("set key_path but not set cert_path.") 66 | } 67 | } 68 | } 69 | mod filters { 70 | use std::path::PathBuf; 71 | 72 | use bytes::{Buf, Bytes}; 73 | use warp::{Filter, Rejection}; 74 | 75 | use crate::{ 76 | config::Config, 77 | server::{ 78 | file_server::utils, 79 | git_protocol::GitCommand, 80 | model::{CratesPublish, Errors, PublishRsp}, 81 | }, 82 | }; 83 | 84 | use super::handlers; 85 | 86 | pub fn build_route( 87 | config: Config, 88 | ) -> impl Filter + Clone { 89 | // GET /dist/... => ./dist/.. 90 | dist(config.clone()) 91 | .or(rustup(config.clone())) 92 | .or(crates(config.clone())) 93 | .or(git(config.clone())) 94 | .or(publish(config.clone())) 95 | .or(sparse_index(config)) 96 | } 97 | 98 | pub fn publish( 99 | config: Config, 100 | ) -> impl Filter + Clone { 101 | warp::path!("api" / "v1" / "crates" / "new") 102 | .and(warp::body::bytes()) 103 | .and(with_config(config)) 104 | .map(|mut body: Bytes, config: Config| { 105 | let json_len = utils::get_usize_from_bytes(body.copy_to_bytes(4)); 106 | 107 | tracing::info!("json_len: {:?}", json_len); 108 | let json = body.copy_to_bytes(json_len); 109 | tracing::info!("raw json: {:?}", json); 110 | 111 | let parse_result = serde_json::from_slice::(json.as_ref()); 112 | let crate_len = utils::get_usize_from_bytes(body.copy_to_bytes(4)); 113 | let file_content = body.copy_to_bytes(crate_len); 114 | 115 | match parse_result { 116 | Ok(result) => { 117 | println!("JSON: {:?}", result); 118 | utils::save_crate_index( 119 | &result, 120 | &file_content, 121 | config.index_path, 122 | ); 123 | utils::save_crate_file(&result, &file_content, config.crates_path); 124 | // let std::fs::write(); 125 | // 1.verify name and version from local db 126 | // 2.call remote server to check info in crates.io 127 | warp::reply::json(&PublishRsp::default()) 128 | } 129 | Err(err) => warp::reply::json(&Errors::new(err.to_string())), 130 | } 131 | }) 132 | } 133 | 134 | pub fn sparse_index( 135 | config: Config, 136 | ) -> impl Filter + Clone { 137 | warp::path("index") 138 | .and(warp::path::tail()) 139 | .and(with_config(config)) 140 | .and_then(|tail: warp::path::Tail, config: Config| async move { 141 | handlers::return_files( 142 | config.rustup.serve_domains.unwrap(), 143 | config.index_path, 144 | PathBuf::from("crates.io-index").join(tail.as_str()), 145 | false, 146 | ) 147 | .await 148 | }) 149 | } 150 | 151 | // build '/dist/*' route, this route handle rust toolchian files request 152 | pub fn dist( 153 | config: Config, 154 | ) -> impl Filter + Clone { 155 | warp::path("dist") 156 | .and(warp::path::tail()) 157 | .and(with_config(config)) 158 | .and_then(|tail: warp::path::Tail, config: Config| async move { 159 | handlers::return_files( 160 | config.rustup.serve_domains.unwrap(), 161 | config.dist_path, 162 | PathBuf::from("dist").join(tail.as_str()), 163 | false, 164 | ) 165 | .await 166 | }) 167 | .recover(handlers::handle_missing_file) 168 | } 169 | 170 | // build '/rustup/*' route, this route handle rustup-init file request 171 | pub fn rustup( 172 | config: Config, 173 | ) -> impl Filter + Clone { 174 | warp::path("rustup") 175 | .and(warp::path::tail()) 176 | .and(with_config(config)) 177 | .and_then(move |tail: warp::path::Tail, config: Config| async move { 178 | handlers::return_files( 179 | config.rustup.serve_domains.unwrap(), 180 | config.rustup_path, 181 | PathBuf::from("rustup").join(tail.as_str()), 182 | false, 183 | ) 184 | .await 185 | }) 186 | .recover(handlers::handle_missing_file) 187 | } 188 | 189 | // build '/crates/*' route, this route handle crates file request 190 | pub fn crates( 191 | config: Config, 192 | ) -> impl Filter + Clone { 193 | let crates_1 = warp::path!("crates" / String / String / "download") 194 | .map(|name: String, version: String| (name, version)) 195 | .untuple_one(); 196 | let crates_2 = warp::path!("crates" / String / String) 197 | .map(|name: String, file: String| { 198 | let split: Vec<_> = file.split('-').collect(); 199 | let version = split[split.len() - 1].replace(".crate", ""); 200 | (name, version) 201 | }) 202 | .untuple_one(); 203 | 204 | crates_1 205 | .or(crates_2) 206 | .unify() 207 | .and(with_config(config)) 208 | .and_then(|name: String, version: String, config: Config| async move { 209 | let file_path = PathBuf::from("crates") 210 | .join(&name) 211 | .join(format!("{}-{}.crate", name, version)); 212 | handlers::return_files( 213 | config.crates.serve_domains.unwrap(), 214 | config.crates_path, 215 | file_path, 216 | true, 217 | ) 218 | .await 219 | }) 220 | .recover(handlers::handle_missing_file) 221 | } 222 | 223 | // build '/crate.io-index/(git protocol)' route, this route handle gti clone and git pull request 224 | pub fn git( 225 | // git_work_dir: PathBuf, 226 | config: Config, 227 | ) -> impl Filter + Clone { 228 | let git_work_dir = if let Some(path) = &config.crates.serve_index { 229 | PathBuf::from(path) 230 | } else { 231 | config.index_path 232 | }; 233 | 234 | let git_upload_pack = warp::path!("git-upload-pack") 235 | .and(warp::path::tail()) 236 | .and(warp::method()) 237 | .and(warp::body::aggregate()) 238 | .and(warp::header::optional::("Content-Type")) 239 | .and( 240 | warp::query::raw() 241 | .or_else(|_| async { Ok::<(String,), Rejection>((String::new(),)) }), 242 | ) 243 | .and(with_work_dir(git_work_dir.to_owned())) 244 | .and_then( 245 | |_tail, method, body, content_type, _query, work_dir| async move { 246 | let git_protocol = GitCommand::default(); 247 | git_protocol 248 | .git_upload_pack(body, work_dir, method, content_type) 249 | .await 250 | }, 251 | ); 252 | 253 | let git_info_refs = warp::path!("info" / "refs") 254 | .and(warp::body::aggregate()) 255 | .and(with_work_dir(git_work_dir)) 256 | .and_then(|body, work_dir| async move { 257 | let git_protocol = GitCommand::default(); 258 | git_protocol.git_info_refs(body, work_dir).await 259 | }); 260 | 261 | warp::path("crates.io-index").and(git_upload_pack.or(git_info_refs)) 262 | } 263 | 264 | fn with_config( 265 | config: Config, 266 | ) -> impl Filter + Clone { 267 | warp::any().map(move || config.clone()) 268 | } 269 | 270 | fn with_work_dir( 271 | work_dir: PathBuf, 272 | ) -> impl Filter + Clone { 273 | warp::any().map(move || work_dir.clone()) 274 | } 275 | } 276 | 277 | mod handlers { 278 | use std::{borrow::BorrowMut, convert::Infallible, error::Error, path::PathBuf, str::FromStr}; 279 | 280 | use reqwest::Url; 281 | use serde::Serialize; 282 | use tokio::{fs::File, io::AsyncWriteExt}; 283 | use tokio_util::codec::{BytesCodec, FramedRead}; 284 | use url::form_urlencoded::byte_serialize; 285 | use warp::{ 286 | http, 287 | http::StatusCode, 288 | hyper::{Body, Response, Uri}, 289 | reject, Rejection, Reply, 290 | }; 291 | 292 | use crate::{ 293 | download, 294 | errors::{FreightResult, FreighterError}, 295 | server::file_server::MissingFile, 296 | }; 297 | 298 | async fn download_local_files(full_path: &PathBuf) -> Result, Rejection> { 299 | let file = File::open(full_path) 300 | .await 301 | .map_err(|_| reject::not_found())?; 302 | 303 | let meta = file.metadata().await.map_err(|_| reject::not_found())?; 304 | let stream = FramedRead::new(file, BytesCodec::new()); 305 | 306 | let body = Body::wrap_stream(stream); 307 | 308 | let mut resp = Response::new(body); 309 | resp.headers_mut() 310 | .insert(http::header::CONTENT_LENGTH, meta.len().into()); 311 | 312 | Ok(resp) 313 | } 314 | 315 | pub async fn return_files( 316 | serve_domains: Vec, 317 | work_dir: PathBuf, 318 | mut file_path: PathBuf, 319 | is_crates: bool, 320 | ) -> Result { 321 | for domain in serve_domains { 322 | if domain.eq("localhost") { 323 | let full_path = work_dir.join(file_path.clone()); 324 | tracing::info!("try to fetch file from local: {}", full_path.display()); 325 | let res = download_local_files(&full_path).await; 326 | if res.is_ok() { 327 | return res; 328 | } 329 | } else { 330 | // url_path: crates/name/version/download or crates/name/version 331 | // file_path: crates/name/name-version.crate 332 | let mut url: Url = format!("{}/{}", domain, file_path.display()) 333 | .parse() 334 | .unwrap(); 335 | if is_crates && domain.contains("myhuaweicloud.com") { 336 | download::encode_huaweicloud_url(&mut url); 337 | 338 | let name = file_path.file_name().unwrap().to_str().unwrap(); 339 | let encode: String = byte_serialize(name.as_bytes()).collect(); 340 | file_path.pop(); 341 | file_path.push(encode); 342 | tracing::debug!("file path {:?}", file_path); 343 | } 344 | return Ok( 345 | warp::redirect::found(Uri::from_str(url.as_str()).unwrap()).into_response() 346 | ); 347 | // return Err(reject::custom(MissingFile { uri })); 348 | } 349 | } 350 | Err(reject::not_found()) 351 | } 352 | 353 | /// An API error serializable to JSON. 354 | #[derive(Serialize)] 355 | struct ErrorMessage { 356 | code: u16, 357 | message: String, 358 | } 359 | 360 | /// ### References Codes 361 | /// 362 | /// - [warp]()'s rejections (example)[]. 363 | /// 364 | /// 365 | // This function receives a `Rejection` and tries to return a custom 366 | // value, otherwise simply passes the rejection along. 367 | pub async fn handle_rejection(err: Rejection) -> Result { 368 | let code; 369 | let message; 370 | if err.is_not_found() { 371 | code = StatusCode::NOT_FOUND; 372 | message = "NOT_FOUND"; 373 | } else if let Some(e) = err.find::() { 374 | // This error happens if the body could not be deserialized correctly 375 | // We can use the cause to analyze the error and customize the error message 376 | message = match e.source() { 377 | Some(cause) => { 378 | if cause.to_string().contains("denom") { 379 | "FIELD_ERROR: denom" 380 | } else { 381 | "BAD_REQUEST" 382 | } 383 | } 384 | None => "BAD_REQUEST", 385 | }; 386 | code = StatusCode::BAD_REQUEST; 387 | } else if err.find::().is_some() { 388 | // We can handle a specific error, here METHOD_NOT_ALLOWED, 389 | // and render it however we want 390 | code = StatusCode::METHOD_NOT_ALLOWED; 391 | message = "METHOD_NOT_ALLOWED"; 392 | } else { 393 | // We should have expected this... Just log and say its a 500 394 | tracing::info!("unhandled rejection: {:?}", err); 395 | code = StatusCode::INTERNAL_SERVER_ERROR; 396 | message = "UNHANDLED_REJECTION"; 397 | } 398 | 399 | let json = warp::reply::json(&ErrorMessage { 400 | code: code.as_u16(), 401 | message: message.into(), 402 | }); 403 | 404 | Ok(warp::reply::with_status(json, code)) 405 | } 406 | 407 | pub async fn handle_missing_file(err: Rejection) -> Result { 408 | if let Some(missing_file) = err.find::() { 409 | let uri = missing_file.uri.clone(); 410 | tracing::info!("redirect to: {}", uri); 411 | return Ok(warp::redirect::found(uri)); 412 | } 413 | Err(err) 414 | } 415 | 416 | #[allow(unused)] 417 | /// async download file from backup domain 418 | pub async fn download_from_remote(path: PathBuf, uri: &Uri) -> FreightResult { 419 | if let Some(parent) = path.parent() { 420 | if !parent.exists() { 421 | tokio::fs::create_dir_all(parent).await?; 422 | } 423 | } 424 | let mut resp = reqwest::get(uri.to_string()).await?; 425 | if resp.status() == 200 { 426 | let mut file = File::create(path).await?; 427 | while let Some(mut data) = resp.chunk().await? { 428 | file.write_all_buf(data.borrow_mut()).await?; 429 | } 430 | tracing::info!("{} {:?}", "&&&[NEW] \t\t ", file); 431 | } else { 432 | tracing::error!("download failed, Please check your url: {}", uri); 433 | return Err(FreighterError::code(resp.status().as_u16().into())); 434 | } 435 | Ok(()) 436 | } 437 | } 438 | 439 | mod utils { 440 | use std::{fs, path::PathBuf}; 441 | 442 | use crate::{ 443 | handler::{crates_file::IndexFile, utils}, 444 | server::model::CratesPublish, 445 | }; 446 | use bytes::Bytes; 447 | use sha2::{Digest, Sha256}; 448 | 449 | pub fn get_usize_from_bytes(bytes: Bytes) -> usize { 450 | let mut fixed_array = [0u8; 8]; 451 | fixed_array[..4].copy_from_slice(&bytes[..4]); 452 | usize::from_le_bytes(fixed_array) 453 | } 454 | 455 | pub fn save_crate_index(json: &CratesPublish, content: &Bytes, work_dir: PathBuf) { 456 | let suffix = utils::index_suffix(&json.name); 457 | let index_path = work_dir.join(suffix); 458 | //convert publish json to index file 459 | let mut index_file: IndexFile = 460 | serde_json::from_str(&serde_json::to_string(&json).unwrap()).unwrap(); 461 | 462 | let mut hasher = Sha256::new(); 463 | hasher.update(content); 464 | index_file.cksum = Some(format!("{:x}", hasher.finalize())); 465 | fs::write(index_path, serde_json::to_string(&index_file).unwrap()).unwrap(); 466 | } 467 | 468 | pub fn save_crate_file(json: &CratesPublish, content: &Bytes, work_dir: PathBuf) { 469 | let crates_dir = work_dir.join(&json.name); 470 | if !crates_dir.exists() { 471 | fs::create_dir_all(&crates_dir).unwrap(); 472 | } 473 | let crates_file = crates_dir.join(format!("{}-{}.crate", json.name, json.vers)); 474 | fs::write(crates_file, content).unwrap(); 475 | } 476 | } 477 | -------------------------------------------------------------------------------- /src/handler/index.rs: -------------------------------------------------------------------------------- 1 | /// 2 | /// 3 | /// ### References Codes 4 | /// 5 | /// - [git2-rs](https://github.com/rust-lang/git2-rs)'s clone (example)[https://github.com/rust-lang/git2-rs/blob/master/examples/clone.rs]. 6 | /// - [crates.io](https://github.com/rust-lang/crates.io)'s [structs](https://github.com/rust-lang/crates.io/blob/master/cargo-registry-index/lib.rs) 7 | /// 8 | use chrono::Utc; 9 | use git2::build::{CheckoutBuilder, RepoBuilder}; 10 | use git2::{ 11 | DiffFormat, DiffLine, DiffOptions, ErrorCode, FetchOptions, Object, ObjectType, Oid, Progress, 12 | ProxyOptions, RemoteCallbacks, Repository, Sort, 13 | }; 14 | 15 | use url::Url; 16 | 17 | use std::cell::RefCell; 18 | 19 | use std::fs::{self, File, OpenOptions}; 20 | use std::io::{self, ErrorKind, Write}; 21 | use std::path::{Path, PathBuf}; 22 | use std::str; 23 | use std::sync::{Arc, Mutex}; 24 | 25 | use crate::errors::FreightResult; 26 | 27 | use super::crates_file::{parse_index_and_download, CratesOptions}; 28 | 29 | /// `CrateIndex` is a wrapper `Git Repository` that crates-io index. 30 | /// 31 | /// 32 | #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] 33 | pub struct CrateIndex { 34 | pub url: Url, 35 | /// /${HOME}/freighter/crates.io-index 36 | pub path: PathBuf, 37 | } 38 | 39 | /// State contains the progress when download index file 40 | /// 41 | /// 42 | pub struct State { 43 | pub progress: Option>, 44 | pub total: usize, 45 | pub current: usize, 46 | pub path: Option, 47 | pub newline: bool, 48 | } 49 | 50 | impl Default for CrateIndex { 51 | fn default() -> CrateIndex { 52 | let home_path = dirs::home_dir().unwrap(); 53 | CrateIndex { 54 | url: Url::parse("https://github.com/rust-lang/crates.io-index.git").unwrap(), 55 | path: home_path.join("freighter/crates.io-index"), 56 | } 57 | } 58 | } 59 | 60 | /// CrateIndex impl provide several functions to for sync steps: like clone, pull, download 61 | /// 62 | /// 63 | impl CrateIndex { 64 | // use default branch master 65 | const REMOTE_BRANCH: &'static str = "master"; 66 | // use default name origin 67 | const REMOTE_NAME: &'static str = "origin"; 68 | /// Create a new `CrateIndex` from a `Work dir`. 69 | pub fn new(domain: &str, path: PathBuf) -> Self { 70 | Self { 71 | path, 72 | url: Url::parse(domain).unwrap(), 73 | } 74 | } 75 | 76 | /// Check the destination path is a git repository and pull 77 | pub fn git_pull(&self, opts: &CratesOptions) -> FreightResult { 78 | let repo = get_repo(self.path.clone()); 79 | 80 | let mut remote = repo.find_remote(CrateIndex::REMOTE_NAME).unwrap(); 81 | let object = repo.revparse_single(CrateIndex::REMOTE_BRANCH)?; 82 | let commit = object.peel_to_commit()?; 83 | let fetch_commit = do_fetch(&repo, &[CrateIndex::REMOTE_BRANCH], &mut remote, opts)?; 84 | 85 | self.save_commit_log(&opts.log_path, &commit.id(), &fetch_commit.id()); 86 | tracing::info!( 87 | "commit id:{}, remote id :{}", 88 | commit.id(), 89 | &fetch_commit.id() 90 | ); 91 | do_merge(&repo, CrateIndex::REMOTE_BRANCH, fetch_commit) 92 | } 93 | 94 | /// Clone the `CrateIndex` to a local directory. 95 | /// 96 | /// 97 | pub fn git_clone(&self, opts: &CratesOptions) -> FreightResult { 98 | tracing::info!("Starting git clone..."); 99 | let state = RefCell::new(State { 100 | progress: None, 101 | total: 0, 102 | current: 0, 103 | path: None, 104 | newline: false, 105 | }); 106 | 107 | let mut cb = RemoteCallbacks::new(); 108 | cb.transfer_progress(|stats| { 109 | let mut state = state.borrow_mut(); 110 | state.progress = Some(stats.to_owned()); 111 | if !opts.no_progressbar { 112 | print(&mut state); 113 | } 114 | true 115 | }); 116 | 117 | let mut co = CheckoutBuilder::new(); 118 | co.progress(|path, cur, total| { 119 | let mut state = state.borrow_mut(); 120 | state.path = path.map(|p| p.to_path_buf()); 121 | state.current = cur; 122 | state.total = total; 123 | if !opts.no_progressbar { 124 | print(&mut state); 125 | } 126 | }); 127 | 128 | let mut fo = FetchOptions::new(); 129 | fo.remote_callbacks(cb); 130 | let repo = RepoBuilder::new() 131 | .fetch_options(fo) 132 | .with_checkout(co) 133 | .clone(self.url.as_ref(), self.path.as_path())?; 134 | 135 | let object = repo.revparse_single(CrateIndex::REMOTE_BRANCH)?; 136 | let mut revwalk = repo.revwalk()?; 137 | revwalk.set_sorting(Sort::REVERSE)?; 138 | revwalk.push(object.id())?; 139 | let commit = object.peel_to_commit()?; 140 | // first commit of crates.io-index 141 | let first_commit_id: Oid = revwalk.next().unwrap().unwrap(); 142 | self.save_commit_log(&opts.log_path, &first_commit_id, &commit.id()); 143 | Ok(()) 144 | } 145 | 146 | /// save commit record in record.log, it will write from first commit to current commit if command is git clone 147 | pub fn save_commit_log(&self, log_path: &PathBuf, from_commit: &Oid, to_commit: &Oid) { 148 | let now = Utc::now(); 149 | let mut file_name = now.date_naive().to_string(); 150 | file_name.push('-'); 151 | file_name.push_str("record.log"); 152 | let file_name = &log_path.join(file_name); 153 | let mut f = match OpenOptions::new().write(true).append(true).open(file_name) { 154 | Ok(f) => f, 155 | Err(err) => match err.kind() { 156 | ErrorKind::NotFound => { 157 | fs::create_dir_all(log_path).unwrap(); 158 | File::create(file_name).unwrap() 159 | } 160 | other_error => panic!("something wrong: {}", other_error), 161 | }, 162 | }; 163 | // save record commit id only id does not matches 164 | if from_commit != to_commit { 165 | writeln!(f, "{},{},{}", from_commit, to_commit, now.timestamp()).unwrap(); 166 | } 167 | } 168 | } 169 | 170 | /// Print progressbar while clone data from git 171 | /// 172 | /// 173 | /// 174 | fn print(state: &mut State) { 175 | let stats = state.progress.as_ref().unwrap(); 176 | let network_pct = (100 * stats.received_objects()) / stats.total_objects(); 177 | let index_pct = (100 * stats.indexed_objects()) / stats.total_objects(); 178 | let co_pct = if state.total > 0 { 179 | (100 * state.current) / state.total 180 | } else { 181 | 0 182 | }; 183 | 184 | let kb = stats.received_bytes() / 1024; 185 | 186 | if stats.received_objects() == stats.total_objects() { 187 | if !state.newline { 188 | print!(""); 189 | state.newline = true; 190 | } 191 | print!( 192 | "Resolving deltas {}/{}\r", 193 | stats.indexed_deltas(), 194 | stats.total_deltas() 195 | ); 196 | } else { 197 | print!( 198 | "net {:3}% ({:4} kb, {:5}/{:5}) / idx {:3}% ({:5}/{:5}) \ 199 | / chk {:3}% ({:4}/{:4}) {}\r", 200 | network_pct, 201 | kb, 202 | stats.received_objects(), 203 | stats.total_objects(), 204 | index_pct, 205 | stats.indexed_objects(), 206 | stats.total_objects(), 207 | co_pct, 208 | state.current, 209 | state.total, 210 | state 211 | .path 212 | .as_ref() 213 | .map(|s| s.to_string_lossy().into_owned()) 214 | .unwrap_or_default() 215 | ) 216 | } 217 | 218 | io::stdout().flush().unwrap(); 219 | } 220 | 221 | /// If destination path is not empty, run pull instead of clone 222 | pub fn pull(opts: &CratesOptions) -> FreightResult { 223 | let index = opts.index.to_owned(); 224 | if opts.no_progressbar { 225 | tracing::info!("no-progressbar has been set to true, it will not be displayed!"); 226 | } 227 | let index_dir = Path::new(index.path.as_path()); 228 | // try to remove index dir if it's empty 229 | if index_dir.exists() { 230 | if !index_dir 231 | .read_dir() 232 | .unwrap() 233 | .filter_map(|x| x.ok()) 234 | .any(|e| !e.file_name().to_str().unwrap().contains("git")) 235 | { 236 | tracing::warn!( 237 | "It seems last task has been broken and {} is empty, 238 | freighter had to removed this index, and then run init again", 239 | index_dir.display() 240 | ); 241 | match fs::remove_dir_all(index_dir) { 242 | Ok(_) => index.git_clone(opts).unwrap(), 243 | Err(e) => panic!("Remove index failed, try to delete it manually: {}", e), 244 | }; 245 | } else { 246 | index.git_pull(opts).unwrap(); 247 | } 248 | } else { 249 | index.git_clone(opts).unwrap(); 250 | } 251 | Ok(()) 252 | } 253 | 254 | /// get repo from path 255 | pub fn get_repo(path: PathBuf) -> Repository { 256 | let path = path.to_str().unwrap_or("."); 257 | match Repository::open(path) { 258 | Ok(repo) => repo, 259 | Err(e) => match e.code() { 260 | ErrorCode::NotFound => { 261 | panic!( 262 | "index path: {} not found, please execute freighter sync pull first", 263 | &path 264 | ); 265 | } 266 | _other_error => panic!("Target path is not a git repository: {}", e), 267 | }, 268 | } 269 | } 270 | 271 | pub fn git2_diff( 272 | options: &CratesOptions, 273 | from_oid: &str, 274 | to_oid: &str, 275 | file: Arc>, 276 | ) -> Result<(), anyhow::Error> { 277 | let index = &options.index; 278 | let repo = get_repo(index.path.clone()); 279 | let t1 = tree_to_treeish(&repo, from_oid)?; 280 | let t2 = tree_to_treeish(&repo, to_oid)?; 281 | let mut opts = DiffOptions::new(); 282 | let diff = repo.diff_tree_to_tree( 283 | t1.unwrap().as_tree(), 284 | t2.unwrap().as_tree(), 285 | Some(&mut opts), 286 | )?; 287 | 288 | diff.print(DiffFormat::NameOnly, |_d, _h, l| { 289 | handle_diff_line(l, options, &file) 290 | })?; 291 | 292 | Ok(()) 293 | } 294 | 295 | /// Traversing directories in diff lines 296 | fn handle_diff_line(line: DiffLine, opts: &CratesOptions, err_record: &Arc>) -> bool { 297 | let path_suffix = str::from_utf8(line.content()) 298 | .unwrap() 299 | .strip_suffix('\n') 300 | .unwrap(); 301 | if path_suffix.eq("config.json") { 302 | return true; 303 | } 304 | let index_path = opts.index.path.join(path_suffix); 305 | opts.thread_pool.scope(|s| { 306 | parse_index_and_download(&index_path, opts, s, err_record).unwrap(); 307 | }); 308 | true 309 | } 310 | 311 | /// ### References Codes 312 | /// 313 | /// - [git2-rs](https://github.com/rust-lang/git2-rs)'s clone (example)[]. 314 | fn tree_to_treeish<'a>( 315 | repo: &'a Repository, 316 | arg: &str, 317 | ) -> Result>, anyhow::Error> { 318 | let obj = repo.revparse_single(arg)?; 319 | let tree = obj.peel(ObjectType::Tree)?; 320 | Ok(Some(tree)) 321 | } 322 | 323 | /// fetch the remote commit and show callback progress 324 | fn do_fetch<'a>( 325 | repo: &'a Repository, 326 | refs: &[&str], 327 | remote: &'a mut git2::Remote, 328 | opts: &CratesOptions, 329 | ) -> Result, git2::Error> { 330 | let mut cb = RemoteCallbacks::new(); 331 | 332 | // Print out our transfer progress. 333 | cb.transfer_progress(|stats| { 334 | if stats.received_objects() == stats.total_objects() { 335 | print!( 336 | "Resolving deltas {}/{}\r", 337 | stats.indexed_deltas(), 338 | stats.total_deltas() 339 | ); 340 | } else if stats.total_objects() > 0 { 341 | print!( 342 | "Received {}/{} objects ({}) in {} bytes\r", 343 | stats.received_objects(), 344 | stats.total_objects(), 345 | stats.indexed_objects(), 346 | stats.received_bytes() 347 | ); 348 | } 349 | io::stdout().flush().unwrap(); 350 | true 351 | }); 352 | 353 | let mut fo = FetchOptions::new(); 354 | if opts.proxy.enable { 355 | let mut proxy_op = ProxyOptions::new(); 356 | proxy_op.url(&opts.proxy.git_index_proxy); 357 | fo.proxy_options(proxy_op); 358 | } 359 | 360 | if !opts.no_progressbar { 361 | fo.remote_callbacks(cb); 362 | } 363 | 364 | // Always fetch all tags. 365 | // Perform a download and also update tips 366 | fo.download_tags(git2::AutotagOption::All); 367 | tracing::info!("Fetching {} for repo", remote.name().unwrap()); 368 | remote.fetch(refs, Some(&mut fo), None).unwrap(); 369 | 370 | // If there are local objects (we got a thin pack), then tell the user 371 | // how many objects we saved from having to cross the network. 372 | let stats = remote.stats(); 373 | if stats.local_objects() > 0 { 374 | print!( 375 | "\rReceived {}/{} objects in {} bytes (used {} local \ 376 | objects)", 377 | stats.indexed_objects(), 378 | stats.total_objects(), 379 | stats.received_bytes(), 380 | stats.local_objects() 381 | ); 382 | } else { 383 | print!( 384 | "\rReceived {}/{} objects in {} bytes", 385 | stats.indexed_objects(), 386 | stats.total_objects(), 387 | stats.received_bytes() 388 | ); 389 | } 390 | 391 | let fetch_head = repo.find_reference("FETCH_HEAD")?; 392 | repo.reference_to_annotated_commit(&fetch_head) 393 | } 394 | 395 | /// Set repo head to the newest remote commit 396 | fn fast_forward( 397 | repo: &Repository, 398 | lb: &mut git2::Reference, 399 | rc: &git2::AnnotatedCommit, 400 | ) -> Result<(), git2::Error> { 401 | let name = match lb.name() { 402 | Some(s) => s.to_string(), 403 | None => String::from_utf8_lossy(lb.name_bytes()).to_string(), 404 | }; 405 | let msg = format!("Fast-Forward: Setting {} to id: {}", name, rc.id()); 406 | tracing::info!("{}", msg); 407 | lb.set_target(rc.id(), &msg)?; 408 | repo.set_head(&name)?; 409 | repo.checkout_head(Some( 410 | CheckoutBuilder::default() 411 | // For some reason the force is required to make the working directory actually get updated 412 | // I suspect we should be adding some logic to handle dirty working directory states 413 | // but this is just an example so maybe not. 414 | .force(), 415 | ))?; 416 | Ok(()) 417 | } 418 | 419 | /// Add a merge commit and set working tree to match head 420 | fn normal_merge( 421 | repo: &Repository, 422 | local: &git2::AnnotatedCommit, 423 | remote: &git2::AnnotatedCommit, 424 | ) -> Result<(), git2::Error> { 425 | let local_tree = repo.find_commit(local.id())?.tree()?; 426 | let remote_tree = repo.find_commit(remote.id())?.tree()?; 427 | let ancestor = repo 428 | .find_commit(repo.merge_base(local.id(), remote.id())?)? 429 | .tree()?; 430 | let mut idx = repo.merge_trees(&ancestor, &local_tree, &remote_tree, None)?; 431 | 432 | if idx.has_conflicts() { 433 | tracing::info!("Merge conflicts detected..."); 434 | repo.checkout_index(Some(&mut idx), None)?; 435 | return Ok(()); 436 | } 437 | let result_tree = repo.find_tree(idx.write_tree_to(repo)?)?; 438 | // now create the merge commit 439 | let msg = format!("Merge: {} into {}", remote.id(), local.id()); 440 | let sig = repo.signature()?; 441 | let local_commit = repo.find_commit(local.id())?; 442 | let remote_commit = repo.find_commit(remote.id())?; 443 | // Do our merge commit and set current branch head to that commit. 444 | let _merge_commit = repo.commit( 445 | Some("HEAD"), 446 | &sig, 447 | &sig, 448 | &msg, 449 | &result_tree, 450 | &[&local_commit, &remote_commit], 451 | )?; 452 | // Set working tree to match head. 453 | repo.checkout_head(Some(CheckoutBuilder::default().force()))?; 454 | Ok(()) 455 | } 456 | 457 | /// Do a merge analysis to determine whether it should fast_forward or merge 458 | fn do_merge<'a>( 459 | repo: &'a Repository, 460 | remote_branch: &str, 461 | fetch_commit: git2::AnnotatedCommit<'a>, 462 | ) -> FreightResult { 463 | // 1. do a merge analysis 464 | let analysis = repo.merge_analysis(&[&fetch_commit])?; 465 | 466 | // 2. Do the appropriate merge 467 | if analysis.0.is_fast_forward() { 468 | tracing::info!("Doing a fast forward"); 469 | // do a fast forward 470 | let ref_name = format!("refs/heads/{}", remote_branch); 471 | match repo.find_reference(&ref_name) { 472 | Ok(mut r) => { 473 | fast_forward(repo, &mut r, &fetch_commit)?; 474 | } 475 | Err(_) => { 476 | // The branch doesn't exist so just set the reference to the 477 | // commit directly. Usually this is because you are pulling 478 | // into an empty repository. 479 | repo.reference( 480 | &ref_name, 481 | fetch_commit.id(), 482 | true, 483 | &format!("Setting {} to {}", remote_branch, fetch_commit.id()), 484 | )?; 485 | repo.set_head(&ref_name)?; 486 | repo.checkout_head(Some( 487 | CheckoutBuilder::default() 488 | .allow_conflicts(true) 489 | .conflict_style_merge(true) 490 | .force(), 491 | ))?; 492 | } 493 | }; 494 | } else if analysis.0.is_normal() { 495 | // do a normal merge 496 | let head_commit = repo.reference_to_annotated_commit(&repo.head()?)?; 497 | normal_merge(repo, &head_commit, &fetch_commit)?; 498 | } else { 499 | tracing::info!("Nothing to do..."); 500 | } 501 | 502 | Ok(()) 503 | } 504 | 505 | #[cfg(test)] 506 | mod tests { 507 | use std::path::PathBuf; 508 | 509 | // use crate::handler::crates_file::CratesOptions; 510 | 511 | #[test] 512 | fn test_clone() { 513 | let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); 514 | path.push("data/tests/fixtures/"); 515 | 516 | let _ = 517 | super::CrateIndex::new("https://github.com/rust-lang/crates.io-index.git", path); 518 | // index.git_clone(&mut CratesOptions::default()).unwrap(); 519 | } 520 | 521 | #[test] 522 | fn test_downloads() { 523 | let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); 524 | path.push("data/tests/fixtures/"); 525 | 526 | let _ = 527 | super::CrateIndex::new("https://github.com/rust-lang/crates.io-index.git", path); 528 | } 529 | } 530 | --------------------------------------------------------------------------------