", ...]
27 | // }
28 | //
29 | // to get all the images, we basically iterate and concat p and b in this format:
30 | //
31 | // --
32 | //
33 | // rinse and repeat for ALL the repos in the GCP bucket until we have absolutely everything
34 | //
35 | // todo: an API for the metadata and search for the images would be nice since they do have data in there
36 |
37 | #[derive(ValueEnum, Clone, Debug, Default)]
38 | enum OperatingMode {
39 | /// The original mode, which downloads all the wallpapers
40 | /// in every crop and resolution available from the API
41 | ///
42 | /// This mode is heavy on the API, network and storage
43 | /// but will give you pre-cropped wallpapers for every form factor
44 | Verbose,
45 | /// A simplified mode that only downloads the wallpapers
46 | /// in its original form by using the CDN bucket
47 | ///
48 | /// This mode simply downloads the original wallpapers,
49 | /// recommended for those who just want the raw images
50 | /// so they can crop them themselves.
51 | #[default]
52 | Simple,
53 | }
54 |
55 | #[derive(clap::Parser)]
56 | struct Cli {
57 | /// The operating mode (backend) to use
58 | #[clap(short, long)]
59 | #[clap(value_enum)]
60 | #[arg(default_value_t)]
61 | mode: OperatingMode,
62 |
63 | /// Dry run mode, will not download anything
64 | #[clap(short, long, env = "DRY_RUN")]
65 | dry_run: bool,
66 |
67 | /// Skip preview images (may save space)
68 | ///
69 | /// With the verbose/complete mode, this will skip
70 | /// all images that are not in the "dhd" form factor
71 | #[clap(short = 'F', long, default_value = "false")]
72 | filter_previews: bool,
73 | }
74 |
75 | impl Cli {
76 | pub async fn download(&self) -> Result<(), Box> {
77 | // set dry run mode
78 | if self.dry_run {
79 | std::env::set_var("DRY_RUN", "true");
80 | }
81 |
82 | if self.filter_previews {
83 | std::env::set_var("FILTER_PREVIEWS", "true");
84 | }
85 |
86 | match self.mode {
87 | OperatingMode::Verbose => verbose::download_verbose().await,
88 | OperatingMode::Simple => simple::download_simple().await,
89 | }
90 | }
91 | }
92 |
93 | #[tokio::main]
94 | async fn main() -> Result<(), Box> {
95 | // println!("Hello, world!");
96 |
97 | let cli = Cli::parse();
98 |
99 | cli.download().await?;
100 |
101 | Ok(())
102 | }
103 |
--------------------------------------------------------------------------------
/src/simple.rs:
--------------------------------------------------------------------------------
1 | use std::path::Path;
2 | use std::sync::Arc;
3 |
4 | use crate::DOWNLOADS_DIR;
5 | use serde::Deserialize;
6 | use serde_json::Value;
7 | use tokio::sync::Semaphore;
8 | const DATE: &str = "20240730";
9 | // https://storage.googleapis.com/panels-cdn/data/20240730/all.json
10 | const CDN_URL: &str = "https://storage.googleapis.com/panels-cdn/data";
11 |
12 | #[derive(Deserialize, Debug)]
13 | struct Cdn {
14 | #[serde(flatten)]
15 | inner: serde_json::Value,
16 | }
17 |
18 | pub async fn download(url: &url::Url) -> Result<(), Box> {
19 | // get the last two segments of the path
20 |
21 | let path = url.path_segments().unwrap().collect::>();
22 |
23 | let filename = path[path.len() - 2..].join("/");
24 |
25 | let file_path = Path::new(DOWNLOADS_DIR).join(&filename);
26 |
27 | let dir = file_path.parent().unwrap();
28 |
29 | println!("Downloading {} to {}", url, filename);
30 |
31 | let dry_run = std::env::var("DRY_RUN").unwrap_or_else(|_| "false".to_string()) == "true";
32 |
33 | if dry_run {
34 | println!("Dry run: Downloading {} to {}", url, filename);
35 | } else {
36 | tokio::fs::create_dir_all(dir).await?;
37 | let res = reqwest::get(url.clone()).await?;
38 | let bytes = res.bytes().await?;
39 | println!(
40 | "Downloaded {} bytes ({})",
41 | bytes.len(),
42 | &file_path.display()
43 | );
44 | tokio::fs::write(&file_path, &bytes).await?;
45 | }
46 |
47 | Ok(())
48 | }
49 |
50 | pub async fn download_simple() -> Result<(), Box> {
51 | let spec = Cdn::fetch().await?;
52 | // println!("{:#?}", spec);
53 | //
54 | println!("Finding urls...");
55 |
56 | let urls = spec
57 | .find_urls()
58 | .into_iter()
59 | .filter_map(|url| url::Url::parse(&url).ok())
60 | .filter(|url| {
61 | !std::env::var("FILTER_PREVIEWS")
62 | .map(|val| val == "true")
63 | .unwrap_or(false)
64 | || !url.path().contains("_preview")
65 | })
66 | .collect::>();
67 |
68 | download_urls(urls).await;
69 |
70 | // println!("{:#?}", urls);
71 | Ok(())
72 | }
73 |
74 | async fn download_urls(urls: Vec) {
75 | let semaphore = Arc::new(Semaphore::new(10)); // Limit concurrent downloads to 10
76 | let mut handles = vec![];
77 |
78 | for image in urls {
79 | let permit = semaphore.clone().acquire_owned().await.unwrap();
80 | let handle = tokio::spawn(async move {
81 | let res = download(&image).await;
82 | drop(permit); // Release the permit
83 | if let Err(e) = res {
84 | eprintln!("Error downloading image: {:?}", e);
85 | }
86 | });
87 | handles.push(handle);
88 | }
89 | for handle in handles {
90 | handle.await.unwrap();
91 | }
92 | }
93 |
94 | impl Cdn {
95 | pub async fn fetch() -> Result {
96 | let res = reqwest::get(format!("{CDN_URL}/{DATE}/all.json")).await?;
97 | // println!("{:#?}", res);
98 | let spec = res.json::().await?;
99 |
100 | // println!("{:#?}", spec);
101 | Ok(spec)
102 | }
103 |
104 | pub fn find_urls(&self) -> Vec {
105 | // find any key called "url" in the json recursively
106 | let mut urls = vec![];
107 | extract_urls(&self.inner, &mut urls);
108 | urls
109 | }
110 | }
111 |
112 | fn extract_urls(element: &Value, urls: &mut Vec) {
113 | match element {
114 | Value::Object(map) => {
115 | map.iter().for_each(|(key, value)| {
116 | if key == "url" {
117 | if let Some(url) = value.as_str() {
118 | urls.push(url.to_string());
119 | }
120 | } else {
121 | extract_urls(value, urls);
122 | }
123 | });
124 | }
125 | Value::Array(arr) => {
126 | arr.iter().for_each(|item| extract_urls(item, urls));
127 | }
128 | _ => {}
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/src/verbose.rs:
--------------------------------------------------------------------------------
1 | use serde::Deserialize;
2 | use std::collections::HashMap;
3 | const SPEC_URL: &str = "https://storage.googleapis.com/panels-api/data";
4 | use crate::{DATE, DOWNLOADS_DIR};
5 | use std::sync::Arc;
6 | use tokio::sync::Semaphore;
7 |
8 | pub async fn download_verbose() -> Result<(), Box> {
9 | let spec = Spec::fetch().await?;
10 | let repos = spec.media.iterate_all();
11 | println!("{:#?}", repos);
12 |
13 | println!("Iterating through repos...");
14 |
15 | let repos_iter = repos.iter().map(|repo| Repo::new(repo)).collect::>();
16 |
17 | let repos_iter = futures::future::join_all(repos_iter)
18 | .await
19 | .into_iter()
20 | .filter_map(Result::ok)
21 | .collect::>();
22 |
23 | let filter_previews =
24 | std::env::var("FILTER_PREVIEWS").unwrap_or_else(|_| "false".to_string()) == "true";
25 |
26 | let images: Vec<_> = repos_iter
27 | .iter()
28 | .flat_map(|repo| {
29 | repo.data.iter().flat_map(move |(id, image)| {
30 | image.image.iter().filter_map(move |(form_factor, url)| {
31 | if !filter_previews || form_factor == "dhd" {
32 | Some(ImageDownload {
33 | id: id.clone(),
34 | repo_id: repo.repo.clone(),
35 | form_factor: form_factor.clone(),
36 | url: url.clone(),
37 | })
38 | } else {
39 | None
40 | }
41 | })
42 | })
43 | })
44 | .collect();
45 |
46 | // println!("{:#?}", images);
47 |
48 | download_images_flat(images).await;
49 |
50 | Ok(())
51 | }
52 |
53 | #[derive(Deserialize, Debug)]
54 | pub struct Spec {
55 | // content: String,
56 | // search: String,
57 | pub media: PanelMedia,
58 | }
59 |
60 | impl Spec {
61 | pub async fn fetch() -> Result {
62 | let res = reqwest::get(format!("{SPEC_URL}/{DATE}/spec.json")).await?;
63 | let spec = res.json::().await?;
64 | Ok(spec)
65 | }
66 | }
67 |
68 | #[derive(Deserialize, Debug)]
69 | pub struct PanelMedia {
70 | pub root: String,
71 | pub p: Vec,
72 | pub b: Vec,
73 | }
74 |
75 | impl PanelMedia {
76 | pub fn iterate_all(&self) -> Vec {
77 | self.p
78 | .iter()
79 | .flat_map(|p| {
80 | self.b
81 | .iter()
82 | .map(move |b| format!("{root}-{p}-{b}", root = self.root, p = p, b = b))
83 | })
84 | .collect()
85 | }
86 | }
87 |
88 | #[derive(Deserialize, Debug)]
89 | pub struct Repo {
90 | #[serde(skip_deserializing)]
91 | repo: String,
92 | data: HashMap,
93 | }
94 |
95 | impl Repo {
96 | pub async fn new(repo: &str) -> Result {
97 | println!("Fetching repo {}", repo);
98 | let res = reqwest::get(repo.to_string()).await?;
99 |
100 | let repo_url_parsed = url::Url::parse(repo).unwrap();
101 | let repo = repo_url_parsed.path_segments().unwrap().last().unwrap();
102 |
103 | // println!("Got response: {:#?}", res);
104 |
105 | let panels = res.json::().await?;
106 | Ok(Self {
107 | repo: repo.to_string(),
108 | data: panels.data,
109 | })
110 | }
111 | }
112 |
113 | // The image is basically this:
114 | // {
115 | // "": {
116 | // "