├── .dockerignore ├── icon.png ├── web-app ├── images │ └── hide.png ├── fonts │ └── Inter-Regular.ttf ├── index.html ├── style.css └── script.js ├── .github ├── readme │ ├── banner.png │ └── screenshot.jpg └── workflows │ ├── scripts │ ├── upload-asset-to-release.sh │ ├── prep-build-env.sh │ └── translate-arch-to-rust-tripple.sh │ ├── scheduled-security-audit.yaml │ └── build-image.yaml ├── .gitignore ├── docker-compose.yaml ├── renovate.json ├── .releaserc ├── src ├── config.rs ├── utils.rs ├── web_app_endpoint.rs ├── config_endpoint.rs ├── weather_endpoint.rs ├── weather_processor.rs ├── resource_processor.rs ├── scheduler.rs ├── image_processor.rs ├── resource_processor_test.rs ├── integration_test_config_api.rs ├── integration_test_weather_api.rs ├── resource_reader.rs ├── exif_reader.rs ├── main.rs ├── geo_location.rs ├── filesystem_client.rs ├── resource_reader_test.rs ├── resource_endpoint.rs ├── resource_store.rs └── integration_test_resources_api.rs ├── .container └── stage-arch-bin.sh ├── LICENSE ├── Cargo.toml ├── Containerfile └── README.md /.dockerignore: -------------------------------------------------------------------------------- 1 | **/target/*/build 2 | **/target/*/deps -------------------------------------------------------------------------------- /icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RouHim/this-week-in-past/HEAD/icon.png -------------------------------------------------------------------------------- /web-app/images/hide.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RouHim/this-week-in-past/HEAD/web-app/images/hide.png -------------------------------------------------------------------------------- /.github/readme/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RouHim/this-week-in-past/HEAD/.github/readme/banner.png -------------------------------------------------------------------------------- /.github/readme/screenshot.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RouHim/this-week-in-past/HEAD/.github/readme/screenshot.jpg -------------------------------------------------------------------------------- /web-app/fonts/Inter-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/RouHim/this-week-in-past/HEAD/web-app/fonts/Inter-Regular.ttf -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | .idea 3 | cache 4 | data 5 | test.sh 6 | test-image.sh 7 | index-* 8 | content-* 9 | *.db 10 | .env 11 | this-week-in-past 12 | flamegraph* 13 | perf.data* -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | 2 | services: 3 | this-week-in-past: 4 | image: rouhim/this-week-in-past 5 | build: 6 | dockerfile: Containerfile 7 | context: . 8 | volumes: 9 | - ~/Pictures/:/resources:ro # should be read only 10 | ports: 11 | - "8080:8080" -------------------------------------------------------------------------------- /renovate.json: -------------------------------------------------------------------------------- 1 | { 2 | "minimumReleaseAge": "7 days", 3 | "packageRules": [ 4 | { 5 | "matchUpdateTypes": [ 6 | "major", 7 | "minor", 8 | "patch", 9 | "pin", 10 | "digest" 11 | ], 12 | "automerge": true, 13 | "automergeType": "branch" 14 | } 15 | ] 16 | } 17 | -------------------------------------------------------------------------------- /.releaserc: -------------------------------------------------------------------------------- 1 | { 2 | "branches": [ 3 | "main" 4 | ], 5 | "tagFormat": "${version}", 6 | "plugins": [ 7 | ["@semantic-release/commit-analyzer", { 8 | "releaseRules": [ 9 | {"type": "chore", "release": "patch"} 10 | ] 11 | }], 12 | "@semantic-release/release-notes-generator", 13 | "@semantic-release/changelog", 14 | "@semantic-release/git", 15 | "@semantic-release/github" 16 | ] 17 | } -------------------------------------------------------------------------------- /src/config.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | pub fn get_slideshow_interval_value() -> usize { 4 | env::var("SLIDESHOW_INTERVAL") 5 | .unwrap_or_else(|_| "30".to_string()) 6 | .parse() 7 | .unwrap_or(30) 8 | } 9 | 10 | pub fn get_refresh_interval_value() -> usize { 11 | env::var("REFRESH_INTERVAL") 12 | .unwrap_or_else(|_| "360".to_string()) 13 | .parse() 14 | .unwrap_or(360) 15 | } 16 | 17 | pub fn get_weather_unit() -> String { 18 | env::var("WEATHER_UNIT").unwrap_or_else(|_| "metric".to_string()) 19 | } 20 | -------------------------------------------------------------------------------- /.github/workflows/scripts/upload-asset-to-release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/env bash 2 | set -e 3 | 4 | GITHUB_TOKEN=$1 5 | BIN_PATH=$2 6 | NAME=$3 7 | echo "Uploading $BIN_PATH to $NAME" 8 | RELEASE_ID=$(curl -s https://api.github.com/repos/RouHim/this-week-in-past/releases/latest | jq -r '.id' ) 9 | UPLOAD_URL="https://uploads.github.com/repos/RouHim/this-week-in-past/releases/${RELEASE_ID}/assets?name=${NAME}" 10 | curl -X POST \ 11 | -H "Content-Type: $(file -b --mime-type "$BIN_PATH")" \ 12 | -H "Authorization: token ${GITHUB_TOKEN}"\ 13 | -T "${BIN_PATH}" \ 14 | "${UPLOAD_URL}" -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | use chrono::{DateTime, NaiveDateTime}; 2 | use std::time::{SystemTime, UNIX_EPOCH}; 3 | 4 | /// Converts the type `SystemTime` to `NaiveDateTime` 5 | pub fn to_date_time(system_time: SystemTime) -> NaiveDateTime { 6 | DateTime::from_timestamp( 7 | system_time 8 | .duration_since(UNIX_EPOCH) 9 | .unwrap_or(std::time::Duration::new(0, 0)) 10 | .as_secs() as i64, 11 | 0, 12 | ) 13 | .unwrap() 14 | .naive_utc() 15 | } 16 | 17 | /// Returns a md5 string based on a given string 18 | pub fn md5(string: &str) -> String { 19 | format!("{:x}", md5::compute(string.as_bytes())) 20 | } 21 | -------------------------------------------------------------------------------- /.github/workflows/scripts/prep-build-env.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Description: 4 | # Provides a function to build a static binary for the specified cpu arch. 5 | # This script utilizes this project: https://github.com/rust-cross/rust-musl-cross 6 | # 7 | # Parameter: 8 | # $1 - CPU arch to build. Check all available docker image tags here: https://github.com/rust-cross/rust-musl-cross 9 | # 10 | # How to use: 11 | # Just source this file and then run: build-rust-static-bin 12 | # 13 | # Example: 14 | # build-rust-static-bin aarch64-musl 15 | # 16 | # # # # 17 | 18 | function build-rust-static-bin() { 19 | echo "Building arch: $1" 20 | docker run --rm -e CARGO_NET_GIT_FETCH_WITH_CLI=true -v "$(pwd)":/home/rust/src messense/rust-musl-cross:"${1}" cargo build --release 21 | } 22 | -------------------------------------------------------------------------------- /.github/workflows/scripts/translate-arch-to-rust-tripple.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Description: 4 | # Translates the current cpu architecture to a rust triple. 5 | # 6 | # # # # 7 | 8 | # Get input string 9 | input_string=$1 10 | 11 | # Convert the input string to a rust target triple 12 | function to_triple() { 13 | case $1 in 14 | "x86_64-musl") echo "x86_64-unknown-linux-musl";; 15 | "aarch64-musl") echo "aarch64-unknown-linux-musl";; 16 | "armv7-musleabihf") echo "armv7-unknown-linux-musleabihf";; 17 | "arm-musleabihf") echo "arm-unknown-linux-musleabihf";; 18 | *) echo "Error: Unsupported input string" && return 1;; 19 | esac 20 | } 21 | 22 | # Convert input string 23 | output=$(to_triple "$input_string") 24 | 25 | # check if the function returned an error 26 | if [ $? -eq 1 ]; then 27 | exit 1 28 | fi 29 | 30 | # Print the output 31 | echo "$output" 32 | -------------------------------------------------------------------------------- /.container/stage-arch-bin.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # 3 | # Description: 4 | # Stages the specified rust binary for the current cpu architecture to the current directory. 5 | # 6 | # Parameter: 7 | # $1 - Binary file name to stage 8 | # 9 | # Example: 10 | # ./stage-arch-bin.sh this-week-in-past 11 | # 12 | # # # # 13 | 14 | if [ -n "$1" ]; then 15 | echo "Staging binary file arch: $1" 16 | else 17 | echo "Binary file name to stage not supplied! First parameter is required." 18 | exit 1 19 | fi 20 | 21 | CURRENT_ARCH=$(uname -m) 22 | 23 | if [ "$CURRENT_ARCH" = "armv7l" ]; then 24 | CURRENT_ARCH="armv7" 25 | fi 26 | 27 | echo "Current arch is: $CURRENT_ARCH" 28 | 29 | find . -wholename "*release/${1}" -type f | while read arch_binary; do 30 | echo "Checking: $arch_binary" 31 | if [[ "$arch_binary" = *"$CURRENT_ARCH"* ]]; then 32 | echo " -> Binary for this cpu arch is: $arch_binary" 33 | file "$arch_binary" 34 | cp "$arch_binary" "${1}" 35 | realpath "${1}" 36 | chmod +x "${1}" 37 | exit 0 38 | else 39 | echo " -> No match" 40 | fi 41 | done 42 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | ----------- 3 | 4 | Copyright (c) 2022 Rouven Himmelstein 5 | Permission is hereby granted, free of charge, to any person 6 | obtaining a copy of this software and associated documentation 7 | files (the "Software"), to deal in the Software without 8 | restriction, including without limitation the rights to use, 9 | copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the 11 | Software is furnished to do so, subject to the following 12 | conditions: 13 | 14 | The above copyright notice and this permission notice shall be 15 | included in all copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /src/web_app_endpoint.rs: -------------------------------------------------------------------------------- 1 | use actix_web::get; 2 | use actix_web::HttpResponse; 3 | 4 | #[get("/")] 5 | pub async fn index() -> HttpResponse { 6 | let html = include_str!("../web-app/index.html"); 7 | HttpResponse::Ok().content_type("text/html").body(html) 8 | } 9 | 10 | #[get("/style.css")] 11 | pub async fn style_css() -> HttpResponse { 12 | let css = include_str!("../web-app/style.css"); 13 | HttpResponse::Ok().content_type("text/css").body(css) 14 | } 15 | 16 | #[get("/script.js")] 17 | pub async fn script_js() -> HttpResponse { 18 | let js = include_str!("../web-app/script.js"); 19 | HttpResponse::Ok().content_type("text/javascript").body(js) 20 | } 21 | 22 | #[get("/images/hide.png")] 23 | pub async fn hide_png() -> HttpResponse { 24 | let hide_icon: &[u8] = include_bytes!("../web-app/images/hide.png"); 25 | HttpResponse::Ok().content_type("image/png").body(hide_icon) 26 | } 27 | 28 | #[get("/icon.png")] 29 | pub async fn icon_png() -> HttpResponse { 30 | let icon: &[u8] = include_bytes!("../icon.png"); 31 | HttpResponse::Ok().content_type("image/png").body(icon) 32 | } 33 | 34 | #[get("/font.ttf")] 35 | pub async fn font() -> HttpResponse { 36 | let font: &[u8] = include_bytes!("../web-app/fonts/Inter-Regular.ttf"); 37 | HttpResponse::Ok().content_type("font/ttf").body(font) 38 | } 39 | -------------------------------------------------------------------------------- /src/config_endpoint.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | use crate::config; 4 | use actix_web::get; 5 | use actix_web::HttpResponse; 6 | 7 | #[get("interval/slideshow")] 8 | pub async fn get_slideshow_interval() -> HttpResponse { 9 | HttpResponse::Ok() 10 | .content_type("plain/text") 11 | .body(config::get_slideshow_interval_value().to_string()) 12 | } 13 | 14 | #[get("interval/refresh")] 15 | pub async fn get_refresh_interval() -> HttpResponse { 16 | HttpResponse::Ok() 17 | .content_type("plain/text") 18 | .body(config::get_refresh_interval_value().to_string()) 19 | } 20 | 21 | #[get("show-hide-button")] 22 | pub async fn get_hide_button_enabled() -> HttpResponse { 23 | HttpResponse::Ok() 24 | .content_type("plain/text") 25 | .body(env::var("SHOW_HIDE_BUTTON").unwrap_or_else(|_| "false".to_string())) 26 | } 27 | 28 | #[get("random-slideshow")] 29 | pub async fn get_random_slideshow_enabled() -> HttpResponse { 30 | HttpResponse::Ok() 31 | .content_type("plain/text") 32 | .body(env::var("RANDOM_SLIDESHOW").unwrap_or_else(|_| "false".to_string())) 33 | } 34 | 35 | #[get("preload-images")] 36 | pub async fn get_preload_images_enabled() -> HttpResponse { 37 | HttpResponse::Ok() 38 | .content_type("plain/text") 39 | .body(env::var("PRELOAD_IMAGES").unwrap_or_else(|_| "false".to_string())) 40 | } 41 | -------------------------------------------------------------------------------- /web-app/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | This week in past 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 |
16 |
17 | 18 | 19 |
20 |
21 | 22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 | 34 | 35 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "this-week-in-past" 3 | version = "0.0.0" 4 | edition = "2021" 5 | license = "MIT" 6 | authors = ["Rouven Himmelstein"] 7 | description = "Aggregate images taken this week, from previous years and presents them on a web page with slideshow." 8 | readme = "README.md" 9 | homepage = "https://github.com/RouHim/this-week-in-past" 10 | repository = "https://github.com/RouHim/this-week-in-past" 11 | keywords = ["image", "gallery", "slideshow", "week"] 12 | 13 | [dependencies] 14 | lazy_static = "1.4" 15 | serde = { version = "1.0.200", features = ["derive"] } 16 | serde_json = "1.0.116" 17 | chrono = { version = "0.4.38", features = ["serde"] } 18 | md5 = "0.8" 19 | ureq = "3.0.0" 20 | actix-web = { version = "4.5.1", features = ["rustls"] } 21 | clokwerk = "0.4" 22 | kamadak-exif = "0.6" 23 | rayon = "1.10" 24 | regex = "1.10.4" 25 | image = "0.25.1" 26 | log = "0.4.21" 27 | env_logger = "0.11.3" 28 | rand = "0.9.0" 29 | mime_guess = "2.0.5" 30 | r2d2 = "0.8.10" 31 | rusqlite = { version = "0.37", features = ["bundled"] } 32 | r2d2_sqlite = "0.31" 33 | 34 | [target.'cfg(target_env = "musl")'.dependencies] 35 | mimalloc = "0.1.43" 36 | 37 | [dev-dependencies] 38 | assertor = "0.0.4" 39 | pretty_assertions = "1.4" 40 | actix-rt = "2.9" 41 | ureq = "3.0.0" 42 | 43 | [profile.release] 44 | panic = "abort" # Strip expensive panic clean-up logic 45 | codegen-units = 1 # Compile crates one after another so the compiler can optimize better 46 | lto = true # Enables link to optimizations 47 | strip = true # Strip debug symbols 48 | -------------------------------------------------------------------------------- /Containerfile: -------------------------------------------------------------------------------- 1 | # # # # # # # # # # # # # # # # # # # # 2 | # Builder 3 | # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # 4 | FROM docker.io/alpine AS builder 5 | 6 | # Create an empty directory that will be used in the final image 7 | RUN mkdir "/empty_dir" 8 | 9 | # Install required packages for the staging script 10 | RUN apk update && apk add --no-cache bash file 11 | 12 | # Copy all archs into this container 13 | RUN mkdir /work 14 | WORKDIR /work 15 | COPY target . 16 | COPY .container/stage-arch-bin.sh /work 17 | 18 | # This will copy the cpu arch corresponding binary to /target/this-week-in-past 19 | RUN bash stage-arch-bin.sh this-week-in-past 20 | 21 | # # # # # # # # # # # # # # # # # # # # 22 | # Run image 23 | # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # 24 | FROM scratch 25 | 26 | ENV USER="1337" 27 | ENV RESOURCE_PATHS="/resources" 28 | ENV DATA_FOLDER="/data" 29 | ENV RUST_LOG="info" 30 | 31 | # For performance reasons write data to docker volume instead of containers writeable fs layer 32 | VOLUME $DATA_FOLDER 33 | 34 | # Copy the empty directory as data and temp folder 35 | COPY --chown=$USER:$USER --from=builder /empty_dir $DATA_FOLDER 36 | COPY --chown=$USER:$USER --from=builder /empty_dir /tmp 37 | 38 | # Copy the built application from the build image to the run-image 39 | COPY --chown=$USER:$USER --from=builder /work/this-week-in-past /this-week-in-past 40 | 41 | EXPOSE 8080 42 | USER $USER 43 | 44 | ENTRYPOINT ["/this-week-in-past"] 45 | -------------------------------------------------------------------------------- /src/weather_endpoint.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | use actix_web::get; 4 | use actix_web::HttpResponse; 5 | 6 | use crate::{config, weather_processor}; 7 | 8 | #[get("")] 9 | pub async fn get_is_weather_enabled() -> HttpResponse { 10 | let is_weather_enabled = env::var("WEATHER_ENABLED").unwrap_or_else(|_| "false".to_string()); 11 | 12 | HttpResponse::Ok() 13 | .content_type("plain/text") 14 | .body(is_weather_enabled) 15 | } 16 | 17 | #[get("current")] 18 | pub async fn get_current_weather() -> HttpResponse { 19 | let weather_data = weather_processor::get_current_weather().await; 20 | 21 | if let Some(weather_data) = weather_data { 22 | HttpResponse::Ok() 23 | .content_type("application/json") 24 | .body(weather_data) 25 | } else { 26 | HttpResponse::InternalServerError().finish() 27 | } 28 | } 29 | 30 | #[get("homeassistant")] 31 | pub async fn get_is_home_assistant_enabled() -> HttpResponse { 32 | let is_home_assistant_enabled = env::var("HOME_ASSISTANT_BASE_URL").is_ok() 33 | && env::var("HOME_ASSISTANT_API_TOKEN").is_ok() 34 | && env::var("HOME_ASSISTANT_ENTITY_ID").is_ok(); 35 | 36 | HttpResponse::Ok() 37 | .content_type("plain/text") 38 | .body(is_home_assistant_enabled.to_string()) 39 | } 40 | 41 | #[get("homeassistant/temperature")] 42 | pub async fn get_home_assistant_entity_data() -> HttpResponse { 43 | let weather_data = weather_processor::get_home_assistant_data().await; 44 | 45 | if let Some(weather_data) = weather_data { 46 | HttpResponse::Ok() 47 | .content_type("plain/text") 48 | .body(weather_data) 49 | } else { 50 | HttpResponse::InternalServerError().finish() 51 | } 52 | } 53 | 54 | #[get("unit")] 55 | pub async fn get_weather_unit() -> HttpResponse { 56 | HttpResponse::Ok() 57 | .content_type("plain/text") 58 | .body(config::get_weather_unit()) 59 | } 60 | -------------------------------------------------------------------------------- /src/weather_processor.rs: -------------------------------------------------------------------------------- 1 | use crate::config; 2 | use std::env; 3 | 4 | /// Returns the current weather data provided by OpenWeatherMap 5 | /// The data is selected by the configured location 6 | /// Returns None if the data could not be retrieved or the weather json data 7 | pub async fn get_current_weather() -> Option { 8 | if env::var("OPEN_WEATHER_MAP_API_KEY").is_err() { 9 | return None; 10 | } 11 | 12 | let api_key: String = env::var("OPEN_WEATHER_MAP_API_KEY").unwrap(); 13 | let city: String = env::var("WEATHER_LOCATION").unwrap_or_else(|_| "Berlin".to_string()); 14 | let units: String = config::get_weather_unit(); 15 | let language: String = env::var("WEATHER_LANGUAGE").unwrap_or_else(|_| "en".to_string()); 16 | let response = ureq::get(format!( 17 | "https://api.openweathermap.org/data/2.5/weather?q={city}&appid={api_key}&units={units}&lang={language}" 18 | ).as_str()).call(); 19 | 20 | if let Ok(mut response) = response { 21 | response.body_mut().read_to_string().ok() 22 | } else { 23 | None 24 | } 25 | } 26 | 27 | /// Returns the current weather data provided by Home Assistant 28 | /// if the Home Assistant integration is enabled 29 | /// and entity_id is found 30 | pub async fn get_home_assistant_data() -> Option { 31 | let base_url = env::var("HOME_ASSISTANT_BASE_URL").ok(); 32 | let api_token = env::var("HOME_ASSISTANT_API_TOKEN").ok(); 33 | let entity_id = env::var("HOME_ASSISTANT_ENTITY_ID").ok(); 34 | 35 | if base_url.is_none() || api_token.is_none() || entity_id.is_none() { 36 | return None; 37 | } 38 | 39 | let response = 40 | ureq::get(format!("{}/api/states/{}", base_url.unwrap(), entity_id.unwrap()).as_str()) 41 | .header( 42 | "Authorization", 43 | format!("Bearer {}", api_token.unwrap()).as_str(), 44 | ) 45 | .call(); 46 | 47 | if let Ok(mut response) = response { 48 | response.body_mut().read_to_string().ok() 49 | } else { 50 | None 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/resource_processor.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | 3 | use crate::geo_location; 4 | use crate::resource_reader::ImageResource; 5 | use crate::resource_store::ResourceStore; 6 | 7 | /// Builds the display value for the specified resource 8 | /// The display value contains the date and location of a resource 9 | pub async fn build_display_value( 10 | resource: ImageResource, 11 | resource_store: &ResourceStore, 12 | ) -> String { 13 | let mut display_value: String = String::new(); 14 | 15 | // Append taken date 16 | if let Some(taken_date) = resource.taken { 17 | let date_format: String = 18 | env::var("DATE_FORMAT").unwrap_or_else(|_| "%d.%m.%Y".to_string()); 19 | display_value.push_str( 20 | taken_date 21 | .date() 22 | .format(date_format.as_str()) 23 | .to_string() 24 | .as_str(), 25 | ); 26 | }; 27 | 28 | // Append city name 29 | let city_name = get_city_name(&resource, resource_store).await; 30 | if let Some(city_name) = city_name { 31 | display_value.push_str(", "); 32 | display_value.push_str(city_name.as_str()); 33 | } 34 | 35 | display_value.trim().to_string() 36 | } 37 | 38 | /// Returns the city name for the specified resource 39 | /// The city name is taken from the cache, if available 40 | /// If not, the city name is taken from the geo location service 41 | async fn get_city_name(resource: &ImageResource, resource_store: &ResourceStore) -> Option { 42 | let resource_location = resource.location?; 43 | let resource_location_string = resource_location.to_string(); 44 | 45 | // Check if cache contains resource location 46 | if resource_store.location_exists(resource_location_string.as_str()) { 47 | resource_store.get_location(resource_location_string.as_str()) 48 | } else { 49 | // Get city name 50 | let city_name = geo_location::resolve_city_name(resource_location).await; 51 | 52 | if let Some(city_name) = &city_name { 53 | // Write to cache 54 | resource_store.add_location(resource_location_string, city_name.clone()); 55 | } 56 | 57 | city_name 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/scheduler.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::thread; 3 | 4 | use std::time::{Duration, Instant}; 5 | 6 | use clokwerk::{Job, ScheduleHandle, Scheduler, TimeUnits}; 7 | use log::info; 8 | 9 | use crate::resource_store::ResourceStore; 10 | use crate::ResourceReader; 11 | 12 | /// Schedules the cache indexer at every day at midnight 13 | pub fn schedule_indexer( 14 | resource_reader: ResourceReader, 15 | resource_store: ResourceStore, 16 | ) -> ScheduleHandle { 17 | let mut scheduler = Scheduler::new(); 18 | 19 | let resource_reader_clone = resource_reader.clone(); 20 | let resource_store_clone = resource_store.clone(); 21 | 22 | // Fetch resources at midnight 23 | scheduler 24 | .every(1.day()) 25 | .at("00:05") 26 | .run(move || index_resources(resource_reader.clone(), resource_store.clone())); 27 | 28 | // For the first time on application start 29 | thread::spawn(move || { 30 | index_resources(resource_reader_clone, resource_store_clone); 31 | }); 32 | 33 | // Check the thread every minute 34 | scheduler.watch_thread(Duration::from_secs(60)) 35 | } 36 | 37 | /// Fetches the resources from the configures paths and writes them to the resource store 38 | pub fn index_resources(resource_reader: ResourceReader, resource_store: ResourceStore) { 39 | let s = Instant::now(); 40 | info!("Begin resource indexing"); 41 | 42 | info!("Indexing resources, this may take some time depending on the amount of resources..."); 43 | let resources = resource_reader.read_all(); 44 | 45 | info!("Found {} resources", resources.len()); 46 | let map: HashMap = resources 47 | .iter() 48 | .map(|resource| { 49 | ( 50 | resource.id.clone(), 51 | serde_json::to_string(resource).unwrap(), 52 | ) 53 | }) 54 | .collect(); 55 | 56 | info!("Purging resources store"); 57 | resource_store.clear_resources(); 58 | 59 | info!("Cleanup cache"); 60 | resource_store.clear_data_cache(); 61 | 62 | info!("Inserting new resources"); 63 | resource_store.add_resources(map); 64 | 65 | info!("Cleanup database"); 66 | resource_store.vacuum(); 67 | 68 | info!("Job done in {:?}!", s.elapsed()); 69 | } 70 | -------------------------------------------------------------------------------- /src/image_processor.rs: -------------------------------------------------------------------------------- 1 | use std::io::Cursor; 2 | 3 | use image::imageops::FilterType; 4 | use image::ImageReader; 5 | use log::error; 6 | use serde::{Deserialize, Serialize}; 7 | 8 | /// Represents the orientation of an image in two dimensions 9 | /// rotation: 0, 90, 180 or 270 10 | /// mirror_vertically: true, if the image is mirrored vertically 11 | #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq, Eq)] 12 | pub struct ImageOrientation { 13 | pub rotation: u16, 14 | pub mirror_vertically: bool, 15 | } 16 | 17 | /// Adjusts the image to fit optimal to the browser resolution 18 | /// Also fixes the orientation delivered by the exif image rotation 19 | /// src: 20 | pub fn adjust_image( 21 | resource_path: String, 22 | resource_data: Vec, 23 | display_width: u32, 24 | display_height: u32, 25 | image_orientation: Option, 26 | ) -> Option> { 27 | let read_result = ImageReader::new(Cursor::new(&resource_data)) 28 | .with_guessed_format() 29 | .unwrap() 30 | .decode(); 31 | 32 | if read_result.is_err() { 33 | error!("{resource_path} | Error: {}", read_result.unwrap_err()); 34 | return None; 35 | } 36 | 37 | // Resize the image to the needed display size 38 | let mut image = read_result.unwrap(); 39 | 40 | // Rotate or flip the image if needed 41 | image = if let Some(orientation) = image_orientation { 42 | let rotated = match orientation.rotation { 43 | 90 => image.rotate90(), 44 | 180 => image.rotate180(), 45 | 270 => image.rotate270(), 46 | _ => image, 47 | }; 48 | 49 | if orientation.mirror_vertically { 50 | rotated.flipv() 51 | } else { 52 | rotated 53 | } 54 | } else { 55 | image 56 | }; 57 | 58 | image = if display_height > 0 && display_width > 0 { 59 | image.resize(display_width, display_height, FilterType::Triangle) 60 | } else { 61 | image 62 | }; 63 | 64 | // Write the image to a buffer 65 | let mut bytes: Vec = Vec::new(); 66 | image 67 | .write_to(&mut Cursor::new(&mut bytes), image::ImageFormat::Png) 68 | .unwrap(); 69 | Some(bytes) 70 | } 71 | -------------------------------------------------------------------------------- /src/resource_processor_test.rs: -------------------------------------------------------------------------------- 1 | use assertor::*; 2 | 3 | use crate::geo_location; 4 | use crate::geo_location::GeoLocation; 5 | 6 | #[actix_rt::test] 7 | async fn resolve_koblenz() { 8 | // GIVEN are the geo coordinates for Koblenz 9 | let geo_location: GeoLocation = GeoLocation { 10 | latitude: 50.35357, 11 | longitude: 7.57883, 12 | }; 13 | 14 | // WHEN resolving the city name 15 | let city_name = geo_location::resolve_city_name(geo_location).await; 16 | 17 | // THEN the resolved city name should be Koblenz 18 | assert_that!(city_name).is_equal_to(Some("Koblenz".to_string())); 19 | } 20 | 21 | #[actix_rt::test] 22 | async fn resolve_amsterdam() { 23 | // GIVEN are the geo coordinates for Amsterdam 24 | let geo_location: GeoLocation = GeoLocation { 25 | latitude: 52.37403, 26 | longitude: 4.88969, 27 | }; 28 | 29 | // WHEN resolving the city name 30 | let city_name = geo_location::resolve_city_name(geo_location).await; 31 | 32 | // THEN the resolved city name should be Amsterdam 33 | assert_that!(city_name).is_equal_to(Some("Amsterdam".to_string())); 34 | } 35 | 36 | #[actix_rt::test] 37 | async fn resolve_kottenheim() { 38 | // GIVEN are the geo coordinates for Kottenheim 39 | let geo_location: GeoLocation = GeoLocation { 40 | latitude: 50.34604, 41 | longitude: 7.25359, 42 | }; 43 | 44 | // WHEN resolving the city name 45 | let city_name = geo_location::resolve_city_name(geo_location).await; 46 | 47 | // THEN the resolved city name should be Kottenheim 48 | assert_that!(city_name).is_equal_to(Some("Kottenheim".to_string())); 49 | } 50 | 51 | #[actix_rt::test] 52 | async fn resolve_negative_dms() { 53 | // GIVEN are the degree minutes seconds coordinates for San Bartolomé de Tirajana 54 | let lat = "27 deg 45 min 22.22 sec"; 55 | let long = "15 deg 34 min 13.76 sec"; 56 | let lat_ref = "N"; 57 | let long_ref = "W"; 58 | 59 | // WHEN resolving the city name 60 | let dms = geo_location::from_degrees_minutes_seconds(lat, long, lat_ref, long_ref); 61 | 62 | // THEN the resolved city name should be San Bartolomé de Tirajana 63 | let city_name = geo_location::resolve_city_name(dms.unwrap()).await; 64 | assert_that!(city_name).is_equal_to(Some("San Bartolomé de Tirajana".to_string())); 65 | } 66 | 67 | #[actix_rt::test] 68 | async fn resolve_invalid_data() { 69 | // GIVEN are invalid geo coordinates 70 | let geo_location: GeoLocation = GeoLocation { 71 | latitude: -100.0, 72 | longitude: -100.0, 73 | }; 74 | 75 | // WHEN resolving the city name 76 | let city_name = geo_location::resolve_city_name(geo_location).await; 77 | 78 | // THEN the resolved city name should be None 79 | assert_that!(city_name).is_equal_to(None); 80 | } 81 | -------------------------------------------------------------------------------- /.github/workflows/scheduled-security-audit.yaml: -------------------------------------------------------------------------------- 1 | name: Security Audits 2 | on: 3 | schedule: 4 | - cron: '0 0 * * *' # every day at midnight 5 | workflow_dispatch: # allow manual execution 6 | 7 | jobs: 8 | image-audit: 9 | name: Audit container image 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Checkout code 13 | uses: actions/checkout@v6 14 | 15 | - name: Run Trivy vulnerability scanner 16 | uses: aquasecurity/trivy-action@master 17 | with: 18 | image-ref: 'docker.io/rouhim/this-week-in-past:latest' 19 | format: 'sarif' 20 | output: 'trivy-results.sarif' 21 | exit-code: '0' 22 | ignore-unfixed: true 23 | severity: 'CRITICAL,HIGH' 24 | 25 | - name: Upload Trivy scan results to GitHub Security tab 26 | uses: github/codeql-action/upload-sarif@v4 27 | with: 28 | sarif_file: 'trivy-results.sarif' 29 | 30 | filesystem-audit: 31 | name: Audit repo filesystem 32 | runs-on: ubuntu-latest 33 | steps: 34 | - name: Checkout code 35 | uses: actions/checkout@v6 36 | 37 | - name: Run Trivy vulnerability scanner 38 | uses: aquasecurity/trivy-action@master 39 | with: 40 | scan-type: 'fs' 41 | format: 'sarif' 42 | output: 'trivy-results.sarif' 43 | exit-code: '0' 44 | ignore-unfixed: true 45 | severity: 'CRITICAL,HIGH' 46 | 47 | - name: Upload Trivy scan results to GitHub Security tab 48 | uses: github/codeql-action/upload-sarif@v4 49 | with: 50 | sarif_file: 'trivy-results.sarif' 51 | 52 | config-audit: 53 | name: Audit config files 54 | runs-on: ubuntu-latest 55 | steps: 56 | - name: Checkout code 57 | uses: actions/checkout@v6 58 | 59 | - name: Run Trivy vulnerability scanner 60 | uses: aquasecurity/trivy-action@master 61 | with: 62 | scan-type: 'config' 63 | hide-progress: false 64 | format: 'sarif' 65 | output: 'trivy-results.sarif' 66 | exit-code: '0' 67 | ignore-unfixed: true 68 | severity: 'CRITICAL,HIGH' 69 | 70 | - name: Upload Trivy scan results to GitHub Security tab 71 | uses: github/codeql-action/upload-sarif@v4 72 | with: 73 | sarif_file: 'trivy-results.sarif' 74 | 75 | check-unused-dependencies: 76 | name: Check for unused deps 77 | runs-on: ubuntu-latest 78 | steps: 79 | - name: Checkout code 80 | uses: actions/checkout@v6 81 | 82 | - name: Install rust toolchain 83 | uses: actions-rs/toolchain@v1 84 | with: 85 | profile: minimal 86 | toolchain: nightly 87 | 88 | - uses: Swatinem/rust-cache@v2 # use rust / cargo caching 89 | with: 90 | cache-on-failure: "true" 91 | 92 | - name: Install cargo-udeps 93 | run: cargo update -p proc-macro2 && cargo install cargo-udeps --locked 94 | 95 | - name: Analyze dependencies 96 | run: cargo +nightly udeps 97 | -------------------------------------------------------------------------------- /src/integration_test_config_api.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use std::{env, fs}; 4 | 5 | use actix_web::dev::{ServiceFactory, ServiceRequest, ServiceResponse}; 6 | use actix_web::{test, web, App, Error}; 7 | use assertor::{assert_that, EqualityAssertion}; 8 | use rand::Rng; 9 | 10 | use crate::{config_endpoint, resource_reader, resource_store, scheduler}; 11 | 12 | const TEST_FOLDER_NAME: &str = "integration_test_config_api"; 13 | 14 | #[actix_web::test] 15 | async fn test_get_random_slideshow() { 16 | // GIVEN is a running this-week-in-past instance 17 | let base_test_dir = create_temp_folder().await; 18 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 19 | 20 | // AND random slideshow is set 21 | let random_slideshow: String = rand::rng().random::().to_string(); 22 | env::set_var("RANDOM_SLIDESHOW", &random_slideshow); 23 | 24 | // WHEN requesting random slideshow 25 | let response: String = String::from_utf8( 26 | test::call_and_read_body( 27 | &app_server, 28 | test::TestRequest::get() 29 | .uri("/api/config/random-slideshow") 30 | .to_request(), 31 | ) 32 | .await 33 | .to_vec(), 34 | ) 35 | .unwrap(); 36 | 37 | // THEN the response should contain the correct interval 38 | assert_that!(response).is_equal_to(&random_slideshow); 39 | 40 | // cleanup 41 | cleanup(&base_test_dir).await; 42 | } 43 | 44 | fn build_app( 45 | base_test_dir: &str, 46 | ) -> App< 47 | impl ServiceFactory< 48 | ServiceRequest, 49 | Config = (), 50 | Response = ServiceResponse, 51 | Error = Error, 52 | InitError = (), 53 | >, 54 | > { 55 | let resource_reader = resource_reader::new(base_test_dir); 56 | let resource_store = resource_store::initialize(base_test_dir); 57 | scheduler::index_resources(resource_reader, resource_store.clone()); 58 | App::new().app_data(web::Data::new(resource_store)).service( 59 | web::scope("/api/config") 60 | .service(config_endpoint::get_slideshow_interval) 61 | .service(config_endpoint::get_refresh_interval) 62 | .service(config_endpoint::get_random_slideshow_enabled), 63 | ) 64 | } 65 | 66 | /// Creates a temp folder with the given name and returns its full path 67 | async fn create_temp_folder() -> PathBuf { 68 | let random_string = rand::rng().random::().to_string(); 69 | let test_dir: PathBuf = env::temp_dir().join(TEST_FOLDER_NAME).join(&random_string); 70 | 71 | if test_dir.exists() { 72 | fs::remove_dir_all(&test_dir).expect("Failed to remove test dir"); 73 | } 74 | 75 | fs::create_dir_all(&test_dir).unwrap(); 76 | 77 | let data_dir = format!("/tmp/cache/{}/{}", &random_string, TEST_FOLDER_NAME); 78 | env::set_var("DATA_FOLDER", &data_dir); 79 | fs::create_dir_all(&data_dir).unwrap(); 80 | 81 | test_dir 82 | } 83 | 84 | /// Removes the test folder after test run 85 | async fn cleanup(test_dir: &PathBuf) { 86 | let _ = fs::remove_dir_all(test_dir); 87 | } 88 | -------------------------------------------------------------------------------- /src/integration_test_weather_api.rs: -------------------------------------------------------------------------------- 1 | use std::path::PathBuf; 2 | 3 | use std::{env, fs}; 4 | 5 | use actix_web::dev::{ServiceFactory, ServiceRequest, ServiceResponse}; 6 | use actix_web::{test, web, App, Error}; 7 | use assertor::{assert_that, StringAssertion}; 8 | use rand::Rng; 9 | 10 | use crate::{resource_reader, resource_store, scheduler, weather_endpoint}; 11 | 12 | const TEST_FOLDER_NAME: &str = "integration_test_weather_api"; 13 | 14 | #[actix_web::test] 15 | async fn test_get_weather_current() { 16 | // GIVEN is a running this-week-in-past instance 17 | let base_test_dir = create_temp_folder().await; 18 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 19 | 20 | // WHEN requesting current weather 21 | let response: String = String::from_utf8( 22 | test::call_and_read_body( 23 | &app_server, 24 | test::TestRequest::get() 25 | .uri("/api/weather/current") 26 | .to_request(), 27 | ) 28 | .await 29 | .to_vec(), 30 | ) 31 | .unwrap(); 32 | 33 | // THEN the response should contain weather data 34 | assert_that!(response).contains("weather"); 35 | 36 | // cleanup 37 | cleanup(&base_test_dir).await; 38 | } 39 | 40 | #[actix_web::test] 41 | async fn test_get_is_weather_enabled() { 42 | // GIVEN is a running this-week-in-past instance 43 | let base_test_dir = create_temp_folder().await; 44 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 45 | 46 | // AND the weather is enabled via env var 47 | env::set_var("WEATHER_ENABLED", "true"); 48 | 49 | // WHEN requesting if weather is enabled 50 | let response: String = String::from_utf8( 51 | test::call_and_read_body( 52 | &app_server, 53 | test::TestRequest::get().uri("/api/weather").to_request(), 54 | ) 55 | .await 56 | .to_vec(), 57 | ) 58 | .unwrap(); 59 | 60 | // THEN the response should return if weather is enabled 61 | assert_that!(response).contains("true"); 62 | 63 | // cleanup 64 | cleanup(&base_test_dir).await; 65 | } 66 | 67 | fn build_app( 68 | base_test_dir: &str, 69 | ) -> App< 70 | impl ServiceFactory< 71 | ServiceRequest, 72 | Config = (), 73 | Response = ServiceResponse, 74 | Error = Error, 75 | InitError = (), 76 | >, 77 | > { 78 | let resource_reader = resource_reader::new(base_test_dir); 79 | let resource_store = resource_store::initialize(base_test_dir); 80 | scheduler::index_resources(resource_reader.clone(), resource_store.clone()); 81 | App::new() 82 | .app_data(web::Data::new(resource_store)) 83 | .app_data(web::Data::new(resource_reader)) 84 | .service( 85 | web::scope("/api/weather") 86 | .service(weather_endpoint::get_is_weather_enabled) 87 | .service(weather_endpoint::get_current_weather), 88 | ) 89 | } 90 | 91 | /// Creates a temp folder with the given name and returns its full path 92 | async fn create_temp_folder() -> PathBuf { 93 | let random_string = rand::rng().random::().to_string(); 94 | let test_dir: PathBuf = env::temp_dir().join(TEST_FOLDER_NAME).join(&random_string); 95 | 96 | if test_dir.exists() { 97 | fs::remove_dir_all(&test_dir).expect("Failed to remove test dir"); 98 | } 99 | 100 | fs::create_dir_all(&test_dir).unwrap(); 101 | 102 | let data_dir = format!("/tmp/cache/{}/{}", &random_string, TEST_FOLDER_NAME); 103 | env::set_var("DATA_FOLDER", &data_dir); 104 | fs::create_dir_all(&data_dir).unwrap(); 105 | 106 | test_dir 107 | } 108 | 109 | /// Removes the test folder after test run 110 | async fn cleanup(test_dir: &PathBuf) { 111 | let _ = fs::remove_dir_all(test_dir); 112 | } 113 | -------------------------------------------------------------------------------- /src/resource_reader.rs: -------------------------------------------------------------------------------- 1 | use std::path::Path; 2 | 3 | use chrono::{Local, NaiveDateTime}; 4 | use exif::Exif; 5 | use rayon::iter::IntoParallelRefIterator; 6 | use rayon::iter::ParallelIterator; 7 | use serde::{Deserialize, Serialize}; 8 | 9 | use crate::geo_location::GeoLocation; 10 | use crate::image_processor::ImageOrientation; 11 | use crate::{exif_reader, filesystem_client, ResourceReader}; 12 | 13 | /// Returns all available resources 14 | impl ResourceReader { 15 | pub fn read_all(&self) -> Vec { 16 | self.local_resource_paths 17 | .par_iter() 18 | .map(|path_str| Path::new(path_str.as_str())) 19 | .flat_map(filesystem_client::read_files_recursive) 20 | .map(|resource| filesystem_client::fill_exif_data(&resource)) 21 | .collect() 22 | } 23 | } 24 | 25 | /// A image resource that is available on the filesystem 26 | #[derive(Serialize, Deserialize, Debug, Clone)] 27 | pub struct ImageResource { 28 | pub id: String, 29 | pub path: String, 30 | pub content_type: String, 31 | pub name: String, 32 | pub content_length: u64, 33 | pub last_modified: NaiveDateTime, 34 | pub taken: Option, 35 | pub location: Option, 36 | pub orientation: Option, 37 | } 38 | 39 | impl ImageResource { 40 | pub fn with_taken_date(&self, taken_date: NaiveDateTime) -> ImageResource { 41 | let mut resource = self.clone(); 42 | resource.taken = Some(taken_date); 43 | resource 44 | } 45 | } 46 | 47 | /// Impl Default for ImageResource 48 | impl Default for ImageResource { 49 | fn default() -> Self { 50 | ImageResource { 51 | id: "".to_string(), 52 | path: "".to_string(), 53 | content_type: "".to_string(), 54 | name: "".to_string(), 55 | content_length: 0, 56 | last_modified: Local::now().naive_local(), 57 | taken: None, 58 | location: None, 59 | orientation: None, 60 | } 61 | } 62 | } 63 | 64 | /// Augments the provided resource with meta information 65 | /// The meta information is extracted from the exif data 66 | /// If the exif data is not available, the meta information is extracted from the gps data 67 | /// If the gps data is not available, the meta information is extracted from the file name 68 | pub fn fill_exif_data(resource: &ImageResource, maybe_exif_data: Option) -> ImageResource { 69 | let mut taken_date = None; 70 | let mut location = None; 71 | let mut orientation = None; 72 | 73 | if let Some(exif_data) = maybe_exif_data { 74 | taken_date = exif_reader::get_exif_date(&exif_data); 75 | location = exif_reader::detect_location(&exif_data); 76 | orientation = exif_reader::detect_orientation(&exif_data); 77 | } 78 | 79 | if taken_date.is_none() { 80 | taken_date = exif_reader::detect_date_by_name(&resource.path); 81 | } 82 | 83 | let mut augmented_resource = resource.clone(); 84 | augmented_resource.taken = taken_date; 85 | augmented_resource.location = location; 86 | augmented_resource.orientation = orientation; 87 | 88 | augmented_resource 89 | } 90 | 91 | /// Instantiates a new resource reader for the given paths 92 | pub fn new(resource_folder_paths: &str) -> ResourceReader { 93 | let local_resource_paths: Vec = resource_folder_paths 94 | .split(',') 95 | .map(|entry| entry.to_string()) 96 | .map(|entry| entry.trim().to_string()) 97 | .collect(); 98 | 99 | local_resource_paths.iter().for_each(|entry| verify(entry)); 100 | 101 | ResourceReader { 102 | local_resource_paths, 103 | } 104 | } 105 | 106 | /// Ensure that all folder exists 107 | fn verify(path: &str) { 108 | let folder_path = Path::new(path); 109 | let exists = folder_path.exists(); 110 | if !exists { 111 | panic!("{} does not exists", path); 112 | } 113 | 114 | let is_dir = folder_path.is_dir(); 115 | if !is_dir { 116 | panic!("{} is not a folder", path); 117 | } 118 | } 119 | -------------------------------------------------------------------------------- /web-app/style.css: -------------------------------------------------------------------------------- 1 | @font-face { 2 | font-family: 'Inter'; 3 | src: url('font.ttf') format('truetype'); 4 | } 5 | 6 | * { 7 | overflow: hidden; 8 | margin: 0; 9 | box-sizing: border-box; 10 | } 11 | 12 | 13 | /* ======================== */ 14 | /* STYLED IDs */ 15 | /* ======================== */ 16 | 17 | #slideshow-layout { 18 | background-color: #000000; 19 | display: flex; 20 | justify-content: center; 21 | align-items: center; 22 | height: 100vh; 23 | } 24 | 25 | 26 | #background-image { 27 | filter: blur(20px); 28 | -webkit-filter: blur(20px); 29 | height: 125vh; 30 | width: 100%; 31 | position: absolute; 32 | background-position: center; 33 | background-repeat: no-repeat; 34 | background-size: cover; 35 | top: 0; 36 | left: 0; 37 | z-index: 1; 38 | } 39 | 40 | #slideshow-image { 41 | width: 100%; 42 | height: 100%; 43 | object-fit: contain; /* This makes sure the image maintains its aspect ratio */ 44 | z-index: 2; 45 | } 46 | 47 | #hide-current-image { 48 | position: absolute; 49 | top: 1vh; 50 | right: 1vh; 51 | background: transparent; 52 | height: 25px; 53 | cursor: pointer; 54 | visibility: hidden; 55 | 56 | filter: drop-shadow(0px 0px 1px #FFFFFF); 57 | z-index: 2; 58 | } 59 | 60 | /* ======================== */ 61 | /* STYLE CLASSES */ 62 | /* ======================== */ 63 | 64 | .label { 65 | position: absolute; 66 | bottom: 3vh; 67 | left: 3vh; 68 | color: #FFFFFF; 69 | font-family: "Inter", serif; 70 | background: transparent; 71 | font-size: 30px; 72 | text-align: left; 73 | -webkit-text-stroke-width: 1px; 74 | -webkit-text-stroke-color: black; 75 | z-index: 2; 76 | } 77 | 78 | .weather { 79 | position: absolute; 80 | bottom: 3vh; 81 | right: 3vh; 82 | text-align: right; 83 | background: transparent; 84 | z-index: 2; 85 | } 86 | 87 | .weather-label { 88 | text-align: right; 89 | color: #FFFFFF; 90 | font-family: "Inter", serif; 91 | background: transparent; 92 | font-size: 30px; 93 | -webkit-text-stroke-width: 1px; 94 | -webkit-text-stroke-color: black; 95 | flex-grow: 0; 96 | z-index: 2; 97 | } 98 | 99 | .weather > * { 100 | background: transparent; 101 | } 102 | 103 | /* ======================== */ 104 | /* FADE IN ANIMATION */ 105 | /* ======================== */ 106 | 107 | .fade-in { 108 | animation: fadeIn ease 500ms forwards; 109 | -webkit-animation: fadeIn ease 500ms forwards; 110 | -moz-animation: fadeIn ease 500ms forwards; 111 | -o-animation: fadeIn ease 500ms forwards; 112 | -ms-animation: fadeIn ease 500ms forwards; 113 | } 114 | 115 | @keyframes fadeIn { 116 | 0% { 117 | opacity: 0; 118 | } 119 | 100% { 120 | opacity: 1; 121 | } 122 | } 123 | 124 | @-moz-keyframes fadeIn { 125 | 0% { 126 | opacity: 0; 127 | } 128 | 100% { 129 | opacity: 1; 130 | } 131 | } 132 | 133 | @-webkit-keyframes fadeIn { 134 | 0% { 135 | opacity: 0; 136 | } 137 | 100% { 138 | opacity: 1; 139 | } 140 | } 141 | 142 | @-o-keyframes fadeIn { 143 | 0% { 144 | opacity: 0; 145 | } 146 | 100% { 147 | opacity: 1; 148 | } 149 | } 150 | 151 | @-ms-keyframes fadeIn { 152 | 0% { 153 | opacity: 0; 154 | } 155 | 100% { 156 | opacity: 1; 157 | } 158 | } 159 | 160 | /* ======================== */ 161 | /* FADE OUT ANIMATION */ 162 | /* ======================== */ 163 | 164 | .fade-out { 165 | animation: fadeOut ease 500ms forwards; 166 | -webkit-animation: fadeOut ease 500ms forwards; 167 | -moz-animation: fadeOut ease 500ms forwards; 168 | -o-animation: fadeOut ease 500ms forwards; 169 | -ms-animation: fadeOut ease 500ms forwards; 170 | } 171 | 172 | @keyframes fadeOut { 173 | 0% { 174 | opacity: 1; 175 | } 176 | 100% { 177 | opacity: 0; 178 | } 179 | } 180 | 181 | @-moz-keyframes fadeOut { 182 | 0% { 183 | opacity: 1; 184 | } 185 | 100% { 186 | opacity: 0; 187 | } 188 | } 189 | 190 | @-webkit-keyframes fadeOut { 191 | 0% { 192 | opacity: 1; 193 | } 194 | 100% { 195 | opacity: 0; 196 | } 197 | } 198 | 199 | @-o-keyframes fadeOut { 200 | 0% { 201 | opacity: 1; 202 | } 203 | 100% { 204 | opacity: 0; 205 | } 206 | } 207 | 208 | @-ms-keyframes fadeOut { 209 | 0% { 210 | opacity: 1; 211 | } 212 | 100% { 213 | opacity: 0; 214 | } 215 | } 216 | 217 | #control-panel { 218 | position: absolute; 219 | bottom: 50px; 220 | top: 50px; 221 | width: 100%; 222 | display: flex; 223 | justify-content: space-between; 224 | padding: 0 3vh; 225 | z-index: 3; 226 | } 227 | 228 | #control-panel div { 229 | flex: 1; 230 | height: 100%; 231 | opacity: 0; /* Make the zones invisible */ 232 | cursor: pointer; 233 | } -------------------------------------------------------------------------------- /src/exif_reader.rs: -------------------------------------------------------------------------------- 1 | use chrono::{NaiveDate, NaiveDateTime}; 2 | use exif::{Exif, In, Tag}; 3 | 4 | use crate::geo_location; 5 | use crate::geo_location::GeoLocation; 6 | use crate::image_processor::ImageOrientation; 7 | 8 | /// Reads the exif date from a given exif data entry 9 | /// Primarily the exif date is used to determine the date the image was taken 10 | /// If the exif date is not available, the gps date is used 11 | pub fn get_exif_date(exif_data: &Exif) -> Option { 12 | let mut exif_date: Option = detect_exif_date( 13 | vec![Tag::DateTimeOriginal, Tag::DateTimeDigitized, Tag::DateTime], 14 | exif_data, 15 | ); 16 | 17 | if exif_date.is_none() { 18 | exif_date = get_gps_date(exif_data); 19 | }; 20 | 21 | exif_date 22 | } 23 | 24 | /// Reads the gps date from a given exif data entry 25 | /// The gps date is used to determine the date the image was taken 26 | fn get_gps_date(exif_data: &Exif) -> Option { 27 | exif_data 28 | .get_field(Tag::GPSDateStamp, In::PRIMARY) 29 | .and_then(|gps_date| { 30 | NaiveDate::parse_from_str(gps_date.display_value().to_string().as_str(), "%F").ok() 31 | }) 32 | .and_then(|gps_date| gps_date.and_hms_opt(0, 0, 0)) 33 | } 34 | 35 | /// Finds the exif date in for the given tags 36 | /// Returns the first date found or None if no date was found 37 | fn detect_exif_date(tags_to_evaluate: Vec, exif_data: &Exif) -> Option { 38 | let exit_dates: Vec = tags_to_evaluate 39 | .iter() 40 | .filter_map(|tag| exif_data.get_field(*tag, In::PRIMARY)) 41 | .filter_map(|exif_date| parse_exif_date(exif_date.display_value().to_string())) 42 | .collect(); 43 | 44 | if !exit_dates.is_empty() { 45 | Some(*exit_dates.first().unwrap()) 46 | } else { 47 | None 48 | } 49 | } 50 | 51 | /// Parses the exif date from a given string 52 | fn parse_exif_date(date: String) -> Option { 53 | NaiveDateTime::parse_from_str(date.as_str(), "%F %T").ok() 54 | } 55 | 56 | /// Detects the location from the exif data 57 | /// If the location is not found, the location is set to None 58 | pub fn detect_location(exif_data: &Exif) -> Option { 59 | let maybe_latitude = exif_data.get_field(Tag::GPSLatitude, In::PRIMARY); 60 | let maybe_latitude_ref = exif_data.get_field(Tag::GPSLatitudeRef, In::PRIMARY); 61 | let maybe_longitude = exif_data.get_field(Tag::GPSLongitude, In::PRIMARY); 62 | let maybe_longitude_ref = exif_data.get_field(Tag::GPSLongitudeRef, In::PRIMARY); 63 | 64 | if let (Some(latitude), Some(longitude), Some(latitude_ref), Some(longitude_ref)) = ( 65 | maybe_latitude, 66 | maybe_longitude, 67 | maybe_latitude_ref, 68 | maybe_longitude_ref, 69 | ) { 70 | return geo_location::from_degrees_minutes_seconds( 71 | &latitude.display_value().to_string(), 72 | &longitude.display_value().to_string(), 73 | &latitude_ref.display_value().to_string(), 74 | &longitude_ref.display_value().to_string(), 75 | ); 76 | } 77 | 78 | None 79 | } 80 | 81 | /// Detects the orientation from the exif data 82 | /// If the orientation is not found, the orientation is set to None 83 | /// Possible rotations are: 0, 90, 180, 270 84 | pub fn detect_orientation(exif_data: &Exif) -> Option { 85 | let maybe_orientation = exif_data 86 | .get_field(Tag::Orientation, In::PRIMARY) 87 | .and_then(|field| field.value.get_uint(0)); 88 | 89 | match maybe_orientation { 90 | Some(1) => Some(ImageOrientation { 91 | rotation: 0, 92 | mirror_vertically: false, 93 | }), 94 | Some(2) => Some(ImageOrientation { 95 | rotation: 0, 96 | mirror_vertically: true, 97 | }), 98 | Some(3) => Some(ImageOrientation { 99 | rotation: 180, 100 | mirror_vertically: false, 101 | }), 102 | Some(4) => Some(ImageOrientation { 103 | rotation: 180, 104 | mirror_vertically: true, 105 | }), 106 | Some(5) => Some(ImageOrientation { 107 | rotation: 90, 108 | mirror_vertically: true, 109 | }), 110 | Some(6) => Some(ImageOrientation { 111 | rotation: 90, 112 | mirror_vertically: false, 113 | }), 114 | Some(7) => Some(ImageOrientation { 115 | rotation: 270, 116 | mirror_vertically: true, 117 | }), 118 | Some(8) => Some(ImageOrientation { 119 | rotation: 270, 120 | mirror_vertically: false, 121 | }), 122 | _ => None, 123 | } 124 | } 125 | 126 | /// Detects the date from the file name 127 | /// If the date is not found, the date is set to None 128 | /// The chars '/', ' ', '.', '_' are replaced with '_' 129 | pub fn detect_date_by_name(resource_path: &str) -> Option { 130 | let parsed: Vec = resource_path 131 | .replace(['/', ' ', '.'], "_") 132 | .split('_') 133 | .filter_map(parse_from_str) 134 | .collect(); 135 | 136 | if parsed.is_empty() { 137 | None 138 | } else { 139 | Some(parsed.first().unwrap().and_hms_opt(0, 0, 0).unwrap()) 140 | } 141 | } 142 | 143 | /// Parses a string into a date 144 | /// Returns None if the string could not be parsed 145 | fn parse_from_str(shard: &str) -> Option { 146 | // https://docs.rs/chrono/latest/chrono/format/strftime/index.html 147 | let parse_results: Vec = [ 148 | "%F", // 2001-07-08 149 | "%Y%m%d", // 20010708 150 | "signal-%Y-%m-%d-%Z", 151 | ] 152 | .iter() 153 | .filter_map(|format| NaiveDate::parse_from_str(shard, format).ok()) 154 | .collect(); 155 | 156 | if parse_results.is_empty() { 157 | None 158 | } else { 159 | Some(*parse_results.first().unwrap()) 160 | } 161 | } 162 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | extern crate core; 2 | 3 | use std::env; 4 | 5 | use actix_web::{middleware, web, App, HttpRequest, HttpResponse, HttpServer}; 6 | use env_logger::Builder; 7 | use log::{info, warn, LevelFilter}; 8 | 9 | mod config; 10 | mod config_endpoint; 11 | mod exif_reader; 12 | mod filesystem_client; 13 | mod geo_location; 14 | mod image_processor; 15 | mod resource_endpoint; 16 | mod resource_processor; 17 | mod resource_reader; 18 | mod resource_store; 19 | mod scheduler; 20 | mod utils; 21 | mod weather_endpoint; 22 | mod weather_processor; 23 | mod web_app_endpoint; 24 | 25 | #[cfg(test)] 26 | mod integration_test_config_api; 27 | #[cfg(test)] 28 | mod integration_test_resources_api; 29 | #[cfg(test)] 30 | mod integration_test_weather_api; 31 | #[cfg(test)] 32 | mod resource_processor_test; 33 | #[cfg(test)] 34 | mod resource_reader_test; 35 | 36 | // Avoid musl's default allocator due to lackluster performance 37 | // https://nickb.dev/blog/default-musl-allocator-considered-harmful-to-performance 38 | #[cfg(target_env = "musl")] 39 | #[global_allocator] 40 | static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; 41 | 42 | #[derive(Clone)] 43 | pub struct ResourceReader { 44 | /// Holds all specified local paths 45 | pub local_resource_paths: Vec, 46 | } 47 | 48 | #[actix_web::main] 49 | async fn main() -> std::io::Result<()> { 50 | // Configure logger 51 | let mut builder = Builder::from_default_env(); 52 | builder 53 | .filter(Some("actix_web::middleware::logger"), LevelFilter::Error) 54 | .init(); 55 | 56 | // Print cargo version to console 57 | info!( 58 | "👋 Welcome to this-week-in-past version {}", 59 | env!("CARGO_PKG_VERSION") 60 | ); 61 | 62 | // Print system date and time 63 | info!( 64 | "📅 System time: {}", 65 | chrono::Local::now().format("%Y-%m-%d %H:%M:%S") 66 | ); 67 | 68 | // Create a new resource reader based on the provided resources path 69 | let resource_reader = resource_reader::new( 70 | env::var("RESOURCE_PATHS") 71 | .expect("RESOURCE_PATHS is missing") 72 | .as_str(), 73 | ); 74 | 75 | // Initialize databases 76 | if env::var("CACHE_DIR").is_ok() { 77 | warn!("CACHE_DIR environment variable is deprecated, use DATA_FOLDER instead!") 78 | } 79 | let data_folder = env::var("DATA_FOLDER") 80 | .or_else(|_| env::var("CACHE_DIR")) 81 | .unwrap_or_else(|_| "./data".to_string()); 82 | let resource_store = resource_store::initialize(&data_folder); 83 | 84 | info!("📅 Database time: {}", resource_store.get_database_time()); 85 | 86 | // Start scheduler to run at midnight 87 | let scheduler_handle = 88 | scheduler::schedule_indexer(resource_reader.clone(), resource_store.clone()); 89 | 90 | let bind_address = format!( 91 | "0.0.0.0:{}", 92 | env::var("PORT").unwrap_or_else(|_| "8080".to_string()) 93 | ); 94 | // Run the actual web server and hold the main thread here 95 | info!("🚀 Launching webserver on http://{} 🚀", bind_address); 96 | let http_server_result = HttpServer::new(move || { 97 | App::new() 98 | .app_data(web::Data::new(resource_store.clone())) 99 | .app_data(web::Data::new(resource_reader.clone())) 100 | .wrap(middleware::Logger::default()) // enable logger 101 | .service(web_app_endpoint::index) 102 | .service(web_app_endpoint::style_css) 103 | .service(web_app_endpoint::script_js) 104 | .service(web_app_endpoint::hide_png) 105 | .service(web_app_endpoint::icon_png) 106 | .service(web_app_endpoint::font) 107 | .service( 108 | web::scope("/api/resources") 109 | .service(resource_endpoint::get_all_resources) 110 | .service(resource_endpoint::get_this_week_resources) 111 | .service(resource_endpoint::get_this_week_resources_count) 112 | .service(resource_endpoint::get_this_week_resources_metadata) 113 | .service(resource_endpoint::get_this_week_resource_image) 114 | .service(resource_endpoint::random_resources) 115 | .service(resource_endpoint::get_resource_by_id_and_resolution) 116 | .service(resource_endpoint::get_resource_metadata_by_id) 117 | .service(resource_endpoint::get_resource_metadata_description_by_id) 118 | .service(resource_endpoint::get_all_hidden_resources) 119 | .service(resource_endpoint::set_resource_hidden) 120 | .service(resource_endpoint::delete_resource_hidden), 121 | ) 122 | .service( 123 | web::scope("/api/weather") 124 | .service(weather_endpoint::get_is_weather_enabled) 125 | .service(weather_endpoint::get_current_weather) 126 | .service(weather_endpoint::get_is_home_assistant_enabled) 127 | .service(weather_endpoint::get_home_assistant_entity_data) 128 | .service(weather_endpoint::get_weather_unit), 129 | ) 130 | .service( 131 | web::scope("/api/config") 132 | .service(config_endpoint::get_slideshow_interval) 133 | .service(config_endpoint::get_refresh_interval) 134 | .service(config_endpoint::get_hide_button_enabled) 135 | .service(config_endpoint::get_random_slideshow_enabled) 136 | .service(config_endpoint::get_preload_images_enabled), 137 | ) 138 | .service(web::resource("/api/version").route(web::get().to( 139 | |_: HttpRequest, _: web::Payload| async move { 140 | Ok::<_, actix_web::Error>( 141 | HttpResponse::Ok() 142 | .content_type("plain/text") 143 | .body(env!("CARGO_PKG_VERSION")), 144 | ) 145 | }, 146 | ))) 147 | .service(web::resource("/api/health").route(web::get().to(HttpResponse::Ok))) 148 | }) 149 | .bind(bind_address)? 150 | .run() 151 | .await; 152 | 153 | // If the http server is terminated... 154 | 155 | // Cleanup database 156 | info!("Cleanup database 🧹"); 157 | resource_store::initialize(&data_folder).vacuum(); 158 | 159 | // Stop the scheduler 160 | info!("Stopping scheduler 🕐️"); 161 | scheduler_handle.stop(); 162 | 163 | // Done, let's get out here 164 | info!("Stopping Application 😵️"); 165 | http_server_result 166 | } 167 | -------------------------------------------------------------------------------- /src/geo_location.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::env; 3 | use std::fmt::{Display, Formatter}; 4 | 5 | use lazy_static::lazy_static; 6 | use regex::{Captures, Regex}; 7 | use serde::{Deserialize, Serialize}; 8 | use serde_json::Value; 9 | 10 | /// Struct representing a geo location 11 | #[derive(Serialize, Deserialize, Debug, Copy, Clone, PartialEq)] 12 | pub struct GeoLocation { 13 | pub latitude: f32, 14 | pub longitude: f32, 15 | } 16 | 17 | /// Display trait implementation for GeoLocation 18 | impl Display for GeoLocation { 19 | fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { 20 | write!(f, "[lat={} lon={}]", self.latitude, self.longitude,) 21 | } 22 | } 23 | 24 | /// Converts Degrees Minutes Seconds To Decimal Degrees 25 | /// See 26 | fn dms_to_dd(dms_string: &str, dms_ref: &str) -> Option { 27 | lazy_static! { 28 | static ref DMS_PARSE_PATTERN_1: Regex = Regex::new( 29 | // e.g.: 7 deg 33 min 55.5155 sec or 7 deg 33 min 55 sec 30 | r"(?P\d+) deg (?P\d+) min (?P\d+.?\d*) sec" 31 | ).unwrap(); 32 | static ref DMS_PARSE_PATTERN_2: Regex = Regex::new( 33 | // e.g.: 50/1, 25/1, 2519/100 34 | r"(?P\d+)/(?P\d+),\s*(?P\d+)/(?P\d+),\s*(?P\d+)/(?P\d+)" 35 | ).unwrap(); 36 | } 37 | 38 | let dms_pattern_1_match: Option = DMS_PARSE_PATTERN_1.captures(dms_string); 39 | let dms_pattern_2_match: Option = DMS_PARSE_PATTERN_2.captures(dms_string); 40 | 41 | // Depending on the dms ref the value has to be multiplied by -1 42 | let dms_ref_multiplier = match dms_ref { 43 | "S" | "W" => -1.0, 44 | _ => 1.0, 45 | }; 46 | 47 | if let Some(pattern_match) = dms_pattern_1_match { 48 | parse_pattern_1(pattern_match).map(|value| value * dms_ref_multiplier) 49 | } else if let Some(pattern_match) = dms_pattern_2_match { 50 | parse_pattern_2(pattern_match).map(|value| value * dms_ref_multiplier) 51 | } else { 52 | None 53 | } 54 | } 55 | 56 | /// Parses Degrees minutes seconds for the following example pattern: "7 deg 33 min 55.5155 sec" 57 | fn parse_pattern_1(caps: Captures) -> Option { 58 | let maybe_deg: Option = caps 59 | .name("deg") 60 | .map(|cap| cap.as_str().parse::().unwrap()); 61 | let maybe_min: Option = caps 62 | .name("min") 63 | .map(|cap| cap.as_str().parse::().unwrap()); 64 | let maybe_sec: Option = caps 65 | .name("sec") 66 | .map(|cap| cap.as_str().parse::().unwrap()); 67 | 68 | if let (Some(deg), Some(min), Some(sec)) = (maybe_deg, maybe_min, maybe_sec) { 69 | Some(deg + (min / 60.0) + (sec / 3600.0)) 70 | } else { 71 | None 72 | } 73 | } 74 | 75 | /// Parses Degrees minutes seconds for the following example pattern: "50/1, 25/1, 2519/100" 76 | fn parse_pattern_2(caps: Captures) -> Option { 77 | let maybe_deg: Option = caps 78 | .name("deg") 79 | .map(|cap| cap.as_str().parse::().unwrap()); 80 | let maybe_deg_fraction: Option = caps 81 | .name("deg_fraction") 82 | .map(|cap| cap.as_str().parse::().unwrap()); 83 | let maybe_min: Option = caps 84 | .name("min") 85 | .map(|cap| cap.as_str().parse::().unwrap()); 86 | let maybe_min_fraction: Option = caps 87 | .name("min_fraction") 88 | .map(|cap| cap.as_str().parse::().unwrap()); 89 | let maybe_sec: Option = caps 90 | .name("sec") 91 | .map(|cap| cap.as_str().parse::().unwrap()); 92 | let maybe_sec_fraction: Option = caps 93 | .name("sec_fraction") 94 | .map(|cap| cap.as_str().parse::().unwrap()); 95 | 96 | if let (Some(deg), Some(deg_frac), Some(min), Some(min_frac), Some(sec), Some(sec_frac)) = ( 97 | maybe_deg, 98 | maybe_deg_fraction, 99 | maybe_min, 100 | maybe_min_fraction, 101 | maybe_sec, 102 | maybe_sec_fraction, 103 | ) { 104 | Some((deg / deg_frac) + ((min / min_frac) / 60.0) + ((sec / sec_frac) / 3600.0)) 105 | } else { 106 | None 107 | } 108 | } 109 | 110 | /// Converts latitude and longitude to a GeoLocation 111 | /// If the latitude or longitude is not valid, None is returned 112 | /// This is done by converting the latitude and longitude to degrees minutes seconds 113 | pub fn from_degrees_minutes_seconds( 114 | latitude: &str, 115 | longitude: &str, 116 | latitude_ref: &str, 117 | longitude_ref: &str, 118 | ) -> Option { 119 | let maybe_dd_lat = dms_to_dd(latitude, latitude_ref); 120 | let maybe_dd_lon = dms_to_dd(longitude, longitude_ref); 121 | 122 | if let (Some(latitude), Some(longitude)) = (maybe_dd_lat, maybe_dd_lon) { 123 | Some(GeoLocation { 124 | latitude, 125 | longitude, 126 | }) 127 | } else { 128 | None 129 | } 130 | } 131 | 132 | /// Returns the city name for the specified geo location 133 | /// The city name is resolved from the geo location using the bigdatacloud api 134 | pub async fn resolve_city_name(geo_location: GeoLocation) -> Option { 135 | if env::var("BIGDATA_CLOUD_API_KEY").is_err() { 136 | return None; 137 | } 138 | 139 | let request_url = format!( 140 | "https://api.bigdatacloud.net/data/reverse-geocode?latitude={}&longitude={}&localityLanguage=de&key={}", 141 | geo_location.latitude, 142 | geo_location.longitude, 143 | env::var("BIGDATA_CLOUD_API_KEY").unwrap(), 144 | ); 145 | 146 | let response = ureq::get(request_url.as_str()).call(); 147 | 148 | if response.is_err() { 149 | return None; 150 | } 151 | 152 | let response_json = response 153 | .unwrap() 154 | .body_mut() 155 | .read_to_string() 156 | .ok() 157 | .and_then(|json_string| serde_json::from_str::>(&json_string).ok()); 158 | 159 | let mut city_name = response_json 160 | .as_ref() 161 | .and_then(|json_data| get_string_value("city", json_data)) 162 | .filter(|city_name| !city_name.trim().is_empty()); 163 | 164 | if city_name.is_none() { 165 | city_name = response_json 166 | .as_ref() 167 | .and_then(|json_data| get_string_value("locality", json_data)) 168 | .filter(|city_name| !city_name.trim().is_empty()); 169 | } 170 | 171 | city_name 172 | } 173 | 174 | /// Returns the string value for the specified key of an hash map 175 | fn get_string_value(field_name: &str, json_data: &HashMap) -> Option { 176 | json_data 177 | .get(field_name) 178 | .and_then(|field_value| field_value.as_str()) 179 | .map(|field_string_value| field_string_value.to_string()) 180 | } 181 | -------------------------------------------------------------------------------- /src/filesystem_client.rs: -------------------------------------------------------------------------------- 1 | use core::option::Option::None; 2 | use std::fs; 3 | use std::path::{Path, PathBuf}; 4 | 5 | use image::ImageFormat; 6 | use lazy_static::lazy_static; 7 | use log::{debug, error, info}; 8 | use regex::Regex; 9 | 10 | use crate::resource_reader::ImageResource; 11 | use crate::{resource_reader, utils}; 12 | 13 | /// Reads all files of a folder and returns all found resources 14 | /// The folder is recursively searched 15 | pub fn read_files_recursive(path: &Path) -> Vec { 16 | let folder_path = fs::File::open(path); 17 | 18 | if folder_path.is_err() { 19 | error!( 20 | "Could not open folder: {:?}. Error:\n{:?}", 21 | path, 22 | folder_path.err() 23 | ); 24 | return vec![]; 25 | } 26 | let folder_path = folder_path.unwrap(); 27 | let metadata = folder_path.metadata().unwrap_or_else(|error| { 28 | panic!( 29 | "Failed to read metadata for: {} Error:\n{}", 30 | path.to_str().unwrap(), 31 | error 32 | ) 33 | }); 34 | 35 | if metadata.is_file() { 36 | return vec![]; 37 | } 38 | 39 | // Checks if the folder should be skipped, because it is ignored or contains a .ignore file 40 | if should_skip_folder(path) { 41 | return vec![]; 42 | } 43 | 44 | let paths = fs::read_dir(path).unwrap_or_else(|error| { 45 | panic!( 46 | "Failed to read directory: {} Error:\n{}", 47 | path.to_str().unwrap(), 48 | error 49 | ) 50 | }); 51 | 52 | paths 53 | .flatten() 54 | .flat_map(|dir_entry| { 55 | let metadata = dir_entry.metadata().unwrap_or_else(|error| { 56 | panic!( 57 | "Failed to read metadata for: {} Error:\n{}", 58 | dir_entry.path().to_str().unwrap(), 59 | error 60 | ) 61 | }); 62 | 63 | if metadata.is_file() { 64 | read_resource(&dir_entry.path()) 65 | } else { 66 | read_files_recursive(&dir_entry.path()) 67 | } 68 | }) 69 | .collect() 70 | } 71 | 72 | /// Checks if the folder should be skipped, because it is ignored or contains certain .ignore file 73 | /// Returns true if the folder should be skipped 74 | /// Returns false if the folder should be processed 75 | fn should_skip_folder(path: &Path) -> bool { 76 | lazy_static! { 77 | static ref IGNORE_FOLDER_REGEX: Option = std::env::var("IGNORE_FOLDER_REGEX") 78 | .ok() 79 | .map(|ignore_folders| Regex::new(&ignore_folders).unwrap()); 80 | static ref IGNORE_FOLDER_MARKER_FILES: Vec = 81 | std::env::var("IGNORE_FOLDER_MARKER_FILES") 82 | .unwrap_or(".ignore".to_string()) 83 | .as_str() 84 | .trim() 85 | .split(',') 86 | .map(|s| s.trim().to_string()) 87 | .collect(); 88 | } 89 | 90 | let folder_name = path 91 | .file_name() 92 | .unwrap_or_else(|| panic!("Failed to get folder name for path: {}", path.display())) 93 | .to_str() 94 | .unwrap_or_else(|| { 95 | panic!( 96 | "Failed to convert folder name to string for path: {}", 97 | path.display() 98 | ) 99 | }); 100 | 101 | if IGNORE_FOLDER_REGEX.is_some() && IGNORE_FOLDER_REGEX.as_ref().unwrap().is_match(folder_name) 102 | { 103 | info!( 104 | "⏭️ Skipping folder: {:?} because it is ignored by regular expression {:?}", 105 | path, 106 | std::env::var("IGNORE_FOLDER_REGEX").unwrap() 107 | ); 108 | return true; 109 | } 110 | 111 | let contains_ignore_file = fs::read_dir(path) 112 | .unwrap_or_else(|error| { 113 | panic!( 114 | "Failed to read directory: {} Error:\n{}", 115 | path.display(), 116 | error 117 | ) 118 | }) 119 | .flatten() 120 | .any(|entry| { 121 | let metadata = entry.metadata().unwrap_or_else(|error| { 122 | panic!( 123 | "Failed to read metadata for: {} Error:\n{}", 124 | entry.path().display(), 125 | error 126 | ) 127 | }); 128 | metadata.is_file() 129 | && IGNORE_FOLDER_MARKER_FILES 130 | .contains(&entry.file_name().to_str().unwrap().to_string()) 131 | }); 132 | if contains_ignore_file { 133 | info!( 134 | "⏭️ Skipping folder: {:?} because it contains any of these files {:?}", 135 | path, 136 | std::env::var("IGNORE_FOLDER_MARKER_FILES") 137 | ); 138 | return true; 139 | } 140 | 141 | false 142 | } 143 | 144 | /// Reads a single file and returns the found resource 145 | /// Checks if the file is a supported resource currently all image types 146 | fn read_resource(file_path: &PathBuf) -> Vec { 147 | let absolute_file_path = file_path.to_str().unwrap(); 148 | let file_name = file_path.as_path().file_name().unwrap().to_str().unwrap(); 149 | 150 | let file = fs::File::open(file_path) 151 | .unwrap_or_else(|error| panic!("Failed to read file {}: {}", absolute_file_path, error)); 152 | 153 | let metadata = file.metadata().unwrap_or_else(|error| { 154 | panic!("Failed to read metadata {}: {}", absolute_file_path, error) 155 | }); 156 | 157 | // Cancel if folder 158 | if !metadata.is_file() { 159 | return vec![]; 160 | } 161 | 162 | let mime_type: &str = mime_guess::from_path(file_name).first_raw().unwrap_or(""); 163 | let image_format = ImageFormat::from_mime_type(mime_type); 164 | 165 | // Cancel and print error if no supported image format 166 | if image_format.is_none() { 167 | // If the mime type is image, but the format is not supported, log it 168 | if mime_type.starts_with("image") { 169 | debug!( 170 | "{absolute_file_path} | has unsupported image format: {}", 171 | mime_type 172 | ); 173 | } 174 | 175 | return vec![]; 176 | } 177 | 178 | vec![ImageResource { 179 | id: utils::md5(file_name), 180 | path: absolute_file_path.to_string(), 181 | content_type: mime_type.to_string(), 182 | name: file_name.to_string(), 183 | content_length: metadata.len(), 184 | last_modified: utils::to_date_time(metadata.modified().unwrap()), 185 | taken: None, 186 | location: None, 187 | orientation: None, 188 | }] 189 | } 190 | 191 | /// Reads the exif data from the file and augments the image resource with this information 192 | pub fn fill_exif_data(resource: &ImageResource) -> ImageResource { 193 | let file_path = resource.path.as_str(); 194 | let file = fs::File::open(file_path).unwrap_or_else(|error| { 195 | panic!( 196 | "Failed to read exif data from file {}: {}", 197 | file_path, error 198 | ); 199 | }); 200 | 201 | let mut bufreader = std::io::BufReader::new(&file); 202 | let exif_reader = exif::Reader::new(); 203 | let maybe_exif_data = exif_reader.read_from_container(&mut bufreader).ok(); 204 | 205 | resource_reader::fill_exif_data(resource, maybe_exif_data) 206 | } 207 | -------------------------------------------------------------------------------- /src/resource_reader_test.rs: -------------------------------------------------------------------------------- 1 | use std::io::Read; 2 | use std::path::{Path, PathBuf}; 3 | use std::{env, fs}; 4 | 5 | use chrono::NaiveDateTime; 6 | use rand::Rng; 7 | 8 | use crate::geo_location::GeoLocation; 9 | use crate::image_processor::ImageOrientation; 10 | use crate::{filesystem_client, utils}; 11 | 12 | const TEST_JPEG_EXIF_URL: &str = 13 | "https://raw.githubusercontent.com/ianare/exif-samples/master/jpg/gps/DSCN0010.jpg"; 14 | const TEST_JPEG_URL: &str = "https://www.w3.org/People/mimasa/test/imgformat/img/w3c_home.jpg"; 15 | const TEST_PNG_URL: &str = "https://www.w3.org/People/mimasa/test/imgformat/img/w3c_home.png"; 16 | const TEST_GIF_URL: &str = "https://www.w3.org/People/mimasa/test/imgformat/img/w3c_home.gif"; 17 | const TEST_FOLDER_NAME: &str = "resource_reader_test"; 18 | 19 | #[test] 20 | fn read_dir_recursive() { 21 | // GIVEN is a folder structure with two assets and another file type 22 | let base_test_dir = create_temp_folder(); 23 | create_test_image(&base_test_dir, "", "test_image_1.jpg", TEST_JPEG_URL); 24 | create_test_image(&base_test_dir, "sub1", "test_image_2.jpg", TEST_JPEG_URL); 25 | create_test_file(&base_test_dir, "sub2", "test_file.txt"); 26 | 27 | // WHEN reading resources from a folder 28 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 29 | 30 | // THEN two resources should be found 31 | assert_eq!(resources_read.len(), 2); 32 | 33 | // cleanup 34 | cleanup(&base_test_dir); 35 | } 36 | 37 | #[test] 38 | fn read_jpg_image_resource() { 39 | // GIVEN is a folder with one jpg image 40 | let base_test_dir = create_temp_folder(); 41 | let test_image_name = "test_image_1.jpg"; 42 | let test_image_1_path = create_test_image(&base_test_dir, "", test_image_name, TEST_JPEG_URL); 43 | 44 | // WHEN reading resources from a folder 45 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 46 | 47 | // THEN the resource info should be correct 48 | assert_eq!(resources_read.len(), 1); 49 | assert_eq!(resources_read[0].id, utils::md5(test_image_name)); 50 | assert_eq!(resources_read[0].path, test_image_1_path); 51 | assert_eq!(resources_read[0].content_type, "image/jpeg"); 52 | assert_eq!(resources_read[0].name, test_image_name); 53 | 54 | // cleanup 55 | cleanup(&base_test_dir); 56 | } 57 | 58 | #[test] 59 | fn read_jpg_with_exif_image_resource() { 60 | // GIVEN is a folder with one jpg image with exif and gps metadata 61 | let base_test_dir = create_temp_folder(); 62 | let test_image_name = "test_image_1.jpg"; 63 | create_test_image(&base_test_dir, "", test_image_name, TEST_JPEG_EXIF_URL); 64 | 65 | // WHEN reading resources from a folder 66 | let resources_read = filesystem_client::fill_exif_data( 67 | &filesystem_client::read_files_recursive(&base_test_dir)[0], 68 | ); 69 | 70 | // THEN the resource metadata should be correct 71 | assert_eq!( 72 | resources_read.taken, 73 | Some(NaiveDateTime::parse_from_str("2008-10-22T16:28:39", "%Y-%m-%dT%H:%M:%S").unwrap()) 74 | ); 75 | assert_eq!( 76 | resources_read.orientation, 77 | Some(ImageOrientation { 78 | rotation: 0, 79 | mirror_vertically: false, 80 | }) 81 | ); 82 | assert_eq!( 83 | resources_read.location, 84 | Some(GeoLocation { 85 | latitude: 43.46745, 86 | longitude: 11.885126, 87 | }) 88 | ); 89 | 90 | // cleanup 91 | cleanup(&base_test_dir); 92 | } 93 | 94 | #[test] 95 | fn read_png_image_resource() { 96 | // GIVEN is a folder with one png image 97 | let base_test_dir = create_temp_folder(); 98 | let test_image_name = "test_image_1.png"; 99 | let test_image_1_path = create_test_image(&base_test_dir, "", test_image_name, TEST_PNG_URL); 100 | 101 | // WHEN reading resources from a folder 102 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 103 | 104 | // THEN the resource info should be correct 105 | assert_eq!(resources_read.len(), 1); 106 | assert_eq!(resources_read[0].id, utils::md5(test_image_name)); 107 | assert_eq!(resources_read[0].path, test_image_1_path); 108 | assert_eq!(resources_read[0].content_type, "image/png"); 109 | assert_eq!(resources_read[0].name, test_image_name); 110 | 111 | // cleanup 112 | cleanup(&base_test_dir); 113 | } 114 | 115 | #[test] 116 | fn read_gif_image_resource() { 117 | // GIVEN is a folder with one gif image 118 | let base_test_dir = create_temp_folder(); 119 | let test_image_name = "test_image_1.gif"; 120 | let test_image_1_path = create_test_image(&base_test_dir, "", test_image_name, TEST_GIF_URL); 121 | 122 | // WHEN reading resources from a folder 123 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 124 | 125 | // THEN the resource info should be correct 126 | assert_eq!(resources_read.len(), 1); 127 | assert_eq!(resources_read[0].id, utils::md5(test_image_name)); 128 | assert_eq!(resources_read[0].path, test_image_1_path); 129 | assert_eq!(resources_read[0].content_type, "image/gif"); 130 | assert_eq!(resources_read[0].name, test_image_name); 131 | 132 | // cleanup 133 | cleanup(&base_test_dir); 134 | } 135 | 136 | #[test] 137 | fn read_no_images_dir() { 138 | // GIVEN is a folder structure with no assets 139 | let base_test_dir = create_temp_folder(); 140 | create_test_file(&base_test_dir, "", "test_file.txt"); 141 | 142 | // WHEN reading resources from a folder 143 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 144 | 145 | // THEN two resources should be found 146 | assert_eq!(resources_read.len(), 0); 147 | 148 | // cleanup 149 | cleanup(&base_test_dir); 150 | } 151 | 152 | #[test] 153 | fn read_empty_dir() { 154 | // GIVEN is an empty folder 155 | let base_test_dir = create_temp_folder(); 156 | 157 | // WHEN reading resources from a folder 158 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 159 | 160 | // THEN two resources should be found 161 | assert_eq!(resources_read.len(), 0); 162 | 163 | // cleanup 164 | cleanup(&base_test_dir); 165 | } 166 | 167 | #[test] 168 | fn read_non_existent_folder() { 169 | // GIVEN is a folder path that does not exist 170 | let base_test_dir = PathBuf::from("/some/non/existent/path"); 171 | 172 | // WHEN reading resources from a folder 173 | let resources_read = filesystem_client::read_files_recursive(&base_test_dir); 174 | 175 | // THEN two resources should be found 176 | assert_eq!(resources_read.len(), 0); 177 | 178 | // cleanup 179 | cleanup(&base_test_dir); 180 | } 181 | 182 | /// Creates a test image withing a folder 183 | fn create_test_image(base_dir: &Path, sub_dir: &str, file_name: &str, image_url: &str) -> String { 184 | let target_dir = base_dir.join(sub_dir); 185 | 186 | if !target_dir.exists() { 187 | fs::create_dir_all(&target_dir).unwrap(); 188 | } 189 | 190 | let test_image_path = target_dir.join(file_name); 191 | 192 | let mut response = ureq::get(image_url).call().unwrap(); 193 | 194 | let content_length = response.headers().get("Content-Length").unwrap(); 195 | let len: usize = content_length.to_str().unwrap().parse().unwrap(); 196 | 197 | let mut data: Vec = Vec::with_capacity(len); 198 | response 199 | .body_mut() 200 | .as_reader() 201 | .read_to_end(&mut data) 202 | .unwrap(); 203 | 204 | fs::write(&test_image_path, &data).unwrap_or_else(|_| { 205 | panic!( 206 | "error while writing test image {}", 207 | test_image_path.to_str().unwrap() 208 | ) 209 | }); 210 | 211 | test_image_path.to_str().unwrap().to_string() 212 | } 213 | 214 | /// Removes the test folder after test run 215 | fn cleanup(test_dir: &PathBuf) { 216 | let _ = fs::remove_dir_all(test_dir); 217 | } 218 | 219 | /// Creates a test file withing a folder 220 | fn create_test_file(base_dir: &Path, sub_dir: &str, file_name: &str) -> String { 221 | let target_dir = base_dir.join(sub_dir); 222 | 223 | if !target_dir.exists() { 224 | fs::create_dir_all(&target_dir).unwrap(); 225 | } 226 | 227 | let test_file_path = target_dir.join(file_name); 228 | 229 | fs::write(&test_file_path, b"test").unwrap_or_else(|_| { 230 | panic!( 231 | "error while writing test image {}", 232 | test_file_path.to_str().unwrap() 233 | ) 234 | }); 235 | 236 | test_file_path.to_str().unwrap().to_string() 237 | } 238 | 239 | /// Creates a temp folder with the given name and returns its full path 240 | fn create_temp_folder() -> PathBuf { 241 | let random_string = rand::rng().random::().to_string(); 242 | let test_dir: PathBuf = env::temp_dir().join(TEST_FOLDER_NAME).join(random_string); 243 | 244 | if test_dir.exists() { 245 | fs::remove_dir_all(&test_dir).expect("Failed to remove test dir"); 246 | } 247 | 248 | fs::create_dir_all(&test_dir).unwrap(); 249 | 250 | test_dir 251 | } 252 | -------------------------------------------------------------------------------- /src/resource_endpoint.rs: -------------------------------------------------------------------------------- 1 | use actix_web::delete; 2 | use actix_web::get; 3 | use actix_web::post; 4 | use actix_web::web; 5 | use actix_web::HttpResponse; 6 | use log::{debug, log_enabled}; 7 | use std::fs; 8 | 9 | use crate::resource_reader::ImageResource; 10 | use crate::resource_store::ResourceStore; 11 | use crate::{image_processor, resource_processor}; 12 | 13 | const CONTENT_TYPE_APPLICATION_JSON: &str = "application/json"; 14 | const CONTENT_TYPE_TEXT_PLAIN: &str = "text/plain"; 15 | const CONTENT_TYPE_IMAGE_PNG: &str = "image/png"; 16 | 17 | #[get("")] 18 | pub async fn get_all_resources(resource_store: web::Data) -> HttpResponse { 19 | let keys: Vec = resource_store.get_ref().get_all_resource_ids(); 20 | 21 | HttpResponse::Ok() 22 | .content_type(CONTENT_TYPE_APPLICATION_JSON) 23 | .body(serde_json::to_string(&keys).unwrap()) 24 | } 25 | 26 | #[get("week")] 27 | pub async fn get_this_week_resources(resource_store: web::Data) -> HttpResponse { 28 | let resource_ids = resource_store 29 | .as_ref() 30 | .get_resources_this_week_visible_random(); 31 | 32 | HttpResponse::Ok() 33 | .content_type(CONTENT_TYPE_APPLICATION_JSON) 34 | .body(serde_json::to_string(&resource_ids).unwrap()) 35 | } 36 | 37 | #[get("week/count")] 38 | pub async fn get_this_week_resources_count( 39 | resource_store: web::Data, 40 | ) -> HttpResponse { 41 | let resource_count = resource_store 42 | .as_ref() 43 | .get_resources_this_week_visible_count(); 44 | 45 | HttpResponse::Ok() 46 | .content_type(CONTENT_TYPE_TEXT_PLAIN) 47 | .body(resource_count.to_string()) 48 | } 49 | 50 | #[get("week/metadata")] 51 | pub async fn get_this_week_resources_metadata( 52 | resource_store: web::Data, 53 | ) -> HttpResponse { 54 | let resource_ids = resource_store 55 | .as_ref() 56 | .get_resources_this_week_visible_random(); 57 | 58 | // WARNING: this is not very efficient, but it's ok for a debug endpoint 59 | let resources_metadata: Vec = resource_ids 60 | .iter() 61 | .flat_map(|id| resource_store.as_ref().get_resource(id)) 62 | .map(|resource_string| { 63 | serde_json::from_str::(resource_string.as_str()).unwrap() 64 | }) 65 | .collect(); 66 | 67 | HttpResponse::Ok() 68 | .content_type(CONTENT_TYPE_APPLICATION_JSON) 69 | .body(serde_json::to_string(&resources_metadata).unwrap()) 70 | } 71 | 72 | #[get("week/image")] 73 | pub async fn get_this_week_resource_image( 74 | resource_store: web::Data, 75 | ) -> HttpResponse { 76 | let resource_image: Option = resource_store 77 | .as_ref() 78 | .get_resources_this_week_visible_random() 79 | .first() 80 | .and_then(|resource_id| resource_store.get_resource(resource_id)) 81 | .and_then(|resource_json_string| serde_json::from_str(resource_json_string.as_str()).ok()); 82 | 83 | if resource_image.is_none() { 84 | return HttpResponse::NotFound().finish(); 85 | } 86 | 87 | // Read the image data from the file system and adjust the image to the display 88 | let image_resource = resource_image.unwrap(); 89 | let resource_data = fs::read(&image_resource.path) 90 | .ok() 91 | .and_then(|resource_data| { 92 | image_processor::adjust_image( 93 | image_resource.path, 94 | resource_data, 95 | 0, 96 | 0, 97 | image_resource.orientation, 98 | ) 99 | }); 100 | 101 | if let Some(resource_data) = resource_data { 102 | HttpResponse::Ok() 103 | .content_type(CONTENT_TYPE_IMAGE_PNG) 104 | .body(resource_data) 105 | } else { 106 | HttpResponse::InternalServerError().finish() 107 | } 108 | } 109 | 110 | #[get("random")] 111 | pub async fn random_resources(resource_store: web::Data) -> HttpResponse { 112 | let resource_ids: Vec = resource_store.get_random_resources(); 113 | 114 | HttpResponse::Ok() 115 | .content_type(CONTENT_TYPE_APPLICATION_JSON) 116 | .body(serde_json::to_string(&resource_ids).unwrap()) 117 | } 118 | 119 | // TODO: Refactor me 120 | #[get("{resource_id}/{display_width}/{display_height}")] 121 | pub async fn get_resource_by_id_and_resolution( 122 | resources_id: web::Path<(String, u32, u32)>, 123 | resource_store: web::Data, 124 | ) -> HttpResponse { 125 | let path_params = resources_id.into_inner(); 126 | let resource_id = path_params.0.as_str(); 127 | let display_width = path_params.1; 128 | let display_height = path_params.2; 129 | 130 | // If RUST_LOG is DEBUG, print resource metadata 131 | if log_enabled!(log::Level::Debug) { 132 | let image_resource: Option = resource_store 133 | .get_resource(resource_id) 134 | .and_then(|resource_json_string| { 135 | serde_json::from_str(resource_json_string.as_str()).ok() 136 | }); 137 | if let Some(image_resource) = image_resource { 138 | debug!("Resource: {:?}", image_resource); 139 | } 140 | } 141 | 142 | // Check cache, if successful return it 143 | let cached_data = resource_store 144 | .get_ref() 145 | .get_data_cache_entry(format!("{resource_id}_{display_width}_{display_height}")); 146 | if let Some(cached_data) = cached_data { 147 | return HttpResponse::Ok() 148 | .content_type(CONTENT_TYPE_IMAGE_PNG) 149 | .body(cached_data); 150 | } 151 | 152 | // if not in cache, load resource metadata from database 153 | let image_resource: Option = resource_store 154 | .get_resource(resource_id) 155 | .and_then(|resource_json_string| serde_json::from_str(resource_json_string.as_str()).ok()); 156 | // If we can't find the requested resource by id, return with an error 157 | if image_resource.is_none() { 158 | return HttpResponse::NotFound().finish(); 159 | } 160 | 161 | // If we found the requested resource, read the image data and adjust the image to the display 162 | let image_resource = image_resource.unwrap(); 163 | let resource_data = fs::read(image_resource.path.clone()) 164 | .ok() 165 | .and_then(|resource_data| { 166 | image_processor::adjust_image( 167 | image_resource.path, 168 | resource_data, 169 | display_width, 170 | display_height, 171 | image_resource.orientation, 172 | ) 173 | }); 174 | 175 | // If image adjustments were successful, return the data, otherwise return with error 176 | if let Some(resource_data) = resource_data { 177 | resource_store.get_ref().add_data_cache_entry( 178 | format!("{resource_id}_{display_width}_{display_height}"), 179 | &resource_data, 180 | ); 181 | 182 | HttpResponse::Ok() 183 | .content_type(CONTENT_TYPE_IMAGE_PNG) 184 | .body(resource_data) 185 | } else { 186 | HttpResponse::InternalServerError().finish() 187 | } 188 | } 189 | 190 | #[get("{resource_id}/metadata")] 191 | pub async fn get_resource_metadata_by_id( 192 | resource_id: web::Path, 193 | resource_store: web::Data, 194 | ) -> HttpResponse { 195 | let metadata = resource_store.get_resource(resource_id.as_ref()); 196 | 197 | if let Some(metadata) = metadata { 198 | HttpResponse::Ok() 199 | .content_type(CONTENT_TYPE_APPLICATION_JSON) 200 | .body(metadata) 201 | } else { 202 | HttpResponse::InternalServerError().finish() 203 | } 204 | } 205 | 206 | #[get("{resource_id}/description")] 207 | pub async fn get_resource_metadata_description_by_id( 208 | resources_id: web::Path, 209 | resource_store: web::Data, 210 | ) -> HttpResponse { 211 | let resource = resource_store 212 | .get_resource(resources_id.as_str()) 213 | .and_then(|resource_json_string| serde_json::from_str(resource_json_string.as_str()).ok()); 214 | 215 | let display_value = resource 216 | .map(|resource| resource_processor::build_display_value(resource, resource_store.as_ref())); 217 | 218 | if let Some(display_value) = display_value { 219 | HttpResponse::Ok() 220 | .content_type("plain/text") 221 | .body(display_value.await) 222 | } else { 223 | HttpResponse::InternalServerError().finish() 224 | } 225 | } 226 | 227 | #[post("/hide/{resource_id}")] 228 | pub async fn set_resource_hidden( 229 | resources_id: web::Path, 230 | resource_store: web::Data, 231 | ) -> HttpResponse { 232 | resource_store.get_ref().add_hidden(resources_id.as_str()); 233 | HttpResponse::Ok().finish() 234 | } 235 | 236 | #[delete("/hide/{resource_id}")] 237 | pub async fn delete_resource_hidden( 238 | resources_id: web::Path, 239 | resource_store: web::Data, 240 | ) -> HttpResponse { 241 | resource_store 242 | .get_ref() 243 | .remove_hidden(resources_id.as_str()); 244 | HttpResponse::Ok().finish() 245 | } 246 | 247 | #[get("/hide")] 248 | pub async fn get_all_hidden_resources(resource_store: web::Data) -> HttpResponse { 249 | let hidden_ids: Vec = resource_store.as_ref().get_all_hidden(); 250 | HttpResponse::Ok() 251 | .content_type(CONTENT_TYPE_APPLICATION_JSON) 252 | .body(serde_json::to_string(&hidden_ids).unwrap()) 253 | } 254 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

2 | 3 |

4 | 5 |

6 | CI 7 | CI 8 | Docker Pulls 9 | Docker Image Size (tag) 10 | os-arch 11 | Online demo 12 | Donate me 13 | Awesome 14 |

15 | 16 |

17 | Aggregate images taken this week, from previous years and presents them on a web page with a slideshow. 18 |

19 | 20 |

21 | 22 |

23 | 24 | ## Motivation 25 | 26 | When I migrated my photo collection from google photos to a locally hosted instance of photoprism, I missed the 27 | automatically generated slideshow feature of google photos, here it is now. 28 | 29 | ## How it works 30 | 31 | The meta information of all images are read at startup and cached in memory. When the slideshow is opened, images from 32 | this calendar week from previous years are displayed. If no images from the calendar year are found, random images are 33 | displayed. 34 | 35 | ## Run the application 36 | 37 | ### Docker 38 | 39 | Docker Example: 40 | 41 | ```shell 42 | docker run -p 8080:8080 \ 43 | -v /path/to/pictures:/resources \ 44 | -e SLIDESHOW_INTERVAL=60 \ 45 | -e WEATHER_ENABLED=true \ 46 | -e OPEN_WEATHER_MAP_API_KEY= \ 47 | -e BIGDATA_CLOUD_API_KEY= \ 48 | rouhim/this-week-in-past 49 | ``` 50 | 51 | Docker compose example: 52 | 53 | ```shell 54 | services: 55 | this-week-in-past: 56 | image: rouhim/this-week-in-past 57 | volumes: 58 | - /path/to/pictures:/resources:ro # mount read only 59 | ports: 60 | - "8080:8080" 61 | environment: 62 | SLIDESHOW_INTERVAL: 10 63 | ``` 64 | 65 | 66 | ### Native execution 67 | 68 | Download the latest release for your system from 69 | the [releases page](https://github.com/RouHim/this-week-in-past/releases): 70 | 71 | ```shell 72 | # Assuming you run a x86/x64 system, if not adjust the binary name to download 73 | LATEST_VERSION=$(curl -L -s -H 'Accept: application/json' https://github.com/RouHim/this-week-in-past/releases/latest | \ 74 | sed -e 's/.*"tag_name":"\([^"]*\)".*/\1/') && \ 75 | curl -L -o this-week-in-past https://github.com/RouHim/this-week-in-past/releases/download/$LATEST_VERSION/this-week-in-past-x86_64-unknown-linux-musl && \ 76 | chmod +x this-week-in-past 77 | ``` 78 | 79 | Create a folder to store the application data: 80 | 81 | ```shell 82 | mkdir data 83 | ``` 84 | 85 | Start the application with: 86 | 87 | ```shell 88 | RESOURCE_PATHS=/path/to/pictures \ 89 | DATA_FOLDER=data \ 90 | SLIDESHOW_INTERVAL=60 \ 91 | ./this-week-in-past 92 | ``` 93 | 94 | > Since the binary is compiled [completely statically](https://github.com/rust-cross/rust-musl-cross), there are no 95 | > dependencies on system libraries like glibc. 96 | 97 | ## Configuration 98 | 99 | All configuration is done via environment variables: 100 | 101 | | Name | Description | Default value | Can be overwritten in URL | 102 | |----------------------------|------------------------------------------------------------------------------------------------------------|-------------------------------|---------------------------| 103 | | RESOURCE_PATHS | A list of folders from which the images should be loaded (comma separated) | `/resources` (Container only) | | 104 | | DATA_FOLDER | Path to a folder where the data should be stored, needs read/write access | `/data` (Container only) | | 105 | | PORT | Port on which the application should listen | `8080` | | 106 | | SLIDESHOW_INTERVAL | Interval of the slideshow in seconds | `30` | x | 107 | | REFRESH_INTERVAL | Interval how often the page should be reloaded in minutes (triggers a new slideshow playlist) | `360` (6h) | | 108 | | DATE_FORMAT | Date format of the image taken date (https://docs.rs/chrono/0.4.19/chrono/format/strftime/index.html) | `%d.%m.%Y` | | 109 | | BIGDATA_CLOUD_API_KEY | To resolve geo coordinates to city name. Obtain here: https://www.bigdatacloud.com | | | 110 | | OPEN_WEATHER_MAP_API_KEY | To receive weather live data. Obtain here: https://openweathermap.org/api | | | 111 | | WEATHER_ENABLED | Indicates if weather should be shown in the slideshow | `false` | x | 112 | | WEATHER_LOCATION | Name of a city | `Berlin` | | 113 | | WEATHER_LANGUAGE | Weather language ([ISO_639-1 two digit code](https://en.wikipedia.org/wiki/List_of_ISO_639-1_codes)) | `en` | | 114 | | WEATHER_UNIT | Weather units (`metric` or `imperial`) | `metric` | | 115 | | HOME_ASSISTANT_BASE_URL | Home assistant base url (e.g.: `http://192.168.0.123:8123`) | | | 116 | | HOME_ASSISTANT_ENTITY_ID | Home assistant entity id to load the weather from (e.g.: `sensor.outside_temperature`) | | | 117 | | HOME_ASSISTANT_API_TOKEN | Home assistant api access token | | | 118 | | SHOW_HIDE_BUTTON | Show the hide button on the slideshow | `false` | x | 119 | | RANDOM_SLIDESHOW | Show only random images instead of images from this week in previous years | `false` | x | 120 | | IGNORE_FOLDER_MARKER_FILES | A list of file names which causes the folder in which the file is located to be ignored. (comma separated) | `.ignore` | | 121 | | IGNORE_FOLDER_REGEX | A regular expression that causes the folder to be ignored if it matches | | | 122 | | PRELOAD_IMAGES | Indicates if images should be preloaded during the slideshow | `false` | | 123 | 124 | > Some parameters, as marked in the table, can be overwritten as URL parameter 125 | > e.g.: http://localhost:8080/?SLIDESHOW_INTERVAL=10&SHOW_HIDE_BUTTON=false 126 | 127 | ### Ignoring folders 128 | 129 | There are two ways to ignore folders: 130 | 131 | 1) By ignore file: If a folder contains a file with the name specified in `IGNORE_FOLDER_MARKER_FILES`, the folder is 132 | ignored. 133 | 2) By folder name: If a folder name matches the regular expression specified in `IGNORE_FOLDER_REGEX`, the folder is 134 | ignored. 135 | 136 | > If a folder is ignored, all its sub-elements (files and folders) are also ignored. 137 | 138 | ## Limitations 139 | 140 | * Due to [this issue](https://github.com/image-rs/image/issues/1375) of the image crate, there is currently no **HEIC** 141 | image support. 142 | 143 | ## Visual Interaction 144 | 145 | The slideshow can be controlled by clicking on invisible zones on the screen. These zones are divided into three areas: 146 | 147 | * **Previous Image**: Clicking on the left side of the screen will show the previous image. 148 | * **Pause/Resume**: Clicking in the middle of the screen will pause or resume the slideshow. 149 | * **Next Image**: Clicking on the right side of the screen will show the next image. 150 | 151 | > The slideshow will automatically continue after the `REFRESH_INTERVAL` has passed. 152 | 153 | ## Performance 154 | 155 | ### Example 1 156 | 157 | * Hardware: i3-12100T, 3xWD_BLACK SN750 (RAID-Z1), 32GB RAM 158 | * Photos: ~80k 159 | * Indexing: 6 seconds 160 | * Uncached slideshow change: < 1 second 161 | 162 | ### Example 2 163 | 164 | * Hardware: Raspberry Pi Model B, Class 10 SD Card, 1GHz (OC) 32-Bit arm/v6, 512MB RAM 165 | * Photos: ~6k 166 | * Indexing: 38 seconds 167 | * Uncached slideshow change: ~7 seconds 168 | 169 | ### Example 3 170 | 171 | * Hardware: LG G3 (Android Smartphone), Internal Storage, Snapdragon 801 4C 32-Bit arm/v7, 3GB RAM 172 | * Photos: ~8k 173 | * Indexing: 50 seconds 174 | * Uncached slideshow change: < 1 second 175 | 176 | > Indexing scales with storage performance 177 | 178 | > Slideshow change scales with CPU performance 179 | 180 | ## Resources 181 | 182 | * Compiling static Rust binaries - https://github.com/rust-cross/rust-musl-cross 183 | * Weather API - https://openweathermap.org/api 184 | * Resolve Geo coordinates - https://www.bigdatacloud.com 185 | * IntelliJ IDEA - https://www.jetbrains.com/idea 186 | * Serving ML at the speed of Rust - https://shvbsle.in/serving-ml-at-the-speed-of-rust 187 | * The Rust Performance Book - https://nnethercote.github.io/perf-book/#the-rust-performance-book 188 | -------------------------------------------------------------------------------- /web-app/script.js: -------------------------------------------------------------------------------- 1 | /* 2 | Disclaimer: 3 | Yes this is vanilla javascript, and no I'm not a professional web developer. 4 | */ 5 | 6 | 7 | const WEATHER_INTERVAL = 1800000; // 30 minutes 8 | 9 | let resourcesThisWeek; 10 | let currentIndex = 0; 11 | let maxIndex = 0; 12 | let current_resource_id; 13 | let intervalID; 14 | let forceRandomSlideshow = false; 15 | let isPaused = false; 16 | let slideshowId; 17 | let weatherUnit; 18 | 19 | /** 20 | * On page load, do the following things: 21 | * - Load the available images and initialize the slideshow with it 22 | * - Load and show the weather information 23 | * - Set a page reload interval for each hour 24 | */ 25 | window.addEventListener('load', () => { 26 | forceRandomSlideshow = shouldOnlyPlayRandom(); 27 | initSlideshow(); 28 | loadWeatherInformation(); 29 | initHideButton(); 30 | 31 | // Reload page every x minutes 32 | let refreshIntervalInMinutes = getRefreshInterval(); 33 | intervalID = setInterval(() => location.reload(), refreshIntervalInMinutes * 60000); 34 | }); 35 | 36 | function shouldOnlyPlayRandom() { 37 | const urlParams = new URLSearchParams(window.location.search); 38 | if (urlParams.has('RANDOM_SLIDESHOW')) { 39 | return urlParams.get('RANDOM_SLIDESHOW') === "true"; 40 | } 41 | 42 | let request = new XMLHttpRequest(); 43 | request.open('GET', `/api/config/random-slideshow`, false); 44 | request.send(null); 45 | 46 | return request.status === 200 && request.responseText === "true"; 47 | } 48 | 49 | /** 50 | * Checks if images should be preloaded. 51 | * @returns {boolean} true if images should be preloaded, false otherwise 52 | */ 53 | function shouldPreloadImages() { 54 | let request = new XMLHttpRequest(); 55 | request.open('GET', `/api/config/preload-images`, false); 56 | request.send(null); 57 | 58 | return request.status === 200 && request.responseText === "true"; 59 | } 60 | 61 | /** 62 | * Initializes a new slideshow, if random is active fetch a random playlist. 63 | * Otherwise, fetch the current week's playlist. 64 | * If no images are available, fetch a random playlist. 65 | * If the slideshow is forced to be random, fetch a random playlist. 66 | */ 67 | function initSlideshow() { 68 | if (forceRandomSlideshow) { 69 | console.log("Forcing random slideshow"); 70 | fetch('/api/resources/random') 71 | .then(response => response.json()) 72 | .then(resources => beginSlideshow(resources)) 73 | .catch(error => console.error('Error loading available images:', error)); 74 | } else { 75 | fetch('/api/resources/week/count') 76 | .then(response => response.json()) 77 | .then(count => { 78 | console.log("Available images this week:", count); 79 | if (count === 0) { 80 | console.log("No images available, starting random slideshow"); 81 | return fetch('/api/resources/random') 82 | .then(response => response.json()) 83 | .then(resources => beginSlideshow(resources)); 84 | } else { 85 | console.log("Starting this week's slideshow"); 86 | return fetch('/api/resources/week') 87 | .then(response => response.json()) 88 | .then(resources => beginSlideshow(resources)); 89 | } 90 | }) 91 | .catch(error => console.error('Error loading available images:', error)); 92 | } 93 | } 94 | 95 | /** 96 | * Starts the slideshow utilizing `setInterval` 97 | * The interval is set to the value returned from the backend API 98 | * @param foundResourcesOfThisWeek to start the slideshow with 99 | */ 100 | function beginSlideshow(foundResourcesOfThisWeek) { 101 | console.log("Starting slideshow with " + foundResourcesOfThisWeek.length + " images"); 102 | 103 | resourcesThisWeek = foundResourcesOfThisWeek; 104 | 105 | maxIndex = Object.keys(resourcesThisWeek).length - 1; 106 | slideshowTick(); 107 | 108 | // Load slideshow interval 109 | let intervalInSeconds = getSlideshowInterval(); 110 | 111 | // Start image slideshow 112 | slideshowId = setInterval(() => slideshowTick(), intervalInSeconds * 1000); 113 | } 114 | 115 | /** 116 | * Checks if the hidden button should be visible and appends the click event listener on it 117 | */ 118 | function initHideButton() { 119 | const urlParams = new URLSearchParams(window.location.search); 120 | if (urlParams.has('SHOW_HIDE_BUTTON')) { 121 | if (urlParams.get('SHOW_HIDE_BUTTON') === "true") { 122 | showHideButton(); 123 | } 124 | return; 125 | } 126 | 127 | fetch(`/api/config/show-hide-button`) 128 | .then(response => response.json()) 129 | .then(isHideButtonVisible => { 130 | if (isHideButtonVisible === true) { 131 | showHideButton(); 132 | } 133 | }) 134 | .catch(error => console.error("Error:", error)); 135 | } 136 | 137 | function showHideButton() { 138 | let hideCurrentImageBtn = document.getElementById("hide-current-image"); 139 | hideCurrentImageBtn.style.visibility = "visible"; 140 | hideCurrentImageBtn.addEventListener("click", hideCurrentImage); 141 | } 142 | 143 | /** 144 | * Adds the current visible image to the hidden list and reloads the slideshow (to hide it) 145 | */ 146 | function hideCurrentImage() { 147 | fetch("/api/resources/hide/" + current_resource_id, {method: "POST"}) 148 | .then(() => location.reload()) 149 | .catch(error => console.error("Error:", error)); 150 | } 151 | 152 | 153 | function fetchWeatherUnit() { 154 | fetch(`/api/weather/unit`) 155 | .then(response => response.text()) 156 | .then(unitText => { 157 | weatherUnit = unitText; 158 | }) 159 | .catch(error => console.error("Error:", error)); 160 | } 161 | 162 | /** 163 | * Checks if the weather information should be shown, if so load them 164 | */ 165 | function loadWeatherInformation() { 166 | // First check if the WEATHER_ENABLED was overwritten in the url 167 | const urlParams = new URLSearchParams(window.location.search); 168 | if (urlParams.has('WEATHER_ENABLED')) { 169 | if (urlParams.get('WEATHER_ENABLED') === "true") { 170 | loadCurrentWeather(); 171 | setInterval(loadCurrentWeather, WEATHER_INTERVAL); // Update weather every x minutes 172 | 173 | } 174 | return; 175 | } 176 | 177 | // If not, check in the app config if weather is enabled 178 | fetch(`/api/weather`) 179 | .then(response => response.json()) 180 | .then(showWeather => { 181 | if (showWeather === true) { 182 | loadCurrentWeather(); 183 | setInterval(loadCurrentWeather, WEATHER_INTERVAL); // Update weather every x minutes 184 | } 185 | }) 186 | .catch(error => console.error("Error:", error)); 187 | } 188 | 189 | function getWeatherUnit() { 190 | return weatherUnit === "metric" ? "°C" : "°F"; 191 | } 192 | 193 | /** 194 | * Loads the current weather from the rest api and shows it 195 | */ 196 | function loadCurrentWeather() { 197 | fetchWeatherUnit(); 198 | 199 | fetch(`/api/weather/current`) 200 | .then(response => response.json()) 201 | .then(data => { 202 | showCurrentWeather(data); 203 | }) 204 | .catch(error => console.error("Error:", error)); 205 | } 206 | 207 | /** 208 | * Shows the actual weather on the frontend. 209 | * If home assistant is enabled, the temperature is loaded from Home Assistant. 210 | * The weather icon is loaded from OpenWeatherMap. 211 | * @param data the weather data 212 | */ 213 | function showCurrentWeather(data) { 214 | const weather = data.weather[0]; 215 | const icon = weather.icon; 216 | 217 | document.getElementById("weather-label").textContent = weather.description + ","; 218 | document.getElementById("weather-icon").src = `https://openweathermap.org/img/w/${icon}.png`; 219 | 220 | isHomeAssistantEnabled().then((isHomeAssistantEnabled) => { 221 | let temperatureText; 222 | if (isHomeAssistantEnabled) { 223 | let homeAssistantData = JSON.parse(getCurrentTemperatureDataFromHomeAssistant()); 224 | temperatureText = Math.round(homeAssistantData.state) + homeAssistantData.attributes.unit_of_measurement; 225 | } else { 226 | temperatureText = Math.round(data.main.temp) + getWeatherUnit(); 227 | } 228 | document.getElementById("weather-temperature").innerText = temperatureText; 229 | }); 230 | } 231 | 232 | /** 233 | * @returns {Promise} true if Home Assistant is enabled 234 | */ 235 | async function isHomeAssistantEnabled() { 236 | try { 237 | const response = await fetch(`/api/weather/homeassistant`); 238 | const data = await response.json(); 239 | return data === true; 240 | } catch (error) { 241 | console.error("Error:", error); 242 | return false; 243 | } 244 | } 245 | 246 | 247 | /** 248 | * @returns {string} the current temperature from Home Assistant 249 | */ 250 | function getCurrentTemperatureDataFromHomeAssistant() { 251 | let request = new XMLHttpRequest(); 252 | request.open('GET', `/api/weather/homeassistant/temperature`, false); 253 | request.send(null); 254 | if (request.status === 200) { 255 | return request.response; 256 | } 257 | return "{}"; 258 | } 259 | 260 | /** 261 | * Sets the image url and its meta information to the frontend 262 | * This is done by fading out the current image and fading in the new image 263 | * The sleep function is used to prevent the slideshow from flickering 264 | * @param resource_id the id of the resource 265 | */ 266 | function setImage(resource_id) { 267 | console.log("Showing image: " + resource_id); 268 | 269 | // build the image url 270 | let screenWidth = window.screen.availWidth; 271 | let screenHeight = window.screen.availHeight; 272 | let imageUrl = `/api/resources/${resource_id}/${screenWidth}/${screenHeight}`; 273 | 274 | // obtain the image elements 275 | let backgroundImage = document.getElementById('background-image'); 276 | let slideshowImage = document.getElementById("slideshow-image"); 277 | let slideShowMetadata = document.getElementById("slideshow-metadata"); 278 | 279 | // start the fade out animation 280 | backgroundImage.classList.add("fade-out"); 281 | slideshowImage.classList.add("fade-out"); 282 | slideShowMetadata.classList.add("fade-out"); 283 | 284 | // wait for the fade out animation to end 285 | sleep(500).then(() => { 286 | 287 | // when the image is loaded, start the fade in animation 288 | slideshowImage.onload = () => { 289 | // fade images in 290 | backgroundImage.classList.replace("fade-out", "fade-in"); 291 | slideshowImage.classList.replace("fade-out", "fade-in"); 292 | slideShowMetadata.classList.replace("fade-out", "fade-in"); 293 | 294 | // wait for the fade in animation to end 295 | sleep(500).then(() => { 296 | backgroundImage.classList.remove("fade-in"); 297 | slideshowImage.classList.remove("fade-in"); 298 | slideShowMetadata.classList.remove("fade-in"); 299 | }); 300 | }; 301 | 302 | // set image and blurred background image 303 | backgroundImage.style.backgroundImage = `url(${imageUrl})`; 304 | slideshowImage.src = imageUrl; 305 | 306 | // set image description but fade in is done simultaneously with the fade in of the image, see above 307 | fetch(`/api/resources/${resource_id}/description`) 308 | .then(response => response.text()) 309 | .then(text => slideShowMetadata.innerText = text); 310 | 311 | // At last step, set the current resource id 312 | current_resource_id = resource_id; 313 | }); 314 | } 315 | 316 | /** 317 | * On slideshow tick interval. 318 | * Set the slideshow image and its meta information. 319 | */ 320 | function slideshowTick() { 321 | // Proceeds with the regular "this week" slideshow 322 | setImage(resourcesThisWeek[currentIndex]); 323 | 324 | currentIndex++; 325 | if (currentIndex > maxIndex) { 326 | currentIndex = 0; 327 | } 328 | 329 | // Preload next image if active 330 | if (shouldPreloadImages()) { 331 | preloadNextImage(resourcesThisWeek[currentIndex]); 332 | } 333 | } 334 | 335 | /** 336 | * Preloads the next image in the background, this done by requesting the image from the backend, because the backend caches the images. 337 | * Thus, consecutive requests for the same image are faster. 338 | * @param resource_id the id of the resource to preload 339 | */ 340 | function preloadNextImage(resource_id) { 341 | console.log("Preloading next image: " + resource_id); 342 | let screenWidth = window.screen.availWidth; 343 | let screenHeight = window.screen.availHeight; 344 | let request = new XMLHttpRequest(); 345 | request.open("GET", `/api/resources/${resource_id}/${screenWidth}/${screenHeight}`); 346 | request.send(); 347 | } 348 | 349 | /** 350 | * @returns {number} the slideshow interval in seconds 351 | */ 352 | function getSlideshowInterval() { 353 | // First check if the user overwrites the SLIDESHOW_INTERVAL as url parameter 354 | const urlParams = new URLSearchParams(window.location.search); 355 | if (urlParams.has('SLIDESHOW_INTERVAL')) { 356 | return parseInt(urlParams.get('SLIDESHOW_INTERVAL')) 357 | } 358 | 359 | // if no interval was found in the url, load the value from the config 360 | let request = new XMLHttpRequest(); 361 | request.open('GET', `/api/config/interval/slideshow`, false); 362 | request.send(null); 363 | if (request.status === 200) { 364 | return parseInt(request.responseText); 365 | } 366 | return 30; 367 | } 368 | 369 | /** 370 | * @returns {number} the refresh interval in minutes from the backend API 371 | */ 372 | function getRefreshInterval() { 373 | let request = new XMLHttpRequest(); 374 | request.open('GET', `/api/config/interval/refresh`, false); 375 | request.send(null); 376 | if (request.status === 200) { 377 | return parseInt(request.responseText) 378 | } 379 | return 180; 380 | } 381 | 382 | /** 383 | * Sleeps for the given amount of milliseconds and returns a promise that is resolved when the sleep is finished 384 | * @param ms the amount of milliseconds to sleep 385 | * @returns {Promise} 386 | */ 387 | function sleep(ms) { 388 | return new Promise(resolver => setTimeout(resolver, ms)); 389 | } 390 | 391 | /** 392 | * Shows the previous image in the slideshow. 393 | * Resets the slideshow interval to avoid takeover effect. 394 | */ 395 | function showPrevImage() { 396 | console.log("Showing previous image"); 397 | currentIndex--; 398 | if (currentIndex < 0) { 399 | currentIndex = maxIndex; 400 | } 401 | setImage(resourcesThisWeek[currentIndex]); 402 | 403 | // Reset the slideshow interval to avoid takeover effect 404 | clearInterval(slideshowId); 405 | slideshowId = setInterval(slideshowTick, getSlideshowInterval() * 1000); 406 | } 407 | 408 | /** 409 | * Pauses or resumes the slideshow. 410 | * Updates the button text to reflect the current state. 411 | */ 412 | function pauseResumeSlideshow() { 413 | if (isPaused) { 414 | console.log("Resuming slideshow"); 415 | } else { 416 | console.log("Pausing slideshow"); 417 | } 418 | isPaused = !isPaused; 419 | document.getElementById("pause-zone").innerText = isPaused ? "Resume" : "Pause"; 420 | if (isPaused) { 421 | clearInterval(slideshowId); 422 | } else { 423 | slideshowId = setInterval(slideshowTick, getSlideshowInterval() * 1000); 424 | } 425 | } 426 | 427 | /** 428 | * Shows the next image in the slideshow. 429 | * Resets the slideshow interval to avoid takeover effect. 430 | */ 431 | function showNextImage() { 432 | console.log("Showing next image"); 433 | slideshowTick(); 434 | 435 | // Reset the slideshow interval to avoid takeover effect 436 | clearInterval(slideshowId); 437 | slideshowId = setInterval(slideshowTick, getSlideshowInterval() * 1000); 438 | } -------------------------------------------------------------------------------- /.github/workflows/build-image.yaml: -------------------------------------------------------------------------------- 1 | name: CI/CD 2 | on: 3 | push: 4 | pull_request: 5 | types: 6 | - opened 7 | workflow_dispatch: # allow manual execution 8 | 9 | concurrency: 10 | group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} 11 | cancel-in-progress: true 12 | 13 | env: 14 | IMAGE_NAME: rouhim/this-week-in-past 15 | 16 | jobs: 17 | # Use latest for main and PR number for the "ticket" string as prefix for PRs 18 | set_image_tag: 19 | name: Set image tag 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Set image tag 23 | id: set_image_tag 24 | run: | 25 | IMAGE_TAG= 26 | 27 | if [[ "$GITHUB_REF" == "refs/heads/main" ]]; then 28 | IMAGE_TAG="latest" 29 | elif [ -n "${{ github.event.issue.number }}" ]; then 30 | echo "Using github.event.issue.number: ${{ github.event.issue.number }}" 31 | IMAGE_TAG="ISSUE-${{ github.event.issue.number }}" 32 | elif [ -n "${{ github.event.pull_request.number }}" ];then 33 | echo "Using github.event.pull_request.number: ${{ github.event.pull_request.number }}" 34 | IMAGE_TAG="PR-${{ github.event.pull_request.number }}" 35 | else 36 | echo "Using GITHUB_REF_NAME: $GITHUB_REF_NAME" 37 | IMAGE_TAG="$(echo -n $GITHUB_REF_NAME | md5sum | cut -c1-6)" 38 | fi 39 | 40 | echo "IMAGE_TAG: $IMAGE_TAG" 41 | echo "IMAGE_TAG=$IMAGE_TAG" >> $GITHUB_OUTPUT 42 | 43 | outputs: 44 | IMAGE_TAG: ${{ steps.set_image_tag.outputs.IMAGE_TAG }} 45 | 46 | check-oci-config: 47 | name: Check Containerfile 48 | runs-on: ubuntu-latest 49 | steps: 50 | - name: Checkout code 51 | uses: actions/checkout@v6 52 | 53 | - name: Run Trivy vulnerability scanner 54 | uses: aquasecurity/trivy-action@master 55 | with: 56 | scan-type: 'config' 57 | hide-progress: false 58 | format: 'table' 59 | exit-code: '1' 60 | ignore-unfixed: true 61 | severity: 'CRITICAL,HIGH' 62 | 63 | 64 | check-repo: 65 | name: Check git repository 66 | runs-on: ubuntu-latest 67 | steps: 68 | - name: Checkout code 69 | uses: actions/checkout@v6 70 | 71 | - name: Run Trivy vulnerability scanner 72 | uses: aquasecurity/trivy-action@master 73 | with: 74 | scan-type: 'fs' 75 | format: 'table' 76 | exit-code: '1' 77 | ignore-unfixed: true 78 | severity: 'CRITICAL,HIGH' 79 | 80 | 81 | check-code-style: 82 | name: Check code style 83 | runs-on: ubuntu-latest 84 | steps: 85 | - name: Checkout code 86 | uses: actions/checkout@v6 87 | 88 | - name: Install rust toolchain 89 | uses: actions-rs/toolchain@v1 90 | with: 91 | profile: minimal 92 | toolchain: stable 93 | components: rustfmt 94 | 95 | - uses: Swatinem/rust-cache@v2 # use rust / cargo caching 96 | with: 97 | cache-on-failure: "true" 98 | 99 | - name: Check the code style 100 | run: cargo fmt --all -- --check 101 | 102 | 103 | check-code: 104 | name: Check rust code 105 | runs-on: ubuntu-latest 106 | steps: 107 | - name: Checkout code 108 | uses: actions/checkout@v6 109 | 110 | - name: Install rust toolchain 111 | uses: actions-rs/toolchain@v1 112 | with: 113 | profile: minimal 114 | toolchain: stable 115 | components: clippy 116 | 117 | - uses: Swatinem/rust-cache@v2 # use rust / cargo caching 118 | with: 119 | cache-on-failure: "true" 120 | 121 | - name: Verify code 122 | run: cargo clippy 123 | 124 | 125 | test: 126 | name: Run application tests 127 | runs-on: ubuntu-latest 128 | env: 129 | BIGDATA_CLOUD_API_KEY: ${{ secrets.BIGDATA_CLOUD_API_KEY }} 130 | OPEN_WEATHER_MAP_API_KEY: ${{ secrets.OPEN_WEATHER_MAP_API_KEY }} 131 | steps: 132 | - name: Checkout code 133 | uses: actions/checkout@v6 134 | 135 | - name: Install rust toolchain 136 | uses: actions-rs/toolchain@v1 137 | with: 138 | profile: minimal 139 | toolchain: stable 140 | 141 | - uses: Swatinem/rust-cache@v2 # use rust / cargo caching 142 | with: 143 | cache-on-failure: "true" 144 | 145 | - name: Test code 146 | run: cargo test 147 | 148 | create-release: 149 | name: Create release 150 | needs: [ check-oci-config, check-repo, check-code-style, check-code, test ] 151 | runs-on: ubuntu-latest 152 | steps: 153 | - name: Checkout code 154 | if: github.ref == 'refs/heads/main' 155 | uses: actions/checkout@v6 156 | 157 | # Create a new release based on semantic versioning 158 | - name: Set up Node.js 159 | if: github.ref == 'refs/heads/main' 160 | uses: actions/setup-node@v6 161 | with: 162 | node-version: 24 163 | 164 | - name: Install Dependencies 165 | if: github.ref == 'refs/heads/main' 166 | run: | 167 | npm install -g \ 168 | semantic-release \ 169 | @semantic-release/git \ 170 | @semantic-release/gitlab \ 171 | @semantic-release/changelog \ 172 | @semantic-release/exec \ 173 | @semantic-release/commit-analyzer \ 174 | conventional-changelog-conventionalcommits 175 | 176 | - name: Generate Semantic Release Notes and Create Release 177 | if: github.ref == 'refs/heads/main' 178 | env: 179 | GITHUB_TOKEN: ${{ secrets.RELEASE_TOKEN }} 180 | run: npx semantic-release 181 | 182 | 183 | build-binaries: 184 | name: Build 185 | needs: [ check-oci-config, check-repo, check-code-style, check-code, test, create-release ] 186 | runs-on: ubuntu-latest 187 | # Run all targets in parallel 188 | strategy: 189 | matrix: 190 | target: [ x86_64-musl, aarch64-musl, armv7-musleabihf, arm-musleabihf ] 191 | steps: 192 | - name: Checkout code 193 | uses: actions/checkout@v6 194 | 195 | - name: Set version to environment 196 | if: github.ref == 'refs/heads/main' 197 | shell: bash 198 | run: | 199 | REPO="RouHim/this-week-in-past" 200 | LATEST_RELEASE_VERSION=$(curl --silent "https://api.github.com/repos/$REPO/releases/latest" | jq -r ".tag_name") 201 | echo "Latest release is $LATEST_RELEASE_VERSION" 202 | echo "VERSION=$LATEST_RELEASE_VERSION" >> $GITHUB_ENV 203 | 204 | # Ensure that version is valid 205 | if ! [[ $LATEST_RELEASE_VERSION =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then 206 | echo "Latest release version is not invalid: $LATEST_RELEASE_VERSION" 207 | exit 1 208 | fi 209 | 210 | - name: Install rust toolchain 211 | uses: actions-rs/toolchain@v1 212 | with: 213 | toolchain: stable 214 | 215 | - name: Set version 216 | if: github.ref == 'refs/heads/main' 217 | shell: bash 218 | run: | 219 | sed -i "s/version = \"0.0.0\"/version = \"${{ env.VERSION }}\"/g" Cargo.toml 220 | echo "Cargo version is now" $(cargo metadata --no-deps --format-version 1 | jq -r ".packages[0].version") 221 | 222 | - uses: Swatinem/rust-cache@v2 # use rust / cargo caching 223 | with: 224 | cache-on-failure: "true" 225 | 226 | - name: Build static application binary 227 | run: | 228 | # Prepare rust-cross/rust-musl-cross for cross compiling 229 | source .github/workflows/scripts/prep-build-env.sh 230 | 231 | # Compile the desired target 232 | build-rust-static-bin ${{ matrix.target }} 233 | 234 | # Translate ${{ matrix.target }} to rust target triple 235 | export TARGET_TRIPPLE=$(bash .github/workflows/scripts/translate-arch-to-rust-tripple.sh ${{ matrix.target }}) 236 | echo "TARGET_TRIPPLE=${TARGET_TRIPPLE}" 237 | echo "TARGET_TRIPPLE=${TARGET_TRIPPLE}" >> $GITHUB_ENV 238 | 239 | - name: Upload static application binaries 240 | uses: actions/upload-artifact@v5 241 | with: 242 | name: ${{ matrix.target }} 243 | path: target/${{ env.TARGET_TRIPPLE }}/release/this-week-in-past 244 | 245 | 246 | test-container-image: 247 | name: Test container image 248 | needs: [ build-binaries, set_image_tag ] 249 | runs-on: ubuntu-latest 250 | steps: 251 | - name: Checkout code 252 | uses: actions/checkout@v6 253 | 254 | - name: Download x86_64-musl static application binary 255 | uses: actions/download-artifact@v6 256 | with: 257 | name: x86_64-musl 258 | path: target/x86_64-unknown-linux-musl/release/ 259 | 260 | - name: Set up Docker Buildx 261 | uses: docker/setup-buildx-action@v3 262 | 263 | - name: Build x86 image and load into local repo 264 | uses: docker/build-push-action@v6 265 | with: 266 | tags: ${{ env.IMAGE_NAME }}:${{ needs.set_image_tag.outputs.IMAGE_TAG }} 267 | context: . 268 | file: ./Containerfile 269 | load: true 270 | push: false 271 | 272 | - name: Prepare test env 273 | run: | 274 | mkdir -p /tmp/this-week-in-past-test 275 | wget https://raw.githubusercontent.com/ianare/exif-samples/master/jpg/gps/DSCN0010.jpg -O /tmp/this-week-in-past-test/DSCN0010.jpg 276 | 277 | - name: Start test container 278 | run: docker run --pull never -d --name test-container -p 8080:8080 -v /tmp/this-week-in-past-test:/resources ${{ env.IMAGE_NAME }}:${{ needs.set_image_tag.outputs.IMAGE_TAG }} 279 | 280 | - name: Test container web api 281 | run: wget -q -O /dev/null --timeout 5 --tries 3 http://localhost:8080/api/health 282 | 283 | - name: Cleanup 284 | run: docker kill test-container && docker rm test-container 285 | 286 | 287 | upload-artifacts: 288 | name: Release artifacts 289 | needs: [ build-binaries ] 290 | runs-on: ubuntu-latest 291 | if: github.event_name == 'push' && github.ref == 'refs/heads/main' 292 | steps: 293 | - name: Checkout code 294 | uses: actions/checkout@v6 295 | 296 | - name: Download x86_64-musl binary 297 | uses: actions/download-artifact@v6 298 | with: 299 | name: x86_64-musl 300 | path: target/x86_64-unknown-linux-musl/release/ 301 | - name: Download aarch64-musl binary 302 | uses: actions/download-artifact@v6 303 | with: 304 | name: aarch64-musl 305 | path: target/aarch64-unknown-linux-musl/release/ 306 | - name: Download armv7-musleabihf binary 307 | uses: actions/download-artifact@v6 308 | with: 309 | name: armv7-musleabihf 310 | path: target/armv7-unknown-linux-musleabihf/release/ 311 | - name: Download arm-musleabihf binary 312 | uses: actions/download-artifact@v6 313 | with: 314 | name: arm-musleabihf 315 | path: target/arm-unknown-linux-musleabihf/release/ 316 | 317 | - name: Upload the x86 built binaries to the release 318 | run: | 319 | bash .github/workflows/scripts/upload-asset-to-release.sh \ 320 | ${{ secrets.RELEASE_TOKEN }} \ 321 | "./target/x86_64-unknown-linux-musl/release/this-week-in-past" \ 322 | "this-week-in-past-x86_64-unknown-linux-musl" 323 | 324 | - name: Upload the aarch64 built binaries to the release 325 | run: | 326 | bash .github/workflows/scripts/upload-asset-to-release.sh \ 327 | ${{ secrets.RELEASE_TOKEN }} \ 328 | "./target/aarch64-unknown-linux-musl/release/this-week-in-past" \ 329 | "this-week-in-past-aarch64-unknown-linux-musl" 330 | 331 | - name: Upload the armv7 built binaries to the release 332 | run: | 333 | bash .github/workflows/scripts/upload-asset-to-release.sh \ 334 | ${{ secrets.RELEASE_TOKEN }} \ 335 | "./target/armv7-unknown-linux-musleabihf/release/this-week-in-past" \ 336 | "this-week-in-past-armv7-unknown-linux-musleabihf" 337 | 338 | - name: Upload the arm v6 built binaries to the release 339 | run: | 340 | bash .github/workflows/scripts/upload-asset-to-release.sh \ 341 | ${{ secrets.RELEASE_TOKEN }} \ 342 | "./target/arm-unknown-linux-musleabihf/release/this-week-in-past" \ 343 | "this-week-in-past-arm-unknown-linux-musleabihf" 344 | 345 | 346 | publish-container-images: 347 | name: Publish container images 348 | needs: [ test-container-image, set_image_tag ] 349 | runs-on: ubuntu-latest 350 | if: github.event_name == 'push' 351 | steps: 352 | - name: Checkout code 353 | uses: actions/checkout@v6 354 | 355 | - name: Set version to environment 356 | shell: bash 357 | if: github.ref == 'refs/heads/main' 358 | run: | 359 | REPO="RouHim/this-week-in-past" 360 | VERSION=$(curl --silent "https://api.github.com/repos/$REPO/releases/latest" | jq -r ".tag_name") 361 | MINOR_PART=$(echo $VERSION | cut -d. -f2) 362 | MAJOR_PART=$(echo $VERSION | cut -d. -f1) 363 | echo "Latest release is $VERSION" 364 | echo "VERSION=$VERSION" >> $GITHUB_ENV 365 | echo "MINOR_VERSION=$MAJOR_PART.$MINOR_PART" >> $GITHUB_ENV 366 | echo "MAJOR_VERSION=$MAJOR_PART" >> $GITHUB_ENV 367 | 368 | - name: Download x86_64-musl binary 369 | uses: actions/download-artifact@v6 370 | with: 371 | name: x86_64-musl 372 | path: target/x86_64-unknown-linux-musl/release/ 373 | - name: Download aarch64-musl binary 374 | uses: actions/download-artifact@v6 375 | with: 376 | name: aarch64-musl 377 | path: target/aarch64-unknown-linux-musl/release/ 378 | - name: Download armv7-musleabihf binary 379 | uses: actions/download-artifact@v6 380 | with: 381 | name: armv7-musleabihf 382 | path: target/armv7-unknown-linux-musleabihf/release/ 383 | - name: Download arm-musleabihf binary 384 | uses: actions/download-artifact@v6 385 | with: 386 | name: arm-musleabihf 387 | path: target/arm-unknown-linux-musleabihf/release/ 388 | 389 | - name: Set up Docker Buildx 390 | uses: docker/setup-buildx-action@v3 391 | 392 | - name: Login to Docker Hub 393 | uses: docker/login-action@v3 394 | with: 395 | username: ${{ secrets.DOCKERHUB_USERNAME }} 396 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 397 | 398 | - name: Build and push multi platform container image 399 | uses: docker/build-push-action@v6 400 | with: 401 | tags: ${{ env.IMAGE_NAME }}:${{ needs.set_image_tag.outputs.IMAGE_TAG }} 402 | platforms: linux/amd64, linux/arm64/v8, linux/arm/v7, linux/arm/v6 403 | context: . 404 | file: ./Containerfile 405 | load: false 406 | push: true 407 | 408 | - name: Build and push versioned multi platform container image 409 | uses: docker/build-push-action@v6 410 | if: env.VERSION != '' 411 | with: 412 | tags: ${{ env.IMAGE_NAME }}:${{ env.VERSION }} 413 | platforms: linux/amd64, linux/arm64/v8, linux/arm/v7, linux/arm/v6 414 | context: . 415 | file: ./Containerfile 416 | load: false 417 | push: true 418 | 419 | - name: Build and push minor versioned multi platform container image 420 | uses: docker/build-push-action@v6 421 | if: env.VERSION != '' 422 | with: 423 | tags: ${{ env.IMAGE_NAME }}:${{ env.MINOR_VERSION }} 424 | platforms: linux/amd64, linux/arm64/v8, linux/arm/v7, linux/arm/v6 425 | context: . 426 | file: ./Containerfile 427 | load: false 428 | push: true 429 | 430 | - name: Build and push major versioned multi platform container image 431 | uses: docker/build-push-action@v6 432 | if: env.VERSION != '' 433 | with: 434 | tags: ${{ env.IMAGE_NAME }}:${{ env.MAJOR_VERSION }} 435 | platforms: linux/amd64, linux/arm64/v8, linux/arm/v7, linux/arm/v6 436 | context: . 437 | file: ./Containerfile 438 | load: false 439 | push: true 440 | 441 | - name: Delete old releases 442 | uses: dev-drprasad/delete-older-releases@v0.3.4 443 | with: 444 | keep_latest: 5 445 | delete_tags: true 446 | env: 447 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 448 | 449 | - name: Update docker hub description 450 | if: github.ref == 'refs/heads/main' 451 | uses: peter-evans/dockerhub-description@v5 452 | with: 453 | username: ${{ secrets.DOCKERHUB_USERNAME }} 454 | password: ${{ secrets.DOCKERHUB_PASSWORD }} 455 | repository: ${{ env.IMAGE_NAME }} 456 | -------------------------------------------------------------------------------- /src/resource_store.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::fs; 3 | use std::path::PathBuf; 4 | 5 | use crate::config; 6 | use chrono::Datelike; 7 | use log::{debug, error}; 8 | use r2d2::{Pool, PooledConnection}; 9 | use r2d2_sqlite::SqliteConnectionManager; 10 | use rand::seq::SliceRandom; 11 | 12 | #[derive(Clone)] 13 | pub struct ResourceStore { 14 | persistent_file_store_pool: Pool, 15 | } 16 | 17 | /// Implements all functions acting on the data store instance 18 | impl ResourceStore { 19 | /// Cleanup database 20 | pub fn vacuum(&self) { 21 | let connection = self.persistent_file_store_pool.get().unwrap(); 22 | let mut stmt = connection.prepare("VACUUM").unwrap(); 23 | stmt.execute([]).unwrap_or_else(|error| { 24 | error!("VACUUM failed. Error:\n{}", error); 25 | 0 26 | }); 27 | } 28 | 29 | /// Returns a list of all hidden resource ids 30 | pub fn get_all_hidden(&self) -> Vec { 31 | let connection = self.persistent_file_store_pool.get().unwrap(); 32 | let mut stmt = connection.prepare("SELECT id FROM hidden").unwrap(); 33 | let mut rows = stmt.query([]).unwrap(); 34 | let mut ids: Vec = Vec::new(); 35 | while let Some(row) = rows.next().unwrap() { 36 | ids.push(row.get(0).unwrap()); 37 | } 38 | ids 39 | } 40 | 41 | /// Gets a list of all visible resources for the current week 42 | /// Returns a list of resource ids 43 | pub fn get_resources_this_week_visible_random(&self) -> Vec { 44 | let connection = self.persistent_file_store_pool.get().unwrap(); 45 | 46 | // Check if we are in the new year week 47 | // If yes, we need to query differently 48 | if range_hits_new_year() { 49 | debug!("🎊 New year week detected"); 50 | let mut new_year_resources = [ 51 | execute_query(&connection, get_last_year_query()), 52 | execute_query(&connection, get_next_year_query()), 53 | ] 54 | .concat(); 55 | new_year_resources.shuffle(&mut rand::rng()); 56 | return new_year_resources; 57 | } 58 | 59 | // Otherwise, we can query normally 60 | let regular_week_query = r#" 61 | SELECT DISTINCT resources.id 62 | FROM resources, 63 | json_each(resources.value) json 64 | WHERE json.key = 'taken' 65 | AND json.value NOT NULL 66 | AND resources.id NOT IN (SELECT id FROM hidden) 67 | AND strftime('%m-%d', json.value) BETWEEN strftime('%m-%d', 'now', 'localtime', '-3 days') AND strftime('%m-%d', 'now', 'localtime', '+3 days') 68 | ORDER BY RANDOM() 69 | ;"#; 70 | execute_query(&connection, regular_week_query) 71 | } 72 | 73 | /// Returns the count of all visible resources for the current week 74 | pub fn get_resources_this_week_visible_count(&self) -> usize { 75 | let connection = self.persistent_file_store_pool.get().unwrap(); 76 | 77 | // Check if we are in the new year week 78 | // If yes, we need to query differently 79 | if range_hits_new_year() { 80 | debug!("🎊 New year week detected"); 81 | let new_year_resources_count = [ 82 | execute_count_query(&connection, get_last_year_count_query()), 83 | execute_count_query(&connection, get_next_year_count_query()), 84 | ] 85 | .iter() 86 | .sum(); 87 | return new_year_resources_count; 88 | } 89 | 90 | // Otherwise, we can query normally 91 | let regular_week_query = r#" 92 | SELECT COUNT(DISTINCT resources.id) 93 | FROM resources, 94 | json_each(resources.value) json 95 | WHERE json.key = 'taken' 96 | AND json.value NOT NULL 97 | AND resources.id NOT IN (SELECT id FROM hidden) 98 | AND strftime('%m-%d', json.value) BETWEEN strftime('%m-%d', 'now', 'localtime', '-3 days') AND strftime('%m-%d', 'now', 'localtime', '+3 days') 99 | ;"#; 100 | execute_count_query(&connection, regular_week_query) 101 | } 102 | 103 | /// Sets the specified resource id as hidden 104 | pub fn add_hidden(&self, resource_id: &str) { 105 | let connection = self.persistent_file_store_pool.get().unwrap(); 106 | let mut stmt = connection 107 | .prepare("INSERT OR IGNORE INTO hidden(id) VALUES(?)") 108 | .unwrap(); 109 | stmt.execute([resource_id]).unwrap(); 110 | } 111 | 112 | /// Removes the specified id from the hidden list 113 | pub fn remove_hidden(&self, resource_id: &str) { 114 | let connection = self.persistent_file_store_pool.get().unwrap(); 115 | let mut stmt = connection 116 | .prepare("DELETE FROM hidden WHERE ID = ?") 117 | .unwrap(); 118 | stmt.execute([resource_id]).unwrap(); 119 | } 120 | 121 | /// Adds an image cache entry, if an entry already exists it gets updated 122 | pub fn add_data_cache_entry(&self, id: String, data: &Vec) { 123 | let connection = self.persistent_file_store_pool.get().unwrap(); 124 | let mut stmt = connection 125 | .prepare("INSERT OR REPLACE INTO data_cache(id, data) VALUES(?, ?)") 126 | .unwrap(); 127 | stmt.execute((&id, data)) 128 | .unwrap_or_else(|error| panic!("Insertion of {id} failed:n{}", error)); 129 | } 130 | 131 | /// Get an image cache entry 132 | pub fn get_data_cache_entry(&self, id: String) -> Option> { 133 | let connection = self.persistent_file_store_pool.get().unwrap(); 134 | let mut stmt = connection 135 | .prepare("SELECT data FROM data_cache WHERE id = ?") 136 | .unwrap(); 137 | let mut rows = stmt.query([id]).unwrap(); 138 | 139 | let first_entry = rows.next(); 140 | 141 | if let Ok(first_entry) = first_entry { 142 | first_entry 143 | .map(|entry| entry.get(0)) 144 | .and_then(|entry| entry.ok()) 145 | } else { 146 | None 147 | } 148 | } 149 | 150 | /// Clears the complete image cache 151 | pub fn clear_data_cache(&self) { 152 | let connection = self.persistent_file_store_pool.get().unwrap(); 153 | let mut stmt = connection.prepare("DELETE FROM data_cache").unwrap(); 154 | stmt.execute(()) 155 | .unwrap_or_else(|error| panic!("Deletion of table 'data_cache' failed.\n{}", error)); 156 | } 157 | 158 | /// Returns an id list of all resources, including hidden resources 159 | pub fn get_all_resource_ids(&self) -> Vec { 160 | let connection = self.persistent_file_store_pool.get().unwrap(); 161 | let mut stmt = connection.prepare("SELECT id FROM resources").unwrap(); 162 | let mut rows = stmt.query([]).unwrap(); 163 | let mut ids: Vec = Vec::new(); 164 | while let Some(row) = rows.next().unwrap() { 165 | ids.push(row.get(0).unwrap()); 166 | } 167 | ids 168 | } 169 | 170 | /// Get a resource value by id entry 171 | /// Returns a optional resource value 172 | pub fn get_resource(&self, id: &str) -> Option { 173 | let connection = self.persistent_file_store_pool.get().unwrap(); 174 | let mut stmt = connection 175 | .prepare("SELECT value FROM resources WHERE id = ?") 176 | .unwrap(); 177 | let mut rows = stmt.query([id]).unwrap(); 178 | 179 | let first_entry = rows.next(); 180 | 181 | if let Ok(first_entry) = first_entry { 182 | first_entry 183 | .map(|entry| entry.get(0)) 184 | .and_then(|entry| entry.ok()) 185 | } else { 186 | None 187 | } 188 | } 189 | 190 | /// Returns random resources, non-hidden, resource id 191 | pub fn get_random_resources(&self) -> Vec { 192 | let connection = self.persistent_file_store_pool.get().unwrap(); 193 | // Request limit is calculated by: (60/SLIDESHOW_INTERVAL)*REFRESH_INTERVAL * 10% buffer 194 | let request_limit = (60. / config::get_slideshow_interval_value() as f32) 195 | * config::get_refresh_interval_value() as f32; 196 | let request_limit = (request_limit * 1.1) as usize; 197 | let mut stmt = connection 198 | .prepare(&format!( 199 | r#" 200 | SELECT id FROM resources 201 | WHERE id NOT IN (SELECT id FROM hidden) 202 | ORDER BY RANDOM() 203 | LIMIT {};"#, 204 | request_limit 205 | )) 206 | .unwrap(); 207 | let mut rows = stmt.query([]).unwrap(); 208 | let mut ids: Vec = Vec::new(); 209 | while let Some(row) = rows.next().unwrap() { 210 | ids.push(row.get(0).unwrap()); 211 | } 212 | ids 213 | } 214 | 215 | /// Clears the complete resources cache 216 | pub fn clear_resources(&self) { 217 | let connection = self.persistent_file_store_pool.get().unwrap(); 218 | let mut stmt = connection.prepare("DELETE FROM resources").unwrap(); 219 | stmt.execute(()) 220 | .unwrap_or_else(|error| panic!("Deletion of table 'resources' failed.\n{}", error)); 221 | } 222 | 223 | /// Batch inserts or updates resources 224 | pub fn add_resources(&self, resources: HashMap) { 225 | let mut connection = self.persistent_file_store_pool.get().unwrap(); 226 | let tx = connection 227 | .transaction() 228 | .expect("Failed to create transaction"); 229 | 230 | resources.iter().for_each(|(id, value)| { 231 | tx.execute( 232 | "INSERT OR REPLACE INTO resources(id, value) VALUES(?, ?)", 233 | (id.as_str(), value.as_str()), 234 | ) 235 | .unwrap_or_else(|error| panic!("Insertion of {id} failed.\n{}", error)); 236 | }); 237 | 238 | tx.commit().expect("Transaction commit failed"); 239 | } 240 | 241 | /// Adds a geo location cache entry, if an entry already exists it gets updated 242 | pub fn add_location(&self, id: String, value: String) { 243 | let connection = self.persistent_file_store_pool.get().unwrap(); 244 | let mut stmt = connection 245 | .prepare("INSERT OR REPLACE INTO geo_location_cache(id, value) VALUES(?, ?)") 246 | .unwrap(); 247 | stmt.execute((&id, value)) 248 | .unwrap_or_else(|error| panic!("Insertion of {id} failed:n{}", error)); 249 | } 250 | 251 | /// Get a geo location entry by id entry 252 | pub fn get_location(&self, id: &str) -> Option { 253 | let connection = self.persistent_file_store_pool.get().unwrap(); 254 | let mut stmt = connection 255 | .prepare("SELECT value FROM geo_location_cache WHERE id = ?") 256 | .unwrap(); 257 | let mut rows = stmt.query([id]).unwrap(); 258 | 259 | let first_entry = rows.next(); 260 | 261 | if let Ok(first_entry) = first_entry { 262 | first_entry 263 | .map(|entry| entry.get(0)) 264 | .and_then(|entry| entry.ok()) 265 | } else { 266 | None 267 | } 268 | } 269 | 270 | /// Checks if the specified geo location entry exists 271 | pub fn location_exists(&self, id: &str) -> bool { 272 | let connection = self.persistent_file_store_pool.get().unwrap(); 273 | let mut stmt = connection 274 | .prepare("SELECT COUNT(id) FROM geo_location_cache WHERE id = ?") 275 | .unwrap(); 276 | let mut rows = stmt.query([id]).unwrap(); 277 | 278 | let count: i32 = rows.next().unwrap().unwrap().get(0).unwrap(); 279 | 280 | count == 1 281 | } 282 | 283 | /// Returns the current time of the database 284 | pub fn get_database_time(&self) -> String { 285 | let connection = self.persistent_file_store_pool.get().unwrap(); 286 | let mut stmt = connection 287 | .prepare("SELECT datetime('now', 'localtime')") 288 | .unwrap(); 289 | let mut rows = stmt.query([]).unwrap(); 290 | 291 | let first_entry = rows.next(); 292 | 293 | if let Ok(first_entry) = first_entry { 294 | first_entry 295 | .map(|entry| entry.get(0)) 296 | .and_then(|entry| entry.ok()) 297 | .unwrap_or("N/A".to_string()) 298 | } else { 299 | "N/A".to_string() 300 | } 301 | } 302 | } 303 | 304 | /// Initializes a new datastore in the $DATA_FOLDER folder and returns the instance 305 | /// If no $DATA_FOLDER env var is configured, ./data/ is used 306 | /// Creates data folder if it does not exists 307 | /// Also creates all tables if needed 308 | pub fn initialize(data_folder: &str) -> ResourceStore { 309 | fs::create_dir_all(data_folder) 310 | .unwrap_or_else(|error| panic!("Could not create data folder: {}", error)); 311 | let database_path = PathBuf::from(data_folder).join("resources.db"); 312 | 313 | // Create persistent file store and enable WAL mode 314 | let sqlite_manager = SqliteConnectionManager::file(database_path).with_init(|c| { 315 | c.execute_batch( 316 | " 317 | PRAGMA journal_mode=WAL; -- better write-concurrency 318 | PRAGMA synchronous=NORMAL; -- fsync only in critical moments 319 | PRAGMA wal_autocheckpoint=1000; -- write WAL changes back every 1000 pages 320 | PRAGMA wal_checkpoint(TRUNCATE); -- free some space by truncating possibly massive WAL files from the last run 321 | ", 322 | ) 323 | }); 324 | 325 | let persistent_file_store_pool = Pool::new(sqlite_manager) 326 | .unwrap_or_else(|error| panic!("Could not create persistent file store: {}", error)); 327 | 328 | create_table_hidden(&persistent_file_store_pool); 329 | create_table_data_cache(&persistent_file_store_pool); 330 | create_table_geo_location_cache(&persistent_file_store_pool); 331 | create_table_resources(&persistent_file_store_pool); 332 | 333 | ResourceStore { 334 | persistent_file_store_pool, 335 | } 336 | } 337 | 338 | /// Creates the "hidden" database table 339 | fn create_table_hidden(pool: &Pool) { 340 | pool.get() 341 | .unwrap() 342 | .execute( 343 | "CREATE TABLE IF NOT EXISTS hidden (id TEXT PRIMARY KEY);", 344 | (), 345 | ) 346 | .unwrap_or_else(|error| panic!("table creation of 'hidden' failed.\n{}", error)); 347 | } 348 | 349 | /// Creates the "data_cache" database table 350 | fn create_table_data_cache(pool: &Pool) { 351 | pool.get() 352 | .unwrap() 353 | .execute( 354 | "CREATE TABLE IF NOT EXISTS data_cache (id TEXT PRIMARY KEY, data BLOB);", 355 | (), 356 | ) 357 | .unwrap_or_else(|error| panic!("table creation of 'data_cache' failed.\n{}", error)); 358 | } 359 | 360 | /// Creates the "geo_location_cache" database table 361 | fn create_table_geo_location_cache(pool: &Pool) { 362 | pool.get() 363 | .unwrap() 364 | .execute( 365 | "CREATE TABLE IF NOT EXISTS geo_location_cache (id TEXT PRIMARY KEY, value TEXT);", 366 | (), 367 | ) 368 | .unwrap_or_else(|error| { 369 | panic!("table creation of 'geo_location_cache' failed.\n{}", error) 370 | }); 371 | } 372 | 373 | /// Creates the "resources" database table 374 | fn create_table_resources(pool: &Pool) { 375 | pool.get() 376 | .unwrap() 377 | .execute( 378 | "CREATE TABLE IF NOT EXISTS resources (id TEXT PRIMARY KEY, value TEXT);", 379 | (), 380 | ) 381 | .unwrap_or_else(|error| panic!("table creation of 'resources' failed.\n{}", error)); 382 | } 383 | 384 | /// Checks if today +-3 hits new year 385 | fn range_hits_new_year() -> bool { 386 | let today = chrono::Local::now(); 387 | today.month() == 12 && today.day() >= 29 || today.month() == 1 && today.day() <= 3 388 | } 389 | 390 | /// Returns the week query for the next year 391 | fn get_next_year_query() -> &'static str { 392 | r#" 393 | SELECT DISTINCT resources.id 394 | FROM resources, 395 | json_each(resources.value) json 396 | WHERE json.key = 'taken' 397 | AND json.value NOT NULL 398 | AND resources.id NOT IN (SELECT id FROM hidden) 399 | AND strftime('%m-%d', json.value) BETWEEN '01-01' AND strftime('%m-%d', 'now', 'localtime', '+3 days') 400 | ;"# 401 | } 402 | 403 | /// Returns the week query for the last year 404 | fn get_last_year_query() -> &'static str { 405 | r#" 406 | SELECT DISTINCT resources.id 407 | FROM resources, 408 | json_each(resources.value) json 409 | WHERE json.key = 'taken' 410 | AND json.value NOT NULL 411 | AND resources.id NOT IN (SELECT id FROM hidden) 412 | AND strftime('%m-%d', json.value) BETWEEN strftime('%m-%d', 'now', 'localtime', '-3 days') AND '12-31' 413 | ;"# 414 | } 415 | 416 | /// Executes the specified query and returns a list of resource ids 417 | fn execute_query( 418 | connection: &PooledConnection, 419 | week_query: &str, 420 | ) -> Vec { 421 | let mut stmt = connection.prepare(week_query).unwrap(); 422 | let mut rows = stmt.query([]).unwrap(); 423 | let mut resources: Vec = Vec::new(); 424 | while let Ok(Some(row)) = rows.next() { 425 | let id = row.get(0).unwrap(); 426 | resources.push(id); 427 | } 428 | resources 429 | } 430 | 431 | /// Executes the specified query and returns the count of resource ids 432 | fn execute_count_query( 433 | connection: &PooledConnection, 434 | count_query: &str, 435 | ) -> usize { 436 | let mut stmt = connection.prepare(count_query).unwrap(); 437 | let mut rows = stmt.query([]).unwrap(); 438 | if let Ok(Some(row)) = rows.next() { 439 | row.get(0).unwrap() 440 | } else { 441 | 0 442 | } 443 | } 444 | 445 | /// Returns the count query for the next year 446 | fn get_next_year_count_query() -> &'static str { 447 | r#" 448 | SELECT COUNT(DISTINCT resources.id) 449 | FROM resources, 450 | json_each(resources.value) json 451 | WHERE json.key = 'taken' 452 | AND json.value NOT NULL 453 | AND resources.id NOT IN (SELECT id FROM hidden) 454 | AND strftime('%m-%d', json.value) BETWEEN '01-01' AND strftime('%m-%d', 'now', 'localtime', '+3 days') 455 | ;"# 456 | } 457 | 458 | /// Returns the count query for the last year 459 | fn get_last_year_count_query() -> &'static str { 460 | r#" 461 | SELECT COUNT(DISTINCT resources.id) 462 | FROM resources, 463 | json_each(resources.value) json 464 | WHERE json.key = 'taken' 465 | AND json.value NOT NULL 466 | AND resources.id NOT IN (SELECT id FROM hidden) 467 | AND strftime('%m-%d', json.value) BETWEEN strftime('%m-%d', 'now', 'localtime', '-3 days') AND '12-31' 468 | ;"# 469 | } 470 | -------------------------------------------------------------------------------- /src/integration_test_resources_api.rs: -------------------------------------------------------------------------------- 1 | use std::fs::File; 2 | use std::io::Read; 3 | use std::path::{Path, PathBuf}; 4 | 5 | use std::ops::{Add, Sub}; 6 | use std::{env, fs}; 7 | 8 | use actix_web::dev::{ServiceFactory, ServiceRequest, ServiceResponse}; 9 | use actix_web::{test, web, App, Error}; 10 | use assertor::{assert_that, EqualityAssertion, VecAssertion}; 11 | use chrono::{Duration, Local, NaiveDateTime}; 12 | use rand::Rng; 13 | use rusqlite::fallible_iterator::FallibleIterator; 14 | use test::TestRequest; 15 | 16 | use crate::geo_location::GeoLocation; 17 | use crate::resource_reader::ImageResource; 18 | use crate::{resource_endpoint, resource_reader, resource_store, scheduler, utils}; 19 | 20 | const TEST_JPEG_EXIF_URL: &str = 21 | "https://raw.githubusercontent.com/ianare/exif-samples/master/jpg/gps/DSCN0010.jpg"; 22 | const TEST_JPEG_URL: &str = "https://www.w3.org/People/mimasa/test/imgformat/img/w3c_home.jpg"; 23 | const TEST_FOLDER_NAME: &str = "integration_test_rest_api"; 24 | 25 | #[actix_web::test] 26 | async fn test_get_all_resources() { 27 | // GIVEN is a folder structure with two assets 28 | let base_test_dir = create_temp_folder().await; 29 | let test_image_1 = create_test_image( 30 | &base_test_dir, 31 | "sub1", 32 | "test_image_1.jpg", 33 | TEST_JPEG_EXIF_URL, 34 | ) 35 | .await; 36 | let test_image_2 = create_test_image( 37 | &base_test_dir, 38 | "sub2", 39 | "test_image_2.jpg", 40 | TEST_JPEG_EXIF_URL, 41 | ) 42 | .await; 43 | 44 | // AND a running this-week-in-past instance 45 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 46 | 47 | // WHEN requesting all resources 48 | let response: Vec = test::call_and_read_body_json( 49 | &app_server, 50 | TestRequest::get().uri("/api/resources").to_request(), 51 | ) 52 | .await; 53 | 54 | // THEN the response should contain the two resources 55 | assert_that!(response).contains_exactly(vec![ 56 | utils::md5(test_image_1.as_str()), 57 | utils::md5(test_image_2.as_str()), 58 | ]); 59 | 60 | // cleanup 61 | cleanup(&base_test_dir).await; 62 | } 63 | 64 | #[actix_web::test] 65 | async fn test_this_week_in_past_resources_end_range() { 66 | // GIVEN is one in week range 67 | let base_test_dir = create_temp_folder().await; 68 | let upper_bound = Local::now().add(Duration::days(3)); 69 | let today_date_string = upper_bound.date_naive().format("%Y%m%d").to_string(); 70 | let test_image_1 = create_test_image( 71 | &base_test_dir, 72 | "", 73 | format!("IMG_{}.jpg", today_date_string).as_str(), 74 | TEST_JPEG_URL, 75 | ) 76 | .await; 77 | let another_date_string = Local::now() 78 | .date_naive() 79 | .add(Duration::weeks(4)) 80 | .format("%Y%m%d") 81 | .to_string(); 82 | let _ = create_test_image( 83 | &base_test_dir, 84 | "", 85 | format!("IMG_{}.jpg", another_date_string).as_str(), 86 | TEST_JPEG_URL, 87 | ) 88 | .await; 89 | 90 | // AND a running this-week-in-past instance 91 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 92 | 93 | // WHEN requesting of this week in past resources 94 | let response: Vec = test::call_and_read_body_json( 95 | &app_server, 96 | TestRequest::get().uri("/api/resources/week").to_request(), 97 | ) 98 | .await; 99 | 100 | // THEN the response should contain the resource 101 | assert_that!(response).contains_exactly(vec![utils::md5(test_image_1.as_str())]); 102 | 103 | // cleanup 104 | cleanup(&base_test_dir).await; 105 | } 106 | 107 | #[actix_web::test] 108 | async fn test_this_week_in_past_resources_begin_range() { 109 | // GIVEN is one image in week rnage 110 | let base_test_dir = create_temp_folder().await; 111 | let lower_bound = Local::now().sub(Duration::days(3)); 112 | let today_date_string = lower_bound.date_naive().format("%Y%m%d").to_string(); 113 | let test_image_1 = create_test_image( 114 | &base_test_dir, 115 | "", 116 | format!("IMG_{}.jpg", today_date_string).as_str(), 117 | TEST_JPEG_URL, 118 | ) 119 | .await; 120 | let another_date_string = Local::now() 121 | .date_naive() 122 | .add(Duration::weeks(4)) 123 | .format("%Y%m%d") 124 | .to_string(); 125 | let _ = create_test_image( 126 | &base_test_dir, 127 | "", 128 | format!("IMG_{}.jpg", another_date_string).as_str(), 129 | TEST_JPEG_URL, 130 | ) 131 | .await; 132 | 133 | // AND a running this-week-in-past instance 134 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 135 | 136 | // WHEN requesting of this week in past resources 137 | let response: Vec = test::call_and_read_body_json( 138 | &app_server, 139 | TestRequest::get().uri("/api/resources/week").to_request(), 140 | ) 141 | .await; 142 | 143 | // THEN the response should contain the resource 144 | assert_that!(response).contains_exactly(vec![utils::md5(test_image_1.as_str())]); 145 | 146 | // cleanup 147 | cleanup(&base_test_dir).await; 148 | } 149 | 150 | #[actix_web::test] 151 | async fn test_this_week_in_past_resources_out_of_end_range() { 152 | // GIVEN is one image that is out of range 153 | let base_test_dir = create_temp_folder().await; 154 | let upper_bound = Local::now().add(Duration::days(4)); 155 | let today_date_string = upper_bound.date_naive().format("%Y%m%d").to_string(); 156 | let _test_image_1 = create_test_image( 157 | &base_test_dir, 158 | "", 159 | format!("IMG_{}.jpg", today_date_string).as_str(), 160 | TEST_JPEG_URL, 161 | ) 162 | .await; 163 | let another_date_string = Local::now() 164 | .date_naive() 165 | .add(Duration::weeks(4)) 166 | .format("%Y%m%d") 167 | .to_string(); 168 | let _ = create_test_image( 169 | &base_test_dir, 170 | "", 171 | format!("IMG_{}.jpg", another_date_string).as_str(), 172 | TEST_JPEG_URL, 173 | ) 174 | .await; 175 | 176 | // AND a running this-week-in-past instance 177 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 178 | 179 | // WHEN requesting of this week in past resources 180 | let response: Vec = test::call_and_read_body_json( 181 | &app_server, 182 | TestRequest::get().uri("/api/resources/week").to_request(), 183 | ) 184 | .await; 185 | 186 | // THEN the response should not contain the resource 187 | assert_that!(response).is_empty(); 188 | 189 | // cleanup 190 | cleanup(&base_test_dir).await; 191 | } 192 | 193 | #[actix_web::test] 194 | async fn test_this_week_in_past_resources_out_of_begin_range() { 195 | // GIVEN is a image that is out of range 196 | let base_test_dir = create_temp_folder().await; 197 | let lower_bound = Local::now().sub(Duration::days(4)); 198 | let today_date_string = lower_bound.date_naive().format("%Y%m%d").to_string(); 199 | let _test_image_1 = create_test_image( 200 | &base_test_dir, 201 | "", 202 | format!("IMG_{}.jpg", today_date_string).as_str(), 203 | TEST_JPEG_URL, 204 | ) 205 | .await; 206 | let another_date_string = Local::now() 207 | .date_naive() 208 | .add(Duration::weeks(4)) 209 | .format("%Y%m%d") 210 | .to_string(); 211 | let _ = create_test_image( 212 | &base_test_dir, 213 | "", 214 | format!("IMG_{}.jpg", another_date_string).as_str(), 215 | TEST_JPEG_URL, 216 | ) 217 | .await; 218 | 219 | // AND a running this-week-in-past instance 220 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 221 | 222 | // WHEN requesting of this week in past resources 223 | let response: Vec = test::call_and_read_body_json( 224 | &app_server, 225 | TestRequest::get().uri("/api/resources/week").to_request(), 226 | ) 227 | .await; 228 | 229 | // THEN the response should not contain the resource 230 | assert_that!(response).is_empty(); 231 | 232 | // cleanup 233 | cleanup(&base_test_dir).await; 234 | } 235 | 236 | #[actix_web::test] 237 | async fn test_get_random_resources() { 238 | // GIVEN is one exif image 239 | let base_test_dir = create_temp_folder().await; 240 | let test_image_1 = 241 | create_test_image(&base_test_dir, "", "test_image_1.jpg", TEST_JPEG_EXIF_URL).await; 242 | 243 | // AND a running this-week-in-past instance 244 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 245 | 246 | // WHEN requesting a random resource 247 | let response: Vec = test::call_and_read_body_json( 248 | &app_server, 249 | TestRequest::get().uri("/api/resources/random").to_request(), 250 | ) 251 | .await; 252 | 253 | // THEN the response should contain the random resources 254 | assert_that!(response).contains_exactly(vec![utils::md5(test_image_1.as_str())]); 255 | 256 | // cleanup 257 | cleanup(&base_test_dir).await; 258 | } 259 | 260 | #[actix_web::test] 261 | async fn test_get_resources_week_count() { 262 | // GIVEN is a folder structure with two assets in the week range, and one out of range 263 | let base_test_dir = create_temp_folder().await; 264 | let upper_bound = Local::now().add(Duration::days(3)); 265 | let today_date_string = upper_bound.date_naive().format("%Y%m%d").to_string(); 266 | let _test_image_1 = create_test_image( 267 | &base_test_dir, 268 | "", 269 | format!("IMG_{}.jpg", today_date_string).as_str(), 270 | TEST_JPEG_URL, 271 | ) 272 | .await; 273 | let lower_bound = Local::now().sub(Duration::days(3)); 274 | let another_date_string = lower_bound.date_naive().format("%Y%m%d").to_string(); 275 | let _test_image_2 = create_test_image( 276 | &base_test_dir, 277 | "", 278 | format!("IMG_{}.jpg", another_date_string).as_str(), 279 | TEST_JPEG_URL, 280 | ) 281 | .await; 282 | let out_of_range_date_string = Local::now() 283 | .sub(Duration::days(4)) 284 | .date_naive() 285 | .format("%Y%m%d") 286 | .to_string(); 287 | let _ = create_test_image( 288 | &base_test_dir, 289 | "", 290 | format!("IMG_{}.jpg", out_of_range_date_string).as_str(), 291 | TEST_JPEG_URL, 292 | ) 293 | .await; 294 | 295 | // AND a running this-week-in-past instance 296 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 297 | 298 | // WHEN requesting the count of this week resources (text/plain) 299 | let response = test::call_and_read_body( 300 | &app_server, 301 | TestRequest::get() 302 | .uri("/api/resources/week/count") 303 | .to_request(), 304 | ) 305 | .await; 306 | let result = String::from_utf8(response.to_vec()).unwrap(); 307 | let response = result.parse::().unwrap(); 308 | 309 | // THEN the response should contain the count of the resources 310 | assert_that!(response).is_equal_to(2); 311 | 312 | // cleanup 313 | cleanup(&base_test_dir).await; 314 | } 315 | 316 | #[actix_web::test] 317 | async fn test_get_resource_by_id_and_resolution() { 318 | // GIVEN is an exif image 319 | let base_test_dir = create_temp_folder().await; 320 | let test_image_1 = 321 | create_test_image(&base_test_dir, "", "test_image_1.jpg", TEST_JPEG_EXIF_URL).await; 322 | let test_image_1_id = utils::md5(test_image_1.as_str()); 323 | 324 | // AND a running this-week-in-past instance 325 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 326 | 327 | // WHEN requesting a random resource 328 | let response = test::call_and_read_body( 329 | &app_server, 330 | TestRequest::get() 331 | .uri(format!("/api/resources/{test_image_1_id}/10/10").as_str()) 332 | .to_request(), 333 | ) 334 | .await; 335 | 336 | // THEN the response should contain the resized image 337 | assert_that!(response.len()).is_equal_to(316); 338 | 339 | // cleanup 340 | cleanup(&base_test_dir).await; 341 | } 342 | 343 | #[actix_web::test] 344 | async fn test_get_resource_metadata_by_id() { 345 | // GIVEN is an exif image 346 | let base_test_dir = create_temp_folder().await; 347 | let test_image_1 = 348 | create_test_image(&base_test_dir, "", "test_image_1.jpg", TEST_JPEG_EXIF_URL).await; 349 | let test_image_1_id = utils::md5(test_image_1.as_str()); 350 | let test_image_1_path = format!("{}/{}", base_test_dir.to_str().unwrap(), test_image_1); 351 | 352 | // AND a running this-week-in-past instance 353 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 354 | 355 | // WHEN requesting a random resource 356 | let response: ImageResource = test::call_and_read_body_json( 357 | &app_server, 358 | TestRequest::get() 359 | .uri(format!("/api/resources/{test_image_1_id}/metadata").as_str()) 360 | .to_request(), 361 | ) 362 | .await; 363 | 364 | // THEN the response should contain the resized image 365 | assert_that!(response.id).is_equal_to(test_image_1_id); 366 | assert_that!(response.path).is_equal_to(&test_image_1_path); 367 | assert_that!(response.content_type).is_equal_to("image/jpeg".to_string()); 368 | assert_that!(response.name).is_equal_to("test_image_1.jpg".to_string()); 369 | assert_that!(response.content_length).is_equal_to( 370 | File::open(&test_image_1_path) 371 | .unwrap() 372 | .metadata() 373 | .unwrap() 374 | .len(), 375 | ); 376 | assert_that!(response.taken).is_equal_to(Some( 377 | NaiveDateTime::parse_from_str("2008-10-22T16:28:39", "%Y-%m-%dT%H:%M:%S").unwrap(), 378 | )); 379 | assert_that!(response.location).is_equal_to(Some(GeoLocation { 380 | latitude: 43.46745, 381 | longitude: 11.885126, 382 | })); 383 | 384 | // cleanup 385 | cleanup(&base_test_dir).await; 386 | } 387 | 388 | #[actix_web::test] 389 | async fn test_get_resource_description_by_id() { 390 | // GIVEN is an exif image 391 | let base_test_dir = create_temp_folder().await; 392 | let test_image_1 = 393 | create_test_image(&base_test_dir, "", "test_image_1.jpg", TEST_JPEG_EXIF_URL).await; 394 | let test_image_1_id = utils::md5(test_image_1.as_str()); 395 | 396 | // AND a running this-week-in-past instance 397 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 398 | 399 | // WHEN requesting a description resource 400 | let response = String::from_utf8( 401 | test::call_and_read_body( 402 | &app_server, 403 | TestRequest::get() 404 | .uri(format!("/api/resources/{test_image_1_id}/description").as_str()) 405 | .to_request(), 406 | ) 407 | .await 408 | .to_vec(), 409 | ) 410 | .unwrap(); 411 | 412 | // THEN the response should contain the resized image 413 | assert_that!(response).is_equal_to("22.10.2008, Arezzo".to_string()); 414 | 415 | // cleanup 416 | cleanup(&base_test_dir).await; 417 | } 418 | 419 | #[actix_web::test] 420 | async fn test_ignore_file_in_resources() { 421 | // GIVEN is a folder structure with two assets 422 | // AND a file with the name .ignore 423 | let base_test_dir = create_temp_folder().await; 424 | create_test_image( 425 | &base_test_dir, 426 | "sub1", 427 | "test_image_1.jpg", 428 | TEST_JPEG_EXIF_URL, 429 | ) 430 | .await; 431 | let test_image_2 = create_test_image( 432 | &base_test_dir, 433 | "sub2", 434 | "test_image_2.jpg", 435 | TEST_JPEG_EXIF_URL, 436 | ) 437 | .await; 438 | create_test_image(&base_test_dir, "sub1", ".ignore", TEST_JPEG_URL).await; 439 | 440 | // AND a running this-week-in-past instance 441 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 442 | 443 | // WHEN requesting all resources 444 | let response: Vec = test::call_and_read_body_json( 445 | &app_server, 446 | TestRequest::get().uri("/api/resources").to_request(), 447 | ) 448 | .await; 449 | 450 | // THEN the response should contain only the second resource 451 | assert_that!(response).contains_exactly(vec![utils::md5(test_image_2.as_str())]); 452 | 453 | // cleanup 454 | cleanup(&base_test_dir).await; 455 | } 456 | 457 | #[actix_web::test] 458 | async fn get_hidden_resources() { 459 | // GIVEN is a folder structure with one assets 460 | let base_test_dir = create_temp_folder().await; 461 | let test_image_1_id = utils::md5( 462 | create_test_image( 463 | &base_test_dir, 464 | "sub1", 465 | "test_image_1.jpg", 466 | TEST_JPEG_EXIF_URL, 467 | ) 468 | .await 469 | .as_str(), 470 | ); 471 | 472 | // AND a running this-week-in-past instance 473 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 474 | 475 | // AND this image is hidden 476 | let _ = test::call_and_read_body( 477 | &app_server, 478 | TestRequest::post() 479 | .uri(format!("/api/resources/hide/{test_image_1_id}").as_str()) 480 | .to_request(), 481 | ) 482 | .await; 483 | 484 | // WHEN receiving all hidden resources 485 | let response: Vec = test::call_and_read_body_json( 486 | &app_server, 487 | TestRequest::get().uri("/api/resources/hide").to_request(), 488 | ) 489 | .await; 490 | 491 | // THEN then one image should be hidden 492 | assert_that!(response).contains_exactly(vec![test_image_1_id]); 493 | 494 | // cleanup 495 | cleanup(&base_test_dir).await; 496 | } 497 | 498 | #[actix_web::test] 499 | async fn get_hidden_resources_when_set_visible_again() { 500 | // GIVEN is a folder structure with one assets and another file type 501 | let base_test_dir = create_temp_folder().await; 502 | let test_image_1_id = utils::md5( 503 | create_test_image( 504 | &base_test_dir, 505 | "sub1", 506 | "test_image_1.jpg", 507 | TEST_JPEG_EXIF_URL, 508 | ) 509 | .await 510 | .as_str(), 511 | ); 512 | 513 | // AND a running this-week-in-past instance 514 | let app_server = test::init_service(build_app(base_test_dir.to_str().unwrap())).await; 515 | 516 | // AND this image is hidden 517 | let _ = test::call_and_read_body( 518 | &app_server, 519 | TestRequest::post() 520 | .uri(format!("/api/resources/hide/{test_image_1_id}").as_str()) 521 | .to_request(), 522 | ) 523 | .await; 524 | 525 | // AND this image is set to visible again 526 | let _ = test::call_and_read_body( 527 | &app_server, 528 | TestRequest::delete() 529 | .uri(format!("/api/resources/hide/{test_image_1_id}").as_str()) 530 | .to_request(), 531 | ) 532 | .await; 533 | 534 | // WHEN receiving all hidden resources 535 | let response: Vec = test::call_and_read_body_json( 536 | &app_server, 537 | TestRequest::get().uri("/api/resources/hide").to_request(), 538 | ) 539 | .await; 540 | 541 | // THEN then no image should be hidden 542 | assert_that!(response).contains_exactly(vec![]); 543 | 544 | // cleanup 545 | cleanup(&base_test_dir).await; 546 | } 547 | 548 | fn build_app( 549 | base_test_dir: &str, 550 | ) -> App< 551 | impl ServiceFactory< 552 | ServiceRequest, 553 | Config = (), 554 | Response = ServiceResponse, 555 | Error = Error, 556 | InitError = (), 557 | >, 558 | > { 559 | let resource_reader = resource_reader::new(base_test_dir); 560 | let resource_store = resource_store::initialize(base_test_dir); 561 | scheduler::index_resources(resource_reader.clone(), resource_store.clone()); 562 | App::new() 563 | .app_data(web::Data::new(resource_store)) 564 | .app_data(web::Data::new(resource_reader)) 565 | .service( 566 | web::scope("/api/resources") 567 | .service(resource_endpoint::get_all_resources) 568 | .service(resource_endpoint::get_this_week_resources_count) 569 | .service(resource_endpoint::get_this_week_resources) 570 | .service(resource_endpoint::random_resources) 571 | .service(resource_endpoint::get_resource_by_id_and_resolution) 572 | .service(resource_endpoint::get_resource_metadata_by_id) 573 | .service(resource_endpoint::get_resource_metadata_description_by_id) 574 | .service(resource_endpoint::get_all_hidden_resources) 575 | .service(resource_endpoint::set_resource_hidden) 576 | .service(resource_endpoint::delete_resource_hidden), 577 | ) 578 | } 579 | 580 | /// Creates a test image withing a folder 581 | async fn create_test_image( 582 | base_dir: &Path, 583 | sub_dir: &str, 584 | file_name: &str, 585 | image_url: &str, 586 | ) -> String { 587 | let target_dir = base_dir.join(sub_dir); 588 | 589 | if !target_dir.exists() { 590 | fs::create_dir_all(&target_dir).unwrap(); 591 | } 592 | 593 | let test_image_path = target_dir.join(file_name); 594 | 595 | let mut response = ureq::get(image_url).call().unwrap(); 596 | 597 | let content_length = response.headers().get("Content-Length").unwrap(); 598 | let len: usize = content_length.to_str().unwrap().parse().unwrap(); 599 | 600 | let mut data: Vec = Vec::with_capacity(len); 601 | response 602 | .body_mut() 603 | .as_reader() 604 | .read_to_end(&mut data) 605 | .unwrap(); 606 | 607 | fs::write(&test_image_path, data).unwrap_or_else(|_| { 608 | panic!( 609 | "error while writing test image {}", 610 | test_image_path.to_str().unwrap() 611 | ) 612 | }); 613 | 614 | file_name.to_string() 615 | } 616 | 617 | /// Removes the test folder after test run 618 | async fn cleanup(test_dir: &PathBuf) { 619 | let _ = fs::remove_dir_all(test_dir); 620 | } 621 | 622 | /// Creates a temp folder with the given name and returns its full path 623 | async fn create_temp_folder() -> PathBuf { 624 | let random_string = rand::rng().random::().to_string(); 625 | let test_dir: PathBuf = env::temp_dir().join(TEST_FOLDER_NAME).join(random_string); 626 | 627 | if test_dir.exists() { 628 | fs::remove_dir_all(&test_dir).expect("Failed to remove test dir"); 629 | } 630 | 631 | fs::create_dir_all(&test_dir).unwrap(); 632 | 633 | // add data folder to test dir 634 | let data_dir = test_dir.join("data"); 635 | env::set_var("DATA_FOLDER", data_dir.as_path().to_str().unwrap()); 636 | fs::create_dir_all(&data_dir).unwrap(); 637 | 638 | test_dir 639 | } 640 | --------------------------------------------------------------------------------