├── docs
├── chapter_1.md
├── SUMMARY.md
├── introduction.md
├── installation.md
├── index.md
├── usage.md
├── configuration.md
└── installation
│ └── helm-chart.md
├── src
├── utils
│ ├── mod.rs
│ └── file.rs
├── models.rs
├── routes
│ ├── system.rs
│ ├── mod.rs
│ ├── export.rs
│ ├── event.rs
│ ├── location.rs
│ ├── tag.rs
│ ├── person.rs
│ ├── shoebox.rs
│ ├── media.rs
│ └── scan.rs
├── models
│ ├── tag.rs
│ ├── person.rs
│ ├── shoebox.rs
│ └── video.rs
├── services
│ ├── mod.rs
│ ├── thumbnail.rs
│ ├── event.rs
│ ├── location.rs
│ ├── export.rs
│ ├── tag.rs
│ ├── person.rs
│ └── shoebox.rs
├── error.rs
├── main.rs
└── db.rs
├── rustfmt.toml
├── rust-analyzer.toml
├── frontend
├── src
│ ├── assets
│ │ ├── logo_large.png
│ │ └── shoebox_logo_small.png
│ ├── config.ts
│ ├── main.tsx
│ ├── theme.ts
│ ├── App.tsx
│ ├── contexts
│ │ └── ScanContext.tsx
│ ├── components
│ │ ├── VideoCard.tsx
│ │ └── Layout.tsx
│ └── pages
│ │ ├── SystemInfoPage.tsx
│ │ └── HomePage.tsx
├── tsconfig.node.json
├── index.html
├── vite.config.ts
├── tsconfig.json
└── package.json
├── .idea
├── vcs.xml
├── .gitignore
├── jsLibraryMappings.xml
├── modules.xml
├── sqldialects.xml
├── shoebox.iml
├── dataSources.xml
├── tailwindcss.xml
└── inspectionProfiles
│ └── Project_Default.xml
├── migrations
├── 20240601000000_add_exif_data.sql
├── 20250522000000_add_duration_column.sql
├── 20250523000000_add_original_file_path_column.sql
├── 20240610000000_add_unique_constraint_to_file_path.sql
├── 20250524000000_add_location_and_event_columns.sql
├── 20240609000000_remove_duplicate_videos.sql
├── 20250525000000_add_shoeboxes.sql
└── 20240101000000_initial_schema.sql
├── charts
└── shoebox
│ ├── Chart.yaml
│ ├── templates
│ ├── service.yaml
│ ├── hpa.yaml
│ ├── ingress.yaml
│ ├── _helpers.tpl
│ ├── pvc.yaml
│ └── statefulset.yaml
│ ├── README.md
│ └── values.yaml
├── book.toml
├── .github
├── dependabot.yml
└── workflows
│ ├── rust-clippy.yml
│ ├── build-and-publish.yml
│ ├── mdbook.yml
│ ├── helm-release.yml
│ └── release.yml
├── .gitignore
├── LICENSE
├── Cargo.toml
├── Dockerfile
├── docker-compose.yml
└── README.md
/docs/chapter_1.md:
--------------------------------------------------------------------------------
1 | # Chapter 1
2 |
--------------------------------------------------------------------------------
/src/utils/mod.rs:
--------------------------------------------------------------------------------
1 | mod file;
2 |
3 | pub use file::*;
4 |
--------------------------------------------------------------------------------
/rustfmt.toml:
--------------------------------------------------------------------------------
1 | edition = "2021"
2 | # (optional) other config...
3 |
--------------------------------------------------------------------------------
/rust-analyzer.toml:
--------------------------------------------------------------------------------
1 | [rustfmt]
2 | overrideCommand = ["leptosfmt", "--stdin", "--rustfmt"]
3 |
--------------------------------------------------------------------------------
/frontend/src/assets/logo_large.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/slackspace-io/shoebox/HEAD/frontend/src/assets/logo_large.png
--------------------------------------------------------------------------------
/frontend/src/assets/shoebox_logo_small.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/slackspace-io/shoebox/HEAD/frontend/src/assets/shoebox_logo_small.png
--------------------------------------------------------------------------------
/.idea/vcs.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/.idea/.gitignore:
--------------------------------------------------------------------------------
1 | # Default ignored files
2 | /shelf/
3 | /workspace.xml
4 | # Editor-based HTTP Client requests
5 | /httpRequests/
6 | # Datasource local storage ignored files
7 | /dataSources/
8 | /dataSources.local.xml
9 |
--------------------------------------------------------------------------------
/docs/SUMMARY.md:
--------------------------------------------------------------------------------
1 | # Summary
2 |
3 | - [Introduction](./introduction.md)
4 | - [Installation](./installation.md)
5 | - [Helm Chart](./installation/helm-chart.md)
6 | - [Configuration](./configuration.md)
7 | - [Usage](./usage.md)
8 |
--------------------------------------------------------------------------------
/src/models.rs:
--------------------------------------------------------------------------------
1 | mod video;
2 | mod tag;
3 | mod person;
4 | mod shoebox;
5 |
6 | pub use video::*;
7 | pub use tag::*;
8 | pub use person::*;
9 | pub use shoebox::*;
10 |
11 | // Re-export all models for convenience
12 |
--------------------------------------------------------------------------------
/.idea/jsLibraryMappings.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/migrations/20240601000000_add_exif_data.sql:
--------------------------------------------------------------------------------
1 | -- Add exif_data column to videos table
2 | -- Up migration
3 | ALTER TABLE videos ADD COLUMN exif_data JSONB;
4 |
5 | -- Down migration
6 | -- ALTER TABLE videos DROP COLUMN exif_data;
7 |
--------------------------------------------------------------------------------
/migrations/20250522000000_add_duration_column.sql:
--------------------------------------------------------------------------------
1 | -- Add duration column to videos table
2 | -- Up migration
3 |
4 | ALTER TABLE videos ADD COLUMN duration BIGINT;
5 |
6 | -- Down migration
7 | -- ALTER TABLE videos DROP COLUMN duration;
8 |
--------------------------------------------------------------------------------
/frontend/tsconfig.node.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "composite": true,
4 | "skipLibCheck": true,
5 | "module": "ESNext",
6 | "moduleResolution": "bundler",
7 | "allowSyntheticDefaultImports": true
8 | },
9 | "include": ["vite.config.ts"]
10 | }
11 |
--------------------------------------------------------------------------------
/migrations/20250523000000_add_original_file_path_column.sql:
--------------------------------------------------------------------------------
1 | -- Add original_file_path column to videos table
2 | -- Up migration
3 |
4 | ALTER TABLE videos ADD COLUMN original_file_path VARCHAR(255);
5 |
6 | -- Down migration
7 | -- ALTER TABLE videos DROP COLUMN original_file_path;
8 |
--------------------------------------------------------------------------------
/.idea/modules.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
--------------------------------------------------------------------------------
/charts/shoebox/Chart.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v2
2 | name: shoebox-helm
3 | description: A Helm chart for the Shoebox application - a digital shoebox for your videos
4 | type: application
5 | version: 0.0.21
6 | appVersion: "latest"
7 | keywords:
8 | - video
9 | - organizer
10 | - shoebox
11 | - archive
12 |
--------------------------------------------------------------------------------
/.idea/sqldialects.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/migrations/20240610000000_add_unique_constraint_to_file_path.sql:
--------------------------------------------------------------------------------
1 | -- Add unique constraint to file_path column in videos table
2 | -- Up migration
3 | CREATE UNIQUE INDEX IF NOT EXISTS idx_videos_file_path_unique ON videos (file_path);
4 |
5 | -- Down migration
6 | -- DROP INDEX IF EXISTS idx_videos_file_path_unique;
7 |
--------------------------------------------------------------------------------
/migrations/20250524000000_add_location_and_event_columns.sql:
--------------------------------------------------------------------------------
1 | -- Add location and event columns to videos table
2 | -- Up migration
3 |
4 | ALTER TABLE videos ADD COLUMN location VARCHAR(255);
5 | ALTER TABLE videos ADD COLUMN event VARCHAR(255);
6 |
7 | -- Down migration
8 | -- ALTER TABLE videos DROP COLUMN location;
9 | -- ALTER TABLE videos DROP COLUMN event;
10 |
--------------------------------------------------------------------------------
/book.toml:
--------------------------------------------------------------------------------
1 | [book]
2 | authors = ["Jacob Morgan"]
3 | language = "en"
4 | multilingual = false
5 | src = "docs/"
6 | title = "Shoebox Docs"
7 |
8 | [rust]
9 | edition = "2021"
10 |
11 | [build]
12 | build-dir = "site"
13 |
14 |
15 | [output.html]
16 | default-theme = "ayu"
17 | git-repository-url = "https://github.com/slackspace-io/shoebox"
18 | highlight-code = true
19 |
--------------------------------------------------------------------------------
/frontend/src/config.ts:
--------------------------------------------------------------------------------
1 | // Application configuration settings
2 |
3 | export const config = {
4 | // Unreviewed page settings
5 | unreviewed: {
6 | // Default number of videos to show at once
7 | defaultVideosToShow: 3,
8 | // Maximum number of videos that can be shown when expanded
9 | maxVideosToShow: 10
10 | }
11 | };
12 |
13 | export default config;
14 |
--------------------------------------------------------------------------------
/charts/shoebox/templates/service.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: Service
3 | metadata:
4 | name: {{ include "shoebox.fullname" . }}
5 | labels:
6 | {{- include "shoebox.labels" . | nindent 4 }}
7 | spec:
8 | type: {{ .Values.service.type }}
9 | ports:
10 | - port: {{ .Values.service.port }}
11 | targetPort: http
12 | protocol: TCP
13 | name: http
14 | selector:
15 | {{- include "shoebox.selectorLabels" . | nindent 4 }}
16 |
--------------------------------------------------------------------------------
/.idea/shoebox.iml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/src/routes/system.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::State,
3 | routing::get,
4 | Json, Router,
5 | };
6 |
7 | use crate::services::AppState;
8 |
9 | pub fn router(app_state: AppState) -> Router {
10 | Router::new()
11 | .route("/", get(get_system_info))
12 | .with_state(app_state)
13 | }
14 |
15 | async fn get_system_info(
16 | State(state): State,
17 | ) -> Json {
18 | // Simply return the configuration from the app state
19 | Json(state.config)
20 | }
21 |
--------------------------------------------------------------------------------
/frontend/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Shoebox
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | # To get started with Dependabot version updates, you'll need to specify which
2 | # package ecosystems to update and where the package manifests are located.
3 | # Please see the documentation for all configuration options:
4 | # https://docs.github.com/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
5 |
6 | version: 2
7 | updates:
8 | - package-ecosystem: "" # See documentation for possible values
9 | directory: "/" # Location of package manifests
10 | schedule:
11 | interval: "weekly"
12 |
--------------------------------------------------------------------------------
/frontend/vite.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite';
2 | import react from '@vitejs/plugin-react';
3 | import path from 'path';
4 |
5 | // https://vitejs.dev/config/
6 | export default defineConfig({
7 | plugins: [react()],
8 | resolve: {
9 | alias: {
10 | '@': path.resolve(__dirname, './src'),
11 | },
12 | },
13 | server: {
14 | proxy: {
15 | '/api': {
16 | target: 'http://localhost:3000',
17 | changeOrigin: true,
18 | },
19 | },
20 | },
21 | build: {
22 | outDir: 'dist',
23 | emptyOutDir: true,
24 | },
25 | });
26 |
--------------------------------------------------------------------------------
/frontend/src/main.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import ReactDOM from 'react-dom/client';
3 | import { ChakraProvider } from '@chakra-ui/react';
4 | import { BrowserRouter as Router } from 'react-router-dom';
5 | import App from './App';
6 | import theme from './theme';
7 | import { ScanProvider } from './contexts/ScanContext';
8 |
9 | ReactDOM.createRoot(document.getElementById('root') as HTMLElement).render(
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | );
20 |
--------------------------------------------------------------------------------
/frontend/src/theme.ts:
--------------------------------------------------------------------------------
1 | import { extendTheme } from '@chakra-ui/react';
2 |
3 | const theme = extendTheme({
4 | colors: {
5 | brand: {
6 | 50: '#e6f7ff',
7 | 100: '#b3e0ff',
8 | 200: '#80caff',
9 | 300: '#4db3ff',
10 | 400: '#1a9dff',
11 | 500: '#0080ff',
12 | 600: '#0066cc',
13 | 700: '#004d99',
14 | 800: '#003366',
15 | 900: '#001a33',
16 | },
17 | },
18 | fonts: {
19 | heading: 'Inter, system-ui, sans-serif',
20 | body: 'Inter, system-ui, sans-serif',
21 | },
22 | config: {
23 | initialColorMode: 'dark',
24 | useSystemColorMode: true,
25 | },
26 | });
27 |
28 | export default theme;
29 |
--------------------------------------------------------------------------------
/src/models/tag.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 | use sqlx::FromRow;
3 | use uuid::Uuid;
4 |
5 | #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
6 | pub struct Tag {
7 | pub id: String,
8 | pub name: String,
9 | pub created_at: chrono::NaiveDateTime,
10 | }
11 |
12 | #[derive(Debug, Clone, Serialize, Deserialize)]
13 | pub struct CreateTagDto {
14 | pub name: String,
15 | }
16 |
17 | #[derive(Debug, Clone, Serialize, Deserialize)]
18 | pub struct TagUsage {
19 | pub id: String,
20 | pub name: String,
21 | pub video_count: i64,
22 | }
23 |
24 | impl Tag {
25 | pub fn new(name: String) -> Self {
26 | Self {
27 | id: Uuid::new_v4().to_string(),
28 | name,
29 | created_at: chrono::Utc::now().naive_utc(),
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/frontend/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2020",
4 | "useDefineForClassFields": true,
5 | "lib": ["ES2020", "DOM", "DOM.Iterable"],
6 | "module": "ESNext",
7 | "skipLibCheck": true,
8 |
9 | /* Bundler mode */
10 | "moduleResolution": "bundler",
11 | "allowImportingTsExtensions": true,
12 | "resolveJsonModule": true,
13 | "isolatedModules": true,
14 | "noEmit": true,
15 | "jsx": "react-jsx",
16 |
17 | /* Linting */
18 | "strict": true,
19 | "noUnusedLocals": true,
20 | "noUnusedParameters": true,
21 | "noFallthroughCasesInSwitch": true,
22 |
23 | /* Paths */
24 | "baseUrl": ".",
25 | "paths": {
26 | "@/*": ["src/*"]
27 | }
28 | },
29 | "include": ["src"],
30 | "references": [{ "path": "./tsconfig.node.json" }]
31 | }
32 |
--------------------------------------------------------------------------------
/src/models/person.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 | use sqlx::FromRow;
3 | use uuid::Uuid;
4 |
5 | #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
6 | pub struct Person {
7 | pub id: String,
8 | pub name: String,
9 | pub created_at: chrono::NaiveDateTime,
10 | }
11 |
12 | #[derive(Debug, Clone, Serialize, Deserialize)]
13 | pub struct CreatePersonDto {
14 | pub name: String,
15 | }
16 |
17 | #[derive(Debug, Clone, Serialize, Deserialize)]
18 | pub struct PersonUsage {
19 | pub id: String,
20 | pub name: String,
21 | pub video_count: i64,
22 | }
23 |
24 | impl Person {
25 | pub fn new(name: String) -> Self {
26 | Self {
27 | id: Uuid::new_v4().to_string(),
28 | name,
29 | created_at: chrono::Utc::now().naive_utc(),
30 | }
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/migrations/20240609000000_remove_duplicate_videos.sql:
--------------------------------------------------------------------------------
1 | -- Remove duplicate videos based on file_path
2 | -- Up migration
3 |
4 | -- First, create a temporary table to store the IDs of duplicate videos to keep
5 | -- We'll keep the most recently updated video for each file_path
6 | CREATE TEMPORARY TABLE IF NOT EXISTS videos_to_keep AS
7 | WITH ranked_videos AS (
8 | SELECT
9 | id,
10 | file_path,
11 | ROW_NUMBER() OVER (PARTITION BY file_path ORDER BY updated_at DESC) as rn
12 | FROM videos
13 | )
14 | SELECT id FROM ranked_videos WHERE rn = 1;
15 |
16 | -- Delete videos that are not in the videos_to_keep table
17 | DELETE FROM videos
18 | WHERE id NOT IN (SELECT id FROM videos_to_keep);
19 |
20 | -- Drop the temporary table
21 | DROP TABLE IF EXISTS videos_to_keep;
22 |
23 | -- Down migration
24 | -- No down migration as we can't restore deleted data
25 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "shoebox-frontend",
3 | "version": "0.1.0",
4 | "private": true,
5 | "dependencies": {
6 | "@chakra-ui/react": "^2.8.2",
7 | "@emotion/react": "^11.11.1",
8 | "@emotion/styled": "^11.11.0",
9 | "axios": "^1.12.0",
10 | "framer-motion": "^10.16.16",
11 | "react": "^18.2.0",
12 | "react-dom": "^18.2.0",
13 | "react-icons": "^4.12.0",
14 | "react-player": "^2.13.0",
15 | "react-router-dom": "^6.21.0",
16 | "react-select": "^5.8.0",
17 | "tsc": "^2.0.4"
18 | },
19 | "devDependencies": {
20 | "@types/node": "^20.10.4",
21 | "@types/react": "^18.2.45",
22 | "@types/react-dom": "^18.2.17",
23 | "@vitejs/plugin-react": "^4.2.1",
24 | "typescript": "^5.8.3",
25 | "vite": "^6.4.1"
26 | },
27 | "scripts": {
28 | "dev": "vite",
29 | "build": "tsc && vite build",
30 | "preview": "vite preview"
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | ### Rust template
2 | # Generated by Cargo
3 | # will have compiled files and executables
4 | debug/
5 | target/
6 |
7 | # Remove Cargo.lock from gitignore if creating an executable, leave it for libraries
8 | # More information here https://doc.rust-lang.org/cargo/guide/cargo-toml-vs-cargo-lock.html
9 | Cargo.lock
10 |
11 | # These are backup files generated by rustfmt
12 | **/*.rs.bk
13 |
14 | # MSVC Windows builds of rustc generate these, which store debugging information
15 | *.pdb
16 |
17 | ### react template
18 | .DS_*
19 | *.log
20 | logs
21 | **/*.backup.*
22 | **/*.back.*
23 |
24 | node_modules
25 | bower_components
26 |
27 | *.sublime*
28 |
29 | psd
30 | thumb
31 | sketch
32 |
33 | ### rust-analyzer template
34 | # Can be generated by other build systems other than cargo (ex: bazelbuild/rust_rules)
35 | rust-project.json
36 |
37 |
38 | /charts/shoebox/.myvalues.yaml
39 | /thumbnails/
40 | /media/
41 | /data.db
42 | /data.db-shm
43 | /data.db-wal
44 | /charts/shoebox/charts/postgresql-16.7.4.tgz
45 | /site/
46 |
--------------------------------------------------------------------------------
/migrations/20250525000000_add_shoeboxes.sql:
--------------------------------------------------------------------------------
1 | -- Add shoebox functionality
2 | -- Up migration
3 |
4 | -- Shoeboxes table
5 | CREATE TABLE IF NOT EXISTS shoeboxes (
6 | id VARCHAR(36) PRIMARY KEY NOT NULL,
7 | name VARCHAR(100) NOT NULL UNIQUE,
8 | description TEXT,
9 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
10 | updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
11 | );
12 |
13 | -- Video-Shoebox relationship table
14 | CREATE TABLE IF NOT EXISTS video_shoeboxes (
15 | video_id VARCHAR(36) NOT NULL,
16 | shoebox_id VARCHAR(36) NOT NULL,
17 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
18 | PRIMARY KEY (video_id, shoebox_id),
19 | FOREIGN KEY (video_id) REFERENCES videos (id) ON DELETE CASCADE,
20 | FOREIGN KEY (shoebox_id) REFERENCES shoeboxes (id) ON DELETE CASCADE
21 | );
22 |
23 | -- Create indexes
24 | CREATE INDEX IF NOT EXISTS idx_shoeboxes_name ON shoeboxes (name);
25 |
26 | -- Down migration
27 | -- DROP TABLE IF EXISTS video_shoeboxes;
28 | -- DROP TABLE IF EXISTS shoeboxes;
29 |
--------------------------------------------------------------------------------
/src/models/shoebox.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 | use sqlx::FromRow;
3 | use uuid::Uuid;
4 |
5 | #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
6 | pub struct Shoebox {
7 | pub id: String,
8 | pub name: String,
9 | pub description: Option,
10 | pub created_at: chrono::NaiveDateTime,
11 | pub updated_at: chrono::NaiveDateTime,
12 | }
13 |
14 | #[derive(Debug, Clone, Serialize, Deserialize)]
15 | pub struct CreateShoeboxDto {
16 | pub name: String,
17 | pub description: Option,
18 | }
19 |
20 | #[derive(Debug, Clone, Serialize, Deserialize)]
21 | pub struct ShoeboxUsage {
22 | pub id: String,
23 | pub name: String,
24 | pub description: Option,
25 | pub video_count: i64,
26 | }
27 |
28 | impl Shoebox {
29 | pub fn new(name: String, description: Option) -> Self {
30 | let now = chrono::Utc::now().naive_utc();
31 | Self {
32 | id: Uuid::new_v4().to_string(),
33 | name,
34 | description,
35 | created_at: now.clone(),
36 | updated_at: now,
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/charts/shoebox/templates/hpa.yaml:
--------------------------------------------------------------------------------
1 | {{- if .Values.autoscaling.enabled }}
2 | apiVersion: autoscaling/v2
3 | kind: HorizontalPodAutoscaler
4 | metadata:
5 | name: {{ include "shoebox.fullname" . }}
6 | labels:
7 | {{- include "shoebox.labels" . | nindent 4 }}
8 | spec:
9 | scaleTargetRef:
10 | apiVersion: apps/v1
11 | kind: Deployment
12 | name: {{ include "shoebox.fullname" . }}
13 | minReplicas: {{ .Values.autoscaling.minReplicas }}
14 | maxReplicas: {{ .Values.autoscaling.maxReplicas }}
15 | metrics:
16 | {{- if .Values.autoscaling.targetCPUUtilizationPercentage }}
17 | - type: Resource
18 | resource:
19 | name: cpu
20 | target:
21 | type: Utilization
22 | averageUtilization: {{ .Values.autoscaling.targetCPUUtilizationPercentage }}
23 | {{- end }}
24 | {{- if .Values.autoscaling.targetMemoryUtilizationPercentage }}
25 | - type: Resource
26 | resource:
27 | name: memory
28 | target:
29 | type: Utilization
30 | averageUtilization: {{ .Values.autoscaling.targetMemoryUtilizationPercentage }}
31 | {{- end }}
32 | {{- end }}
33 |
--------------------------------------------------------------------------------
/src/services/mod.rs:
--------------------------------------------------------------------------------
1 | mod scanner;
2 | mod thumbnail;
3 | mod video;
4 | mod tag;
5 | mod person;
6 | mod export;
7 | mod location;
8 | mod event;
9 | mod shoebox;
10 |
11 | pub use scanner::*;
12 | pub use thumbnail::*;
13 | pub use video::*;
14 | pub use tag::*;
15 | pub use person::*;
16 | pub use export::*;
17 | pub use location::*;
18 | pub use event::*;
19 | pub use shoebox::*;
20 |
21 | use sqlx::{Pool, Postgres};
22 | use crate::config::Config;
23 | use std::sync::Arc;
24 | use tokio::sync::RwLock;
25 |
26 | /// Represents the current status of a scan operation
27 | #[derive(Clone, Debug)]
28 | pub struct ScanStatus {
29 | pub in_progress: bool,
30 | pub new_videos_count: usize,
31 | pub updated_videos_count: usize,
32 | }
33 |
34 | impl Default for ScanStatus {
35 | fn default() -> Self {
36 | Self {
37 | in_progress: false,
38 | new_videos_count: 0,
39 | updated_videos_count: 0,
40 | }
41 | }
42 | }
43 |
44 | #[derive(Clone)]
45 | pub struct AppState {
46 | pub db: Pool,
47 | pub config: Config,
48 | pub scan_status: Arc>,
49 | }
50 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Shoebox Contributors
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/src/routes/mod.rs:
--------------------------------------------------------------------------------
1 | mod video;
2 | mod tag;
3 | mod person;
4 | mod scan;
5 | mod export;
6 | mod system;
7 | mod location;
8 | mod event;
9 | mod shoebox;
10 | pub mod media;
11 |
12 | use axum::Router;
13 |
14 | use crate::services::AppState;
15 |
16 | pub fn api_router(app_state: AppState) -> Router {
17 | Router::new()
18 | // Video routes
19 | .nest("/videos", video::router(app_state.clone()))
20 | // Tag routes
21 | .nest("/tags", tag::router(app_state.clone()))
22 | // Person routes
23 | .nest("/people", person::router(app_state.clone()))
24 | // Location routes
25 | .nest("/locations", location::router(app_state.clone()))
26 | // Event routes
27 | .nest("/events", event::router(app_state.clone()))
28 | // Shoebox routes
29 | .nest("/shoeboxes", shoebox::router(app_state.clone()))
30 | // Scan routes
31 | .nest("/scan", scan::router(app_state.clone()))
32 | // Export routes
33 | .nest("/export", export::router(app_state.clone()))
34 | // System info routes
35 | .nest("/system", system::router(app_state))
36 | }
37 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "shoebox"
3 | version = "0.0.4"
4 | edition = "2021"
5 | description = "A digital shoebox for organizing and preserving your videos"
6 | authors = ["Developer"]
7 |
8 | [dependencies]
9 | # Web framework
10 | axum = "0.8.4"
11 |
12 | # Async runtime
13 | tokio = { version = "1.47.1", features = ["full"] }
14 |
15 | # Database
16 | sqlx = { version = "0.8.6", features = ["runtime-tokio-rustls", "sqlite", "postgres", "macros", "time", "json", "chrono"] }
17 |
18 | # Serialization/Deserialization
19 | serde = { version = "1.0.219", features = ["derive"] }
20 | serde_json = "1.0.142"
21 |
22 | # Error handling
23 | anyhow = "1.0.98"
24 | thiserror = "2.0.12"
25 |
26 | # Logging
27 | tracing = "0.1.41"
28 | tracing-subscriber = { version = "0.3.20", features = ["env-filter"] }
29 |
30 | # Environment variables
31 | dotenv = "0.15.0"
32 |
33 | # FFmpeg
34 | # The application uses the system's FFmpeg binary directly via std::process::Command
35 | # No FFmpeg wrapper library is needed
36 |
37 | # File operations
38 | walkdir = "2.5.0"
39 | tokio-util = { version = "0.7.16", features = ["io"] }
40 | bytes = "1.10.1"
41 | mime_guess = "2.0.5"
42 | chrono = { version = "0.4.41", features = ["serde"] }
43 | uuid = { version = "1.17.0", features = ["v4", "serde"] }
44 |
45 | # Static file serving
46 | tower-http = { version = "0.6.6", features = ["fs"] }
47 |
--------------------------------------------------------------------------------
/frontend/src/App.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { Routes, Route } from 'react-router-dom';
3 | import { Box } from '@chakra-ui/react';
4 | import Layout from './components/Layout';
5 | import HomePage from './pages/HomePage';
6 | import VideoDetailPage from './pages/VideoDetailPage';
7 | import ExportPage from './pages/ExportPage';
8 | import ManagementPage from './pages/ManagementPage';
9 | import UnreviewedPage from './pages/UnreviewedPage';
10 | import SystemInfoPage from './pages/SystemInfoPage';
11 | import RatedVideosTimelinePage from './pages/RatedVideosTimelinePage';
12 | import BulkEditPage from './pages/BulkEditPage';
13 |
14 | const App: React.FC = () => {
15 | return (
16 |
17 |
18 |
19 | } />
20 | } />
21 | } />
22 | } />
23 | } />
24 | } />
25 | } />
26 | } />
27 |
28 |
29 |
30 | );
31 | };
32 |
33 | export default App;
34 |
--------------------------------------------------------------------------------
/src/routes/export.rs:
--------------------------------------------------------------------------------
1 | use axum::{extract::State, routing::post, Json, Router};
2 | use serde::Serialize;
3 |
4 | use crate::error::Result;
5 | use crate::models::ExportRequest;
6 | use crate::services::AppState;
7 | use crate::services::{ExportService, VideoService, TagService, PersonService, ThumbnailService};
8 |
9 | pub fn router(app_state: AppState) -> Router {
10 | Router::new()
11 | .route("/", post(export_videos))
12 | .with_state(app_state)
13 | }
14 |
15 | #[derive(Debug, Serialize)]
16 | struct ExportResponse {
17 | export_path: String,
18 | video_count: usize,
19 | }
20 |
21 | async fn export_videos(
22 | State(state): State,
23 | Json(request): Json,
24 | ) -> Result> {
25 | let video_service = VideoService::new(
26 | state.db.clone(),
27 | TagService::new(state.db.clone()),
28 | PersonService::new(state.db.clone()),
29 | ThumbnailService::new(&state.config),
30 | crate::services::ShoeboxService::new(state.db.clone()),
31 | );
32 |
33 | let export_service = ExportService::new(
34 | state.config.clone(),
35 | video_service,
36 | );
37 |
38 | let export_path = export_service.export_videos(request.clone()).await?;
39 |
40 | let response = ExportResponse {
41 | export_path,
42 | video_count: request.video_ids.len(),
43 | };
44 |
45 | Ok(Json(response))
46 | }
47 |
--------------------------------------------------------------------------------
/charts/shoebox/templates/ingress.yaml:
--------------------------------------------------------------------------------
1 | {{- if .Values.ingress.enabled -}}
2 | {{- $fullName := include "shoebox.fullname" . -}}
3 | {{- $svcPort := .Values.service.port -}}
4 | {{- if and .Values.ingress.className (not (semverCompare ">=1.18-0" .Capabilities.KubeVersion.GitVersion)) }}
5 | {{- if not (hasKey .Values.ingress.annotations "kubernetes.io/ingress.class") }}
6 | {{- $_ := set .Values.ingress.annotations "kubernetes.io/ingress.class" .Values.ingress.className}}
7 | {{- end }}
8 | {{- end }}
9 | apiVersion: networking.k8s.io/v1
10 | kind: Ingress
11 | metadata:
12 | name: {{ $fullName }}
13 | labels:
14 | {{- include "shoebox.labels" . | nindent 4 }}
15 | {{- with .Values.ingress.annotations }}
16 | annotations:
17 | {{- toYaml . | nindent 4 }}
18 | {{- end }}
19 | spec:
20 | {{- if .Values.ingress.className }}
21 | ingressClassName: {{ .Values.ingress.className }}
22 | {{- end }}
23 | {{- if .Values.ingress.tls }}
24 | tls:
25 | {{- range .Values.ingress.tls }}
26 | - hosts:
27 | {{- range .hosts }}
28 | - {{ . | quote }}
29 | {{- end }}
30 | secretName: {{ .secretName }}
31 | {{- end }}
32 | {{- end }}
33 | rules:
34 | {{- range .Values.ingress.hosts }}
35 | - host: {{ .host | quote }}
36 | http:
37 | paths:
38 | {{- range .paths }}
39 | - path: {{ .path }}
40 | pathType: {{ .pathType }}
41 | backend:
42 | service:
43 | name: {{ $fullName }}
44 | port:
45 | number: {{ $svcPort }}
46 | {{- end }}
47 | {{- end }}
48 | {{- end }}
49 |
--------------------------------------------------------------------------------
/docs/introduction.md:
--------------------------------------------------------------------------------
1 | # Introduction to Shoebox
2 |
3 | Shoebox is a digital solution for organizing and preserving your videos over a lifetime.
4 |
5 | ## The Digital Shoebox Concept
6 |
7 | Remember how previous generations kept their memories in physical shoeboxes at their parents' homes? Those boxes filled with photographs, negatives, and mementos that captured life's precious moments.
8 |
9 | Shoebox aims to recreate that experience for the digital age. Instead of photos getting lost in the endless stream of cloud services or social media platforms, Shoebox provides a dedicated space for your videos - a digital equivalent of that cherished box in your closet.
10 |
11 | ## What Makes Shoebox Different
12 |
13 | **Shoebox is not trying to compete with immich, Google Photos, or other photo management services.**
14 |
15 | The main purpose of Shoebox is to help you:
16 |
17 | - **Find original videos** export(copy) to a defined location, allowing you to easily import into a video editor of choice. Create highlights, collages, etc.
18 | - **Organize your videos** over a lifetime for easy recall and future use. Have a coffee, review new videos cataloguing your memories as your kids grow.
19 | - **Preserve video memories** in a way that makes them accessible and workable
20 |
21 | While other services focus on viewing and sharing, Shoebox focuses on organization and preservation with the specific goal of making your video content useful for future creative projects.
22 |
23 | ## Tech Stack
24 |
25 | - **Backend**: Rust with Axum web framework
26 | - **Frontend**: React with TypeScript
27 | - **Database**: SQLite/PostgreSQL via SQLx
28 | - **Media Processing**: FFmpeg
29 | - **Deployment**: Docker/Kubernetes support
30 |
--------------------------------------------------------------------------------
/.idea/dataSources.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | sqlite.xerial
6 | true
7 | org.sqlite.JDBC
8 | jdbc:sqlite:$PROJECT_DIR$/data.db
9 |
10 |
11 |
12 | $ProjectFileDir$
13 |
14 |
15 | file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.45.1/org/xerial/sqlite-jdbc/3.45.1.0/sqlite-jdbc-3.45.1.0.jar
16 |
17 |
18 | file://$APPLICATION_CONFIG_DIR$/jdbc-drivers/Xerial SQLiteJDBC/3.45.1/org/slf4j/slf4j-api/1.7.36/slf4j-api-1.7.36.jar
19 |
20 |
21 |
22 |
23 | postgresql
24 | true
25 | org.postgresql.Driver
26 | jdbc:postgresql://10.21.20.229:5432/shoebox-dev
27 |
28 |
29 |
30 |
31 |
32 | $ProjectFileDir$
33 |
34 |
35 |
--------------------------------------------------------------------------------
/src/routes/event.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::{Path, State},
3 | routing::{get, post, delete},
4 | Json, Router,
5 | };
6 | use serde::{Deserialize, Serialize};
7 |
8 | use crate::error::Result;
9 | use crate::services::{AppState, EventService};
10 |
11 | pub fn router(app_state: AppState) -> Router {
12 | Router::new()
13 | .route("/", get(get_all_events))
14 | .route("/usage", get(get_event_usage))
15 | .route("/update", post(update_event))
16 | .route("/{event}", delete(delete_event))
17 | .with_state(app_state)
18 | }
19 |
20 | async fn get_all_events(
21 | State(state): State,
22 | ) -> Result>> {
23 | let event_service = EventService::new(state.db.clone());
24 | let events = event_service.get_all_events().await?;
25 | Ok(Json(events))
26 | }
27 |
28 | async fn get_event_usage(
29 | State(state): State,
30 | ) -> Result>> {
31 | let event_service = EventService::new(state.db.clone());
32 | let usage = event_service.get_event_usage().await?;
33 | Ok(Json(usage))
34 | }
35 |
36 | #[derive(Debug, Deserialize, Serialize)]
37 | struct UpdateEventRequest {
38 | old_event: String,
39 | new_event: String,
40 | }
41 |
42 | async fn update_event(
43 | State(state): State,
44 | Json(request): Json,
45 | ) -> Result> {
46 | let event_service = EventService::new(state.db.clone());
47 | let count = event_service.update_event(&request.old_event, &request.new_event).await?;
48 | Ok(Json(count))
49 | }
50 |
51 | async fn delete_event(
52 | State(state): State,
53 | Path(event): Path,
54 | ) -> Result> {
55 | let event_service = EventService::new(state.db.clone());
56 | let count = event_service.delete_event(&event).await?;
57 | Ok(Json(count))
58 | }
59 |
--------------------------------------------------------------------------------
/docs/installation.md:
--------------------------------------------------------------------------------
1 | # Installation
2 |
3 | Shoebox can be installed in several ways, depending on your environment and preferences.
4 |
5 | ## Prerequisites
6 |
7 | Before installing Shoebox, ensure you have the following prerequisites:
8 |
9 | - [FFmpeg](https://ffmpeg.org/download.html) (for video processing)
10 | - Access to storage for your videos, thumbnails, and exports
11 |
12 | ## Installation Methods
13 |
14 | ### Docker
15 |
16 | The simplest way to run Shoebox is using Docker:
17 |
18 | ```bash
19 | # Pull the latest image
20 | docker pull ghcr.io/slackspace-io/shoebox:latest
21 |
22 | # Run the container
23 | docker run -d \
24 | -p 3000:3000 \
25 | -v /path/to/your/videos:/mnt/videos:ro \
26 | -v /path/to/your/exports:/app/exports \
27 | -v /path/to/thumbnails:/app/thumbnails \
28 | -v /path/to/data:/app/data \
29 | --name shoebox \
30 | ghcr.io/slackspace-io/shoebox:latest
31 | ```
32 |
33 | ### Docker Compose
34 |
35 | For a more complete setup, you can use Docker Compose:
36 |
37 | ```bash
38 | # Clone the repository
39 | git clone https://github.com/slackspace-io/shoebox.git
40 | cd shoebox
41 |
42 | # Edit the docker-compose.yml file to configure your media source paths
43 | # Start the application
44 | docker-compose up -d
45 | ```
46 |
47 | ### Kubernetes with Helm
48 |
49 | For Kubernetes deployments, Shoebox provides a Helm chart. See the [Helm Chart](./installation/helm-chart.md) page for detailed instructions.
50 |
51 | ### Development Setup
52 |
53 | If you want to run Shoebox for development:
54 |
55 | ```bash
56 | # Clone the repository
57 | git clone https://github.com/slackspace-io/shoebox.git
58 | cd shoebox
59 |
60 | # Run the backend
61 | cargo run
62 |
63 | # In a separate terminal, run the frontend
64 | cd frontend
65 | yarn install
66 | yarn dev
67 | ```
68 |
69 | The frontend development server will be available at http://localhost:5173, and the backend server will be available at http://localhost:3000.
70 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Shoebox Documentation
2 |
3 | Welcome to the Shoebox documentation! This guide will help you install, configure, and use Shoebox to organize and preserve your videos.
4 |
5 | ## What is Shoebox?
6 |
7 | Shoebox is a digital solution for organizing and preserving your videos over a lifetime. It provides a dedicated space for your videos - a digital equivalent of that cherished shoebox in your closet.
8 |
9 | ## Getting Started
10 |
11 | - [Introduction](./introduction.md) - Learn about the Shoebox concept and what makes it different
12 | - [Installation](./installation.md) - Install Shoebox using Docker, Docker Compose, or Kubernetes
13 | - [Configuration](./configuration.md) - Configure Shoebox to suit your needs
14 | - [Usage](./usage.md) - Learn how to use Shoebox to organize and preserve your videos
15 |
16 | ## Key Features
17 |
18 | - **Video organization and cataloging** - Keep your videos organized and easily searchable
19 | - **Thumbnail generation** - Quickly identify videos with automatically generated thumbnails
20 | - **Video metadata extraction** - Extract and use metadata from your videos
21 | - **Export capabilities** - Export videos for use in external editing tools
22 | - **Unreviewed videos workflow** - Efficiently process new videos
23 | - **System information and management** - Monitor and manage your Shoebox installation
24 |
25 | ## Recent Enhancements
26 |
27 | ### Original Location Specification
28 |
29 | A recent enhancement allows you to specify the original location of videos, which is useful when the path in your container or server differs from the original path where the videos were created or stored.
30 |
31 | This feature is particularly useful for:
32 | - Preserving metadata about the original location of videos
33 | - Maintaining compatibility with external video editing tools
34 | - Migrating videos between systems
35 |
36 | See the [Configuration](./configuration.md) page for more details on how to use this feature.
37 |
--------------------------------------------------------------------------------
/src/routes/location.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::{Path, State},
3 | routing::{get, post, delete},
4 | Json, Router,
5 | };
6 | use serde::{Deserialize, Serialize};
7 |
8 | use crate::error::Result;
9 | use crate::services::{AppState, LocationService};
10 |
11 | pub fn router(app_state: AppState) -> Router {
12 | Router::new()
13 | .route("/", get(get_all_locations))
14 | .route("/usage", get(get_location_usage))
15 | .route("/update", post(update_location))
16 | .route("/{location}", delete(delete_location))
17 | .with_state(app_state)
18 | }
19 |
20 | async fn get_all_locations(
21 | State(state): State,
22 | ) -> Result>> {
23 | let location_service = LocationService::new(state.db.clone());
24 | let locations = location_service.get_all_locations().await?;
25 | Ok(Json(locations))
26 | }
27 |
28 | async fn get_location_usage(
29 | State(state): State,
30 | ) -> Result>> {
31 | let location_service = LocationService::new(state.db.clone());
32 | let usage = location_service.get_location_usage().await?;
33 | Ok(Json(usage))
34 | }
35 |
36 | #[derive(Debug, Deserialize, Serialize)]
37 | struct UpdateLocationRequest {
38 | old_location: String,
39 | new_location: String,
40 | }
41 |
42 | async fn update_location(
43 | State(state): State,
44 | Json(request): Json,
45 | ) -> Result> {
46 | let location_service = LocationService::new(state.db.clone());
47 | let count = location_service.update_location(&request.old_location, &request.new_location).await?;
48 | Ok(Json(count))
49 | }
50 |
51 | async fn delete_location(
52 | State(state): State,
53 | Path(location): Path,
54 | ) -> Result> {
55 | let location_service = LocationService::new(state.db.clone());
56 | let count = location_service.delete_location(&location).await?;
57 | Ok(Json(count))
58 | }
59 |
--------------------------------------------------------------------------------
/.github/workflows/rust-clippy.yml:
--------------------------------------------------------------------------------
1 | # This workflow uses actions that are not certified by GitHub.
2 | # They are provided by a third-party and are governed by
3 | # separate terms of service, privacy policy, and support
4 | # documentation.
5 | # rust-clippy is a tool that runs a bunch of lints to catch common
6 | # mistakes in your Rust code and help improve your Rust code.
7 | # More details at https://github.com/rust-lang/rust-clippy
8 | # and https://rust-lang.github.io/rust-clippy/
9 |
10 | name: rust-clippy analyze
11 |
12 | on:
13 | push:
14 | branches: [ "main" ]
15 | pull_request:
16 | # The branches below must be a subset of the branches above
17 | branches: [ "main" ]
18 | schedule:
19 | - cron: '42 19 * * 1'
20 |
21 | concurrency:
22 | group: "shoebox-clippy"
23 | cancel-in-progress: true
24 |
25 | jobs:
26 | rust-clippy-analyze:
27 | name: Run rust-clippy analyzing
28 | runs-on: ubuntu-latest
29 | permissions:
30 | contents: read
31 | security-events: write
32 | actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
33 | steps:
34 | - name: Checkout code
35 | uses: actions/checkout@v4
36 |
37 | - name: Install Rust toolchain
38 | uses: actions-rs/toolchain@16499b5e05bf2e26879000db0c1d13f7e13fa3af #@v1
39 | with:
40 | profile: minimal
41 | toolchain: stable
42 | components: clippy
43 | override: true
44 |
45 | - name: Install required cargo
46 | run: cargo install clippy-sarif sarif-fmt
47 |
48 | - name: Run rust-clippy
49 | run:
50 | cargo clippy
51 | --all-features
52 | --message-format=json | clippy-sarif | tee rust-clippy-results.sarif | sarif-fmt
53 | continue-on-error: true
54 |
55 | - name: Upload analysis results to GitHub
56 | uses: github/codeql-action/upload-sarif@v3
57 | with:
58 | sarif_file: rust-clippy-results.sarif
59 | wait-for-processing: true
60 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Multi-stage build for Shoebox
2 |
3 | # Stage 1: Build the frontend
4 | FROM node:18-alpine AS frontend-builder
5 | WORKDIR /app/frontend
6 |
7 | # Copy frontend package.json and install dependencies
8 | COPY frontend/package.json frontend/package-lock.json* ./
9 | RUN yarn install
10 |
11 | # Copy frontend source code
12 | COPY frontend/ ./
13 |
14 | # Build the frontend
15 | RUN yarn run build
16 |
17 | # Stage 2: Build the Rust backend
18 | FROM rust:latest AS backend-builder
19 | WORKDIR /app
20 | # Install dependencies for building
21 | RUN apt-get update && apt-get install -y \
22 | pkg-config \
23 | libssl-dev \
24 | libsqlite3-dev \
25 | libavformat-dev \
26 | libavcodec-dev \
27 | libavutil-dev \
28 | libavfilter-dev \
29 | libswscale-dev \
30 | ffmpeg \
31 | && rm -rf /var/lib/apt/lists/*
32 |
33 | # Copy Cargo.toml and Cargo.lock
34 | COPY Cargo.toml Cargo.lock ./
35 |
36 |
37 | # Copy actual source code
38 | COPY src/ src/
39 | COPY migrations/ migrations/
40 |
41 | # Build the application
42 | RUN cargo build --release
43 |
44 | # Stage 3: Create the final image
45 | FROM debian:bookworm-slim
46 | WORKDIR /app
47 |
48 | # Install runtime dependencies
49 | RUN apt-get update && apt-get install -y \
50 | libsqlite3-0 \
51 | ffmpeg \
52 | ca-certificates \
53 | exiftool \
54 | && rm -rf /var/lib/apt/lists/*
55 |
56 | # Copy the built frontend from stage 1
57 | COPY --from=frontend-builder /app/frontend/dist /app/frontend/dist
58 |
59 | # Copy the built backend from stage 2
60 | COPY --from=backend-builder /app/target/release/shoebox /app/shoebox
61 |
62 | # Create directories for data
63 | RUN mkdir -p /app/data /app/thumbnails /app/exports
64 |
65 | # Set environment variables
66 | ENV SERVER_HOST=0.0.0.0
67 | ENV SERVER_PORT=3000
68 | #ENV DATABASE_URL=sqlite:/app/data/videos.db
69 | ENV THUMBNAIL_PATH=/app/thumbnails
70 | ENV EXPORT_BASE_PATH=/app/exports
71 | ENV FRONTEND_PATH=/app/frontend/dist
72 |
73 | # Expose the port
74 | EXPOSE 3000
75 |
76 | # Run the application
77 | CMD ["/app/shoebox"]
78 |
--------------------------------------------------------------------------------
/.idea/tailwindcss.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/frontend/src/contexts/ScanContext.tsx:
--------------------------------------------------------------------------------
1 | import React, { createContext, useContext, useState, useEffect, ReactNode } from 'react';
2 |
3 | interface ScanStatus {
4 | inProgress: boolean;
5 | newVideosCount: number;
6 | updatedVideosCount: number;
7 | }
8 |
9 | interface ScanContextType {
10 | scanStatus: ScanStatus;
11 | checkScanStatus: () => Promise;
12 | }
13 |
14 | const defaultScanStatus: ScanStatus = {
15 | inProgress: false,
16 | newVideosCount: 0,
17 | updatedVideosCount: 0,
18 | };
19 |
20 | const ScanContext = createContext(undefined);
21 |
22 | export const useScanContext = () => {
23 | const context = useContext(ScanContext);
24 | if (context === undefined) {
25 | throw new Error('useScanContext must be used within a ScanProvider');
26 | }
27 | return context;
28 | };
29 |
30 | interface ScanProviderProps {
31 | children: ReactNode;
32 | }
33 |
34 | export const ScanProvider: React.FC = ({ children }) => {
35 | const [scanStatus, setScanStatus] = useState(defaultScanStatus);
36 |
37 | const checkScanStatus = async () => {
38 | try {
39 | const response = await fetch('/api/scan/status');
40 | if (!response.ok) {
41 | throw new Error('Failed to fetch scan status');
42 | }
43 | const data = await response.json();
44 | setScanStatus({
45 | inProgress: data.in_progress,
46 | newVideosCount: data.new_videos_count,
47 | updatedVideosCount: data.updated_videos_count,
48 | });
49 | } catch (error) {
50 | console.error('Error checking scan status:', error);
51 | }
52 | };
53 |
54 | // Check scan status on mount and every 5 seconds if a scan is in progress
55 | useEffect(() => {
56 | checkScanStatus();
57 |
58 | const intervalId = setInterval(() => {
59 | if (scanStatus.inProgress) {
60 | checkScanStatus();
61 | }
62 | }, 5000);
63 |
64 | return () => clearInterval(intervalId);
65 | }, [scanStatus.inProgress]);
66 |
67 | return (
68 |
69 | {children}
70 |
71 | );
72 | };
73 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3.8'
2 |
3 | services:
4 | app:
5 | build:
6 | context: .
7 | dockerfile: Dockerfile
8 | container_name: shoebox
9 | ports:
10 | - "3000:3000"
11 | environment:
12 | - SERVER_HOST=0.0.0.0
13 | - SERVER_PORT=3000
14 | - DATABASE_URL=sqlite:/app/data/videos.db
15 | - MEDIA_SOURCE_PATHS=/mnt/videos
16 | - THUMBNAIL_PATH=/app/thumbnails
17 | - EXPORT_BASE_PATH=/app/exports
18 | - RUST_LOG=info
19 | volumes:
20 | # Mount media source directories (read-only)
21 | - /path/to/your/videos:/mnt/videos:ro
22 |
23 | # Mount export directory (read-write)
24 | - /path/to/your/exports:/app/exports
25 |
26 | # Mount data directory for persistence
27 | - ./data:/app/data
28 |
29 | # Mount thumbnails directory for persistence
30 | - ./thumbnails:/app/thumbnails
31 | restart: unless-stopped
32 |
33 | # Example with PostgreSQL instead of SQLite
34 | #
35 | # services:
36 | # app:
37 | # build:
38 | # context: .
39 | # dockerfile: Dockerfile
40 | # container_name: shoebox
41 | # ports:
42 | # - "3000:3000"
43 | # environment:
44 | # - SERVER_HOST=0.0.0.0
45 | # - SERVER_PORT=3000
46 | # - DATABASE_URL=postgres://postgres:postgres@db:5432/videos
47 | # - MEDIA_SOURCE_PATHS=/mnt/videos
48 | # - THUMBNAIL_PATH=/app/thumbnails
49 | # - EXPORT_BASE_PATH=/app/exports
50 | # - RUST_LOG=info
51 | # volumes:
52 | # # Mount media source directories (read-only)
53 | # - /path/to/your/videos:/mnt/videos:ro
54 | #
55 | # # Mount export directory (read-write)
56 | # - /path/to/your/exports:/app/exports
57 | #
58 | # # Mount thumbnails directory for persistence
59 | # - ./thumbnails:/app/thumbnails
60 | # depends_on:
61 | # - db
62 | # restart: unless-stopped
63 | #
64 | # db:
65 | # image: postgres:15-alpine
66 | # container_name: shoebox-db
67 | # environment:
68 | # - POSTGRES_USER=postgres
69 | # - POSTGRES_PASSWORD=postgres
70 | # - POSTGRES_DB=videos
71 | # volumes:
72 | # - postgres_data:/var/lib/postgresql/data
73 | # restart: unless-stopped
74 | #
75 | # volumes:
76 | # postgres_data:
77 |
--------------------------------------------------------------------------------
/src/error.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | http::StatusCode,
3 | response::{IntoResponse, Response},
4 | Json,
5 | };
6 | use serde_json::json;
7 | use thiserror::Error;
8 |
9 | #[derive(Error, Debug)]
10 | pub enum AppError {
11 | #[error("Database error: {0}")]
12 | Database(#[from] sqlx::Error),
13 |
14 | #[error("IO error: {0}")]
15 | Io(#[from] std::io::Error),
16 |
17 | #[error("FFmpeg error: {0}")]
18 | FFmpeg(String),
19 |
20 | #[error("Not found: {0}")]
21 | NotFound(String),
22 |
23 | #[error("Bad request: {0}")]
24 | BadRequest(String),
25 |
26 | #[error("Unauthorized: {0}")]
27 | Unauthorized(String),
28 |
29 | #[error("Internal server error: {0}")]
30 | InternalServerError(String),
31 |
32 | #[error("Configuration error: {0}")]
33 | ConfigError(String),
34 |
35 | #[error(transparent)]
36 | Other(#[from] anyhow::Error),
37 | }
38 |
39 | impl IntoResponse for AppError {
40 | fn into_response(self) -> Response {
41 | let (status, error_message) = match self {
42 | AppError::Database(ref e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
43 | AppError::Io(ref e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
44 | AppError::FFmpeg(ref e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
45 | AppError::NotFound(ref e) => (StatusCode::NOT_FOUND, e.to_string()),
46 | AppError::BadRequest(ref e) => (StatusCode::BAD_REQUEST, e.to_string()),
47 | AppError::Unauthorized(ref e) => (StatusCode::UNAUTHORIZED, e.to_string()),
48 | AppError::InternalServerError(ref e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
49 | AppError::ConfigError(ref e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
50 | AppError::Other(ref e) => (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()),
51 | };
52 |
53 | tracing::error!("Error: {}", error_message);
54 |
55 | let body = Json(json!({
56 | "error": {
57 | "message": error_message,
58 | "code": status.as_u16(),
59 | }
60 | }));
61 |
62 | (status, body).into_response()
63 | }
64 | }
65 |
66 | pub type Result = std::result::Result;
67 |
--------------------------------------------------------------------------------
/.github/workflows/build-and-publish.yml:
--------------------------------------------------------------------------------
1 | name: Build and Publish Container Image
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | tags: [ 'v*' ]
7 | paths-ignore:
8 | - 'charts/**'
9 | - '**.md'
10 | - 'docs/**'
11 | - 'book.toml'
12 | - '.github/dependabot.yml'
13 | - '.github/workflows/helm-release.yml'
14 | - '.github/workflows/mdbook.yml'
15 | - '.github/workflows/release.yml'
16 | - '.github/workflows/rust-clippy.yml'
17 | pull_request:
18 | branches: [ main ]
19 | workflow_call:
20 | secrets:
21 | GH_PAT:
22 | required: true
23 |
24 | concurrency:
25 | group: "shoebox"
26 | cancel-in-progress: true
27 |
28 | env:
29 | REGISTRY: ghcr.io
30 | IMAGE_NAME: ${{ github.repository }}
31 |
32 | jobs:
33 | build-and-push:
34 | runs-on: ubuntu-latest
35 | permissions:
36 | contents: read
37 | packages: write
38 |
39 | steps:
40 | - name: Checkout repository
41 | uses: actions/checkout@v3
42 |
43 | - name: Set up Docker Buildx
44 | uses: docker/setup-buildx-action@v2
45 |
46 | - name: Log in to the Container registry
47 | if: github.event_name != 'pull_request'
48 | uses: docker/login-action@v2
49 | with:
50 | registry: ${{ env.REGISTRY }}
51 | username: ${{ github.actor }}
52 | password: ${{ secrets.GITHUB_TOKEN }}
53 |
54 | - name: Extract metadata (tags, labels) for Docker
55 | id: meta
56 | uses: docker/metadata-action@v4
57 | with:
58 | images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
59 | tags: |
60 | type=raw,value=latest,enable={{is_default_branch}}
61 | type=semver,pattern={{version}}
62 | type=semver,pattern={{major}}.{{minor}}
63 | type=ref,event=branch
64 | type=ref,event=pr
65 | type=sha,format=long
66 |
67 | - name: Build and push Docker image
68 | uses: docker/build-push-action@v4
69 | with:
70 | context: .
71 | push: ${{ github.event_name != 'pull_request' }}
72 | tags: ${{ steps.meta.outputs.tags }}
73 | labels: ${{ steps.meta.outputs.labels }}
74 | cache-from: type=gha
75 | cache-to: type=gha,mode=max
76 |
--------------------------------------------------------------------------------
/.idea/inspectionProfiles/Project_Default.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
--------------------------------------------------------------------------------
/.github/workflows/mdbook.yml:
--------------------------------------------------------------------------------
1 | # Sample workflow for building and deploying a mdBook site to GitHub Pages
2 | #
3 | # To get started with mdBook see: https://rust-lang.github.io/mdBook/index.html
4 | #
5 | name: Deploy mdBook site to Pages
6 |
7 | on:
8 | # Runs on pushes targeting the default branch
9 | push:
10 | branches: [ "main" ]
11 |
12 | paths:
13 | - "docs/**"
14 | - "book.toml"
15 | - ".github/workflows/mdbook.yml"
16 | - "index.html"
17 |
18 | workflow_run:
19 | workflows: [ 'Release Charts' ]
20 | types: [ completed ]
21 |
22 | workflow_call:
23 | secrets:
24 | GH_PAT:
25 | required: true
26 |
27 | # Allows you to run this workflow manually from the Actions tab
28 | workflow_dispatch:
29 |
30 | # Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
31 | permissions:
32 | contents: read
33 | pages: write
34 | id-token: write
35 |
36 | # Allow only one concurrent deployment, skipping runs queued between the run in-progress and latest queued.
37 | # However, do NOT cancel in-progress runs as we want to allow these production deployments to complete.
38 | concurrency:
39 | group: "pages"
40 | cancel-in-progress: false
41 |
42 | jobs:
43 | # Build job
44 | build:
45 | runs-on: ubuntu-latest
46 | env:
47 | MDBOOK_VERSION: 0.4.36
48 | steps:
49 | - uses: actions/checkout@v4
50 | - name: Install mdBook
51 | run: |
52 | curl --proto '=https' --tlsv1.2 https://sh.rustup.rs -sSf -y | sh
53 | rustup update
54 | cargo install --version ${MDBOOK_VERSION} mdbook
55 | - name: Setup Pages
56 | id: pages
57 | uses: actions/configure-pages@v5
58 | - name: Build with mdBook
59 | run: mdbook build
60 | - name: Publish Helm Index
61 | run: |
62 | mkdir -p ./site/helm
63 | cp ./index.yaml ./site/helm/index.yaml
64 | - name: Upload artifact
65 | uses: actions/upload-pages-artifact@v3
66 | with:
67 | path: ./site
68 |
69 | # Deployment job
70 | deploy:
71 | environment:
72 | name: github-pages
73 | url: ${{ steps.deployment.outputs.page_url }}
74 | runs-on: ubuntu-latest
75 | needs: build
76 | steps:
77 | - name: Deploy to GitHub Pages
78 | id: deployment
79 | uses: actions/deploy-pages@v4
80 |
--------------------------------------------------------------------------------
/src/utils/file.rs:
--------------------------------------------------------------------------------
1 | use std::path::{Path, PathBuf};
2 | use tokio::fs;
3 | use tracing::error;
4 |
5 | use crate::error::{AppError, Result};
6 |
7 | /// Check if a file exists
8 | pub async fn file_exists(path: &Path) -> bool {
9 | fs::metadata(path).await.is_ok()
10 | }
11 |
12 | /// Get file extension as lowercase string
13 | pub fn get_file_extension(path: &Path) -> Option {
14 | path.extension()
15 | .map(|ext| ext.to_string_lossy().to_lowercase())
16 | }
17 |
18 | /// Check if a file is a video file
19 | pub fn is_video_file(path: &Path) -> bool {
20 | if let Some(ext) = get_file_extension(path) {
21 | return ["mp4", "mov", "mkv"].contains(&ext.as_str());
22 | }
23 | false
24 | }
25 |
26 | /// Create directory if it doesn't exist
27 | pub async fn ensure_dir_exists(path: &Path) -> Result<()> {
28 | if !path.exists() {
29 | fs::create_dir_all(path).await.map_err(|e| {
30 | AppError::Io(std::io::Error::new(
31 | std::io::ErrorKind::Other,
32 | format!("Failed to create directory {}: {e}", path.display()),
33 | ))
34 | })?;
35 | }
36 | Ok(())
37 | }
38 |
39 | /// Get a unique filename in a directory
40 | pub async fn get_unique_filename(dir: &Path, base_name: &str, extension: &str) -> Result {
41 | let mut counter = 0;
42 | let mut file_name = format!("{}.{}", base_name, extension);
43 | let mut file_path = dir.join(&file_name);
44 |
45 | while file_exists(&file_path).await {
46 | counter += 1;
47 | file_name = format!("{}_{}.{}", base_name, counter, extension);
48 | file_path = dir.join(&file_name);
49 | }
50 |
51 | Ok(file_path)
52 | }
53 |
54 | /// Copy a file with error handling
55 | pub async fn copy_file(source: &Path, dest: &Path) -> Result<()> {
56 | if !source.exists() {
57 | return Err(AppError::NotFound(format!(
58 | "Source file not found: {}",
59 | source.display()
60 | )));
61 | }
62 |
63 | // Ensure parent directory exists
64 | if let Some(parent) = dest.parent() {
65 | ensure_dir_exists(parent).await?;
66 | }
67 |
68 | fs::copy(source, dest).await.map_err(|e| {
69 | error!(
70 | "Failed to copy {} to {}: {}",
71 | source.display(),
72 | dest.display(),
73 | e
74 | );
75 | AppError::Io(e)
76 | })?;
77 |
78 | Ok(())
79 | }
80 |
--------------------------------------------------------------------------------
/migrations/20240101000000_initial_schema.sql:
--------------------------------------------------------------------------------
1 | -- Initial schema for Shoebox - a digital shoebox for your videos
2 | -- Up migration
3 |
4 | -- Videos table
5 | CREATE TABLE IF NOT EXISTS videos (
6 | id VARCHAR(36) PRIMARY KEY NOT NULL,
7 | file_path VARCHAR(255) NOT NULL,
8 | file_name VARCHAR(255) NOT NULL,
9 | title VARCHAR(255),
10 | description TEXT,
11 | created_date VARCHAR(50),
12 | file_size BIGINT,
13 | thumbnail_path VARCHAR(255),
14 | rating INTEGER CHECK (rating BETWEEN 1 AND 5 OR rating IS NULL),
15 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
16 | updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
17 | );
18 |
19 | -- Tags table
20 | CREATE TABLE IF NOT EXISTS tags (
21 | id VARCHAR(36) PRIMARY KEY NOT NULL,
22 | name VARCHAR(100) NOT NULL UNIQUE,
23 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
24 | );
25 |
26 | -- People table
27 | CREATE TABLE IF NOT EXISTS people (
28 | id VARCHAR(36) PRIMARY KEY NOT NULL,
29 | name VARCHAR(100) NOT NULL UNIQUE,
30 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
31 | );
32 |
33 | -- Video-Tag relationship table
34 | CREATE TABLE IF NOT EXISTS video_tags (
35 | video_id VARCHAR(36) NOT NULL,
36 | tag_id VARCHAR(36) NOT NULL,
37 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
38 | PRIMARY KEY (video_id, tag_id),
39 | FOREIGN KEY (video_id) REFERENCES videos (id) ON DELETE CASCADE,
40 | FOREIGN KEY (tag_id) REFERENCES tags (id) ON DELETE CASCADE
41 | );
42 |
43 | -- Video-People relationship table
44 | CREATE TABLE IF NOT EXISTS video_people (
45 | video_id VARCHAR(36) NOT NULL,
46 | person_id VARCHAR(36) NOT NULL,
47 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
48 | PRIMARY KEY (video_id, person_id),
49 | FOREIGN KEY (video_id) REFERENCES videos (id) ON DELETE CASCADE,
50 | FOREIGN KEY (person_id) REFERENCES people (id) ON DELETE CASCADE
51 | );
52 |
53 | -- Create indexes
54 | CREATE INDEX IF NOT EXISTS idx_videos_file_path ON videos (file_path);
55 | CREATE INDEX IF NOT EXISTS idx_videos_created_date ON videos (created_date);
56 | CREATE INDEX IF NOT EXISTS idx_tags_name ON tags (name);
57 | CREATE INDEX IF NOT EXISTS idx_people_name ON people (name);
58 |
59 | -- Down migration
60 | -- DROP TABLE IF EXISTS video_people;
61 | -- DROP TABLE IF EXISTS video_tags;
62 | -- DROP TABLE IF EXISTS people;
63 | -- DROP TABLE IF EXISTS tags;
64 | -- DROP TABLE IF EXISTS videos;
65 |
--------------------------------------------------------------------------------
/src/main.rs:
--------------------------------------------------------------------------------
1 | mod config;
2 | mod error;
3 | mod models;
4 | mod routes;
5 | mod services;
6 | mod utils;
7 | mod db;
8 |
9 | use axum::{
10 | routing::get,
11 | Router,
12 | };
13 | use std::net::SocketAddr;
14 | use std::path::PathBuf;
15 | use tokio::net::TcpListener;
16 | use tower_http::services::{ServeDir, ServeFile};
17 | use tracing::info;
18 | use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
19 |
20 | #[tokio::main]
21 | async fn main() -> anyhow::Result<()> {
22 | // Load environment variables
23 | dotenv::dotenv().ok();
24 |
25 | // Initialize tracing
26 | tracing_subscriber::registry()
27 | .with(tracing_subscriber::EnvFilter::new(
28 | std::env::var("RUST_LOG").unwrap_or_else(|_| "info".into()),
29 | ))
30 | .with(tracing_subscriber::fmt::layer())
31 | .init();
32 |
33 | // Load configuration
34 | let config = config::Config::load()?;
35 |
36 | //log that db init will happen
37 | info!("DB Init");
38 | // Initialize database
39 | let db_pool = db::init_db(&config).await?;
40 |
41 | // Create application state
42 | let app_state = services::AppState {
43 | db: db_pool,
44 | config: config.clone(),
45 | scan_status: std::sync::Arc::new(tokio::sync::RwLock::new(services::ScanStatus::default())),
46 | };
47 |
48 | // Determine the path to the frontend dist directory
49 | let frontend_path = std::env::var("FRONTEND_PATH").unwrap_or_else(|_| {
50 | if std::path::Path::new("/app/frontend/dist").exists() {
51 | "/app/frontend/dist".to_string()
52 | } else {
53 | "frontend/dist".to_string()
54 | }
55 | });
56 |
57 | info!("Serving frontend from: {}", frontend_path);
58 |
59 | // Build our application with routes
60 | let app = Router::new()
61 | // API routes
62 | .nest("/api", routes::api_router(app_state.clone()))
63 | // Serve thumbnails from the thumbnails directory
64 | .nest_service("/app/thumbnails", ServeDir::new(&config.media.thumbnail_path))
65 | // Serve media files from the media directory with custom handler
66 | .nest("/media", routes::media::router(app_state))
67 | // Serve static files from the frontend directory
68 | .fallback_service(ServeDir::new(&frontend_path).fallback(ServeFile::new(format!("{frontend_path}/index.html"))));
69 |
70 | // Run the server
71 | let addr = SocketAddr::from(([0, 0, 0, 0], config.server.port));
72 | tracing::info!("Listening on {}", addr);
73 |
74 | // Start the server
75 | let listener = TcpListener::bind(addr).await?;
76 | axum::serve(listener, app).await?;
77 |
78 | Ok(())
79 | }
80 |
--------------------------------------------------------------------------------
/src/routes/tag.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::{Path, State},
3 | routing::{get, post, delete, put},
4 | Json, Router,
5 | };
6 |
7 | use crate::error::Result;
8 | use crate::models::CreateTagDto;
9 | use crate::services::AppState;
10 | use crate::services::TagService;
11 |
12 | pub fn router(app_state: AppState) -> Router {
13 | Router::new()
14 | .route("/", get(list_tags))
15 | .route("/", post(create_tag))
16 | .route("/usage", get(get_tag_usage))
17 | .route("/cleanup", post(cleanup_unused_tags))
18 | .route("/{id}", get(get_tag))
19 | .route("/{id}", put(update_tag))
20 | .route("/{id}", delete(delete_tag))
21 | .with_state(app_state)
22 | }
23 |
24 | async fn list_tags(State(state): State) -> Result>> {
25 | let tag_service = TagService::new(state.db.clone());
26 | let tags = tag_service.find_all().await?;
27 | Ok(Json(tags))
28 | }
29 |
30 | async fn get_tag(
31 | State(state): State,
32 | Path(id): Path,
33 | ) -> Result> {
34 | let tag_service = TagService::new(state.db.clone());
35 | let tag = tag_service.find_by_id(&id).await?;
36 | Ok(Json(tag))
37 | }
38 |
39 | async fn create_tag(
40 | State(state): State,
41 | Json(create_dto): Json,
42 | ) -> Result> {
43 | let tag_service = TagService::new(state.db.clone());
44 | let tag = tag_service.create(create_dto).await?;
45 | Ok(Json(tag))
46 | }
47 |
48 | async fn update_tag(
49 | State(state): State,
50 | Path(id): Path,
51 | Json(new_name): Json,
52 | ) -> Result> {
53 | let tag_service = TagService::new(state.db.clone());
54 | let tag = tag_service.update(&id, &new_name).await?;
55 | Ok(Json(tag))
56 | }
57 |
58 | async fn delete_tag(
59 | State(state): State,
60 | Path(id): Path,
61 | ) -> Result> {
62 | let tag_service = TagService::new(state.db.clone());
63 | tag_service.delete(&id).await?;
64 | Ok(Json(()))
65 | }
66 |
67 | async fn get_tag_usage(
68 | State(state): State,
69 | ) -> Result>> {
70 | let tag_service = TagService::new(state.db.clone());
71 | let usage = tag_service.get_usage().await?;
72 | Ok(Json(usage))
73 | }
74 |
75 | #[derive(serde::Serialize)]
76 | struct CleanupResponse {
77 | count: usize,
78 | }
79 |
80 | async fn cleanup_unused_tags(State(state): State) -> Result> {
81 | let tag_service = TagService::new(state.db.clone());
82 | let count = tag_service.cleanup_unused().await?;
83 | Ok(Json(CleanupResponse { count }))
84 | }
85 |
--------------------------------------------------------------------------------
/charts/shoebox/templates/_helpers.tpl:
--------------------------------------------------------------------------------
1 | {{/*
2 | Expand the name of the chart.
3 | */}}
4 | {{- define "shoebox.name" -}}
5 | {{- default .Chart.Name .Values.nameOverride | trunc 63 | trimSuffix "-" }}
6 | {{- end }}
7 |
8 | {{/*
9 | Create a default fully qualified app name.
10 | We truncate at 63 chars because some Kubernetes name fields are limited to this (by the DNS naming spec).
11 | If release name contains chart name it will be used as a full name.
12 | */}}
13 | {{- define "shoebox.fullname" -}}
14 | {{- if .Values.fullnameOverride }}
15 | {{- .Values.fullnameOverride | trunc 63 | trimSuffix "-" }}
16 | {{- else }}
17 | {{- $name := default .Chart.Name .Values.nameOverride }}
18 | {{- if contains $name .Release.Name }}
19 | {{- .Release.Name | trunc 63 | trimSuffix "-" }}
20 | {{- else }}
21 | {{- printf "%s-%s" .Release.Name $name | trunc 63 | trimSuffix "-" }}
22 | {{- end }}
23 | {{- end }}
24 | {{- end }}
25 |
26 | {{/*
27 | Create chart name and version as used by the chart label.
28 | */}}
29 | {{- define "shoebox.chart" -}}
30 | {{- printf "%s-%s" .Chart.Name .Chart.Version | replace "+" "_" | trunc 63 | trimSuffix "-" }}
31 | {{- end }}
32 |
33 | {{/*
34 | Common labels
35 | */}}
36 | {{- define "shoebox.labels" -}}
37 | helm.sh/chart: {{ include "shoebox.chart" . }}
38 | {{ include "shoebox.selectorLabels" . }}
39 | {{- if .Chart.AppVersion }}
40 | app.kubernetes.io/version: {{ .Chart.AppVersion | quote }}
41 | {{- end }}
42 | app.kubernetes.io/managed-by: {{ .Release.Service }}
43 | {{- end }}
44 |
45 | {{/*
46 | Selector labels
47 | */}}
48 | {{- define "shoebox.selectorLabels" -}}
49 | app.kubernetes.io/name: {{ include "shoebox.name" . }}
50 | app.kubernetes.io/instance: {{ .Release.Name }}
51 | {{- end }}
52 |
53 | {{/*
54 | Create the name of the service account to use
55 | */}}
56 | {{- define "shoebox.serviceAccountName" -}}
57 | {{- if .Values.serviceAccount.create }}
58 | {{- default (include "shoebox.fullname" .) .Values.serviceAccount.name }}
59 | {{- else }}
60 | {{- default "default" .Values.serviceAccount.name }}
61 | {{- end }}
62 | {{- end }}
63 |
64 | {{/*
65 | Convert structured mediaSourcePaths to string format
66 | */}}
67 | {{- define "shoebox.mediaSourcePathsString" -}}
68 | {{- if .Values.config.mediaSourcePaths.enabled -}}
69 | {{- $paths := list -}}
70 | {{- range .Values.config.mediaSourcePaths.sources -}}
71 | {{- $path := printf "%s:%s" .name .path -}}
72 | {{- if .originalPath -}}
73 | {{- $path = printf "%s;%s" $path .originalPath -}}
74 | {{- if .originalExtension -}}
75 | {{- $path = printf "%s;%s" $path .originalExtension -}}
76 | {{- end -}}
77 | {{- end -}}
78 | {{- if .defaultShoebox -}}
79 | {{- $path = printf "%s;%s" $path .defaultShoebox -}}
80 | {{- end -}}
81 | {{- $paths = append $paths $path -}}
82 | {{- end -}}
83 | {{- join "," $paths -}}
84 | {{- else -}}
85 | {{- "" -}}
86 | {{- end -}}
87 | {{- end }}
88 |
--------------------------------------------------------------------------------
/src/routes/person.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::{Path, State},
3 | routing::{get, post, delete, put},
4 | Json, Router,
5 | };
6 |
7 | use crate::error::Result;
8 | use crate::models::CreatePersonDto;
9 | use crate::services::AppState;
10 | use crate::services::PersonService;
11 |
12 | pub fn router(app_state: AppState) -> Router {
13 | Router::new()
14 | .route("/", get(list_people))
15 | .route("/", post(create_person))
16 | .route("/usage", get(get_person_usage))
17 | .route("/cleanup", post(cleanup_unused_people))
18 | .route("/{id}", get(get_person))
19 | .route("/{id}", put(update_person))
20 | .route("/{id}", delete(delete_person))
21 | .with_state(app_state)
22 | }
23 |
24 | async fn list_people(State(state): State) -> Result>> {
25 | let person_service = PersonService::new(state.db.clone());
26 | let people = person_service.find_all().await?;
27 | Ok(Json(people))
28 | }
29 |
30 | async fn get_person(
31 | State(state): State,
32 | Path(id): Path,
33 | ) -> Result> {
34 | let person_service = PersonService::new(state.db.clone());
35 | let person = person_service.find_by_id(&id).await?;
36 | Ok(Json(person))
37 | }
38 |
39 | async fn create_person(
40 | State(state): State,
41 | Json(create_dto): Json,
42 | ) -> Result> {
43 | let person_service = PersonService::new(state.db.clone());
44 | let person = person_service.create(create_dto).await?;
45 | Ok(Json(person))
46 | }
47 |
48 | async fn update_person(
49 | State(state): State,
50 | Path(id): Path,
51 | Json(new_name): Json,
52 | ) -> Result> {
53 | let person_service = PersonService::new(state.db.clone());
54 | let person = person_service.update(&id, &new_name).await?;
55 | Ok(Json(person))
56 | }
57 |
58 | async fn delete_person(
59 | State(state): State,
60 | Path(id): Path,
61 | ) -> Result> {
62 | let person_service = PersonService::new(state.db.clone());
63 | person_service.delete(&id).await?;
64 | Ok(Json(()))
65 | }
66 |
67 | async fn get_person_usage(
68 | State(state): State,
69 | ) -> Result>> {
70 | let person_service = PersonService::new(state.db.clone());
71 | let usage = person_service.get_usage().await?;
72 | Ok(Json(usage))
73 | }
74 |
75 | #[derive(serde::Serialize)]
76 | struct CleanupResponse {
77 | count: usize,
78 | }
79 |
80 | async fn cleanup_unused_people(State(state): State) -> Result> {
81 | let person_service = PersonService::new(state.db.clone());
82 | let count = person_service.cleanup_unused().await?;
83 | Ok(Json(CleanupResponse { count }))
84 | }
85 |
--------------------------------------------------------------------------------
/src/services/thumbnail.rs:
--------------------------------------------------------------------------------
1 | use std::path::{Path, PathBuf};
2 | use std::process::Command;
3 | use tokio::fs;
4 | use uuid::Uuid;
5 | use tracing::{info, error};
6 |
7 | use crate::error::{AppError, Result};
8 | use crate::config::Config;
9 |
10 | pub struct ThumbnailService {
11 | thumbnail_dir: PathBuf,
12 | web_path: String,
13 | }
14 |
15 | impl ThumbnailService {
16 | pub fn new(config: &Config) -> Self {
17 | let thumbnail_dir = PathBuf::from(&config.media.thumbnail_path);
18 | Self {
19 | thumbnail_dir,
20 | web_path: "/app/thumbnails".to_string()
21 | }
22 | }
23 |
24 | pub async fn generate_thumbnail(&self, video_path: &str) -> Result {
25 | // Ensure thumbnail directory exists
26 | if !self.thumbnail_dir.exists() {
27 | fs::create_dir_all(&self.thumbnail_dir).await.map_err(|e| {
28 | AppError::Io(std::io::Error::new(
29 | std::io::ErrorKind::Other,
30 | format!("Failed to create thumbnail directory: {e}"),
31 | ))
32 | })?;
33 | }
34 |
35 | // Generate a unique filename for the thumbnail
36 | let thumbnail_filename = format!("{0}.jpg", Uuid::new_v4());
37 | let thumbnail_path = self.thumbnail_dir.join(&thumbnail_filename);
38 | let thumbnail_path_str = thumbnail_path.to_string_lossy().to_string();
39 |
40 | info!("Generating thumbnail for {video_path} at {thumbnail_path_str}");
41 |
42 | // Use FFmpeg to extract the first keyframe
43 | let output = Command::new("ffmpeg")
44 | .arg("-i")
45 | .arg(video_path)
46 | .arg("-vf")
47 | .arg("select=eq(n\\,0)")
48 | .arg("-vframes")
49 | .arg("1")
50 | .arg("-y") // Overwrite output file if it exists
51 | .arg(&thumbnail_path_str)
52 | .output()
53 | .map_err(|e| {
54 | error!("FFmpeg command failed: {e}");
55 | AppError::FFmpeg(format!("Failed to execute FFmpeg: {e}"))
56 | })?;
57 |
58 | if !output.status.success() {
59 | let stderr = String::from_utf8_lossy(&output.stderr);
60 | error!("FFmpeg error: {stderr}");
61 | return Err(AppError::FFmpeg(format!("FFmpeg error: {stderr}")));
62 | }
63 |
64 | // Check if thumbnail was created
65 | if !thumbnail_path.exists() {
66 | return Err(AppError::FFmpeg("Thumbnail was not created".to_string()));
67 | }
68 |
69 | Ok(thumbnail_path_str)
70 | }
71 |
72 | pub async fn delete_thumbnail(&self, thumbnail_path: &str) -> Result<()> {
73 | let path = Path::new(thumbnail_path);
74 |
75 | // Only delete if the file is in our thumbnail directory
76 | if path.starts_with(&self.thumbnail_dir) && path.exists() {
77 | fs::remove_file(path).await.map_err(AppError::Io)?;
78 | info!("Deleted thumbnail: {thumbnail_path}");
79 | }
80 |
81 | Ok(())
82 | }
83 | }
84 |
--------------------------------------------------------------------------------
/src/services/event.rs:
--------------------------------------------------------------------------------
1 | use sqlx::{Pool, Postgres, Row};
2 | use tracing::info;
3 | use uuid::Uuid;
4 |
5 | use crate::error::{AppError, Result};
6 |
7 | pub struct EventService {
8 | db: Pool,
9 | }
10 |
11 | impl EventService {
12 | pub fn new(db: Pool) -> Self {
13 | Self { db }
14 | }
15 |
16 | // Get all unique events from videos
17 | pub async fn get_all_events(&self) -> Result> {
18 | let rows = sqlx::query(
19 | "SELECT DISTINCT event FROM videos
20 | WHERE event IS NOT NULL AND event != ''
21 | ORDER BY event"
22 | )
23 | .fetch_all(&self.db)
24 | .await
25 | .map_err(AppError::Database)?;
26 |
27 | let mut events = Vec::new();
28 | for row in rows {
29 | let event: String = row.get("event");
30 | events.push(event);
31 | }
32 |
33 | Ok(events)
34 | }
35 |
36 | // Get event usage statistics
37 | pub async fn get_event_usage(&self) -> Result> {
38 | let rows = sqlx::query(
39 | "SELECT event, COUNT(*) as video_count
40 | FROM videos
41 | WHERE event IS NOT NULL AND event != ''
42 | GROUP BY event
43 | ORDER BY event"
44 | )
45 | .fetch_all(&self.db)
46 | .await
47 | .map_err(AppError::Database)?;
48 |
49 | let mut results = Vec::new();
50 | for row in rows {
51 | results.push(EventUsage {
52 | name: row.get("event"),
53 | video_count: row.get("video_count"),
54 | });
55 | }
56 |
57 | Ok(results)
58 | }
59 |
60 | // Update event across multiple videos
61 | pub async fn update_event(&self, old_event: &str, new_event: &str) -> Result {
62 | let result = sqlx::query(
63 | "UPDATE videos SET event = $1 WHERE event = $2"
64 | )
65 | .bind(new_event)
66 | .bind(old_event)
67 | .execute(&self.db)
68 | .await
69 | .map_err(AppError::Database)?;
70 |
71 | let count = result.rows_affected() as usize;
72 | if count > 0 {
73 | info!("Updated event '{}' to '{}' in {} videos", old_event, new_event, count);
74 | }
75 |
76 | Ok(count)
77 | }
78 |
79 | // Delete (set to NULL) event across multiple videos
80 | pub async fn delete_event(&self, event: &str) -> Result {
81 | let result = sqlx::query(
82 | "UPDATE videos SET event = NULL WHERE event = $1"
83 | )
84 | .bind(event)
85 | .execute(&self.db)
86 | .await
87 | .map_err(AppError::Database)?;
88 |
89 | let count = result.rows_affected() as usize;
90 | if count > 0 {
91 | info!("Removed event '{}' from {} videos", event, count);
92 | }
93 |
94 | Ok(count)
95 | }
96 | }
97 |
98 | // Event usage statistics
99 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
100 | pub struct EventUsage {
101 | pub name: String,
102 | pub video_count: i64,
103 | }
104 |
--------------------------------------------------------------------------------
/charts/shoebox/templates/pvc.yaml:
--------------------------------------------------------------------------------
1 | {{- if and .Values.persistence.data.enabled (not .Values.persistence.data.existingClaim) }}
2 | apiVersion: v1
3 | kind: PersistentVolumeClaim
4 | metadata:
5 | name: {{ include "shoebox.fullname" . }}-data
6 | labels:
7 | {{- include "shoebox.labels" . | nindent 4 }}
8 | spec:
9 | accessModes:
10 | - {{ .Values.persistence.data.accessMode }}
11 | resources:
12 | requests:
13 | storage: {{ .Values.persistence.data.size }}
14 | {{- if .Values.persistence.data.storageClass }}
15 | storageClassName: {{ .Values.persistence.data.storageClass }}
16 | {{- end }}
17 | ---
18 | {{- end }}
19 | {{- if and .Values.persistence.thumbnails.enabled (not .Values.persistence.thumbnails.existingClaim) }}
20 | apiVersion: v1
21 | kind: PersistentVolumeClaim
22 | metadata:
23 | name: {{ include "shoebox.fullname" . }}-thumbnails
24 | labels:
25 | {{- include "shoebox.labels" . | nindent 4 }}
26 | spec:
27 | accessModes:
28 | - {{ .Values.persistence.thumbnails.accessMode }}
29 | resources:
30 | requests:
31 | storage: {{ .Values.persistence.thumbnails.size }}
32 | {{- if .Values.persistence.thumbnails.storageClass }}
33 | storageClassName: {{ .Values.persistence.thumbnails.storageClass }}
34 | {{- end }}
35 | ---
36 | {{- end }}
37 | {{- if and .Values.persistence.exports.enabled (not .Values.persistence.exports.existingClaim) }}
38 | apiVersion: v1
39 | kind: PersistentVolumeClaim
40 | metadata:
41 | name: {{ include "shoebox.fullname" . }}-exports
42 | labels:
43 | {{- include "shoebox.labels" . | nindent 4 }}
44 | spec:
45 | accessModes:
46 | - {{ .Values.persistence.exports.accessMode }}
47 | resources:
48 | requests:
49 | storage: {{ .Values.persistence.exports.size }}
50 | {{- if .Values.persistence.exports.storageClass }}
51 | storageClassName: {{ .Values.persistence.exports.storageClass }}
52 | {{- end }}
53 | ---
54 | {{- end }}
55 | {{- if .Values.config.mediaSourcePaths.enabled }}
56 | {{- range .Values.config.mediaSourcePaths.sources }}
57 | {{- if not .pathExistingClaim }}
58 | ---
59 | apiVersion: v1
60 | kind: PersistentVolumeClaim
61 | metadata:
62 | name: {{ include "shoebox.fullname" $ }}-media-{{ .name | lower }}
63 | labels:
64 | {{- include "shoebox.labels" $ | nindent 4 }}
65 | spec:
66 | accessModes:
67 | - ReadOnlyMany
68 | resources:
69 | requests:
70 | storage: 100Gi
71 | {{- if .storageClass }}
72 | storageClassName: {{ .storageClass }}
73 | {{- end }}
74 | {{- end }}
75 | {{- end }}
76 | {{- end }}
77 |
78 | {{- if .Values.config.mediaSourcePaths.enabled }}
79 | {{- range .Values.config.mediaSourcePaths.sources }}
80 | {{- if and .originalPath (not .originalExistingClaim) }}
81 | ---
82 | apiVersion: v1
83 | kind: PersistentVolumeClaim
84 | metadata:
85 | name: {{ include "shoebox.fullname" $ }}-original-media-{{ .name | lower }}
86 | labels:
87 | {{- include "shoebox.labels" $ | nindent 4 }}
88 | spec:
89 | accessModes:
90 | - ReadOnlyMany
91 | resources:
92 | requests:
93 | storage: 100Gi
94 | {{- if .storageClass }}
95 | storageClassName: {{ .storageClass }}
96 | {{- end }}
97 | {{- end }}
98 | {{- end }}
99 | {{- end }}
100 |
--------------------------------------------------------------------------------
/src/services/location.rs:
--------------------------------------------------------------------------------
1 | use sqlx::{Pool, Postgres, Row};
2 | use tracing::info;
3 | use uuid::Uuid;
4 |
5 | use crate::error::{AppError, Result};
6 |
7 | pub struct LocationService {
8 | db: Pool,
9 | }
10 |
11 | impl LocationService {
12 | pub fn new(db: Pool) -> Self {
13 | Self { db }
14 | }
15 |
16 | // Get all unique locations from videos
17 | pub async fn get_all_locations(&self) -> Result> {
18 | let rows = sqlx::query(
19 | "SELECT DISTINCT location FROM videos
20 | WHERE location IS NOT NULL AND location != ''
21 | ORDER BY location"
22 | )
23 | .fetch_all(&self.db)
24 | .await
25 | .map_err(AppError::Database)?;
26 |
27 | let mut locations = Vec::new();
28 | for row in rows {
29 | let location: String = row.get("location");
30 | locations.push(location);
31 | }
32 |
33 | Ok(locations)
34 | }
35 |
36 | // Get location usage statistics
37 | pub async fn get_location_usage(&self) -> Result> {
38 | let rows = sqlx::query(
39 | "SELECT location, COUNT(*) as video_count
40 | FROM videos
41 | WHERE location IS NOT NULL AND location != ''
42 | GROUP BY location
43 | ORDER BY location"
44 | )
45 | .fetch_all(&self.db)
46 | .await
47 | .map_err(AppError::Database)?;
48 |
49 | let mut results = Vec::new();
50 | for row in rows {
51 | results.push(LocationUsage {
52 | name: row.get("location"),
53 | video_count: row.get("video_count"),
54 | });
55 | }
56 |
57 | Ok(results)
58 | }
59 |
60 | // Update location across multiple videos
61 | pub async fn update_location(&self, old_location: &str, new_location: &str) -> Result {
62 | let result = sqlx::query(
63 | "UPDATE videos SET location = $1 WHERE location = $2"
64 | )
65 | .bind(new_location)
66 | .bind(old_location)
67 | .execute(&self.db)
68 | .await
69 | .map_err(AppError::Database)?;
70 |
71 | let count = result.rows_affected() as usize;
72 | if count > 0 {
73 | info!("Updated location '{}' to '{}' in {} videos", old_location, new_location, count);
74 | }
75 |
76 | Ok(count)
77 | }
78 |
79 | // Delete (set to NULL) location across multiple videos
80 | pub async fn delete_location(&self, location: &str) -> Result {
81 | let result = sqlx::query(
82 | "UPDATE videos SET location = NULL WHERE location = $1"
83 | )
84 | .bind(location)
85 | .execute(&self.db)
86 | .await
87 | .map_err(AppError::Database)?;
88 |
89 | let count = result.rows_affected() as usize;
90 | if count > 0 {
91 | info!("Removed location '{}' from {} videos", location, count);
92 | }
93 |
94 | Ok(count)
95 | }
96 | }
97 |
98 | // Location usage statistics
99 | #[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
100 | pub struct LocationUsage {
101 | pub name: String,
102 | pub video_count: i64,
103 | }
104 |
--------------------------------------------------------------------------------
/docs/usage.md:
--------------------------------------------------------------------------------
1 | # Using Shoebox
2 |
3 | This guide covers the basic usage of Shoebox for organizing and preserving your videos.
4 |
5 | ## Accessing the Web Interface
6 |
7 | After installing Shoebox, you can access the web interface by navigating to:
8 |
9 | ```
10 | http://:3000
11 | ```
12 |
13 | If you're running Shoebox locally, this would be:
14 |
15 | ```
16 | http://localhost:3000
17 | ```
18 |
19 | ## Organizing Videos
20 |
21 | Shoebox helps you organize your videos by providing a structured way to catalog and tag them.
22 |
23 | ### Reviewing New Videos
24 |
25 | When you first start Shoebox, it will scan your configured media source paths for videos. New videos will appear in the "Unreviewed" section.
26 |
27 | 1. Navigate to the "Unreviewed" page
28 | 2. For each video:
29 | - Watch the preview
30 | - Add tags and descriptions
31 | - Mark as reviewed
32 |
33 | ### Tagging Videos
34 |
35 | Tags help you categorize your videos for easier searching and filtering:
36 |
37 | 1. Select a video
38 | 2. Add relevant tags (e.g., "birthday", "vacation", "family")
39 | 3. Save your changes
40 |
41 | ### Searching and Filtering
42 |
43 | You can search for videos based on various criteria:
44 |
45 | 1. Use the search bar to find videos by name, tag, or description
46 | 2. Use filters to narrow down results by date, duration, or other metadata
47 | 3. Save your favorite searches for quick access
48 |
49 | ## Exporting Videos
50 |
51 | One of the key features of Shoebox is the ability to export videos for use in external editing tools.
52 |
53 | ### Basic Export
54 |
55 | To export a video:
56 |
57 | 1. Select the video you want to export
58 | 2. Click the "Export" button
59 | 3. Choose the export location
60 | 4. Wait for the export to complete
61 |
62 | The exported video will be copied to the specified location, preserving its original quality.
63 |
64 | ### Export with Original Path Information
65 |
66 | If you've configured Shoebox with original path information (see [Configuration](./configuration.md)), the export will include metadata about the video's original location. This is particularly useful when:
67 |
68 | 1. You're exporting videos for use in a video editing project
69 | 2. You need to maintain references to the original file locations
70 | 3. You're migrating videos between systems
71 |
72 | ## System Information and Management
73 |
74 | Shoebox provides system information and management tools to help you maintain your video collection.
75 |
76 | ### Viewing System Information
77 |
78 | To view system information:
79 |
80 | 1. Navigate to the "System" page
81 | 2. Here you can see:
82 | - Storage usage
83 | - Number of videos
84 | - Database status
85 | - Application version
86 |
87 | ### Managing Storage
88 |
89 | To manage storage:
90 |
91 | 1. Regularly check the storage usage on the System page
92 | 2. Consider archiving older videos if storage is running low
93 | 3. Ensure your export and thumbnail directories have sufficient space
94 |
95 | ## Best Practices
96 |
97 | Here are some best practices for using Shoebox effectively:
98 |
99 | 1. **Regular Reviews**: Set aside time to review new videos regularly
100 | 2. **Consistent Tagging**: Develop a consistent tagging system
101 | 3. **Backup**: Regularly backup your Shoebox database and configuration
102 | 4. **Storage Planning**: Plan your storage needs based on your video collection size
103 | 5. **Original Paths**: When possible, configure Shoebox with information about the original location of your videos
104 |
--------------------------------------------------------------------------------
/docs/configuration.md:
--------------------------------------------------------------------------------
1 | # Configuration
2 |
3 | Shoebox offers various configuration options to customize its behavior according to your needs.
4 |
5 | ## Environment Variables
6 |
7 | Shoebox can be configured using environment variables. Here are the main configuration options:
8 |
9 | ### Server Configuration
10 |
11 | | Environment Variable | Description | Default |
12 | |---------------------|-------------|---------|
13 | | `SERVER_HOST` | Host to bind the server | `127.0.0.1` |
14 | | `SERVER_PORT` | Port to bind the server | `3000` |
15 |
16 | ### Database Configuration
17 |
18 | | Environment Variable | Description | Default |
19 | |---------------------|-------------|---------|
20 | | `DATABASE_URL` | Database connection URL | `sqlite:data.db` |
21 | | `DATABASE_MAX_CONNECTIONS` | Maximum number of database connections | `5` |
22 |
23 | ### Media Configuration
24 |
25 | | Environment Variable | Description | Default |
26 | |---------------------|-------------|---------|
27 | | `MEDIA_SOURCE_PATHS` | Paths to scan for videos | `./media` |
28 | | `EXPORT_BASE_PATH` | Path for exported files | `./exports` |
29 | | `THUMBNAIL_PATH` | Path to store thumbnails | `./thumbnails` |
30 |
31 | ## Media Source Paths Configuration
32 |
33 | The `MEDIA_SOURCE_PATHS` environment variable is particularly important as it defines where Shoebox looks for videos. This variable accepts a comma-separated list of paths.
34 |
35 | ### Basic Usage
36 |
37 | For basic usage, you can specify one or more paths:
38 |
39 | ```
40 | MEDIA_SOURCE_PATHS=/path/to/videos,/path/to/more/videos
41 | ```
42 |
43 | ### Advanced Configuration with Original Locations
44 |
45 | A recent enhancement allows you to specify the original location of videos, which is useful when the path in your container or server differs from the original path where the videos were created or stored.
46 |
47 | Each path can include additional configuration options using the following format:
48 |
49 | ```
50 | /path/to/videos;/original/path;original_extension
51 | ```
52 |
53 | Where:
54 | - `/path/to/videos` is the path where the videos are mounted in the container or server
55 | - `/original/path` (optional) is the original location of the videos on the source system
56 | - `original_extension` (optional) is the original file extension of the videos
57 |
58 | For example:
59 |
60 | ```
61 | MEDIA_SOURCE_PATHS=/mnt/videos;/home/user/videos;mp4,/mnt/other-videos;/media/external/videos
62 | ```
63 |
64 | This configuration specifies two media source paths:
65 | 1. `/mnt/videos` with original path `/home/user/videos` and original extension `mp4`
66 | 2. `/mnt/other-videos` with original path `/media/external/videos` and no specific extension
67 |
68 | ### Why Specify Original Locations?
69 |
70 | Specifying the original location of videos is useful for several reasons:
71 |
72 | 1. **Preserving metadata**: When exporting videos, Shoebox can include information about their original location, which helps with organization and traceability.
73 | 2. **Compatibility with external tools**: Some video editing tools may use absolute paths in project files. By knowing the original path, Shoebox can help maintain compatibility.
74 | 3. **Migration between systems**: If you move your videos from one system to another, specifying the original location helps maintain consistency in your workflow.
75 |
76 | ## Configuration Files
77 |
78 | Currently, Shoebox does not support configuration files directly. All configuration is done through environment variables or command-line arguments.
79 |
80 | For Kubernetes deployments using the Helm chart, configuration is done through the `values.yaml` file or by setting values with the `--set` flag. See the [Helm Chart](./installation/helm-chart.md) page for more details.
81 |
--------------------------------------------------------------------------------
/.github/workflows/helm-release.yml:
--------------------------------------------------------------------------------
1 | name: Release Charts
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | paths:
7 | - 'charts/**'
8 | pull_request:
9 | branches: [ main ]
10 | paths:
11 | - 'charts/**'
12 | workflow_call:
13 |
14 | concurrency:
15 | group: "shoebox-helm"
16 | cancel-in-progress: true
17 |
18 | jobs:
19 | lint-test:
20 | runs-on: ubuntu-latest
21 | #services:
22 | # postgres:
23 | # image: postgres
24 | # env:
25 | # POSTGRES_USER: postgres
26 | # POSTGRES_PASSWORD: postgres
27 | # POSTGRES_DB: videos
28 | # ports:
29 | # - 5432:5432
30 | # options: >-
31 | # --health-cmd pg_isready
32 | # --health-interval 10s
33 | # --health-timeout 5s
34 | # --health-retries 5
35 | steps:
36 | - name: Checkout
37 | uses: actions/checkout@v3
38 | with:
39 | fetch-depth: 0
40 |
41 | #- name: Add Bitnami repository
42 | # run: helm repo add bitnami https://charts.bitnami.com/bitnami
43 |
44 | #- name: Update Helm repositories
45 | # run: helm repo update
46 |
47 | - name: Set up chart-testing
48 | uses: helm/chart-testing-action@v2.3.1
49 |
50 | - name: Run chart-testing (lint)
51 | run: ct lint --target-branch ${{ github.event.repository.default_branch }} --charts charts/shoebox --validate-maintainers=false
52 |
53 | #- name: Add Helm repositories
54 | # run: |
55 | # helm repo add bitnami https://charts.bitnami.com/bitnami
56 | # helm repo update
57 |
58 | - name: Create kind cluster
59 | uses: helm/kind-action@v1.5.0
60 | with:
61 | wait: 120s
62 | - name: Install PostgreSQL in Kind
63 | run: |
64 | helm repo add bitnami https://charts.bitnami.com/bitnami
65 | helm install postgres bitnami/postgresql \
66 | --set primary.persistence.enabled=false \
67 | --set auth.database=videos \
68 | --set auth.username=postgres \
69 | --set auth.password=postgres
70 | kubectl wait --for=condition=ready pod -l app.kubernetes.io/name=postgresql --timeout=120s
71 |
72 | - name: Run chart-testing (install)
73 | run: ct install --target-branch ${{ github.event.repository.default_branch }} --charts charts/shoebox --helm-extra-set-args "--set persistence.data.enabled=false --set persistence.thumbnails.enabled=false --set persistence.exports.enabled=false --set config.mediaSourcePaths.enabled=false --set config.databaseUrl=postgres://postgres:postgres@postgres-postgresql.default.svc.cluster.local:5432/videos"
74 |
75 | release:
76 | needs: lint-test
77 | if: github.event_name != 'pull_request'
78 | runs-on: ubuntu-latest
79 | permissions:
80 | contents: write
81 | steps:
82 | - name: Checkout
83 | uses: actions/checkout@v3
84 | with:
85 | fetch-depth: 0
86 |
87 | - name: Configure Git
88 | run: |
89 | git config user.name "$GITHUB_ACTOR"
90 | git config user.email "$GITHUB_ACTOR@users.noreply.github.com"
91 |
92 | - name: Install Helm
93 | uses: azure/setup-helm@v3
94 | with:
95 | version: v3.10.0
96 |
97 | - name: Add Bitnami repository
98 | run: helm repo add bitnami https://charts.bitnami.com/bitnami
99 |
100 | - name: Update Helm repositories
101 | run: helm repo update
102 |
103 | - name: Run chart-releaser
104 | uses: helm/chart-releaser-action@v1.6.0
105 | with:
106 | pages_branch: main
107 | env:
108 | CR_TOKEN: "${{ github.token }}"
109 |
--------------------------------------------------------------------------------
/src/models/video.rs:
--------------------------------------------------------------------------------
1 | use serde::{Deserialize, Serialize};
2 | use sqlx::FromRow;
3 | use uuid::Uuid;
4 | use chrono::Utc;
5 |
6 | #[derive(Debug, Clone, Serialize, Deserialize, FromRow)]
7 | pub struct Video {
8 | pub id: String,
9 | pub file_path: String,
10 | pub file_name: String,
11 | pub title: Option,
12 | pub description: Option,
13 | pub created_date: Option,
14 | pub file_size: Option,
15 | pub thumbnail_path: Option,
16 | pub rating: Option,
17 | pub duration: Option,
18 | pub original_file_path: Option,
19 | pub exif_data: Option,
20 | pub location: Option,
21 | pub event: Option,
22 | pub created_at: chrono::NaiveDateTime,
23 | pub updated_at: chrono::NaiveDateTime,
24 | }
25 |
26 | #[derive(Debug, Clone, Serialize, Deserialize)]
27 | pub struct VideoWithMetadata {
28 | #[serde(flatten)]
29 | pub video: Video,
30 | pub tags: Vec,
31 | pub people: Vec,
32 | pub shoeboxes: Vec,
33 | }
34 |
35 | #[derive(Debug, Clone, Serialize, Deserialize)]
36 | pub struct CreateVideoDto {
37 | pub file_path: String,
38 | pub file_name: String,
39 | pub title: Option,
40 | pub description: Option,
41 | pub created_date: Option,
42 | pub file_size: Option,
43 | pub thumbnail_path: Option,
44 | pub rating: Option,
45 | pub duration: Option,
46 | pub original_file_path: Option,
47 | pub exif_data: Option,
48 | pub location: Option,
49 | pub event: Option,
50 | pub tags: Vec,
51 | pub people: Vec,
52 | pub shoeboxes: Vec,
53 | }
54 |
55 | #[derive(Debug, Clone, Serialize, Deserialize)]
56 | pub struct UpdateVideoDto {
57 | pub title: Option,
58 | pub description: Option,
59 | pub rating: Option,
60 | pub location: Option,
61 | pub event: Option,
62 | pub tags: Option>,
63 | pub people: Option>,
64 | pub shoeboxes: Option>,
65 | }
66 |
67 | #[derive(Debug, Clone, Serialize, Deserialize)]
68 | pub struct BulkUpdateVideoDto {
69 | pub video_ids: Vec,
70 | pub update: UpdateVideoDto,
71 | }
72 |
73 | #[derive(Debug, Clone, Serialize, Deserialize)]
74 | pub struct VideoSearchParams {
75 | pub query: Option,
76 | pub tags: Option>,
77 | pub people: Option>,
78 | pub shoeboxes: Option>,
79 | pub location: Option,
80 | pub event: Option,
81 | pub rating: Option,
82 | pub limit: Option,
83 | pub offset: Option,
84 | pub unreviewed: Option,
85 | pub sort_by: Option,
86 | pub sort_order: Option,
87 | pub start_date: Option,
88 | pub end_date: Option,
89 | pub min_duration: Option,
90 | pub max_duration: Option,
91 | }
92 |
93 | #[derive(Debug, Clone, Serialize, Deserialize)]
94 | pub struct ExportRequest {
95 | pub video_ids: Vec,
96 | pub project_name: String,
97 | #[serde(default)]
98 | pub use_original_files: bool,
99 | }
100 |
101 | impl Video {
102 | pub fn new(file_path: String, file_name: String) -> Self {
103 | let now = Utc::now().naive_utc();
104 | Self {
105 | id: Uuid::new_v4().to_string(),
106 | file_path,
107 | file_name,
108 | title: None,
109 | description: None,
110 | created_date: None,
111 | file_size: None,
112 | thumbnail_path: None,
113 | rating: None,
114 | duration: None,
115 | original_file_path: None,
116 | exif_data: None,
117 | location: None,
118 | event: None,
119 | created_at: now.clone(),
120 | updated_at: now,
121 | }
122 | }
123 | }
124 |
--------------------------------------------------------------------------------
/src/routes/shoebox.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::{Path, State},
3 | routing::{get, post, delete, put},
4 | Json, Router,
5 | };
6 |
7 | use crate::error::Result;
8 | use crate::models::{CreateShoeboxDto, Shoebox};
9 | use crate::services::AppState;
10 | use crate::services::ShoeboxService;
11 |
12 | pub fn router(app_state: AppState) -> Router {
13 | Router::new()
14 | .route("/", get(list_shoeboxes))
15 | .route("/", post(create_shoebox))
16 | .route("/usage", get(get_shoebox_usage))
17 | .route("/cleanup", post(cleanup_unused_shoeboxes))
18 | .route("/{id}", get(get_shoebox))
19 | .route("/{id}", put(update_shoebox))
20 | .route("/{id}", delete(delete_shoebox))
21 | .route("/{id}/videos", get(get_videos_in_shoebox))
22 | .route("/{id}/videos/{video_id}", put(add_video_to_shoebox))
23 | .route("/{id}/videos/{video_id}", delete(remove_video_from_shoebox))
24 | .with_state(app_state)
25 | }
26 |
27 | async fn list_shoeboxes(State(state): State) -> Result>> {
28 | let shoebox_service = ShoeboxService::new(state.db.clone());
29 | let shoeboxes = shoebox_service.find_all().await?;
30 | Ok(Json(shoeboxes))
31 | }
32 |
33 | async fn get_shoebox(
34 | State(state): State,
35 | Path(id): Path,
36 | ) -> Result> {
37 | let shoebox_service = ShoeboxService::new(state.db.clone());
38 | let shoebox = shoebox_service.find_by_id(&id).await?;
39 | Ok(Json(shoebox))
40 | }
41 |
42 | async fn create_shoebox(
43 | State(state): State,
44 | Json(create_dto): Json,
45 | ) -> Result> {
46 | let shoebox_service = ShoeboxService::new(state.db.clone());
47 | let shoebox = shoebox_service.create(create_dto).await?;
48 | Ok(Json(shoebox))
49 | }
50 |
51 | #[derive(serde::Deserialize)]
52 | struct UpdateShoeboxDto {
53 | name: String,
54 | description: Option,
55 | }
56 |
57 | async fn update_shoebox(
58 | State(state): State,
59 | Path(id): Path,
60 | Json(update_dto): Json,
61 | ) -> Result> {
62 | let shoebox_service = ShoeboxService::new(state.db.clone());
63 | let shoebox = shoebox_service.update(&id, &update_dto.name, update_dto.description.as_deref()).await?;
64 | Ok(Json(shoebox))
65 | }
66 |
67 | async fn delete_shoebox(
68 | State(state): State,
69 | Path(id): Path,
70 | ) -> Result> {
71 | let shoebox_service = ShoeboxService::new(state.db.clone());
72 | shoebox_service.delete(&id).await?;
73 | Ok(Json(()))
74 | }
75 |
76 | async fn get_shoebox_usage(
77 | State(state): State,
78 | ) -> Result>> {
79 | let shoebox_service = ShoeboxService::new(state.db.clone());
80 | let usage = shoebox_service.get_usage().await?;
81 | Ok(Json(usage))
82 | }
83 |
84 | #[derive(serde::Serialize)]
85 | struct CleanupResponse {
86 | count: usize,
87 | }
88 |
89 | async fn cleanup_unused_shoeboxes(State(state): State) -> Result> {
90 | let shoebox_service = ShoeboxService::new(state.db.clone());
91 | let count = shoebox_service.cleanup_unused().await?;
92 | Ok(Json(CleanupResponse { count }))
93 | }
94 |
95 | async fn get_videos_in_shoebox(
96 | State(state): State,
97 | Path(id): Path,
98 | ) -> Result>> {
99 | let shoebox_service = ShoeboxService::new(state.db.clone());
100 | let video_ids = shoebox_service.get_videos_in_shoebox(&id).await?;
101 | Ok(Json(video_ids))
102 | }
103 |
104 | async fn add_video_to_shoebox(
105 | State(state): State,
106 | Path((id, video_id)): Path<(String, String)>,
107 | ) -> Result> {
108 | let shoebox_service = ShoeboxService::new(state.db.clone());
109 | shoebox_service.add_video_to_shoebox(&video_id, &id).await?;
110 | Ok(Json(()))
111 | }
112 |
113 | async fn remove_video_from_shoebox(
114 | State(state): State,
115 | Path((id, video_id)): Path<(String, String)>,
116 | ) -> Result> {
117 | let shoebox_service = ShoeboxService::new(state.db.clone());
118 | shoebox_service.remove_video_from_shoebox(&video_id, &id).await?;
119 | Ok(Json(()))
120 | }
121 |
--------------------------------------------------------------------------------
/frontend/src/components/VideoCard.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import {
3 | Box,
4 | Image,
5 | Text,
6 | Heading,
7 | Badge,
8 | Flex,
9 | useColorModeValue,
10 | HStack,
11 | Icon
12 | } from '@chakra-ui/react';
13 | import { Link as RouterLink } from 'react-router-dom';
14 | import { FaStar, FaRegStar } from 'react-icons/fa';
15 | import { VideoWithMetadata } from '../api/client';
16 |
17 | interface VideoCardProps {
18 | video: VideoWithMetadata;
19 | }
20 |
21 | const VideoCard: React.FC = ({ video }) => {
22 | const cardBg = useColorModeValue('white', 'gray.800');
23 | const cardBorder = useColorModeValue('gray.200', 'gray.700');
24 |
25 |
26 | // Format date
27 | const formatDate = (dateString?: string): string => {
28 | if (!dateString) return 'Unknown date';
29 | try {
30 | const date = new Date(dateString);
31 | if (isNaN(date.getTime())) return 'Unknown date';
32 | return date.toLocaleDateString();
33 | } catch (e) {
34 | return 'Unknown date';
35 | }
36 | };
37 |
38 | // Format duration
39 | const formatDuration = (seconds?: number): string => {
40 | if (!seconds) return '';
41 |
42 | const hours = Math.floor(seconds / 3600);
43 | const minutes = Math.floor((seconds % 3600) / 60);
44 | const remainingSeconds = seconds % 60;
45 |
46 | if (hours > 0) {
47 | return `${hours}:${minutes.toString().padStart(2, '0')}:${remainingSeconds.toString().padStart(2, '0')}`;
48 | } else {
49 | return `${minutes}:${remainingSeconds.toString().padStart(2, '0')}`;
50 | }
51 | };
52 |
53 | // Render rating stars
54 | const renderRating = (rating?: number) => {
55 | if (!rating) return null;
56 |
57 | const stars = [];
58 | for (let i = 1; i <= 5; i++) {
59 | stars.push(
60 |
66 | );
67 | }
68 |
69 | return (
70 |
71 | {stars}
72 |
73 | );
74 | };
75 |
76 | return (
77 |
88 |
96 |
97 |
98 |
99 | {video.title || video.file_name}
100 |
101 |
102 | {renderRating(video.rating)}
103 |
104 |
105 | {formatDate(video.created_date)}
106 |
107 |
108 |
109 | {video.duration ? formatDuration(video.duration) : 'Unknown duration'}
110 |
111 |
112 | {video.tags.length > 0 && (
113 |
114 | {video.tags.slice(0, 3).map((tag) => (
115 |
116 | {tag}
117 |
118 | ))}
119 | {video.tags.length > 3 && (
120 |
121 | +{video.tags.length - 3} more
122 |
123 | )}
124 |
125 | )}
126 |
127 | {video.people.length > 0 && (
128 |
129 | {video.people.slice(0, 2).map((person) => (
130 |
131 | {person}
132 |
133 | ))}
134 | {video.people.length > 2 && (
135 |
136 | +{video.people.length - 2} more
137 |
138 | )}
139 |
140 | )}
141 |
142 |
143 | );
144 | };
145 |
146 | export default VideoCard;
147 |
--------------------------------------------------------------------------------
/src/db.rs:
--------------------------------------------------------------------------------
1 | use sqlx::{Pool, Postgres, PgPool};
2 | use std::fs;
3 | use std::path::Path;
4 | use tracing::info;
5 |
6 | use crate::config::Config;
7 | use crate::error::{AppError, Result};
8 |
9 | pub async fn init_db(config: &Config) -> Result> {
10 | let db_url = &config.database.url;
11 | info!("Database URL: {}", db_url);
12 |
13 | // Check if this is a PostgreSQL URL
14 | if db_url.starts_with("postgres:") || db_url.starts_with("postgresql:") {
15 | // Initialize PostgreSQL
16 | init_postgres(db_url).await
17 | } else {
18 | // If not a PostgreSQL URL, return an error
19 | Err(AppError::ConfigError(
20 | "Only PostgreSQL is supported. Please provide a valid PostgreSQL connection URL.".to_string()
21 | ))
22 | }
23 | }
24 |
25 | async fn init_postgres(db_url: &str) -> Result> {
26 | // Create migrations directory if it doesn't exist
27 | ensure_migrations_dir()?;
28 |
29 | // Connect to the PostgreSQL database
30 | info!("Connecting to PostgreSQL database at {}", db_url);
31 | let pool = PgPool::connect(db_url)
32 | .await
33 | .map_err(AppError::Database)?;
34 |
35 | // Run migrations
36 | info!("Running migrations");
37 | sqlx::migrate!("./migrations")
38 | .run(&pool)
39 | .await
40 | .map_err(|e| AppError::Database(sqlx::Error::Migrate(Box::new(e))))?;
41 |
42 | info!("Database initialized successfully");
43 | Ok(pool)
44 | }
45 |
46 | fn ensure_migrations_dir() -> Result<()> {
47 | let migrations_dir = Path::new("./migrations");
48 | if !migrations_dir.exists() {
49 | info!("Creating migrations dir");
50 | fs::create_dir_all(migrations_dir).map_err(|e| {
51 | AppError::Io(std::io::Error::new(
52 | std::io::ErrorKind::Other,
53 | format!("Failed to create migrations directory: {e}"),
54 | ))
55 | })?;
56 |
57 | // Create initial migration file
58 | info!("create initial migration");
59 | create_initial_migration(migrations_dir)?;
60 | }
61 | Ok(())
62 | }
63 |
64 | fn create_initial_migration(migrations_dir: &Path) -> Result<()> {
65 | let migration_file = migrations_dir.join("20240101000000_initial_schema.sql");
66 | let migration_content = r#"-- Initial schema for Shoebox - a digital shoebox for your videos
67 | -- Up migration
68 |
69 | -- Videos table
70 | CREATE TABLE IF NOT EXISTS videos (
71 | id VARCHAR(36) PRIMARY KEY NOT NULL,
72 | file_path VARCHAR(255) NOT NULL,
73 | file_name VARCHAR(255) NOT NULL,
74 | title VARCHAR(255),
75 | description TEXT,
76 | created_date VARCHAR(50),
77 | file_size BIGINT,
78 | thumbnail_path VARCHAR(255),
79 | rating INTEGER CHECK (rating BETWEEN 1 AND 5 OR rating IS NULL),
80 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
81 | updated_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
82 | );
83 |
84 | -- Tags table
85 | CREATE TABLE IF NOT EXISTS tags (
86 | id VARCHAR(36) PRIMARY KEY NOT NULL,
87 | name VARCHAR(100) NOT NULL UNIQUE,
88 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
89 | );
90 |
91 | -- People table
92 | CREATE TABLE IF NOT EXISTS people (
93 | id VARCHAR(36) PRIMARY KEY NOT NULL,
94 | name VARCHAR(100) NOT NULL UNIQUE,
95 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP
96 | );
97 |
98 | -- Video-Tag relationship table
99 | CREATE TABLE IF NOT EXISTS video_tags (
100 | video_id VARCHAR(36) NOT NULL,
101 | tag_id VARCHAR(36) NOT NULL,
102 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
103 | PRIMARY KEY (video_id, tag_id),
104 | FOREIGN KEY (video_id) REFERENCES videos (id) ON DELETE CASCADE,
105 | FOREIGN KEY (tag_id) REFERENCES tags (id) ON DELETE CASCADE
106 | );
107 |
108 | -- Video-People relationship table
109 | CREATE TABLE IF NOT EXISTS video_people (
110 | video_id VARCHAR(36) NOT NULL,
111 | person_id VARCHAR(36) NOT NULL,
112 | created_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
113 | PRIMARY KEY (video_id, person_id),
114 | FOREIGN KEY (video_id) REFERENCES videos (id) ON DELETE CASCADE,
115 | FOREIGN KEY (person_id) REFERENCES people (id) ON DELETE CASCADE
116 | );
117 |
118 | -- Create indexes
119 | CREATE INDEX IF NOT EXISTS idx_videos_file_path ON videos (file_path);
120 | CREATE INDEX IF NOT EXISTS idx_videos_created_date ON videos (created_date);
121 | CREATE INDEX IF NOT EXISTS idx_tags_name ON tags (name);
122 | CREATE INDEX IF NOT EXISTS idx_people_name ON people (name);
123 |
124 | -- Down migration
125 | -- DROP TABLE IF EXISTS video_people;
126 | -- DROP TABLE IF EXISTS video_tags;
127 | -- DROP TABLE IF EXISTS people;
128 | -- DROP TABLE IF EXISTS tags;
129 | -- DROP TABLE IF EXISTS videos;
130 | "#;
131 |
132 | fs::write(migration_file, migration_content).map_err(AppError::Io)?;
133 | info!("Created initial migration file");
134 | Ok(())
135 | }
136 |
--------------------------------------------------------------------------------
/charts/shoebox/README.md:
--------------------------------------------------------------------------------
1 | # Shoebox Helm Chart
2 |
3 | This Helm chart deploys the Shoebox application on a Kubernetes cluster.
4 |
5 | ## Prerequisites
6 |
7 | - Kubernetes 1.19+
8 | - Helm 3.2.0+
9 | - PV provisioner support in the underlying infrastructure (if persistence is enabled)
10 |
11 | ## Getting Started
12 |
13 | ### Installing the Chart
14 |
15 | To install the chart with the release name `shoebox`:
16 |
17 | ```bash
18 | helm install shoebox .
19 | ```
20 |
21 | ### Using a Specific Image Version
22 |
23 | By default, the chart uses the `latest` tag for the Shoebox image. For production environments, it's recommended to use a specific version:
24 |
25 | ```bash
26 | helm install shoebox . --set image.tag=v1.0.0
27 | ```
28 |
29 | ### Using a Private Registry
30 |
31 | If you're using a private registry for the Shoebox image, you'll need to create a secret with your registry credentials:
32 |
33 | ```bash
34 | kubectl create secret docker-registry regcred \
35 | --docker-server=ghcr.io \
36 | --docker-username= \
37 | --docker-password= \
38 | --docker-email=
39 | ```
40 |
41 | Then, specify the secret in your Helm install command:
42 |
43 | ```bash
44 | helm install shoebox . --set imagePullSecrets[0].name=regcred
45 | ```
46 |
47 | ## Configuration
48 |
49 | The following table lists the configurable parameters of the Shoebox chart and their default values.
50 |
51 | ### Image Configuration
52 |
53 | | Parameter | Description | Default |
54 | |-----------|-------------|---------|
55 | | `image.repository` | Image repository | `ghcr.io/slackspace-io/shoebox` |
56 | | `image.tag` | Image tag | `latest` |
57 | | `image.pullPolicy` | Image pull policy | `IfNotPresent` |
58 | | `imagePullSecrets` | Image pull secrets | `[]` |
59 |
60 | ### Application Configuration
61 |
62 | | Parameter | Description | Default |
63 | |-----------|-------------|---------|
64 | | `config.serverHost` | Host to bind the server | `0.0.0.0` |
65 | | `config.serverPort` | Port to bind the server | `3000` |
66 | | `config.databaseUrl` | Database URL (SQLite) | `sqlite:/app/data/videos.db` |
67 | | `config.mediaSourcePaths.enabled` | Enable media source paths | `true` |
68 | | `config.mediaSourcePaths.sources` | List of media source paths to scan for videos | See values.yaml |
69 | | `config.thumbnailPath` | Path to store thumbnails | `/app/thumbnails` |
70 | | `config.exportBasePath` | Path for exported files | `/app/exports` |
71 | | `config.rustLog` | Rust log level | `info` |
72 |
73 | ### Persistence Configuration
74 |
75 | | Parameter | Description | Default |
76 | |-----------|-------------|---------|
77 | | `persistence.data.enabled` | Enable persistence for data | `true` |
78 | | `persistence.data.size` | Size of data PVC | `1Gi` |
79 | | `persistence.thumbnails.enabled` | Enable persistence for thumbnails | `true` |
80 | | `persistence.thumbnails.size` | Size of thumbnails PVC | `5Gi` |
81 | | `persistence.exports.enabled` | Enable persistence for exports | `true` |
82 | | `persistence.exports.size` | Size of exports PVC | `10Gi` |
83 | | `config.mediaSourcePaths.enabled` | Enable media source paths | `true` |
84 | | `config.mediaSourcePaths.sources[].pathExistingClaim` | Use existing PVC for media source path | `""` |
85 | | `config.mediaSourcePaths.sources[].originalExistingClaim` | Use existing PVC for original media source path | `""` |
86 |
87 | ### PostgreSQL Configuration
88 |
89 | | Parameter | Description | Default |
90 | |-----------|-------------|---------|
91 | | `postgresql.enabled` | Enable PostgreSQL | `false` |
92 | | `postgresql.postgresqlUsername` | PostgreSQL username | `postgres` |
93 | | `postgresql.postgresqlPassword` | PostgreSQL password | `postgres` |
94 | | `postgresql.postgresqlDatabase` | PostgreSQL database | `videos` |
95 | | `postgresql.persistence.enabled` | Enable PostgreSQL persistence | `true` |
96 | | `postgresql.persistence.size` | Size of PostgreSQL PVC | `8Gi` |
97 |
98 | ## Examples
99 |
100 | ### Using SQLite with Persistence
101 |
102 | ```bash
103 | helm install shoebox . \
104 | --set persistence.data.enabled=true \
105 | --set persistence.thumbnails.enabled=true \
106 | --set persistence.exports.enabled=true \
107 | --set persistence.media.existingClaim=media-pvc
108 | ```
109 |
110 | ### Using PostgreSQL
111 |
112 | ```bash
113 | helm install shoebox . \
114 | --set postgresql.enabled=true \
115 | --set postgresql.postgresqlPassword=mypassword \
116 | --set persistence.thumbnails.enabled=true \
117 | --set persistence.exports.enabled=true \
118 | --set persistence.media.existingClaim=media-pvc
119 | ```
120 |
121 | ### Disabling Persistence (for testing)
122 |
123 | ```bash
124 | helm install shoebox . \
125 | --set persistence.data.enabled=false \
126 | --set persistence.thumbnails.enabled=false \
127 | --set persistence.exports.enabled=false \
128 | --set config.mediaSourcePaths.enabled=false
129 | ```
130 |
131 | ## Upgrading
132 |
133 | ### To 1.0.0
134 |
135 | This is the first stable release of the Shoebox chart.
136 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Shoebox
2 |
3 | A digital shoebox for organizing and preserving your videos over a lifetime.
4 |
5 | ## ⚠️ Active Development Notice
6 |
7 | **This project is in active development and is not yet safe for production use.**
8 |
9 | Features may change, data models might be restructured, and there could be bugs that affect your media files. Use at your own risk and always maintain backups of your original media.
10 |
11 | ## The Digital Shoebox Concept
12 |
13 | Remember how previous generations kept their memories in physical shoeboxes at their parents' homes? Those boxes filled with photographs, negatives, and mementos that captured life's precious moments.
14 |
15 | Shoebox aims to recreate that experience for the digital age. Instead of photos getting lost in the endless stream of cloud services or social media platforms, Shoebox provides a dedicated space for your videos - a digital equivalent of that cherished box in your closet.
16 |
17 | ## What Makes Shoebox Different
18 |
19 | **Shoebox is not trying to compete with immich, Google Photos, or other photo management services.**
20 |
21 | The main purpose of Shoebox is to help you:
22 |
23 | - **Find original videos** export(copy) to a defined location, allowing you to easily import into a video editor of choice. Craete highlights, collages, etc.
24 | - **Organize your videos** over a lifetime for easy recall and future use. Have a coffee, review new videos cataloguing your memories as your kids grow.
25 | - **Preserve video memories** in a way that makes them accessible and workable
26 |
27 | While other services focus on viewing and sharing, Shoebox focuses on organization and preservation with the specific goal of making your video content useful for future creative projects.
28 |
29 | ### Video Demo
30 |
31 | Here's a video demo of the application:
32 |
33 | [Watch the demo video on YouTube](https://www.youtube.com/watch?v=xfPMCLWnUz8)
34 |
35 |
36 | ## Tech Stack
37 |
38 | - **Backend**: Rust with Axum web framework
39 | - **Frontend**: React with TypeScript
40 | - **Database**: SQLite/PostgreSQL via SQLx
41 | - **Media Processing**: FFmpeg
42 | - **Deployment**: Docker/Kubernetes support
43 |
44 | ## Features
45 |
46 | - Video organization and cataloging
47 | - Thumbnail generation
48 | - Video metadata extraction
49 | - Export capabilities
50 | - Unreviewed videos workflow
51 | - System information and management
52 |
53 | ## Getting Started
54 |
55 | ### Prerequisites
56 |
57 | - [Rust](https://www.rust-lang.org/tools/install) (for backend development)
58 | - [Node.js](https://nodejs.org/) and [Yarn](https://yarnpkg.com/) (for frontend development)
59 | - [FFmpeg](https://ffmpeg.org/download.html) (for video processing)
60 | - [Docker](https://docs.docker.com/get-docker/) (optional, for containerized deployment)
61 |
62 | ### Running the Frontend (Development)
63 |
64 | ```bash
65 | # Navigate to the frontend directory
66 | cd frontend
67 |
68 | # Install dependencies
69 | yarn install
70 |
71 | # Start the development server
72 | yarn dev
73 | ```
74 |
75 | The frontend development server will be available at http://localhost:5173.
76 |
77 | ### Running the Backend (Development)
78 |
79 | ```bash
80 | # Run the backend server
81 | cargo run
82 | ```
83 |
84 | The backend server will be available at http://localhost:3000.
85 |
86 | ### Running with Docker
87 |
88 | 1. Edit the `docker-compose.yml` file to configure your media source paths:
89 |
90 | ```yaml
91 | volumes:
92 | # Mount media source directories (read-only)
93 | - /path/to/your/videos:/mnt/videos:ro
94 |
95 | # Mount export directory (read-write)
96 | - /path/to/your/exports:/app/exports
97 | ```
98 |
99 | 2. Start the application:
100 |
101 | ```bash
102 | docker-compose up -d
103 | ```
104 |
105 | The application will be available at http://localhost:3000.
106 |
107 | ## Contributing
108 |
109 | As this project is in active development, contributions are welcome but the codebase may change rapidly.
110 |
111 | ## Releasing
112 |
113 | Shoebox uses GitHub Actions for automated releases. To create a new release:
114 |
115 | 1. Go to the Actions tab in the GitHub repository
116 | 2. Select the "Release" workflow
117 | 3. Click "Run workflow"
118 | 4. Enter the version number (e.g., 0.1.0) following semantic versioning
119 | 5. Select the release type (patch, minor, or major)
120 | 6. Indicate whether this is a prerelease
121 | 7. Choose whether to update the Helm chart version (default: false)
122 | 8. Click "Run workflow"
123 |
124 | The workflow will:
125 | - Validate the version format and run tests
126 | - Update version number in Cargo.toml
127 | - Build and publish Docker images
128 | - Create a GitHub release with auto-generated changelog
129 | - Update documentation
130 |
131 | If you choose to update the Helm chart version, the workflow will also:
132 | - Update version numbers in Helm charts
133 | - Release the updated Helm chart
134 | - Include Helm chart installation instructions in the release notes
135 |
136 | ## License
137 |
138 | This project is licensed under the MIT License - see the [LICENSE](LICENSE) file for details.
139 |
--------------------------------------------------------------------------------
/charts/shoebox/values.yaml:
--------------------------------------------------------------------------------
1 | # Default values for shoebox.
2 | # This is a YAML-formatted file.
3 |
4 | replicaCount: 1
5 |
6 | image:
7 | repository: ghcr.io/slackspace-io/shoebox
8 | pullPolicy: IfNotPresent
9 | # Overrides the image tag whose default is the chart appVersion.
10 | # For production, use a specific version tag (e.g., v1.0.0)
11 | # For development, you can use main, latest, or a specific commit SHA
12 | tag: "latest"
13 |
14 | imagePullSecrets: []
15 | # If the image is in a private repository, you need to create a secret with your registry credentials
16 | # and specify it here. For example:
17 | # imagePullSecrets:
18 | # - name: regcred
19 | nameOverride: ""
20 | fullnameOverride: ""
21 |
22 | serviceAccount:
23 | # Specifies whether a service account should be created
24 | create: false
25 | # Annotations to add to the service account
26 | annotations: {}
27 | # The name of the service account to use.
28 | # If not set and create is true, a name is generated using the fullname template
29 | name: ""
30 |
31 | podAnnotations: {}
32 |
33 | podSecurityContext: {}
34 | # fsGroup: 2000
35 |
36 | securityContext: {}
37 | # capabilities:
38 | # drop:
39 | # - ALL
40 | # readOnlyRootFilesystem: true
41 | # runAsNonRoot: true
42 | # runAsUser: 1000
43 |
44 | service:
45 | type: ClusterIP
46 | port: 3000
47 |
48 | ingress:
49 | enabled: false
50 | className: ""
51 | annotations: {}
52 | # kubernetes.io/ingress.class: nginx
53 | # kubernetes.io/tls-acme: "true"
54 | hosts:
55 | - host: chart-example.local
56 | paths:
57 | - path: /
58 | pathType: ImplementationSpecific
59 | tls: []
60 | # - secretName: chart-example-tls
61 | # hosts:
62 | # - chart-example.local
63 |
64 | resources: {}
65 | # We usually recommend not to specify default resources and to leave this as a conscious
66 | # choice for the user. This also increases chances charts run on environments with little
67 | # resources, such as Minikube. If you do want to specify resources, uncomment the following
68 | # lines, adjust them as necessary, and remove the curly braces after 'resources:'.
69 | # limits:
70 | # cpu: 100m
71 | # memory: 128Mi
72 | # requests:
73 | # cpu: 100m
74 | # memory: 128Mi
75 |
76 | nodeSelector: {}
77 |
78 | tolerations: []
79 |
80 | affinity: {}
81 |
82 | # Autoscaling configuration
83 | autoscaling:
84 | enabled: false
85 | minReplicas: 1
86 | maxReplicas: 3
87 | targetCPUUtilizationPercentage: 80
88 | # targetMemoryUtilizationPercentage: 80
89 |
90 | # Application specific configuration
91 | config:
92 | serverHost: "0.0.0.0"
93 | serverPort: 3000
94 | # Database connection URL. If provided, this takes precedence over PostgreSQL settings.
95 | # Example for PostgreSQL: "postgres://username:password@hostname:5432/dbname"
96 | # Media source paths configuration
97 | # You can configure multiple media sources with different settings
98 | databaseUrl: "postgres://postgres:postgres@postgres:5432/videos"
99 | mediaSourcePaths:
100 | # Set to false to disable all media source paths
101 | enabled: true
102 | sources:
103 | - name: bmpcc
104 | path: /mnt/videos
105 | # Optional: specify an existing claim for this source's path
106 | # pathExistingClaim: "existing-claim-name"
107 | # Optional: specify if this path should be mounted as read-only
108 | # readOnly: true
109 | originalPath: /home/user/videos
110 | originalExtension: mp4
111 | # Optional: specify a default shoebox for this source
112 | # defaultShoebox: "my-shoebox"
113 | # Optional: specify an existing claim for this source's originalPath
114 | # originalExistingClaim: "existing-claim-name"
115 | - name: gopro
116 | path: /mnt/other-videos
117 | # Optional: specify an existing claim for this source's path
118 | # pathExistingClaim: "existing-claim-name"
119 | # Optional: specify if this path should be mounted as read-only
120 | # readOnly: true
121 | originalPath: /media/external/videos
122 | # Optional: specify a default shoebox for this source
123 | # defaultShoebox: "gopro-shoebox"
124 | # Optional: specify an existing claim for this source's originalPath
125 | # originalExistingClaim: "existing-claim-name"
126 | thumbnailPath: "/app/thumbnails"
127 | exportBasePath: "/app/exports"
128 | rustLog: "info"
129 | # Maximum number of concurrent tasks for scanning
130 | maxConcurrentTasks: 4
131 |
132 | # Persistence configuration
133 | persistence:
134 | # Data directory for SQLite database
135 | data:
136 | enabled: true
137 | size: 1Gi
138 | storageClass: ""
139 | accessMode: ReadWriteOnce
140 | readOnly: false
141 |
142 | # Thumbnails directory
143 | thumbnails:
144 | enabled: true
145 | size: 5Gi
146 | storageClass: ""
147 | accessMode: ReadWriteOnce
148 | readOnly: false
149 |
150 | # Exports directory
151 | exports:
152 | enabled: true
153 | size: 10Gi
154 | storageClass: ""
155 | accessMode: ReadWriteOnce
156 | existingClaim: ""
157 | readOnly: false
158 |
159 | # Optional PostgreSQL dependency
160 | # postgresql:
161 | # enabled: false
162 | # # If enabled and no databaseUrl is provided, the application will auto-service databaseurl
163 | # postgresqlUsername: postgres
164 | # postgresqlPassword: postgres
165 | # postgresqlDatabase: videos
166 | # persistence:
167 | # enabled: true
168 | # size: 8Gi
169 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | version:
7 | description: 'Version to release (e.g., 0.1.0)'
8 | required: true
9 | release_type:
10 | description: 'Release type'
11 | required: true
12 | default: 'minor'
13 | type: choice
14 | options:
15 | - patch
16 | - minor
17 | - major
18 | prerelease:
19 | description: 'Is this a prerelease?'
20 | required: true
21 | default: false
22 | type: boolean
23 | update_helm:
24 | description: 'Update Helm chart version?'
25 | required: true
26 | default: false
27 | type: boolean
28 |
29 | jobs:
30 | validate:
31 | runs-on: ubuntu-latest
32 | steps:
33 | - name: Checkout repository
34 | uses: actions/checkout@v4
35 |
36 | - name: Set up Rust
37 | uses: actions-rs/toolchain@v1
38 | with:
39 | profile: minimal
40 | toolchain: stable
41 | override: true
42 |
43 | - name: Validate version format
44 | run: |
45 | if ! [[ "${{ github.event.inputs.version }}" =~ ^[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
46 | echo "Invalid version format. Please use semantic versioning (e.g., 0.1.0)"
47 | exit 1
48 | fi
49 |
50 | - name: Run tests
51 | run: cargo test --all-features
52 |
53 | - name: Run clippy
54 | run: |
55 | rustup component add clippy
56 | cargo clippy --all-features -- -A warnings
57 |
58 | update-version:
59 | needs: validate
60 | runs-on: ubuntu-latest
61 | outputs:
62 | new_version: ${{ steps.set-version.outputs.new_version }}
63 | steps:
64 | - name: Checkout repository
65 | uses: actions/checkout@v4
66 | with:
67 | token: ${{ secrets.GITHUB_TOKEN }}
68 |
69 | - name: Configure Git
70 | run: |
71 | git config user.name "GitHub Actions"
72 | git config user.email "actions@github.com"
73 |
74 | - name: Update version in Cargo.toml
75 | id: set-version
76 | run: |
77 | VERSION="${{ github.event.inputs.version }}"
78 | echo "new_version=$VERSION" >> $GITHUB_OUTPUT
79 | sed -i "s/^version = \".*\"/version = \"$VERSION\"/" Cargo.toml
80 |
81 | # Only update the Helm chart if requested
82 | if [[ "${{ github.event.inputs.update_helm }}" == "true" ]]; then
83 | echo "Updating Helm chart version to $VERSION"
84 | sed -i "s/^version: .*/version: $VERSION/" charts/shoebox/Chart.yaml
85 | sed -i "s/^appVersion: .*/appVersion: \"$VERSION\"/" charts/shoebox/Chart.yaml
86 | git add Cargo.toml charts/shoebox/Chart.yaml
87 | else
88 | echo "Skipping Helm chart update as requested"
89 | git add Cargo.toml
90 | fi
91 |
92 | git commit -m "chore: bump version to $VERSION"
93 | git tag -a "v$VERSION" -m "Release v$VERSION"
94 | git push origin main
95 | git push origin "v$VERSION"
96 |
97 | build-and-publish:
98 | needs: update-version
99 | uses: ./.github/workflows/build-and-publish.yml
100 | secrets:
101 | GH_PAT: ${{ secrets.GITHUB_TOKEN }}
102 |
103 | create-release:
104 | needs: [update-version, build-and-publish]
105 | runs-on: ubuntu-latest
106 | steps:
107 | - name: Checkout repository
108 | uses: actions/checkout@v4
109 | with:
110 | fetch-depth: 0
111 |
112 | - name: Generate changelog
113 | id: changelog
114 | uses: metcalfc/changelog-generator@v4.1.0
115 | with:
116 | myToken: ${{ secrets.GITHUB_TOKEN }}
117 |
118 | - name: Create GitHub Release
119 | uses: softprops/action-gh-release@v1
120 | with:
121 | tag_name: v${{ needs.update-version.outputs.new_version }}
122 | name: Release v${{ needs.update-version.outputs.new_version }}
123 | body: |
124 | ## Shoebox v${{ needs.update-version.outputs.new_version }}
125 |
126 | ${{ steps.changelog.outputs.changelog }}
127 |
128 | ### Docker Image
129 | ```
130 | docker pull ghcr.io/${{ github.repository }}:${{ needs.update-version.outputs.new_version }}
131 | ```
132 |
133 | ${{ github.event.inputs.update_helm == 'true' && '### Helm Chart
134 | ```
135 | helm repo add shoebox https://${{ github.repository_owner }}.github.io/${{ github.event.repository.name }}/helm/
136 | helm repo update
137 | helm install shoebox shoebox/shoebox --version ${{ needs.update-version.outputs.new_version }}
138 | ```' || '' }}
139 | draft: false
140 | prerelease: ${{ github.event.inputs.prerelease }}
141 |
142 | release-helm-chart:
143 | needs: [update-version, create-release]
144 | if: ${{ github.event.inputs.update_helm == 'true' }}
145 | uses: ./.github/workflows/helm-release.yml
146 |
147 | update-docs-with-helm:
148 | needs: [update-version, create-release, release-helm-chart]
149 | if: ${{ github.event.inputs.update_helm == 'true' }}
150 | uses: ./.github/workflows/mdbook.yml
151 | secrets:
152 | GH_PAT: ${{ secrets.GITHUB_TOKEN }}
153 |
154 | update-docs-without-helm:
155 | needs: [update-version, create-release]
156 | if: ${{ github.event.inputs.update_helm != 'true' }}
157 | uses: ./.github/workflows/mdbook.yml
158 | secrets:
159 | GH_PAT: ${{ secrets.GITHUB_TOKEN }}
160 |
--------------------------------------------------------------------------------
/src/routes/media.rs:
--------------------------------------------------------------------------------
1 | use axum::{
2 | extract::{Path, State},
3 | http::{header, HeaderMap, StatusCode},
4 | response::Response,
5 | routing::get,
6 | Router,
7 | };
8 | use std::path::PathBuf;
9 | use tokio::fs::File;
10 | use tokio_util::io::ReaderStream;
11 | use axum::body::Body;
12 |
13 | use crate::error::{AppError, Result};
14 | use crate::services::AppState;
15 |
16 | pub fn router(app_state: AppState) -> Router {
17 | Router::new()
18 | .route("/{*path}", get(serve_media))
19 | .with_state(app_state)
20 | }
21 |
22 | async fn serve_media(
23 | State(state): State,
24 | Path(path): Path,
25 | headers: HeaderMap,
26 | ) -> Result {
27 | // Construct the full path to the media file
28 | let base_path = PathBuf::from(&state.config.media.source_paths[0].path);
29 | let file_path = base_path.join(&path);
30 |
31 | // Check if the file exists
32 | if !file_path.exists() {
33 | return Err(AppError::NotFound(format!("Media file not found: {path}")));
34 | }
35 |
36 | // Get the file size
37 | let metadata = match tokio::fs::metadata(&file_path).await {
38 | Ok(metadata) => metadata,
39 | Err(err) => {
40 | return Err(AppError::InternalServerError(format!("Failed to read file metadata: {err}")));
41 | }
42 | };
43 |
44 | let file_size = metadata.len();
45 |
46 | // Determine content type based on file extension
47 | let content_type = match file_path.extension().and_then(|ext| ext.to_str()) {
48 | Some("mov") => "video/quicktime".to_string(),
49 | Some("mp4") => "video/mp4".to_string(),
50 | Some("mkv") => "video/x-matroska".to_string(),
51 | _ => mime_guess::from_path(&file_path)
52 | .first_or_octet_stream()
53 | .to_string()
54 | };
55 |
56 | // Get range header if it exists
57 | let range_header = headers.get(header::RANGE);
58 |
59 | // Handle range request if present
60 | if let Some(range) = range_header {
61 | // Parse the range header
62 | let range_str = range.to_str().map_err(|_| {
63 | AppError::BadRequest("Invalid range header".to_string())
64 | })?;
65 |
66 | // Parse range values (format: "bytes=start-end")
67 | if let Some(range_values) = range_str.strip_prefix("bytes=") {
68 | let ranges: Vec<&str> = range_values.split('-').collect();
69 | if ranges.len() == 2 {
70 | let start = ranges[0].parse::().unwrap_or(0);
71 | let end = ranges[1].parse::().unwrap_or(file_size - 1).min(file_size - 1);
72 |
73 | // Ensure start is less than end and within file bounds
74 | if start <= end && start < file_size {
75 | let length = end - start + 1;
76 |
77 | // Open the file
78 | let mut file = match File::open(&file_path).await {
79 | Ok(file) => file,
80 | Err(err) => {
81 | return Err(AppError::InternalServerError(format!("Failed to open media file: {err}")));
82 | }
83 | };
84 |
85 | // Seek to the start position
86 | use tokio::io::AsyncSeekExt;
87 | if let Err(err) = file.seek(std::io::SeekFrom::Start(start)).await {
88 | return Err(AppError::InternalServerError(format!("Failed to seek in file: {err}")));
89 | }
90 |
91 | // Create a limited stream from the file
92 | use tokio::io::AsyncReadExt;
93 | let stream = ReaderStream::new(file.take(length));
94 | let body = Body::from_stream(stream);
95 |
96 | // Build the response with partial content status
97 | let response = Response::builder()
98 | .status(StatusCode::PARTIAL_CONTENT)
99 | .header(header::CONTENT_TYPE, content_type)
100 | .header(header::CONTENT_LENGTH, length)
101 | .header(header::CONTENT_RANGE, format!("bytes {start}-{end}/{file_size}"))
102 | .header(header::ACCEPT_RANGES, "bytes")
103 | .header(header::CACHE_CONTROL, "public, max-age=31536000")
104 | .header("X-Content-Type-Options", "nosniff")
105 | .body(body)
106 | .unwrap();
107 |
108 | return Ok(response);
109 | }
110 | }
111 | }
112 |
113 | // If we get here, the range was invalid
114 | return Err(AppError::BadRequest("Invalid range format".to_string()));
115 | }
116 |
117 | // If no range header or parsing failed, serve the entire file
118 | // Open the file
119 | let file = match File::open(&file_path).await {
120 | Ok(file) => file,
121 | Err(err) => {
122 | return Err(AppError::InternalServerError(format!("Failed to open media file: {err}")));
123 | }
124 | };
125 |
126 | // Create a stream from the file
127 | let stream = ReaderStream::new(file);
128 | let body = Body::from_stream(stream);
129 |
130 | // Build the response with proper headers for media streaming
131 | let response = Response::builder()
132 | .status(StatusCode::OK)
133 | .header(header::CONTENT_TYPE, content_type)
134 | .header(header::CONTENT_LENGTH, file_size)
135 | .header(header::ACCEPT_RANGES, "bytes")
136 | .header(header::CACHE_CONTROL, "public, max-age=31536000")
137 | .header("X-Content-Type-Options", "nosniff")
138 | .body(body)
139 | .unwrap();
140 |
141 | Ok(response)
142 | }
143 |
--------------------------------------------------------------------------------
/src/services/export.rs:
--------------------------------------------------------------------------------
1 | use std::path::{Path, PathBuf};
2 | use tokio::fs;
3 | use chrono::Utc;
4 | use tracing::{info, error};
5 | use serde_json::json;
6 |
7 | use crate::error::{AppError, Result};
8 | use crate::config::Config;
9 | use crate::models::{ExportRequest, VideoWithMetadata};
10 | use crate::services::video::VideoService;
11 |
12 | pub struct ExportService {
13 | config: Config,
14 | video_service: VideoService,
15 | export_base_path: PathBuf,
16 | }
17 |
18 | impl ExportService {
19 | pub fn new(config: Config, video_service: VideoService) -> Self {
20 | let export_base_path = PathBuf::from(&config.media.export_base_path);
21 | Self {
22 | config,
23 | video_service,
24 | export_base_path,
25 | }
26 | }
27 |
28 | pub async fn export_videos(&self, request: ExportRequest) -> Result {
29 | // Create export directory with timestamp and project name
30 | let date = Utc::now().format("%Y-%m-%d_%H-%M-%S").to_string();
31 | let project_dir_name = format!("{date}_{}", request.project_name.replace(" ", "_"));
32 | let project_dir = self.export_base_path.join(&project_dir_name);
33 |
34 | // Ensure export base directory exists
35 | if !self.export_base_path.exists() {
36 | fs::create_dir_all(&self.export_base_path).await.map_err(|e| {
37 | AppError::Io(std::io::Error::new(
38 | std::io::ErrorKind::Other,
39 | format!("Failed to create export directory: {e}"),
40 | ))
41 | })?;
42 | }
43 |
44 | // Create project directory
45 | fs::create_dir_all(&project_dir).await.map_err(|e| {
46 | AppError::Io(std::io::Error::new(
47 | std::io::ErrorKind::Other,
48 | format!("Failed to create project directory: {e}"),
49 | ))
50 | })?;
51 |
52 | info!("Exporting videos to {0}", project_dir.display());
53 |
54 | // Collect videos with metadata
55 | let mut videos_with_metadata = Vec::new();
56 | for video_id in &request.video_ids {
57 | match self.video_service.find_with_metadata(video_id).await {
58 | Ok(video_metadata) => {
59 | videos_with_metadata.push(video_metadata);
60 | }
61 | Err(e) => {
62 | error!("Error fetching video {video_id}: {e}");
63 | return Err(e);
64 | }
65 | }
66 | }
67 |
68 | // Copy videos to export directory
69 | for video_metadata in &videos_with_metadata {
70 | // Determine source path based on configuration
71 | let source_path = if request.use_original_files && video_metadata.video.original_file_path.is_some() {
72 | Path::new(video_metadata.video.original_file_path.as_ref().unwrap())
73 | } else {
74 | Path::new(&video_metadata.video.file_path)
75 | };
76 |
77 | // Determine destination file name
78 | let dest_file_name = if request.use_original_files && video_metadata.video.original_file_path.is_some() {
79 | // Extract the file name from the original file path
80 | Path::new(video_metadata.video.original_file_path.as_ref().unwrap())
81 | .file_name()
82 | .unwrap_or_else(|| std::ffi::OsStr::new(&video_metadata.video.file_name))
83 | .to_string_lossy()
84 | .to_string()
85 | } else {
86 | video_metadata.video.file_name.clone()
87 | };
88 |
89 | let dest_path = project_dir.join(&dest_file_name);
90 |
91 | // Copy the file
92 | match fs::copy(source_path, &dest_path).await {
93 | Ok(_) => {
94 | info!(
95 | "Copied {0} to {1}",
96 | source_path.display(),
97 | dest_path.display()
98 | );
99 | }
100 | Err(e) => {
101 | error!(
102 | "Failed to copy {0} to {1}: {e}",
103 | source_path.display(),
104 | dest_path.display()
105 | );
106 | return Err(AppError::Io(e));
107 | }
108 | }
109 | }
110 |
111 | // Create metadata.json
112 | let metadata = json!({
113 | "project_name": request.project_name,
114 | "export_date": Utc::now().to_rfc3339(),
115 | "videos": videos_with_metadata.iter().map(|v| {
116 | json!({
117 | "id": v.video.id,
118 | "file_name": v.video.file_name,
119 | "title": v.video.title,
120 | "description": v.video.description,
121 | "created_date": v.video.created_date,
122 | "file_size": v.video.file_size,
123 | "rating": v.video.rating,
124 | "tags": v.tags,
125 | "people": v.people,
126 | })
127 | }).collect::>(),
128 | });
129 |
130 | let metadata_path = project_dir.join("metadata.json");
131 | let metadata_json = serde_json::to_string_pretty(&metadata).map_err(|e| {
132 | AppError::InternalServerError(format!("Failed to serialize metadata: {e}"))
133 | })?;
134 |
135 | fs::write(&metadata_path, metadata_json).await.map_err(|e| {
136 | AppError::Io(std::io::Error::new(
137 | std::io::ErrorKind::Other,
138 | format!("Failed to write metadata file: {e}"),
139 | ))
140 | })?;
141 |
142 | info!("Created metadata file at {}", metadata_path.display());
143 |
144 | // Return the path to the export directory
145 | Ok(project_dir.to_string_lossy().to_string())
146 | }
147 | }
148 |
--------------------------------------------------------------------------------
/frontend/src/components/Layout.tsx:
--------------------------------------------------------------------------------
1 | import React from 'react';
2 | import { Box, Flex, Heading, Link, Spacer, Button, useColorMode, useColorModeValue, Alert, AlertIcon, AlertTitle, AlertDescription, Spinner, Image } from '@chakra-ui/react';
3 | import { Link as RouterLink, useLocation } from 'react-router-dom';
4 | import { FaSun, FaMoon, FaVideo, FaFileExport, FaTags, FaClipboardCheck, FaCog, FaChartLine, FaEdit } from 'react-icons/fa';
5 | import { useScanContext } from '../contexts/ScanContext';
6 | // @ts-ignore
7 | import logo from '../assets/logo_large.png';
8 |
9 | interface LayoutProps {
10 | children: React.ReactNode;
11 | }
12 |
13 | const Layout: React.FC = ({ children }) => {
14 | const { colorMode, toggleColorMode } = useColorMode();
15 | const location = useLocation();
16 | const bgColor = useColorModeValue('white', 'gray.800');
17 | const borderColor = useColorModeValue('gray.200', 'gray.700');
18 | const { scanStatus } = useScanContext();
19 |
20 | return (
21 |
22 |
35 |
36 |
37 |
38 |
39 |
40 | Shoebox
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
58 |
59 | Videos
60 |
61 |
70 |
71 | Ratings Timeline
72 |
73 |
82 |
83 | Unreviewed
84 |
85 |
94 |
95 | Export
96 |
97 |
106 |
107 | Bulk Edit
108 |
109 |
118 |
119 | Manage Data
120 |
121 |
130 |
131 | System Info
132 |
133 |
136 |
137 |
138 |
139 | {scanStatus.inProgress && (
140 |
141 |
142 |
143 |
144 | Scan in progress
145 |
146 | {scanStatus.newVideosCount > 0 || scanStatus.updatedVideosCount > 0 ?
147 | `Found ${scanStatus.newVideosCount} new videos and updated ${scanStatus.updatedVideosCount} videos so far.` :
148 | 'Scanning for videos...'}
149 |
150 |
151 |
152 | )}
153 |
154 |
155 | {children}
156 |
157 |
158 | );
159 | };
160 |
161 | export default Layout;
162 |
--------------------------------------------------------------------------------
/src/routes/scan.rs:
--------------------------------------------------------------------------------
1 | use axum::{extract::State, routing::{post, get}, Json, Router};
2 | use serde::Serialize;
3 |
4 | use crate::error::Result;
5 | use crate::services::AppState;
6 | use crate::services::{ScannerService, VideoService, ThumbnailService, TagService, PersonService};
7 |
8 | pub fn router(app_state: AppState) -> Router {
9 | Router::new()
10 | .route("/", post(start_scan))
11 | .route("/status", get(get_scan_status))
12 | .with_state(app_state)
13 | }
14 |
15 | #[derive(Debug, Serialize)]
16 | struct ScanResponse {
17 | message: String,
18 | scan_started: bool,
19 | }
20 |
21 | #[derive(Debug, Serialize)]
22 | struct ScanStatusResponse {
23 | in_progress: bool,
24 | new_videos_count: usize,
25 | updated_videos_count: usize,
26 | }
27 |
28 | async fn start_scan(State(state): State) -> Result> {
29 | // Check if a scan is already in progress
30 | let is_scan_in_progress = {
31 | let status = state.scan_status.read().await;
32 | status.in_progress
33 | };
34 |
35 | if is_scan_in_progress {
36 | return Ok(Json(ScanResponse {
37 | message: "A scan is already in progress".to_string(),
38 | scan_started: false,
39 | }));
40 | }
41 |
42 | // Mark scan as in progress
43 | {
44 | let mut status = state.scan_status.write().await;
45 | status.in_progress = true;
46 | status.new_videos_count = 0;
47 | status.updated_videos_count = 0;
48 | }
49 |
50 | // Clone what we need for the background task
51 | let db = state.db.clone();
52 | let config = state.config.clone();
53 | let scan_status = state.scan_status.clone();
54 | let source_paths = config.media.source_paths.clone();
55 |
56 | // Spawn a background task to perform the scan
57 | tokio::spawn(async move {
58 | let video_service = VideoService::new(
59 | db.clone(),
60 | TagService::new(db.clone()),
61 | PersonService::new(db.clone()),
62 | ThumbnailService::new(&config),
63 | crate::services::ShoeboxService::new(db.clone()),
64 | );
65 |
66 | let thumbnail_service = ThumbnailService::new(&config);
67 |
68 | // Start the scan but don't wait for it to complete
69 | match ScannerService::scan_directories(
70 | &source_paths,
71 | video_service,
72 | thumbnail_service,
73 | &config,
74 | ).await {
75 | Ok((new_videos_arc, updated_videos_arc, tasks)) => {
76 | // Spawn another task to wait for all processing tasks to complete
77 | // This ensures the main scan task returns quickly
78 | tokio::spawn(async move {
79 | // Periodically update the scan status with progress
80 | let update_interval = tokio::time::Duration::from_secs(2);
81 | let mut interval = tokio::time::interval(update_interval);
82 |
83 | // Track tasks that are still running
84 | let mut remaining_tasks = tasks;
85 |
86 | while !remaining_tasks.is_empty() {
87 | interval.tick().await;
88 |
89 | // Update status with current progress
90 | {
91 | let new_count = {
92 | let guard = new_videos_arc.lock().await;
93 | guard.len()
94 | };
95 |
96 | let updated_count = {
97 | let guard = updated_videos_arc.lock().await;
98 | guard.len()
99 | };
100 |
101 | let mut status = scan_status.write().await;
102 | status.new_videos_count = new_count;
103 | status.updated_videos_count = updated_count;
104 | }
105 |
106 | // Check which tasks have completed
107 | remaining_tasks.retain(|task| !task.is_finished());
108 | }
109 |
110 | // All tasks completed, collect final results
111 | match ScannerService::collect_scan_results(
112 | new_videos_arc,
113 | updated_videos_arc,
114 | Vec::new() // Empty vec since we've already waited for tasks
115 | ).await {
116 | Ok((new_videos, updated_videos)) => {
117 | // Update scan status with final results
118 | let mut status = scan_status.write().await;
119 | status.in_progress = false;
120 | status.new_videos_count = new_videos.len();
121 | status.updated_videos_count = updated_videos.len();
122 | },
123 | Err(e) => {
124 | tracing::error!("Error collecting scan results: {}", e);
125 | // Mark scan as not in progress even if it failed
126 | let mut status = scan_status.write().await;
127 | status.in_progress = false;
128 | }
129 | }
130 | });
131 | },
132 | Err(e) => {
133 | tracing::error!("Error starting scan: {}", e);
134 | // Mark scan as not in progress if it failed to start
135 | let mut status = scan_status.write().await;
136 | status.in_progress = false;
137 | }
138 | }
139 | });
140 |
141 | Ok(Json(ScanResponse {
142 | message: "Scan started successfully".to_string(),
143 | scan_started: true,
144 | }))
145 | }
146 |
147 | async fn get_scan_status(State(state): State) -> Result> {
148 | let status = state.scan_status.read().await;
149 |
150 | Ok(Json(ScanStatusResponse {
151 | in_progress: status.in_progress,
152 | new_videos_count: status.new_videos_count,
153 | updated_videos_count: status.updated_videos_count,
154 | }))
155 | }
156 |
--------------------------------------------------------------------------------
/src/services/tag.rs:
--------------------------------------------------------------------------------
1 | use sqlx::{Pool, Postgres, Transaction, Row};
2 | use tracing::{info, warn};
3 | use uuid::Uuid;
4 |
5 | use crate::error::{AppError, Result};
6 | use crate::models::{Tag, CreateTagDto, TagUsage};
7 |
8 | pub struct TagService {
9 | db: Pool,
10 | }
11 |
12 | impl TagService {
13 | pub fn new(db: Pool) -> Self {
14 | Self { db }
15 | }
16 |
17 | pub async fn find_all(&self) -> Result> {
18 | let tags = sqlx::query_as::<_, Tag>("SELECT * FROM tags ORDER BY name")
19 | .fetch_all(&self.db)
20 | .await
21 | .map_err(AppError::Database)?;
22 |
23 | Ok(tags)
24 | }
25 |
26 | pub async fn find_by_id(&self, id: &str) -> Result {
27 | let tag = sqlx::query_as::<_, Tag>("SELECT * FROM tags WHERE id = $1")
28 | .bind(id)
29 | .fetch_one(&self.db)
30 | .await
31 | .map_err(|e| match e {
32 | sqlx::Error::RowNotFound => AppError::NotFound(format!("Tag not found: {id}")),
33 | _ => AppError::Database(e),
34 | })?;
35 |
36 | Ok(tag)
37 | }
38 |
39 | pub async fn find_by_name(&self, name: &str) -> Result {
40 | let tag = sqlx::query_as::<_, Tag>("SELECT * FROM tags WHERE name = $1")
41 | .bind(name)
42 | .fetch_one(&self.db)
43 | .await
44 | .map_err(|e| match e {
45 | sqlx::Error::RowNotFound => {
46 | AppError::NotFound(format!("Tag not found: {name}"))
47 | }
48 | _ => AppError::Database(e),
49 | })?;
50 |
51 | Ok(tag)
52 | }
53 |
54 | pub async fn find_or_create_by_name(
55 | &self,
56 | name: &str,
57 | tx: &mut Transaction<'_, Postgres>,
58 | ) -> Result {
59 | // Try to find existing tag
60 | let tag_result = sqlx::query_as::<_, Tag>("SELECT * FROM tags WHERE name = $1")
61 | .bind(name)
62 | .fetch_optional(&mut **tx)
63 | .await
64 | .map_err(AppError::Database)?;
65 |
66 | if let Some(tag) = tag_result {
67 | return Ok(tag.id);
68 | }
69 |
70 | // Create new tag
71 | let id = Uuid::new_v4().to_string();
72 | let now = chrono::Utc::now().naive_utc();
73 |
74 | sqlx::query("INSERT INTO tags (id, name, created_at) VALUES ($1, $2, $3)")
75 | .bind(&id)
76 | .bind(name)
77 | .bind(&now)
78 | .execute(&mut **tx)
79 | .await
80 | .map_err(AppError::Database)?;
81 |
82 | info!("Created new tag: {name} ({id})");
83 | Ok(id)
84 | }
85 |
86 | pub async fn create(&self, dto: CreateTagDto) -> Result {
87 | // Check if tag already exists
88 | let existing = sqlx::query_as::<_, Tag>("SELECT * FROM tags WHERE name = $1")
89 | .bind(&dto.name)
90 | .fetch_optional(&self.db)
91 | .await
92 | .map_err(AppError::Database)?;
93 |
94 | if let Some(tag) = existing {
95 | return Ok(tag);
96 | }
97 |
98 | let tag = Tag::new(dto.name);
99 |
100 | sqlx::query("INSERT INTO tags (id, name, created_at) VALUES ($1, $2, $3)")
101 | .bind(&tag.id)
102 | .bind(&tag.name)
103 | .bind(&tag.created_at)
104 | .execute(&self.db)
105 | .await
106 | .map_err(AppError::Database)?;
107 |
108 | info!("Created new tag: {0} ({1})", tag.name, tag.id);
109 | Ok(tag)
110 | }
111 |
112 | pub async fn update(&self, id: &str, new_name: &str) -> Result {
113 | // Check if tag exists
114 | let tag = self.find_by_id(id).await?;
115 |
116 | // Check if the new name already exists
117 | let existing = sqlx::query_as::<_, Tag>("SELECT * FROM tags WHERE name = $1 AND id != $2")
118 | .bind(new_name)
119 | .bind(id)
120 | .fetch_optional(&self.db)
121 | .await
122 | .map_err(AppError::Database)?;
123 |
124 | if existing.is_some() {
125 | return Err(AppError::BadRequest(format!(
126 | "Tag with name '{new_name}' already exists"
127 | )));
128 | }
129 |
130 | // Update tag
131 | sqlx::query("UPDATE tags SET name = $1 WHERE id = $2")
132 | .bind(new_name)
133 | .bind(id)
134 | .execute(&self.db)
135 | .await
136 | .map_err(AppError::Database)?;
137 |
138 | info!("Updated tag: {0} -> {new_name} ({id})", tag.name);
139 |
140 | // Return updated tag
141 | let updated_tag = self.find_by_id(id).await?;
142 | Ok(updated_tag)
143 | }
144 |
145 | pub async fn delete(&self, id: &str) -> Result<()> {
146 | // Check if tag exists
147 | let tag = self.find_by_id(id).await?;
148 |
149 | // Check if tag is in use
150 | let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM video_tags WHERE tag_id = $1")
151 | .bind(id)
152 | .fetch_one(&self.db)
153 | .await
154 | .map_err(AppError::Database)?;
155 |
156 | if count > 0 {
157 | return Err(AppError::BadRequest(format!(
158 | "Cannot delete tag '{}' because it is used by {} videos",
159 | tag.name, count
160 | )));
161 | }
162 |
163 | // Delete tag
164 | sqlx::query("DELETE FROM tags WHERE id = $1")
165 | .bind(id)
166 | .execute(&self.db)
167 | .await
168 | .map_err(AppError::Database)?;
169 |
170 | info!("Deleted tag: {} ({})", tag.name, id);
171 | Ok(())
172 | }
173 |
174 | pub async fn get_usage(&self) -> Result> {
175 | let rows = sqlx::query(
176 | "SELECT t.id, t.name, COUNT(vt.video_id) as video_count
177 | FROM tags t
178 | LEFT JOIN video_tags vt ON t.id = vt.tag_id
179 | GROUP BY t.id
180 | ORDER BY t.name",
181 | )
182 | .fetch_all(&self.db)
183 | .await
184 | .map_err(AppError::Database)?;
185 |
186 | let mut results = Vec::new();
187 | for row in rows {
188 | results.push(TagUsage {
189 | id: row.get("id"),
190 | name: row.get("name"),
191 | video_count: row.get("video_count"),
192 | });
193 | }
194 |
195 | Ok(results)
196 | }
197 |
198 | pub async fn cleanup_unused(&self) -> Result {
199 | let result = sqlx::query(
200 | "DELETE FROM tags
201 | WHERE id NOT IN (SELECT DISTINCT tag_id FROM video_tags)",
202 | )
203 | .execute(&self.db)
204 | .await
205 | .map_err(AppError::Database)?;
206 |
207 | let count = result.rows_affected() as usize;
208 | if count > 0 {
209 | info!("Cleaned up {} unused tags", count);
210 | }
211 |
212 | Ok(count)
213 | }
214 | }
215 |
--------------------------------------------------------------------------------
/charts/shoebox/templates/statefulset.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: StatefulSet
3 | metadata:
4 | name: {{ include "shoebox.fullname" . }}
5 | labels:
6 | {{- include "shoebox.labels" . | nindent 4 }}
7 | spec:
8 | serviceName: {{ include "shoebox.fullname" . }}
9 | {{- if not .Values.autoscaling.enabled }}
10 | replicas: {{ .Values.replicaCount }}
11 | {{- end }}
12 | selector:
13 | matchLabels:
14 | {{- include "shoebox.selectorLabels" . | nindent 6 }}
15 | template:
16 | metadata:
17 | {{- with .Values.podAnnotations }}
18 | annotations:
19 | {{- toYaml . | nindent 8 }}
20 | {{- end }}
21 | labels:
22 | {{- include "shoebox.selectorLabels" . | nindent 8 }}
23 | spec:
24 | {{- with .Values.imagePullSecrets }}
25 | imagePullSecrets:
26 | {{- toYaml . | nindent 8 }}
27 | {{- end }}
28 | serviceAccountName: {{ include "shoebox.serviceAccountName" . }}
29 | securityContext:
30 | {{- toYaml .Values.podSecurityContext | nindent 8 }}
31 | containers:
32 | - name: {{ .Chart.Name }}
33 | securityContext:
34 | {{- toYaml .Values.securityContext | nindent 12 }}
35 | image: "{{ .Values.image.repository }}:{{ .Values.image.tag | default .Chart.AppVersion }}"
36 | imagePullPolicy: {{ .Values.image.pullPolicy }}
37 | env:
38 | - name: SERVER_HOST
39 | value: {{ .Values.config.serverHost | quote }}
40 | - name: SERVER_PORT
41 | value: {{ .Values.config.serverPort | quote }}
42 | - name: DATABASE_MAX_CONNECTIONS
43 | value: {{ .Values.config.databaseMaxConnections | default "10" | quote }}
44 | - name: DATABASE_URL
45 | value: {{ .Values.config.databaseUrl | default "none" | quote }}
46 | - name: MEDIA_SOURCE_PATHS
47 | value: {{ if kindIs "string" .Values.config.mediaSourcePaths }}{{ .Values.config.mediaSourcePaths | quote }}{{ else }}{{ include "shoebox.mediaSourcePathsString" . }}{{ end }}
48 | - name: THUMBNAIL_PATH
49 | value: {{ .Values.config.thumbnailPath | quote }}
50 | - name: EXPORT_BASE_PATH
51 | value: {{ .Values.config.exportBasePath | quote }}
52 | - name: RUST_LOG
53 | value: {{ .Values.config.rustLog | quote }}
54 | - name: MAX_CONCURRENT_TASKS
55 | value: {{ .Values.config.maxConcurrentTasks | default "4" | quote }}
56 | ports:
57 | - name: http
58 | containerPort: {{ .Values.service.port }}
59 | protocol: TCP
60 | livenessProbe:
61 | httpGet:
62 | path: /
63 | port: http
64 | readinessProbe:
65 | httpGet:
66 | path: /
67 | port: http
68 | resources:
69 | {{- toYaml .Values.resources | nindent 12 }}
70 | volumeMounts:
71 | {{- if .Values.persistence.data.enabled }}
72 | - name: data
73 | mountPath: /app/data
74 | {{- if .Values.persistence.data.readOnly }}
75 | readOnly: true
76 | {{- end }}
77 | {{- end }}
78 | {{- if .Values.persistence.thumbnails.enabled }}
79 | - name: thumbnails
80 | mountPath: /app/thumbnails
81 | {{- if .Values.persistence.thumbnails.readOnly }}
82 | readOnly: true
83 | {{- end }}
84 | {{- end }}
85 | {{- if .Values.persistence.exports.enabled }}
86 | - name: exports
87 | mountPath: /app/exports
88 | {{- if .Values.persistence.exports.readOnly }}
89 | readOnly: true
90 | {{- end }}
91 | {{- end }}
92 | {{- if .Values.config.mediaSourcePaths.enabled }}
93 | {{- range .Values.config.mediaSourcePaths.sources }}
94 | - name: media-{{ .name | lower }}
95 | mountPath: {{ .path }}
96 | {{- if or (not (hasKey . "readOnly")) .readOnly }}
97 | readOnly: true
98 | {{- end }}
99 | {{- end }}
100 | {{- range .Values.config.mediaSourcePaths.sources }}
101 | {{- if .originalPath }}
102 | - name: original-media-{{ .name | lower }}
103 | mountPath: {{ .originalPath }}
104 | {{- if or (not (hasKey . "readOnly")) .readOnly }}
105 | readOnly: true
106 | {{- end }}
107 | {{- end }}
108 | {{- end }}
109 | {{- end }}
110 | {{- with .Values.nodeSelector }}
111 | nodeSelector:
112 | {{- toYaml . | nindent 8 }}
113 | {{- end }}
114 | {{- with .Values.affinity }}
115 | affinity:
116 | {{- toYaml . | nindent 8 }}
117 | {{- end }}
118 | {{- with .Values.tolerations }}
119 | tolerations:
120 | {{- toYaml . | nindent 8 }}
121 | {{- end }}
122 | volumes:
123 | {{- if .Values.persistence.data.enabled }}
124 | - name: data
125 | persistentVolumeClaim:
126 | {{- if .Values.persistence.data.existingClaim }}
127 | claimName: {{ .Values.persistence.data.existingClaim }}
128 | {{- else }}
129 | claimName: {{ include "shoebox.fullname" . }}-data
130 | {{- end }}
131 | {{- end }}
132 | {{- if .Values.persistence.thumbnails.enabled }}
133 | - name: thumbnails
134 | persistentVolumeClaim:
135 | {{- if .Values.persistence.thumbnails.existingClaim }}
136 | claimName: {{ .Values.persistence.thumbnails.existingClaim }}
137 | {{- else }}
138 | claimName: {{ include "shoebox.fullname" . }}-thumbnails
139 | {{- end }}
140 | {{- end }}
141 | {{- if .Values.persistence.exports.enabled }}
142 | - name: exports
143 | persistentVolumeClaim:
144 | {{- if .Values.persistence.exports.existingClaim }}
145 | claimName: {{ .Values.persistence.exports.existingClaim }}
146 | {{- else }}
147 | claimName: {{ include "shoebox.fullname" . }}-exports
148 | {{- end }}
149 | {{- end }}
150 | {{- if .Values.config.mediaSourcePaths.enabled }}
151 | {{- range .Values.config.mediaSourcePaths.sources }}
152 | - name: media-{{ .name | lower }}
153 | persistentVolumeClaim:
154 | {{- if .pathExistingClaim }}
155 | claimName: {{ .pathExistingClaim }}
156 | {{- else }}
157 | claimName: {{ include "shoebox.fullname" $ }}-media-{{ .name | lower }}
158 | {{- end }}
159 | {{- end }}
160 | {{- range .Values.config.mediaSourcePaths.sources }}
161 | {{- if .originalPath }}
162 | - name: original-media-{{ .name | lower }}
163 | persistentVolumeClaim:
164 | {{- if .originalExistingClaim }}
165 | claimName: {{ .originalExistingClaim }}
166 | {{- else }}
167 | claimName: {{ include "shoebox.fullname" $ }}-original-media-{{ .name | lower }}
168 | {{- end }}
169 | {{- end }}
170 | {{- end }}
171 | {{- end }}
172 |
--------------------------------------------------------------------------------
/src/services/person.rs:
--------------------------------------------------------------------------------
1 | use sqlx::{Pool, Postgres, Transaction, Row};
2 | use tracing::{info, warn};
3 | use uuid::Uuid;
4 |
5 | use crate::error::{AppError, Result};
6 | use crate::models::{Person, CreatePersonDto, PersonUsage};
7 |
8 | pub struct PersonService {
9 | db: Pool,
10 | }
11 |
12 | impl PersonService {
13 | pub fn new(db: Pool) -> Self {
14 | Self { db }
15 | }
16 |
17 | pub async fn find_all(&self) -> Result> {
18 | let people = sqlx::query_as::<_, Person>("SELECT * FROM people ORDER BY name")
19 | .fetch_all(&self.db)
20 | .await
21 | .map_err(AppError::Database)?;
22 |
23 | Ok(people)
24 | }
25 |
26 | pub async fn find_by_id(&self, id: &str) -> Result {
27 | let person = sqlx::query_as::<_, Person>("SELECT * FROM people WHERE id = $1")
28 | .bind(id)
29 | .fetch_one(&self.db)
30 | .await
31 | .map_err(|e| match e {
32 | sqlx::Error::RowNotFound => AppError::NotFound(format!("Person not found: {id}")),
33 | _ => AppError::Database(e),
34 | })?;
35 |
36 | Ok(person)
37 | }
38 |
39 | pub async fn find_by_name(&self, name: &str) -> Result {
40 | let person = sqlx::query_as::<_, Person>("SELECT * FROM people WHERE name = $1")
41 | .bind(name)
42 | .fetch_one(&self.db)
43 | .await
44 | .map_err(|e| match e {
45 | sqlx::Error::RowNotFound => {
46 | AppError::NotFound(format!("Person not found: {name}"))
47 | }
48 | _ => AppError::Database(e),
49 | })?;
50 |
51 | Ok(person)
52 | }
53 |
54 | pub async fn find_or_create_by_name(
55 | &self,
56 | name: &str,
57 | tx: &mut Transaction<'_, Postgres>,
58 | ) -> Result {
59 | // Try to find existing person
60 | let person_result = sqlx::query_as::<_, Person>("SELECT * FROM people WHERE name = $1")
61 | .bind(name)
62 | .fetch_optional(&mut **tx)
63 | .await
64 | .map_err(AppError::Database)?;
65 |
66 | if let Some(person) = person_result {
67 | return Ok(person.id);
68 | }
69 |
70 | // Create new person
71 | let id = Uuid::new_v4().to_string();
72 | let now = chrono::Utc::now().naive_utc();
73 |
74 | sqlx::query("INSERT INTO people (id, name, created_at) VALUES ($1, $2, $3)")
75 | .bind(&id)
76 | .bind(name)
77 | .bind(&now)
78 | .execute(&mut **tx)
79 | .await
80 | .map_err(AppError::Database)?;
81 |
82 | info!("Created new person: {name} ({id})");
83 | Ok(id)
84 | }
85 |
86 | pub async fn create(&self, dto: CreatePersonDto) -> Result {
87 | // Check if person already exists
88 | let existing = sqlx::query_as::<_, Person>("SELECT * FROM people WHERE name = $1")
89 | .bind(&dto.name)
90 | .fetch_optional(&self.db)
91 | .await
92 | .map_err(AppError::Database)?;
93 |
94 | if let Some(person) = existing {
95 | return Ok(person);
96 | }
97 |
98 | let person = Person::new(dto.name);
99 |
100 | sqlx::query("INSERT INTO people (id, name, created_at) VALUES ($1, $2, $3)")
101 | .bind(&person.id)
102 | .bind(&person.name)
103 | .bind(&person.created_at)
104 | .execute(&self.db)
105 | .await
106 | .map_err(AppError::Database)?;
107 |
108 | info!("Created new person: {} ({})", person.name, person.id);
109 | Ok(person)
110 | }
111 |
112 | pub async fn update(&self, id: &str, new_name: &str) -> Result {
113 | // Check if person exists
114 | let person = self.find_by_id(id).await?;
115 |
116 | // Check if the new name already exists
117 | let existing = sqlx::query_as::<_, Person>("SELECT * FROM people WHERE name = $1 AND id != $2")
118 | .bind(new_name)
119 | .bind(id)
120 | .fetch_optional(&self.db)
121 | .await
122 | .map_err(AppError::Database)?;
123 |
124 | if existing.is_some() {
125 | return Err(AppError::BadRequest(format!(
126 | "Person with name '{new_name}' already exists"
127 | )));
128 | }
129 |
130 | // Update person
131 | sqlx::query("UPDATE people SET name = $1 WHERE id = $2")
132 | .bind(new_name)
133 | .bind(id)
134 | .execute(&self.db)
135 | .await
136 | .map_err(AppError::Database)?;
137 |
138 | info!("Updated person: {0} -> {new_name} ({id})", person.name);
139 |
140 | // Return updated person
141 | let updated_person = self.find_by_id(id).await?;
142 | Ok(updated_person)
143 | }
144 |
145 | pub async fn delete(&self, id: &str) -> Result<()> {
146 | // Check if person exists
147 | let person = self.find_by_id(id).await?;
148 |
149 | // Check if person is in use
150 | let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM video_people WHERE person_id = $1")
151 | .bind(id)
152 | .fetch_one(&self.db)
153 | .await
154 | .map_err(AppError::Database)?;
155 |
156 | if count > 0 {
157 | return Err(AppError::BadRequest(format!(
158 | "Cannot delete person '{}' because they appear in {} videos",
159 | person.name, count
160 | )));
161 | }
162 |
163 | // Delete person
164 | sqlx::query("DELETE FROM people WHERE id = $1")
165 | .bind(id)
166 | .execute(&self.db)
167 | .await
168 | .map_err(AppError::Database)?;
169 |
170 | info!("Deleted person: {} ({})", person.name, id);
171 | Ok(())
172 | }
173 |
174 | pub async fn get_usage(&self) -> Result> {
175 | let rows = sqlx::query(
176 | "SELECT p.id, p.name, COUNT(vp.video_id) as video_count
177 | FROM people p
178 | LEFT JOIN video_people vp ON p.id = vp.person_id
179 | GROUP BY p.id
180 | ORDER BY p.name",
181 | )
182 | .fetch_all(&self.db)
183 | .await
184 | .map_err(AppError::Database)?;
185 |
186 | let mut results = Vec::new();
187 | for row in rows {
188 | results.push(PersonUsage {
189 | id: row.get("id"),
190 | name: row.get("name"),
191 | video_count: row.get("video_count"),
192 | });
193 | }
194 |
195 | Ok(results)
196 | }
197 |
198 | pub async fn cleanup_unused(&self) -> Result {
199 | let result = sqlx::query(
200 | "DELETE FROM people
201 | WHERE id NOT IN (SELECT DISTINCT person_id FROM video_people)",
202 | )
203 | .execute(&self.db)
204 | .await
205 | .map_err(AppError::Database)?;
206 |
207 | let count = result.rows_affected() as usize;
208 | if count > 0 {
209 | info!("Cleaned up {} unused people", count);
210 | }
211 |
212 | Ok(count)
213 | }
214 | }
215 |
--------------------------------------------------------------------------------
/frontend/src/pages/SystemInfoPage.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react';
2 | import {
3 | Box,
4 | Heading,
5 | Text,
6 | VStack,
7 | HStack,
8 | Divider,
9 | Card,
10 | CardHeader,
11 | CardBody,
12 | SimpleGrid,
13 | Spinner,
14 | Alert,
15 | AlertIcon,
16 | AlertTitle,
17 | AlertDescription,
18 | Button,
19 | useToast,
20 | } from '@chakra-ui/react';
21 | import { FaSync } from 'react-icons/fa';
22 |
23 | interface MediaPathConfig {
24 | path: string;
25 | original_path?: string;
26 | original_extension?: string;
27 | }
28 |
29 | interface SystemConfig {
30 | server: {
31 | host: string;
32 | port: number;
33 | };
34 | database: {
35 | url: string;
36 | max_connections: number;
37 | };
38 | media: {
39 | source_paths: MediaPathConfig[];
40 | export_base_path: string;
41 | thumbnail_path: string;
42 | };
43 | }
44 |
45 | const SystemInfoPage: React.FC = () => {
46 | const [config, setConfig] = useState(null);
47 | const [loading, setLoading] = useState(true);
48 | const [error, setError] = useState(null);
49 | const [rescanLoading, setRescanLoading] = useState(false);
50 | const toast = useToast();
51 |
52 | useEffect(() => {
53 | const fetchSystemInfo = async () => {
54 | try {
55 | const response = await fetch('/api/system');
56 | if (!response.ok) {
57 | throw new Error(`Error fetching system info: ${response.statusText}`);
58 | }
59 | const data = await response.json();
60 | setConfig(data);
61 | } catch (err) {
62 | setError(err instanceof Error ? err.message : 'An unknown error occurred');
63 | } finally {
64 | setLoading(false);
65 | }
66 | };
67 |
68 | fetchSystemInfo();
69 | }, []);
70 |
71 | const handleRescan = async () => {
72 | setRescanLoading(true);
73 | try {
74 | const response = await fetch('/api/scan', {
75 | method: 'POST',
76 | });
77 |
78 | if (!response.ok) {
79 | throw new Error(`Error rescanning library: ${response.statusText}`);
80 | }
81 |
82 | const data = await response.json();
83 |
84 | toast({
85 | title: 'Library Rescanned',
86 | description: `Successfully rescanned library. Found ${data.new_videos_count} new videos and updated ${data.updated_videos_count} existing videos.`,
87 | status: 'success',
88 | duration: 5000,
89 | isClosable: true,
90 | });
91 | } catch (err) {
92 | toast({
93 | title: 'Rescan Failed',
94 | description: err instanceof Error ? err.message : 'An unknown error occurred',
95 | status: 'error',
96 | duration: 5000,
97 | isClosable: true,
98 | });
99 | } finally {
100 | setRescanLoading(false);
101 | }
102 | };
103 |
104 | if (loading) {
105 | return (
106 |
107 |
108 | Loading system information...
109 |
110 | );
111 | }
112 |
113 | if (error) {
114 | return (
115 |
116 |
117 |
118 | Error Loading System Information
119 |
120 | {error}
121 |
122 | );
123 | }
124 |
125 | return (
126 |
127 |
128 | System Information
129 |
130 |
131 |
132 | {/* Server Configuration */}
133 |
134 |
135 | Server Configuration
136 |
137 |
138 |
139 |
140 | Host:
141 | {config?.server.host}
142 |
143 |
144 | Port:
145 | {config?.server.port}
146 |
147 |
148 |
149 |
150 |
151 | {/* Database Configuration */}
152 |
153 |
154 | Database Configuration
155 |
156 |
157 |
158 |
159 | URL:
160 |
161 | {config?.database.url}
162 |
163 |
164 |
165 | Max Connections:
166 | {config?.database.max_connections}
167 |
168 |
169 |
170 |
171 |
172 | {/* Media Configuration */}
173 |
174 |
175 | Media Configuration
176 |
177 |
178 |
179 |
180 | Source Paths:
181 | {config?.media.source_paths.map((pathConfig, index) => (
182 |
183 | {pathConfig.path}
184 |
185 | ))}
186 |
187 |
188 |
189 | Export Base Path:
190 |
191 | {config?.media.export_base_path}
192 |
193 |
194 |
195 |
196 | Thumbnail Path:
197 |
198 | {config?.media.thumbnail_path}
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 | }
214 | >
215 | Rescan Library
216 |
217 |
218 | Rescans existing library to capture any new metadata or fields that have been added
219 |
220 |
221 |
222 | );
223 | };
224 |
225 | export default SystemInfoPage;
226 |
--------------------------------------------------------------------------------
/frontend/src/pages/HomePage.tsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react';
2 | import { useLocation } from 'react-router-dom';
3 | import {
4 | Box,
5 | Heading,
6 | Button,
7 | Flex,
8 | Input,
9 | InputGroup,
10 | InputLeftElement,
11 | useToast,
12 | Text,
13 | Spinner,
14 | SimpleGrid
15 | } from '@chakra-ui/react';
16 | import { FaSearch, FaSync } from 'react-icons/fa';
17 | import { videoApi, scanApi, VideoWithMetadata, VideoSearchParams } from '../api/client';
18 | import VideoCard from '../components/VideoCard';
19 | import SearchFilters from '../components/SearchFilters';
20 | import { useScanContext } from '../contexts/ScanContext';
21 |
22 | const HomePage: React.FC = () => {
23 | const [videos, setVideos] = useState([]);
24 | const [loading, setLoading] = useState(true);
25 | const [scanning, setScanning] = useState(false);
26 | const [searchQuery, setSearchQuery] = useState('');
27 | const [searchParams, setSearchParams] = useState({
28 | limit: 100,
29 | offset: 0
30 | });
31 | const toast = useToast();
32 | const { scanStatus, checkScanStatus } = useScanContext();
33 | const location = useLocation();
34 |
35 | // Function to fetch videos
36 | const fetchVideos = async () => {
37 | setLoading(true);
38 | try {
39 | const results = await videoApi.searchVideos(searchParams);
40 | setVideos(results);
41 | } catch (error) {
42 | console.error('Error fetching videos:', error);
43 | toast({
44 | title: 'Error fetching videos',
45 | status: 'error',
46 | duration: 3000,
47 | isClosable: true,
48 | });
49 | } finally {
50 | setLoading(false);
51 | }
52 | };
53 |
54 | // Parse URL parameters and update search params
55 | useEffect(() => {
56 | const searchParams = new URLSearchParams(location.search);
57 | const start_date = searchParams.get('start_date');
58 | const end_date = searchParams.get('end_date');
59 |
60 | if (start_date || end_date) {
61 | setSearchParams(prevParams => ({
62 | ...prevParams,
63 | start_date: start_date || undefined,
64 | end_date: end_date || undefined
65 | }));
66 | }
67 | }, [location]);
68 |
69 | // Load videos on component mount and when search params change
70 | useEffect(() => {
71 | fetchVideos();
72 | }, [searchParams, toast]);
73 |
74 | // Refresh videos when scan completes
75 | useEffect(() => {
76 | // If scan was in progress but is now complete, refresh the videos
77 | if (!scanStatus.inProgress && scanStatus.newVideosCount > 0) {
78 | fetchVideos();
79 |
80 | // Show a toast notification about the completed scan
81 | toast({
82 | title: 'Scan complete',
83 | description: `Found ${scanStatus.newVideosCount} new videos and updated ${scanStatus.updatedVideosCount} videos.`,
84 | status: 'success',
85 | duration: 5000,
86 | isClosable: true,
87 | });
88 | }
89 | }, [scanStatus.inProgress]);
90 |
91 | // Handle search input change
92 | const handleSearchChange = (e: React.ChangeEvent) => {
93 | setSearchQuery(e.target.value);
94 | };
95 |
96 | // Handle search submit
97 | const handleSearch = () => {
98 | // Preserve start_date and end_date from URL if they exist
99 | const urlParams = new URLSearchParams(location.search);
100 | const start_date = urlParams.get('start_date');
101 | const end_date = urlParams.get('end_date');
102 |
103 | setSearchParams({
104 | ...searchParams,
105 | query: searchQuery.trim() || undefined,
106 | start_date: start_date || searchParams.start_date,
107 | end_date: end_date || searchParams.end_date,
108 | offset: 0, // Reset pagination when searching
109 | });
110 | };
111 |
112 | // Handle search on Enter key
113 | const handleKeyDown = (e: React.KeyboardEvent) => {
114 | if (e.key === 'Enter') {
115 | handleSearch();
116 | }
117 | };
118 |
119 | // Handle filter changes
120 | const handleFilterChange = (newFilters: Partial) => {
121 | // Preserve start_date and end_date from URL if they exist
122 | const urlParams = new URLSearchParams(location.search);
123 | const start_date = urlParams.get('start_date');
124 | const end_date = urlParams.get('end_date');
125 |
126 | setSearchParams({
127 | ...searchParams,
128 | ...newFilters,
129 | start_date: start_date || newFilters.start_date || searchParams.start_date,
130 | end_date: end_date || newFilters.end_date || searchParams.end_date,
131 | offset: 0, // Reset pagination when filters change
132 | });
133 | };
134 |
135 | // Handle scan directories
136 | const handleScan = async () => {
137 | setScanning(true);
138 | try {
139 | await scanApi.scanDirectories();
140 |
141 | // Check scan status immediately after starting the scan
142 | await checkScanStatus();
143 |
144 | toast({
145 | title: 'Scan started',
146 | description: 'The scan has been started in the background. You can continue using the application.',
147 | status: 'info',
148 | duration: 5000,
149 | isClosable: true,
150 | });
151 |
152 | // We'll refresh the videos list automatically when the scan completes
153 | // via the scan status polling mechanism
154 | } catch (error) {
155 | console.error('Error starting scan:', error);
156 | toast({
157 | title: 'Error starting scan',
158 | status: 'error',
159 | duration: 3000,
160 | isClosable: true,
161 | });
162 | } finally {
163 | setScanning(false);
164 | }
165 | };
166 |
167 | return (
168 |
169 |
170 | Videos
171 | }
173 | colorScheme="blue"
174 | onClick={handleScan}
175 | isLoading={scanning}
176 | loadingText="Scanning"
177 | >
178 | Scan Directories
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
193 |
194 |
197 |
198 |
199 |
200 |
201 | {loading ? (
202 |
203 |
204 |
205 | ) : videos.length === 0 ? (
206 |
207 | No videos found
208 |
209 | Try adjusting your search or scan for new videos
210 |
211 |
212 | ) : (
213 |
214 | {videos.map((video) => (
215 |
216 | ))}
217 |
218 | )}
219 |
220 | );
221 | };
222 |
223 | export default HomePage;
224 |
--------------------------------------------------------------------------------
/docs/installation/helm-chart.md:
--------------------------------------------------------------------------------
1 | # Helm Chart Installation
2 |
3 | This page provides detailed instructions for deploying Shoebox on Kubernetes using the Helm chart.
4 |
5 | ## Prerequisites
6 |
7 | - Kubernetes 1.19+
8 | - Helm 3.2.0+
9 | - PV provisioner support in the underlying infrastructure (if persistence is enabled)
10 |
11 | ## Getting Started
12 |
13 | ### Adding the Helm Repository
14 |
15 | ```bash
16 | # Add the Shoebox Helm repository
17 | helm repo add shoebox https://slackspace-io.github.io/shoebox
18 | helm repo update
19 | ```
20 |
21 | ### Installing the Chart
22 |
23 | To install the chart with the release name `shoebox`:
24 |
25 | ```bash
26 | helm install shoebox shoebox/shoebox
27 | ```
28 |
29 | ### Using a Specific Image Version
30 |
31 | By default, the chart uses the `preview` tag for the Shoebox image. For production environments, it's recommended to use a specific version:
32 |
33 | ```bash
34 | helm install shoebox shoebox/shoebox --set image.tag=v1.0.0
35 | ```
36 |
37 | ### Using a Private Registry
38 |
39 | If you're using a private registry for the Shoebox image, you'll need to create a secret with your registry credentials:
40 |
41 | ```bash
42 | kubectl create secret docker-registry regcred \
43 | --docker-server=ghcr.io \
44 | --docker-username= \
45 | --docker-password= \
46 | --docker-email=
47 | ```
48 |
49 | Then, specify the secret in your Helm install command:
50 |
51 | ```bash
52 | helm install shoebox shoebox/shoebox --set imagePullSecrets[0].name=regcred
53 | ```
54 |
55 | ## Configuration
56 |
57 | The Shoebox Helm chart offers extensive configuration options through its `values.yaml` file. You can override these values using the `--set` flag or by providing your own values file.
58 |
59 | ### Media Source Paths Configuration
60 |
61 | One of the key features of Shoebox is the ability to specify the original location of videos. This is configured through the `config.mediaSourcePaths` parameter.
62 |
63 | The `mediaSourcePaths` parameter accepts a comma-separated list of paths. Each path can be configured in two formats:
64 |
65 | #### Named Section Format (Recommended)
66 |
67 | ```
68 | name:/path/to/videos;/original/path;original_extension
69 | ```
70 |
71 | Where:
72 | - `name` is a descriptive name for the media source (e.g., "bmpcc", "gopro", etc.)
73 | - `/path/to/videos` is the path where the videos are mounted in the container (required)
74 | - `/original/path` (optional) is the original location of the videos on the source system
75 | - `original_extension` (optional) is the original file extension of the videos. If not provided but `original_path` is, it will use the same extension as the scan path.
76 |
77 | For example:
78 |
79 | ```yaml
80 | config:
81 | mediaSourcePaths: "bmpcc:/mnt/videos;/home/user/videos;mp4,gopro:/mnt/other-videos;/media/external/videos"
82 | ```
83 |
84 | For better readability, you can also use YAML's multi-line string syntax:
85 |
86 | ```yaml
87 | config:
88 | mediaSourcePaths: >-
89 | bmpcc:/mnt/videos;/home/user/videos;mp4,
90 | gopro:/mnt/other-videos;/media/external/videos
91 | ```
92 |
93 | Both configurations specify two named media source paths:
94 | 1. `bmpcc` with scan path `/mnt/videos`, original path `/home/user/videos`, and original extension `mp4`
95 | 2. `gopro` with scan path `/mnt/other-videos`, original path `/media/external/videos`, and using the same extension as the scan path
96 |
97 | #### Legacy Format (Backward Compatible)
98 |
99 | The older format without named sections is still supported:
100 |
101 | ```
102 | /path/to/videos;/original/path;original_extension
103 | ```
104 |
105 | For example:
106 |
107 | ```yaml
108 | config:
109 | mediaSourcePaths: "/mnt/videos;/home/user/videos;mp4,/mnt/other-videos;/media/external/videos"
110 | ```
111 |
112 | You can set this configuration when installing the chart:
113 |
114 | ```bash
115 | helm install shoebox shoebox/shoebox \
116 | --set config.mediaSourcePaths="/mnt/videos;/home/user/videos;mp4,/mnt/other-videos;/media/external/videos"
117 | ```
118 |
119 | ### Other Configuration Parameters
120 |
121 | #### Image Configuration
122 |
123 | | Parameter | Description | Default |
124 | |-----------|-------------|---------|
125 | | `image.repository` | Image repository | `ghcr.io/slackspace-io/shoebox` |
126 | | `image.tag` | Image tag | `preview` |
127 | | `image.pullPolicy` | Image pull policy | `IfNotPresent` |
128 | | `imagePullSecrets` | Image pull secrets | `[]` |
129 |
130 | #### Application Configuration
131 |
132 | | Parameter | Description | Default |
133 | |-----------|-------------|---------|
134 | | `config.serverHost` | Host to bind the server | `0.0.0.0` |
135 | | `config.serverPort` | Port to bind the server | `3000` |
136 | | `config.databaseUrl` | Database URL (SQLite) | `sqlite:/app/data/videos.db` |
137 | | `config.mediaSourcePaths` | Paths to scan for videos | `/mnt/videos` |
138 | | `config.thumbnailPath` | Path to store thumbnails | `/app/thumbnails` |
139 | | `config.exportBasePath` | Path for exported files | `/app/exports` |
140 | | `config.rustLog` | Rust log level | `info` |
141 |
142 | #### Persistence Configuration
143 |
144 | | Parameter | Description | Default |
145 | |-----------|-------------|---------|
146 | | `persistence.data.enabled` | Enable persistence for data | `true` |
147 | | `persistence.data.size` | Size of data PVC | `1Gi` |
148 | | `persistence.thumbnails.enabled` | Enable persistence for thumbnails | `true` |
149 | | `persistence.thumbnails.size` | Size of thumbnails PVC | `5Gi` |
150 | | `persistence.exports.enabled` | Enable persistence for exports | `true` |
151 | | `persistence.exports.size` | Size of exports PVC | `10Gi` |
152 | | `persistence.media.enabled` | Enable persistence for media | `true` |
153 | | `persistence.media.existingClaim` | Use existing PVC for media | `""` |
154 | | `persistence.media.size` | Size of media PVC | `100Gi` |
155 |
156 | #### PostgreSQL Configuration
157 |
158 | | Parameter | Description | Default |
159 | |-----------|-------------|---------|
160 | | `postgresql.enabled` | Enable PostgreSQL | `false` |
161 | | `postgresql.postgresqlUsername` | PostgreSQL username | `postgres` |
162 | | `postgresql.postgresqlPassword` | PostgreSQL password | `postgres` |
163 | | `postgresql.postgresqlDatabase` | PostgreSQL database | `videos` |
164 | | `postgresql.persistence.enabled` | Enable PostgreSQL persistence | `true` |
165 | | `postgresql.persistence.size` | Size of PostgreSQL PVC | `8Gi` |
166 |
167 | ## Examples
168 |
169 | ### Using SQLite with Persistence
170 |
171 | ```bash
172 | helm install shoebox shoebox/shoebox \
173 | --set persistence.data.enabled=true \
174 | --set persistence.thumbnails.enabled=true \
175 | --set persistence.exports.enabled=true \
176 | --set persistence.media.existingClaim=media-pvc
177 | ```
178 |
179 | ### Using PostgreSQL
180 |
181 | ```bash
182 | helm install shoebox shoebox/shoebox \
183 | --set postgresql.enabled=true \
184 | --set postgresql.postgresqlPassword=mypassword \
185 | --set persistence.thumbnails.enabled=true \
186 | --set persistence.exports.enabled=true \
187 | --set persistence.media.existingClaim=media-pvc
188 | ```
189 |
190 | ### Configuring Multiple Media Source Paths with Original Locations
191 |
192 | ```bash
193 | # Using a single line
194 | helm install shoebox shoebox/shoebox \
195 | --set config.mediaSourcePaths="bmpcc:/mnt/videos;/home/user/videos;mp4,gopro:/mnt/other-videos;/media/external/videos" \
196 | --set persistence.thumbnails.enabled=true \
197 | --set persistence.exports.enabled=true \
198 | --set persistence.media.existingClaim=media-pvc
199 |
200 | # Or using a values file with the multi-line syntax for better readability
201 | cat > values-custom.yaml << EOF
202 | config:
203 | mediaSourcePaths: >-
204 | bmpcc:/mnt/videos;/home/user/videos;mp4,
205 | gopro:/mnt/other-videos;/media/external/videos
206 | persistence:
207 | thumbnails:
208 | enabled: true
209 | exports:
210 | enabled: true
211 | media:
212 | existingClaim: media-pvc
213 | EOF
214 |
215 | helm install shoebox shoebox/shoebox -f values-custom.yaml
216 | ```
217 |
218 | ### Disabling Persistence (for testing)
219 |
220 | ```bash
221 | helm install shoebox shoebox/shoebox \
222 | --set persistence.data.enabled=false \
223 | --set persistence.thumbnails.enabled=false \
224 | --set persistence.exports.enabled=false \
225 | --set persistence.media.enabled=false
226 | ```
227 |
228 | ## Upgrading
229 |
230 | ### To 1.0.0
231 |
232 | This is the first stable release of the Shoebox chart.
233 |
--------------------------------------------------------------------------------
/src/services/shoebox.rs:
--------------------------------------------------------------------------------
1 | use sqlx::{Pool, Postgres, Transaction, Row};
2 | use tracing::{info, warn};
3 | use uuid::Uuid;
4 |
5 | use crate::error::{AppError, Result};
6 | use crate::models::{Shoebox, CreateShoeboxDto, ShoeboxUsage};
7 |
8 | pub struct ShoeboxService {
9 | db: Pool,
10 | }
11 |
12 | impl ShoeboxService {
13 | pub fn new(db: Pool) -> Self {
14 | Self { db }
15 | }
16 |
17 | pub async fn find_all(&self) -> Result> {
18 | let shoeboxes = sqlx::query_as::<_, Shoebox>("SELECT * FROM shoeboxes ORDER BY name")
19 | .fetch_all(&self.db)
20 | .await
21 | .map_err(AppError::Database)?;
22 |
23 | Ok(shoeboxes)
24 | }
25 |
26 | pub async fn find_by_id(&self, id: &str) -> Result {
27 | let shoebox = sqlx::query_as::<_, Shoebox>("SELECT * FROM shoeboxes WHERE id = $1")
28 | .bind(id)
29 | .fetch_one(&self.db)
30 | .await
31 | .map_err(|e| match e {
32 | sqlx::Error::RowNotFound => AppError::NotFound(format!("Shoebox not found: {id}")),
33 | _ => AppError::Database(e),
34 | })?;
35 |
36 | Ok(shoebox)
37 | }
38 |
39 | pub async fn find_by_name(&self, name: &str) -> Result {
40 | let shoebox = sqlx::query_as::<_, Shoebox>("SELECT * FROM shoeboxes WHERE name = $1")
41 | .bind(name)
42 | .fetch_one(&self.db)
43 | .await
44 | .map_err(|e| match e {
45 | sqlx::Error::RowNotFound => {
46 | AppError::NotFound(format!("Shoebox not found: {name}"))
47 | }
48 | _ => AppError::Database(e),
49 | })?;
50 |
51 | Ok(shoebox)
52 | }
53 |
54 | pub async fn find_or_create_by_name(
55 | &self,
56 | name: &str,
57 | description: Option<&str>,
58 | tx: &mut Transaction<'_, Postgres>,
59 | ) -> Result {
60 | // Try to find existing shoebox
61 | let shoebox_result = sqlx::query_as::<_, Shoebox>("SELECT * FROM shoeboxes WHERE name = $1")
62 | .bind(name)
63 | .fetch_optional(&mut **tx)
64 | .await
65 | .map_err(AppError::Database)?;
66 |
67 | if let Some(shoebox) = shoebox_result {
68 | return Ok(shoebox.id);
69 | }
70 |
71 | // Create new shoebox
72 | let id = Uuid::new_v4().to_string();
73 | let now = chrono::Utc::now().naive_utc();
74 |
75 | sqlx::query("INSERT INTO shoeboxes (id, name, description, created_at, updated_at) VALUES ($1, $2, $3, $4, $5)")
76 | .bind(&id)
77 | .bind(name)
78 | .bind(description)
79 | .bind(&now)
80 | .bind(&now)
81 | .execute(&mut **tx)
82 | .await
83 | .map_err(AppError::Database)?;
84 |
85 | info!("Created new shoebox: {name} ({id})");
86 | Ok(id)
87 | }
88 |
89 | pub async fn create(&self, dto: CreateShoeboxDto) -> Result {
90 | // Check if shoebox already exists
91 | let existing = sqlx::query_as::<_, Shoebox>("SELECT * FROM shoeboxes WHERE name = $1")
92 | .bind(&dto.name)
93 | .fetch_optional(&self.db)
94 | .await
95 | .map_err(AppError::Database)?;
96 |
97 | if let Some(shoebox) = existing {
98 | return Ok(shoebox);
99 | }
100 |
101 | let shoebox = Shoebox::new(dto.name, dto.description);
102 |
103 | sqlx::query("INSERT INTO shoeboxes (id, name, description, created_at, updated_at) VALUES ($1, $2, $3, $4, $5)")
104 | .bind(&shoebox.id)
105 | .bind(&shoebox.name)
106 | .bind(&shoebox.description)
107 | .bind(&shoebox.created_at)
108 | .bind(&shoebox.updated_at)
109 | .execute(&self.db)
110 | .await
111 | .map_err(AppError::Database)?;
112 |
113 | info!("Created new shoebox: {0} ({1})", shoebox.name, shoebox.id);
114 | Ok(shoebox)
115 | }
116 |
117 | pub async fn update(&self, id: &str, name: &str, description: Option<&str>) -> Result {
118 | // Check if shoebox exists
119 | let shoebox = self.find_by_id(id).await?;
120 |
121 | // Check if the new name already exists
122 | let existing = sqlx::query_as::<_, Shoebox>("SELECT * FROM shoeboxes WHERE name = $1 AND id != $2")
123 | .bind(name)
124 | .bind(id)
125 | .fetch_optional(&self.db)
126 | .await
127 | .map_err(AppError::Database)?;
128 |
129 | if existing.is_some() {
130 | return Err(AppError::BadRequest(format!(
131 | "Shoebox with name '{name}' already exists"
132 | )));
133 | }
134 |
135 | // Update shoebox
136 | let now = chrono::Utc::now().naive_utc();
137 | sqlx::query("UPDATE shoeboxes SET name = $1, description = $2, updated_at = $3 WHERE id = $4")
138 | .bind(name)
139 | .bind(description)
140 | .bind(&now)
141 | .bind(id)
142 | .execute(&self.db)
143 | .await
144 | .map_err(AppError::Database)?;
145 |
146 | info!("Updated shoebox: {0} -> {name} ({id})", shoebox.name);
147 |
148 | // Return updated shoebox
149 | let updated_shoebox = self.find_by_id(id).await?;
150 | Ok(updated_shoebox)
151 | }
152 |
153 | pub async fn delete(&self, id: &str) -> Result<()> {
154 | // Check if shoebox exists
155 | let shoebox = self.find_by_id(id).await?;
156 |
157 | // Check if shoebox is in use
158 | let count: i64 = sqlx::query_scalar("SELECT COUNT(*) FROM video_shoeboxes WHERE shoebox_id = $1")
159 | .bind(id)
160 | .fetch_one(&self.db)
161 | .await
162 | .map_err(AppError::Database)?;
163 |
164 | if count > 0 {
165 | return Err(AppError::BadRequest(format!(
166 | "Cannot delete shoebox '{}' because it contains {} videos",
167 | shoebox.name, count
168 | )));
169 | }
170 |
171 | // Delete shoebox
172 | sqlx::query("DELETE FROM shoeboxes WHERE id = $1")
173 | .bind(id)
174 | .execute(&self.db)
175 | .await
176 | .map_err(AppError::Database)?;
177 |
178 | info!("Deleted shoebox: {} ({})", shoebox.name, id);
179 | Ok(())
180 | }
181 |
182 | pub async fn get_usage(&self) -> Result> {
183 | let rows = sqlx::query(
184 | "SELECT s.id, s.name, s.description, COUNT(vs.video_id) as video_count
185 | FROM shoeboxes s
186 | LEFT JOIN video_shoeboxes vs ON s.id = vs.shoebox_id
187 | GROUP BY s.id
188 | ORDER BY s.name",
189 | )
190 | .fetch_all(&self.db)
191 | .await
192 | .map_err(AppError::Database)?;
193 |
194 | let mut results = Vec::new();
195 | for row in rows {
196 | results.push(ShoeboxUsage {
197 | id: row.get("id"),
198 | name: row.get("name"),
199 | description: row.get("description"),
200 | video_count: row.get("video_count"),
201 | });
202 | }
203 |
204 | Ok(results)
205 | }
206 |
207 | pub async fn cleanup_unused(&self) -> Result {
208 | let result = sqlx::query(
209 | "DELETE FROM shoeboxes
210 | WHERE id NOT IN (SELECT DISTINCT shoebox_id FROM video_shoeboxes)",
211 | )
212 | .execute(&self.db)
213 | .await
214 | .map_err(AppError::Database)?;
215 |
216 | let count = result.rows_affected() as usize;
217 | if count > 0 {
218 | info!("Cleaned up {} unused shoeboxes", count);
219 | }
220 |
221 | Ok(count)
222 | }
223 |
224 | pub async fn add_video_to_shoebox(&self, video_id: &str, shoebox_id: &str) -> Result<()> {
225 | // Check if the relationship already exists
226 | let exists = sqlx::query_scalar::<_, i64>(
227 | "SELECT COUNT(*) FROM video_shoeboxes WHERE video_id = $1 AND shoebox_id = $2",
228 | )
229 | .bind(video_id)
230 | .bind(shoebox_id)
231 | .fetch_one(&self.db)
232 | .await
233 | .map_err(AppError::Database)?;
234 |
235 | if exists > 0 {
236 | return Ok(());
237 | }
238 |
239 | // Add the relationship
240 | sqlx::query(
241 | "INSERT INTO video_shoeboxes (video_id, shoebox_id, created_at) VALUES ($1, $2, $3)",
242 | )
243 | .bind(video_id)
244 | .bind(shoebox_id)
245 | .bind(chrono::Utc::now().naive_utc())
246 | .execute(&self.db)
247 | .await
248 | .map_err(AppError::Database)?;
249 |
250 | info!("Added video {video_id} to shoebox {shoebox_id}");
251 | Ok(())
252 | }
253 |
254 | pub async fn remove_video_from_shoebox(&self, video_id: &str, shoebox_id: &str) -> Result<()> {
255 | sqlx::query(
256 | "DELETE FROM video_shoeboxes WHERE video_id = $1 AND shoebox_id = $2",
257 | )
258 | .bind(video_id)
259 | .bind(shoebox_id)
260 | .execute(&self.db)
261 | .await
262 | .map_err(AppError::Database)?;
263 |
264 | info!("Removed video {video_id} from shoebox {shoebox_id}");
265 | Ok(())
266 | }
267 |
268 | pub async fn get_videos_in_shoebox(&self, shoebox_id: &str) -> Result> {
269 | let video_ids = sqlx::query_scalar::<_, String>(
270 | "SELECT video_id FROM video_shoeboxes WHERE shoebox_id = $1",
271 | )
272 | .bind(shoebox_id)
273 | .fetch_all(&self.db)
274 | .await
275 | .map_err(AppError::Database)?;
276 |
277 | Ok(video_ids)
278 | }
279 |
280 | pub async fn get_shoeboxes_for_video(&self, video_id: &str) -> Result> {
281 | let shoeboxes = sqlx::query_as::<_, Shoebox>(
282 | "SELECT s.* FROM shoeboxes s
283 | JOIN video_shoeboxes vs ON s.id = vs.shoebox_id
284 | WHERE vs.video_id = $1
285 | ORDER BY s.name",
286 | )
287 | .bind(video_id)
288 | .fetch_all(&self.db)
289 | .await
290 | .map_err(AppError::Database)?;
291 |
292 | Ok(shoeboxes)
293 | }
294 | }
295 |
--------------------------------------------------------------------------------