├── web ├── src │ ├── style.css │ ├── features │ │ ├── projects │ │ │ ├── components │ │ │ │ ├── index.ts │ │ │ │ └── ProjectCard.vue │ │ │ ├── types.ts │ │ │ ├── useProjectsCache.ts │ │ │ ├── views │ │ │ │ └── ProjectsView.vue │ │ │ └── projects.ts │ │ ├── tools │ │ │ ├── components │ │ │ │ ├── index.ts │ │ │ │ ├── ToolWarning.vue │ │ │ │ └── ToolCard.vue │ │ │ ├── types.ts │ │ │ ├── views │ │ │ │ ├── ToolsView.vue │ │ │ │ └── SelectionView.vue │ │ │ ├── listenDateUpdater.ts │ │ │ ├── forceMetadata.ts │ │ │ ├── renameSeries.ts │ │ │ ├── pathTagUpdater.ts │ │ │ ├── deleteListeningSessions.ts │ │ │ ├── removeEmptyAuthors.ts │ │ │ ├── splitGenres.ts │ │ │ ├── quickMatchChapters.ts │ │ │ └── migrateServer.ts │ │ ├── clients │ │ │ ├── components │ │ │ │ ├── index.ts │ │ │ │ ├── ApiScoreLegend.vue │ │ │ │ ├── ClientFeatures.vue │ │ │ │ └── ClientCard.vue │ │ │ ├── types.ts │ │ │ └── views │ │ │ │ └── ClientsView.vue │ │ ├── stats │ │ │ ├── components │ │ │ │ ├── index.ts │ │ │ │ ├── StatCard.vue │ │ │ │ ├── DailyAverages.vue │ │ │ │ ├── DeviceList.vue │ │ │ │ ├── TopList.vue │ │ │ │ ├── DayOfWeekChart.vue │ │ │ │ ├── TopBooks.vue │ │ │ │ ├── TimeOfDayChart.vue │ │ │ │ └── StatsConfig.vue │ │ │ └── types.ts │ │ ├── home │ │ │ └── views │ │ │ │ └── HomeView.vue │ │ └── settings │ │ │ └── views │ │ │ └── SettingsView.vue │ ├── main.ts │ ├── shared │ │ ├── types │ │ │ └── filterBar.ts │ │ ├── components │ │ │ ├── EmptyState.vue │ │ │ ├── PageHeader.vue │ │ │ ├── BaseCard.vue │ │ │ ├── index.ts │ │ │ ├── BaseBadge.vue │ │ │ ├── InfoBox.vue │ │ │ ├── LoadingSpinner.vue │ │ │ ├── BaseInput.vue │ │ │ ├── Footer.vue │ │ │ ├── BaseButton.vue │ │ │ ├── FeatureIndicator.vue │ │ │ ├── BaseSelect.vue │ │ │ └── FilterBar.vue │ │ ├── settings.ts │ │ └── composables │ │ │ └── useApi.ts │ ├── common │ │ └── libraryIds.ts │ ├── router │ │ └── index.ts │ └── App.vue ├── env.d.ts ├── public │ ├── logos │ │ ├── ABS.png │ │ ├── buchable.png │ │ ├── lissen.png │ │ ├── AudioBooth.png │ │ ├── github-mark.svg │ │ └── lissen-LICENSE │ ├── icon-32x32.png │ ├── images │ │ ├── cors.png │ │ ├── insecure1.png │ │ ├── insecure2.png │ │ └── metadata.png │ ├── icon-192x192.png │ └── icon-512x512.png ├── tsconfig.json ├── tsconfig.app.json ├── .gitignore ├── tsconfig.node.json ├── vite.config.ts ├── index.html └── package.json ├── .gitattributes ├── README.md ├── tools ├── remove_empty_authors.py ├── cleanup_listening_sessions.py ├── split_genres.py ├── update_descriptions.py ├── plexToAbs.py ├── correct_listening_sessions.py ├── migrate_backup_to_new_server.py └── quick_match_chapters.py └── .gitignore /web/src/style.css: -------------------------------------------------------------------------------- 1 | @import "tailwindcss"; 2 | -------------------------------------------------------------------------------- /web/env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /web/public/logos/ABS.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/logos/ABS.png -------------------------------------------------------------------------------- /web/public/icon-32x32.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/icon-32x32.png -------------------------------------------------------------------------------- /web/public/images/cors.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/images/cors.png -------------------------------------------------------------------------------- /web/public/icon-192x192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/icon-192x192.png -------------------------------------------------------------------------------- /web/public/icon-512x512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/icon-512x512.png -------------------------------------------------------------------------------- /web/public/logos/buchable.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/logos/buchable.png -------------------------------------------------------------------------------- /web/public/logos/lissen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/logos/lissen.png -------------------------------------------------------------------------------- /web/src/features/projects/components/index.ts: -------------------------------------------------------------------------------- 1 | export { default as ProjectCard } from "./ProjectCard.vue"; 2 | -------------------------------------------------------------------------------- /web/public/images/insecure1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/images/insecure1.png -------------------------------------------------------------------------------- /web/public/images/insecure2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/images/insecure2.png -------------------------------------------------------------------------------- /web/public/images/metadata.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/images/metadata.png -------------------------------------------------------------------------------- /web/public/logos/AudioBooth.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Vito0912/absToolbox/HEAD/web/public/logos/AudioBooth.png -------------------------------------------------------------------------------- /web/src/features/tools/components/index.ts: -------------------------------------------------------------------------------- 1 | export { default as ToolCard } from "./ToolCard.vue"; 2 | export { default as ToolWarning } from "./ToolWarning.vue"; 3 | export { default as DynamicForm } from "./DynamicForm.vue"; 4 | -------------------------------------------------------------------------------- /web/src/features/clients/components/index.ts: -------------------------------------------------------------------------------- 1 | export { default as ClientCard } from "./ClientCard.vue"; 2 | export { default as ClientFeatures } from "./ClientFeatures.vue"; 3 | export { default as ApiScoreLegend } from "./ApiScoreLegend.vue"; 4 | -------------------------------------------------------------------------------- /web/src/main.ts: -------------------------------------------------------------------------------- 1 | import { createApp } from "vue"; 2 | import { createPinia } from "pinia"; 3 | import App from "./App.vue"; 4 | import router from "./router/index"; 5 | 6 | const app = createApp(App); 7 | app.use(createPinia()); 8 | app.use(router); 9 | app.mount("#app"); 10 | -------------------------------------------------------------------------------- /web/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "files": [], 3 | "references": [ 4 | { 5 | "path": "./tsconfig.node.json" 6 | }, 7 | { 8 | "path": "./tsconfig.app.json" 9 | } 10 | ], 11 | "compilerOptions": { 12 | "baseUrl": ".", 13 | "paths": { 14 | "@/*": ["./src/*"] 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /web/tsconfig.app.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@vue/tsconfig/tsconfig.dom.json", 3 | "include": ["env.d.ts", "src/**/*", "src/**/*.vue"], 4 | "exclude": ["src/**/__tests__/*"], 5 | "compilerOptions": { 6 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", 7 | 8 | "paths": { 9 | "@/*": ["./src/*"] 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /web/src/shared/types/filterBar.ts: -------------------------------------------------------------------------------- 1 | export interface FilterOption { 2 | value: string; 3 | label: string; 4 | description?: string; 5 | } 6 | 7 | export interface SortOption { 8 | value: string; 9 | label: string; 10 | } 11 | 12 | export interface FilterBarModelValue { 13 | search: string; 14 | filters: string[]; 15 | sortBy: string; 16 | sortDirection: "asc" | "desc"; 17 | } 18 | -------------------------------------------------------------------------------- /web/.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | pnpm-debug.log* 8 | lerna-debug.log* 9 | 10 | node_modules 11 | .DS_Store 12 | dist 13 | dist-ssr 14 | coverage 15 | *.local 16 | 17 | /cypress/videos/ 18 | /cypress/screenshots/ 19 | 20 | # Editor directories and files 21 | .vscode/* 22 | !.vscode/extensions.json 23 | .idea 24 | *.suo 25 | *.ntvs* 26 | *.njsproj 27 | *.sln 28 | *.sw? 29 | 30 | *.tsbuildinfo 31 | -------------------------------------------------------------------------------- /web/tsconfig.node.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node22/tsconfig.json", 3 | "include": [ 4 | "vite.config.*", 5 | "vitest.config.*", 6 | "cypress.config.*", 7 | "nightwatch.conf.*", 8 | "playwright.config.*", 9 | "eslint.config.*" 10 | ], 11 | "compilerOptions": { 12 | "noEmit": true, 13 | "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", 14 | 15 | "module": "ESNext", 16 | "moduleResolution": "Bundler", 17 | "types": ["node"] 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /web/src/features/stats/components/index.ts: -------------------------------------------------------------------------------- 1 | export { default as StatCard } from "./StatCard.vue"; 2 | export { default as TopList } from "./TopList.vue"; 3 | export { default as TopBooks } from "./TopBooks.vue"; 4 | export { default as DailyAverages } from "./DailyAverages.vue"; 5 | export { default as TimeOfDayChart } from "./TimeOfDayChart.vue"; 6 | export { default as DayOfWeekChart } from "./DayOfWeekChart.vue"; 7 | export { default as DeviceList } from "./DeviceList.vue"; 8 | export { default as StatsConfig } from "./StatsConfig.vue"; 9 | -------------------------------------------------------------------------------- /web/src/shared/components/EmptyState.vue: -------------------------------------------------------------------------------- 1 | 12 | 13 | 20 | -------------------------------------------------------------------------------- /web/src/common/libraryIds.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | 3 | const { get } = useApi(); 4 | 5 | export async function fetchLibraryIds(libraryIds: string[]): Promise { 6 | const response = await get("/api/libraries"); 7 | 8 | let allLibraries = response.data.libraries || []; 9 | 10 | if (libraryIds.length !== 0) { 11 | allLibraries = allLibraries.filter((library: { id: string }) => 12 | libraryIds.includes(library.id), 13 | ); 14 | } 15 | return allLibraries.map((library: { id: string }) => library.id); 16 | } 17 | -------------------------------------------------------------------------------- /web/src/shared/components/PageHeader.vue: -------------------------------------------------------------------------------- 1 | 14 | 15 | 21 | -------------------------------------------------------------------------------- /web/vite.config.ts: -------------------------------------------------------------------------------- 1 | import { fileURLToPath, URL } from "node:url"; 2 | import { resolve } from "path"; 3 | import { defineConfig } from "vite"; 4 | import vue from "@vitejs/plugin-vue"; 5 | import vueDevTools from "vite-plugin-vue-devtools"; 6 | import tailwindcss from "@tailwindcss/vite"; 7 | 8 | // https://vite.dev/config/ 9 | export default defineConfig({ 10 | plugins: [vue(), vueDevTools(), tailwindcss()], 11 | resolve: { 12 | alias: { 13 | "@": resolve(__dirname, "src"), 14 | }, 15 | }, 16 | build: { 17 | outDir: "dist", 18 | rollupOptions: { 19 | input: resolve(__dirname, "index.html"), 20 | }, 21 | }, 22 | }); 23 | -------------------------------------------------------------------------------- /web/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | ABS Toolbox 11 | 12 | 13 | 14 |
15 | 16 | 17 | 18 | -------------------------------------------------------------------------------- /web/src/shared/components/BaseCard.vue: -------------------------------------------------------------------------------- 1 | 13 | 14 | 28 | -------------------------------------------------------------------------------- /web/src/shared/components/index.ts: -------------------------------------------------------------------------------- 1 | export { default as PageHeader } from "./PageHeader.vue"; 2 | export { default as BaseButton } from "./BaseButton.vue"; 3 | export { default as BaseCard } from "./BaseCard.vue"; 4 | export { default as EmptyState } from "./EmptyState.vue"; 5 | export { default as BaseBadge } from "./BaseBadge.vue"; 6 | export { default as InfoBox } from "./InfoBox.vue"; 7 | export { default as BaseInput } from "./BaseInput.vue"; 8 | export { default as BaseSelect } from "./BaseSelect.vue"; 9 | export { default as LoadingSpinner } from "./LoadingSpinner.vue"; 10 | export { default as FeatureIndicator } from "./FeatureIndicator.vue"; 11 | export { default as FilterBar } from "./FilterBar.vue"; 12 | -------------------------------------------------------------------------------- /web/src/shared/settings.ts: -------------------------------------------------------------------------------- 1 | import { defineStore } from "pinia"; 2 | import { ref } from "vue"; 3 | import type { Settings } from "@/features/tools/types"; 4 | 5 | export const useSettingsStore = defineStore("settings", () => { 6 | const settings = ref({ 7 | serverUrl: "", 8 | authMethod: "token", 9 | apiToken: "", 10 | username: "", 11 | password: "", 12 | }); 13 | 14 | const saveSettings = () => { 15 | localStorage.setItem( 16 | "abs-toolbox-settings", 17 | JSON.stringify(settings.value), 18 | ); 19 | }; 20 | 21 | const loadSettings = () => { 22 | const saved = localStorage.getItem("abs-toolbox-settings"); 23 | if (saved) { 24 | Object.assign(settings.value, JSON.parse(saved)); 25 | } 26 | }; 27 | 28 | loadSettings(); 29 | 30 | return { 31 | settings, 32 | saveSettings, 33 | loadSettings, 34 | }; 35 | }); 36 | -------------------------------------------------------------------------------- /web/src/features/projects/types.ts: -------------------------------------------------------------------------------- 1 | export interface GitHubStats { 2 | stars: number; 3 | forks: number; 4 | issues: number; 5 | language: string; 6 | lastUpdated: string; 7 | description: string; 8 | topics: string[]; 9 | } 10 | 11 | export interface Project { 12 | id: string; 13 | name: string; 14 | link: string; 15 | description?: string; 16 | authors: string[]; 17 | authorLinks: string[]; 18 | tags: string[]; 19 | githubStats?: GitHubStats; 20 | lastFetched?: string; 21 | testedVersion?: string; 22 | } 23 | 24 | export interface ProjectsCache { 25 | projects: Project[]; 26 | lastUpdated: string; 27 | } 28 | 29 | export interface ProjectFilter { 30 | search: string; 31 | tags: string[]; 32 | sortBy: "name" | "stars" | "lastUpdated" | "forks"; 33 | sortDirection: "asc" | "desc"; 34 | } 35 | 36 | export interface TagDefinition { 37 | name: string; 38 | description: string; 39 | } 40 | -------------------------------------------------------------------------------- /web/src/shared/components/BaseBadge.vue: -------------------------------------------------------------------------------- 1 | 9 | 10 | 34 | -------------------------------------------------------------------------------- /web/public/logos/github-mark.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /web/src/shared/components/InfoBox.vue: -------------------------------------------------------------------------------- 1 | 9 | 10 | 33 | -------------------------------------------------------------------------------- /web/src/features/stats/components/StatCard.vue: -------------------------------------------------------------------------------- 1 | 25 | 26 | 33 | -------------------------------------------------------------------------------- /web/src/shared/components/LoadingSpinner.vue: -------------------------------------------------------------------------------- 1 | 13 | 14 | 42 | -------------------------------------------------------------------------------- /web/src/features/tools/types.ts: -------------------------------------------------------------------------------- 1 | export interface ToolField { 2 | name: string; 3 | type: 4 | | "string" 5 | | "boolean" 6 | | "stringArray" 7 | | "select" 8 | | "librarySelector" 9 | | "singleLibrarySelector" 10 | | "libraryItemsSelector" 11 | | "date"; 12 | label: string; 13 | description?: string; 14 | required?: boolean; 15 | options?: string[]; 16 | placeholder?: string; 17 | default?: string | boolean | string[]; 18 | } 19 | 20 | export interface ToolResult { 21 | success: boolean; 22 | message: string; 23 | error?: string; 24 | timestamp: string; 25 | } 26 | 27 | export interface ToolDefinition { 28 | id: string; 29 | title: string; 30 | description: string; 31 | longDescription?: string; 32 | fields: ToolField[]; 33 | enabled?: boolean; 34 | execute: (formData: Record) => Promise; 35 | } 36 | 37 | export interface Settings { 38 | serverUrl: string; 39 | authMethod: "token" | "credentials"; 40 | apiToken: string; 41 | username: string; 42 | password: string; 43 | } 44 | -------------------------------------------------------------------------------- /web/src/features/tools/views/ToolsView.vue: -------------------------------------------------------------------------------- 1 | 21 | 22 | 36 | -------------------------------------------------------------------------------- /web/src/features/tools/components/ToolWarning.vue: -------------------------------------------------------------------------------- 1 | 20 | 21 | 32 | -------------------------------------------------------------------------------- /web/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "web", 3 | "version": "0.0.0", 4 | "private": true, 5 | "type": "module", 6 | "engines": { 7 | "node": "^20.19.0 || >=22.12.0" 8 | }, 9 | "scripts": { 10 | "dev": "vite --host 0.0.0.0", 11 | "build": "run-p type-check \"build-only {@}\" --", 12 | "preview": "vite preview", 13 | "build-only": "vite build", 14 | "type-check": "vue-tsc --build" 15 | }, 16 | "dependencies": { 17 | "@tailwindcss/cli": "^4.1.11", 18 | "@tailwindcss/vite": "^4.1.11", 19 | "axios": "^1.11.0", 20 | "lucide-vue-next": "^0.555.0", 21 | "pinia": "^3.0.3", 22 | "vue": "^3.5.18", 23 | "vue-router": "^4.5.1" 24 | }, 25 | "devDependencies": { 26 | "@tsconfig/node22": "^22.0.2", 27 | "@types/node": "^22.16.5", 28 | "@vitejs/plugin-vue": "^6.0.1", 29 | "@vue/tsconfig": "^0.7.0", 30 | "autoprefixer": "^10.4.21", 31 | "npm-run-all2": "^8.0.4", 32 | "postcss": "^8.5.6", 33 | "tailwindcss": "^4.1.11", 34 | "typescript": "~5.8.0", 35 | "vite": "^7.0.6", 36 | "vite-plugin-vue-devtools": "^8.0.0", 37 | "vue-tsc": "^3.0.4" 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /web/public/logos/lissen-LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Max Grakov 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /web/src/features/tools/listenDateUpdater.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | 4 | const { get, patch, addLog } = useApi(); 5 | 6 | export async function executeListenDateUpdater( 7 | formData: Record, 8 | ): Promise { 9 | try { 10 | const { libraryItemIds, dateToSet } = formData; 11 | 12 | const newDate: Date = new Date(dateToSet); 13 | if (isNaN(newDate.getTime())) { 14 | throw new Error("Invalid date format"); 15 | } 16 | 17 | addLog( 18 | `Setting listen date to ${newDate.toISOString()} for ${libraryItemIds.length} items`, 19 | ); 20 | 21 | await patch( 22 | "/api/me/progress/batch/update", 23 | libraryItemIds.map((id: string) => ({ 24 | libraryItemId: id, 25 | finishedAt: newDate.getTime(), 26 | })), 27 | ); 28 | 29 | addLog("Listen dates updated successfully"); 30 | 31 | return { 32 | success: true, 33 | message: "Listening state updated successfully", 34 | timestamp: new Date().toISOString(), 35 | }; 36 | } catch (error: any) { 37 | return { 38 | success: false, 39 | message: "Failed to update listening state", 40 | error: error.message || "Unknown error", 41 | timestamp: new Date().toISOString(), 42 | }; 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /web/src/features/clients/types.ts: -------------------------------------------------------------------------------- 1 | export interface ClientInfo { 2 | name: string; 3 | link: string; 4 | iconLink?: string; 5 | language: string; 6 | OSes: ("Android" | "iOS" | "Windows" | "macOS" | "Linux" | "Web")[]; 7 | cost: "Free" | "Freemium" | "Paid"; 8 | heavyAiUsage?: boolean; 9 | openSource: boolean; 10 | notes?: ClientNote[]; 11 | features: ClientFeatures; 12 | } 13 | 14 | export interface ClientNote { 15 | color: string; 16 | text: string; 17 | } 18 | 19 | export interface OIDCInfo { 20 | available: boolean; 21 | mobileRedirectURI?: string; 22 | } 23 | 24 | export interface ClientFeatures { 25 | tested: { 26 | wasTested: boolean; 27 | date?: string; 28 | usedApiCorrectly?: 0 | 1 | 2 | 3 | 4 | 5; 29 | comments?: string[]; 30 | }; 31 | audiobooks?: boolean; 32 | ebooks: { 33 | available: boolean; 34 | ePubSupport?: boolean; 35 | pdfSupport?: boolean; 36 | annotations?: boolean; 37 | }; 38 | podcasts?: boolean; 39 | queue?: boolean; 40 | autoqueue?: boolean; 41 | sleepTimer?: boolean; 42 | chapters?: boolean; 43 | playHistory?: boolean; 44 | shakeToRewind?: boolean; 45 | carSupport?: boolean; 46 | downloads?: boolean; 47 | customHeaders?: boolean; 48 | caching?: boolean; 49 | biggerScreens?: boolean; 50 | oidc?: OIDCInfo; 51 | widgets?: boolean; 52 | bookmarks?: boolean; 53 | } 54 | -------------------------------------------------------------------------------- /web/src/shared/components/BaseInput.vue: -------------------------------------------------------------------------------- 1 | 21 | 22 | 44 | -------------------------------------------------------------------------------- /web/src/router/index.ts: -------------------------------------------------------------------------------- 1 | import { createRouter, createWebHistory } from "vue-router"; 2 | 3 | const router = createRouter({ 4 | history: createWebHistory(import.meta.env.BASE_URL), 5 | scrollBehavior(_to, _from, savedPosition) { 6 | if (savedPosition) { 7 | return savedPosition; 8 | } 9 | return { top: 0 }; 10 | }, 11 | routes: [ 12 | { 13 | path: "/", 14 | name: "home", 15 | component: () => import("@/features/home/views/HomeView.vue"), 16 | }, 17 | { 18 | path: "/tools", 19 | name: "tools", 20 | component: () => import("@/features/tools/views/SelectionView.vue"), 21 | }, 22 | { 23 | path: "/tool/:id", 24 | name: "tool", 25 | component: () => import("@/features/tools/views/ToolsView.vue"), 26 | }, 27 | { 28 | path: "/projects", 29 | name: "projects", 30 | component: () => import("@/features/projects/views/ProjectsView.vue"), 31 | }, 32 | { 33 | path: "/clients", 34 | name: "clients", 35 | component: () => import("@/features/clients/views/ClientsView.vue"), 36 | }, 37 | { 38 | path: "/settings", 39 | name: "settings", 40 | component: () => import("@/features/settings/views/SettingsView.vue"), 41 | }, 42 | { 43 | path: "/stats", 44 | name: "stats", 45 | component: () => import("@/features/stats/views/StatsView.vue"), 46 | }, 47 | ], 48 | }); 49 | 50 | export default router; 51 | -------------------------------------------------------------------------------- /web/src/features/tools/views/SelectionView.vue: -------------------------------------------------------------------------------- 1 | 25 | 26 | 46 | -------------------------------------------------------------------------------- /web/src/features/tools/components/ToolCard.vue: -------------------------------------------------------------------------------- 1 | 29 | 30 | 42 | 43 | 52 | -------------------------------------------------------------------------------- /web/src/features/tools/forceMetadata.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import { fetchLibraryIds } from "@/common/libraryIds"; 3 | import type { ToolResult } from "./types"; 4 | 5 | const { get, post, baseDomain, addLog } = useApi(); 6 | 7 | export async function executeForceMetadata( 8 | formData: Record, 9 | ): Promise { 10 | const { libraryIds } = formData; 11 | 12 | const validLibraryIds = await fetchLibraryIds(libraryIds); 13 | 14 | let payload = []; 15 | 16 | for (const libraryId of validLibraryIds) { 17 | const items = 18 | (await get(`/api/libraries/${libraryId}/items`)).data.results || []; 19 | 20 | for (const item of items) { 21 | const push = { 22 | id: item.id, 23 | mediaPayload: { 24 | tags: [...new Set([...item.media?.tags, "force-metadata"])], 25 | }, 26 | }; 27 | 28 | payload.push(push); 29 | 30 | addLog( 31 | `Adding "force-metadata" tag to book: ${item.media.metadata.title} (${item.id}) in library ${libraryId}`, 32 | ); 33 | } 34 | } 35 | 36 | addLog( 37 | `Adding "force-metadata" tag to ${payload.length} books in ${validLibraryIds.length} libraries.`, 38 | ); 39 | 40 | await post("/api/items/batch/update", [...payload]); 41 | 42 | return { 43 | success: true, 44 | message: `Metadata force tag added successfully for ${payload.length} books in ${validLibraryIds.length} libraries. Got to ${baseDomain.value}/config/item-metadata-utils/tags to remove the tag \'force-metadata\'.`, 45 | timestamp: new Date().toISOString(), 46 | }; 47 | } 48 | -------------------------------------------------------------------------------- /web/src/shared/components/Footer.vue: -------------------------------------------------------------------------------- 1 | 43 | -------------------------------------------------------------------------------- /web/src/features/clients/components/ApiScoreLegend.vue: -------------------------------------------------------------------------------- 1 | 35 | 36 | 44 | -------------------------------------------------------------------------------- /web/src/shared/components/BaseButton.vue: -------------------------------------------------------------------------------- 1 | 15 | 16 | 57 | -------------------------------------------------------------------------------- /web/src/features/stats/components/DailyAverages.vue: -------------------------------------------------------------------------------- 1 | 37 | 38 | 56 | -------------------------------------------------------------------------------- /web/src/shared/components/FeatureIndicator.vue: -------------------------------------------------------------------------------- 1 | 7 | 8 | 49 | -------------------------------------------------------------------------------- /web/src/shared/components/BaseSelect.vue: -------------------------------------------------------------------------------- 1 | 28 | 29 | 60 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # absToolbox 2 | 3 | # NEW 4 | 5 | Now available at https://abstoolbox.vito0912.de/ with easy access. Run these tools in seconds without any other program needed. Also works with local ABS servers. 6 | 7 | --- 8 | 9 | A collection of tools designed to assist with Audiobookshelf-related tasks. 10 | 11 | > [!CAUTION] 12 | > All scripts may have unforeseen side effects or might even crash the server completely. Therefore, before starting any script, you should make a backup via the settings! Do not use the Python scripts anymore. These are legacy! 13 | 14 | > [!NOTE] 15 | > Old, but still valid note:\ 16 | > Currently, these are ~~only~~ Python scripts. However, there are plans to implement plugin support: 17 | > [Plugin Support Pull Request](https://github.com/advplyr/audiobookshelf/pull/3745) 18 | > 19 | > Should this happen, I will try to integrate all scripts into ABS and create additional ones. Depending on how access to the database/UI is provided, possible developments include: 20 | > - An issue tracker with an interface for management (if it is possible to register a settings page) 21 | > - Ratings (if it is possible to access the database and/or create a custom table, and if the item view is customizable) 22 | > - Shared listening states (if there is access to the database) 23 | 24 | 25 | ## Available Tools 26 | 27 | Visit https://abstoolbox.vito0912.de 28 | 29 | ## Legacy Installation & Usage 30 | 31 | Follow these steps to set up and use absToolbox (old Python scripts): 32 | 33 | 1. Clone this repository to your local machine: 34 | ```bash 35 | git clone https://github.com/vito0912/absToolbox 36 | cd absToolbox/tools 37 | ``` 38 | 39 | 2. Ensure Python is installed on your system. 40 | 41 | 3. Open the script you wish to use and configure the variables at the top of the file according to your requirements. 42 | 43 | 4. Run the script by executing the following command in your terminal: 44 | ```bash 45 | python 46 | ``` 47 | 48 | Replace `` with the name of the script you want to run. 49 | -------------------------------------------------------------------------------- /web/src/features/home/views/HomeView.vue: -------------------------------------------------------------------------------- 1 | 34 | 35 | 62 | -------------------------------------------------------------------------------- /web/src/features/stats/components/DeviceList.vue: -------------------------------------------------------------------------------- 1 | 52 | 53 | 60 | -------------------------------------------------------------------------------- /web/src/features/tools/renameSeries.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | 4 | const { get, post, addLog } = useApi(); 5 | 6 | async function getAllBooksForSeries(seriesId: string, libraryId: string) { 7 | try { 8 | const base64SeriesId = btoa(unescape(encodeURIComponent(seriesId))) 9 | .replace(/\+/g, "-") 10 | .replace(/\//g, "_") 11 | .replace(/=+$/, ""); 12 | 13 | const response = await get( 14 | `/api/libraries/${libraryId}/items?filter=series.${base64SeriesId}`, 15 | ); 16 | console.log( 17 | `Fetched ${response.data.results.length} books for series ${seriesId} in library ${libraryId}`, 18 | ); 19 | return response.data.results || []; 20 | } catch (error) { 21 | console.error( 22 | `Error fetching books for series ${seriesId} in library ${libraryId}:`, 23 | error, 24 | ); 25 | return []; 26 | } 27 | } 28 | 29 | export async function executeRenameSeries( 30 | formData: Record, 31 | ): Promise { 32 | const { libraryId, seriesId, newName } = formData; 33 | 34 | const seriesBooks = await getAllBooksForSeries(seriesId, libraryId); 35 | const books = ( 36 | await post(`/api/items/batch/get`, { 37 | libraryItemIds: seriesBooks.map((book: any) => book.id), 38 | }) 39 | ).data.libraryItems; 40 | 41 | const payload = []; 42 | 43 | for (const book of books) { 44 | const oldSeries = book.media.metadata.series; 45 | 46 | for (const series of oldSeries) { 47 | if (series.id === seriesId) { 48 | series.name = newName; 49 | } 50 | } 51 | 52 | const push = { 53 | id: book.id, 54 | mediaPayload: { 55 | metadata: { 56 | series: oldSeries, 57 | }, 58 | }, 59 | }; 60 | payload.push(push); 61 | } 62 | 63 | await post("/api/items/batch/update", [...payload]); 64 | 65 | addLog(`Renamed series to "${newName}" for ${payload.length} books`); 66 | 67 | return { 68 | success: true, 69 | message: `Series renamed successfully.`, 70 | timestamp: new Date().toISOString(), 71 | }; 72 | } 73 | -------------------------------------------------------------------------------- /tools/remove_empty_authors.py: -------------------------------------------------------------------------------- 1 | # This scripts deletes all authors that have no books associated with them. 2 | import requests 3 | 4 | # Configuration constants 5 | ABS_HOST = "" # AudiobookShelf Host URL 6 | LIBRARY_ID = None # Leave None to process all libraries you have 7 | API_KEY = "" # API Key from user settings 8 | DELETE_WITHOUT_CONFIRMATION = False # Set to True to delete authors without confirmation 9 | 10 | 11 | library_ids = [] 12 | if LIBRARY_ID is None: 13 | all_libraries_response = requests.get(f"{ABS_HOST}/api/libraries?token={API_KEY}") 14 | for library in all_libraries_response.json()['libraries']: 15 | library_ids.append(library['id']) 16 | else: 17 | library_ids.append(LIBRARY_ID) 18 | 19 | print(f"Processing {len(library_ids)} libraries") 20 | 21 | for library_id in library_ids: 22 | print(f"Processing library {library_id}") 23 | authors_response = requests.get(f"{ABS_HOST}/api/libraries/{library_id}/authors?token={API_KEY}") 24 | authors_to_delete = [] 25 | for author in authors_response.json()['authors']: 26 | author_id = author['id'] 27 | author_name = author['name'] 28 | author_num_books = author['numBooks'] 29 | if author_num_books == 0: 30 | authors_to_delete.append((author_id, author_name, author_num_books)) 31 | print(f"\n\nFound {len(authors_to_delete)} authors without books") 32 | print("The following authors will be deleted:") 33 | for author in authors_to_delete: 34 | print(f"{author[1]} ({author[2]} books) - ID: {author[0]}") 35 | if not DELETE_WITHOUT_CONFIRMATION: 36 | user_input = input("Do you want to delete these authors? (y/n): ") 37 | if user_input.lower() != "y": 38 | print("Skipping deletion") 39 | continue 40 | print("\n\n----------------------\n\n") 41 | for author in authors_to_delete: 42 | author_id = author[0] 43 | delete_response = requests.delete(f"{ABS_HOST}/api/authors/{author_id}?token={API_KEY}") 44 | if delete_response.status_code == 200: 45 | print(f"Deleted author {author[1]}") 46 | else: 47 | print(f"Error deleting author {author[1]}: {delete_response.status_code}") 48 | print("\n\n----------------------\n\n") 49 | 50 | print("Done") -------------------------------------------------------------------------------- /tools/cleanup_listening_sessions.py: -------------------------------------------------------------------------------- 1 | # MAKE A BACKUP BEFORE USE! DATA IS NOT RECOVERABLE IF THERE IS A PROBLEM WITH RETURNED DATA! 2 | # This script automatically deletes all listening sessions that are larger than a given threshold. 3 | # Depending on the size of the database, this script might take a while to run. 4 | import sys 5 | import time 6 | import requests 7 | 8 | # Configuration constants 9 | ABS_HOST = "" # AudiobookShelf Host URL 10 | USER_IDS = [] # The users that should be processed to delete. Keep empty to process all users. 11 | API_KEY = "" 12 | LISTENING_SESSION_THRESHOLD = 16 # Threshold in hours to delete listening sessions. Everything larger than this will be deleted. 13 | SESSIONS_TO_FETCH = 2000000 # Number of sessions to fetch per user. The script does not use pagination as it is a one-time script. Just set to a high number to fetch all sessions. 14 | 15 | 16 | ######### Code ######### 17 | 18 | if len(USER_IDS) == 0: 19 | print(f"{ABS_HOST}/api/users?token={API_KEY}") 20 | 21 | # Use Bearer token to get user data 22 | user_response = requests.get(f"{ABS_HOST}/api/users?token={API_KEY}") 23 | print(user_response.reason) 24 | for user in user_response.json()['users']: 25 | USER_IDS.append(user['id']) 26 | 27 | print(f"Processing {len(USER_IDS)} users") 28 | 29 | for user_id in USER_IDS: 30 | sessions_response = requests.get(f"{ABS_HOST}/api/users/{user_id}/listening-sessions?itemsPerPage={SESSIONS_TO_FETCH}&token={API_KEY}") 31 | sessions_to_delete = [] 32 | session_time_not_deleted = 0 33 | session_time_deleted = 0 34 | for session in sessions_response.json()['sessions']: 35 | session_id = session['id'] 36 | if session['timeListening'] is None: 37 | continue 38 | session_duration = session['timeListening'] / 3600 39 | if session_duration > LISTENING_SESSION_THRESHOLD: 40 | session_time_deleted += session_duration 41 | print("Session greater than threshold:", session_id, session_duration, "hours") 42 | sessions_to_delete.append((session_id, session_duration)) 43 | else: 44 | session_time_not_deleted += session_duration 45 | print(f"User {user_id} has {len(sessions_to_delete)} sessions to delete with a total duration of {session_time_deleted} hours. ({session_time_not_deleted} hours not deleted)") 46 | 47 | for session in sessions_to_delete: 48 | session_id = session[0] 49 | delete_response = requests.delete(f"{ABS_HOST}/api/sessions/{session_id}?token={API_KEY}") 50 | if delete_response.status_code == 200: 51 | print(f"Deleted session {session_id}") 52 | else: 53 | print(f"Error deleting session {session_id}: {delete_response.status_code}") 54 | print("\n----------------------\n") 55 | 56 | print("Done") -------------------------------------------------------------------------------- /web/src/features/tools/pathTagUpdater.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | 4 | const { get, post, addLog } = useApi(); 5 | 6 | export async function executePathTagUpdater( 7 | formData: Record, 8 | ): Promise { 9 | const { libraryId, ruleSets, dryRun, type } = formData; 10 | 11 | const books = (await get(`/api/libraries/${libraryId}/items`)).data.results; 12 | 13 | for (const ruleSet of ruleSets) { 14 | const payload = []; 15 | const lastColonIndex = ruleSet.lastIndexOf(":"); 16 | let pathRegex, tag; 17 | 18 | for (const book of books) { 19 | if (lastColonIndex !== -1) { 20 | pathRegex = ruleSet.substring(0, lastColonIndex); 21 | tag = ruleSet.substring(lastColonIndex + 1); 22 | } else { 23 | throw new Error(`Invalid rule set format: ${ruleSet}`); 24 | } 25 | 26 | const regex = new RegExp(pathRegex); 27 | 28 | console.log( 29 | `Checking book ${book.media.metadata.title} with path ${book.path} against regex ${regex}`, 30 | ); 31 | 32 | if (book.path.match(regex)) { 33 | type == "tags" 34 | ? payload.push({ 35 | id: book.id, 36 | mediaPayload: { 37 | tags: [...new Set([...(book.media?.tags || []), tag])], 38 | }, 39 | }) 40 | : payload.push({ 41 | id: book.id, 42 | mediaPayload: { 43 | metadata: { 44 | genres: [ 45 | ...new Set([...(book.media?.metadata?.genres || []), tag]), 46 | ], 47 | }, 48 | }, 49 | }); 50 | } 51 | } 52 | 53 | if (payload.length > 0 && !dryRun) { 54 | try { 55 | await post("/api/items/batch/update", [...payload]); 56 | addLog(`Updated ${payload.length} books for rule "${ruleSet}"`); 57 | } catch (error: unknown) { 58 | throw new Error( 59 | `Failed to update books for rule "${ruleSet}": ${error instanceof Error ? error.message : String(error)}`, 60 | ); 61 | } 62 | } 63 | if (dryRun) { 64 | for (const book of payload) { 65 | const fullBook = books.find((b: any) => b.id === book.id); 66 | addLog( 67 | `Dry run: would update book ${fullBook.media.metadata.title} with tag "${tag}" based on path "${fullBook.path}"`, 68 | ); 69 | } 70 | } 71 | if (payload.length === 0) { 72 | addLog(`No books matched for rule "${ruleSet}"`); 73 | } 74 | } 75 | 76 | return { 77 | success: true, 78 | message: dryRun 79 | ? `Dry run completed. No changes applied.` 80 | : `Tags updated successfully.`, 81 | timestamp: new Date().toISOString(), 82 | }; 83 | } 84 | -------------------------------------------------------------------------------- /web/src/features/stats/components/TopList.vue: -------------------------------------------------------------------------------- 1 | 54 | 55 | 83 | -------------------------------------------------------------------------------- /web/src/features/projects/components/ProjectCard.vue: -------------------------------------------------------------------------------- 1 | 61 | 62 | 76 | 77 | 94 | -------------------------------------------------------------------------------- /web/src/App.vue: -------------------------------------------------------------------------------- 1 | 79 | 80 | 83 | -------------------------------------------------------------------------------- /web/src/features/stats/components/DayOfWeekChart.vue: -------------------------------------------------------------------------------- 1 | 52 | 53 | 108 | -------------------------------------------------------------------------------- /web/src/shared/composables/useApi.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import type { AxiosInstance, AxiosRequestConfig } from "axios"; 3 | import { computed, ref } from "vue"; 4 | import { useSettingsStore } from "@/shared/settings"; 5 | 6 | const executionLogs = ref([]); 7 | const executionStartTime = ref(null); 8 | const isExecuting = ref(false); 9 | 10 | export function useApi() { 11 | const settingsStore = useSettingsStore(); 12 | 13 | const apiClient = computed((): AxiosInstance => { 14 | const client = axios.create({ 15 | baseURL: settingsStore.settings.serverUrl, 16 | timeout: 600000, // 10 minutes for now 17 | headers: { 18 | "Content-Type": "application/json", 19 | }, 20 | }); 21 | 22 | client.interceptors.request.use((config) => { 23 | if (settingsStore.settings.authMethod === "token") { 24 | config.headers.Authorization = `Bearer ${settingsStore.settings.apiToken}`; 25 | } 26 | return config; 27 | }); 28 | 29 | client.interceptors.response.use( 30 | (response) => response, 31 | (error) => { 32 | console.error("API Error:", error); 33 | return Promise.reject(error); 34 | }, 35 | ); 36 | 37 | return client; 38 | }); 39 | 40 | const get = (url: string, config?: AxiosRequestConfig) => { 41 | return apiClient.value.get(url, config); 42 | }; 43 | 44 | const post = (url: string, data?: any, config?: AxiosRequestConfig) => { 45 | return apiClient.value.post(url, data, config); 46 | }; 47 | 48 | const put = (url: string, data?: any, config?: AxiosRequestConfig) => { 49 | return apiClient.value.put(url, data, config); 50 | }; 51 | 52 | const patch = (url: string, data?: any, config?: AxiosRequestConfig) => { 53 | return apiClient.value.patch(url, data, config); 54 | }; 55 | 56 | const del = (url: string, config?: AxiosRequestConfig) => { 57 | return apiClient.value.delete(url, config); 58 | }; 59 | 60 | const baseDomain = computed(() => { 61 | const url = new URL(settingsStore.settings.serverUrl); 62 | return url; 63 | }); 64 | 65 | const startExecution = () => { 66 | executionLogs.value = []; 67 | executionStartTime.value = Date.now(); 68 | isExecuting.value = true; 69 | }; 70 | 71 | const stopExecution = () => { 72 | isExecuting.value = false; 73 | executionStartTime.value = null; 74 | }; 75 | 76 | const addLog = (...messages: string[]) => { 77 | const timestamp = new Date().toLocaleTimeString(); 78 | const combinedMessage = messages.filter(Boolean).join(" "); 79 | executionLogs.value.push(`[${timestamp}] ${combinedMessage}`); 80 | }; 81 | 82 | const clearLogs = () => { 83 | executionLogs.value = []; 84 | }; 85 | 86 | const getElapsedTime = () => { 87 | if (!executionStartTime.value) return "0:00"; 88 | const elapsed = Math.floor((Date.now() - executionStartTime.value) / 1000); 89 | const minutes = Math.floor(elapsed / 60); 90 | const seconds = elapsed % 60; 91 | return `${minutes}:${seconds.toString().padStart(2, "0")}`; 92 | }; 93 | 94 | return { 95 | apiClient, 96 | get, 97 | post, 98 | put, 99 | patch, 100 | del, 101 | baseDomain, 102 | executionLogs, 103 | isExecuting, 104 | startExecution, 105 | stopExecution, 106 | addLog, 107 | clearLogs, 108 | getElapsedTime, 109 | }; 110 | } 111 | -------------------------------------------------------------------------------- /web/src/features/tools/deleteListeningSessions.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | 4 | const { get, apiClient, addLog } = useApi(); 5 | 6 | async function getAllUsers() { 7 | try { 8 | const response = await get("/api/users"); 9 | return response.data.users || []; 10 | } catch (error) { 11 | console.error("Error fetching users:", error); 12 | return []; 13 | } 14 | } 15 | 16 | async function getListeningSessions(userId: string, itemsPerPage: number) { 17 | try { 18 | const response = await get( 19 | `/api/users/${userId}/listening-sessions?itemsPerPage=${itemsPerPage}`, 20 | ); 21 | return response.data.sessions || []; 22 | } catch (error) { 23 | console.error(`Error fetching sessions for user ${userId}:`, error); 24 | return []; 25 | } 26 | } 27 | 28 | async function deleteSession(sessionId: string) { 29 | try { 30 | await apiClient.value.delete(`/api/sessions/${sessionId}`); 31 | return true; 32 | } catch (error) { 33 | console.error(`Error deleting session ${sessionId}:`, error); 34 | return false; 35 | } 36 | } 37 | 38 | export async function executeDeleteListeningSessions( 39 | formData: Record, 40 | ): Promise { 41 | try { 42 | const { userIds = [], threshold, sessionsToFetch } = formData; 43 | 44 | let processableUsers = userIds; 45 | 46 | if (processableUsers.length === 0) { 47 | const users = await getAllUsers(); 48 | processableUsers = users.map((user: { id: string }) => user.id); 49 | } 50 | 51 | addLog(`Processing ${processableUsers.length} users`); 52 | 53 | for (const userId of processableUsers) { 54 | const sessions = await getListeningSessions(userId, sessionsToFetch); 55 | const sessionsToDelete = []; 56 | let sessionTimeNotDeleted = 0; 57 | let sessionTimeDeleted = 0; 58 | 59 | for (const session of sessions) { 60 | if (!session.timeListening) continue; 61 | 62 | const sessionDuration = session.timeListening / 3600; 63 | if (sessionDuration > threshold) { 64 | sessionTimeDeleted += sessionDuration; 65 | addLog( 66 | `Session greater than threshold: ${session.id} ${sessionDuration} hours`, 67 | ); 68 | sessionsToDelete.push([session.id, sessionDuration]); 69 | } else { 70 | sessionTimeNotDeleted += sessionDuration; 71 | } 72 | } 73 | 74 | addLog( 75 | `User ${userId} has ${sessionsToDelete.length} sessions to delete with a total duration of ${sessionTimeDeleted.toFixed(2)} hours. (${sessionTimeNotDeleted.toFixed(2)} hours not deleted)`, 76 | ); 77 | 78 | for (const [sessionId, duration] of sessionsToDelete) { 79 | const success = await deleteSession(sessionId); 80 | if (success) { 81 | addLog(`Deleted session ${sessionId}`); 82 | } else { 83 | addLog(`Error deleting session ${sessionId}`); 84 | } 85 | } 86 | 87 | addLog(" "); 88 | } 89 | 90 | return { 91 | success: true, 92 | message: "Listening sessions deleted successfully", 93 | timestamp: new Date().toISOString(), 94 | }; 95 | } catch (error: any) { 96 | return { 97 | success: false, 98 | message: "Failed to delete listening sessions", 99 | error: error.message || "Unknown error", 100 | timestamp: new Date().toISOString(), 101 | }; 102 | } 103 | } 104 | -------------------------------------------------------------------------------- /web/src/features/stats/components/TopBooks.vue: -------------------------------------------------------------------------------- 1 | 65 | 66 | 102 | -------------------------------------------------------------------------------- /tools/split_genres.py: -------------------------------------------------------------------------------- 1 | # This script splits genres that contain ", " into multiple genres so you can more easily search for a single tag. 2 | import base64 3 | 4 | import requests 5 | 6 | # Configuration constants 7 | 8 | # PLEASE MAKE A BACKUP BEFORE RUNNING THIS SCRIPT! 9 | 10 | ABS_HOST = "" # AudiobookShelf Host URL 11 | API_KEY = "" 12 | LIBRARY_IDS = [] # Leave empty to process all libraries 13 | SKIP_GENRES = ['Mystery, Thriller & Suspense'] # Genres to skip 14 | 15 | 16 | ###### Code ###### 17 | 18 | def get_all_genres(): 19 | response = requests.get( 20 | f"{ABS_HOST}/api/genres", 21 | params={"token": API_KEY} 22 | ) 23 | response.raise_for_status() 24 | return response.json()['genres'] 25 | 26 | 27 | # Returns all genres that include ", " 28 | def get_multi_genres(genres): 29 | multi_genres = [] 30 | for genre in genres: 31 | if ", " in genre: 32 | if genre not in SKIP_GENRES: 33 | multi_genres.append(genre) 34 | return multi_genres 35 | 36 | 37 | def get_all_books_for_genre(genre: str, id): 38 | base64_genre = genre.encode('utf-8') 39 | url_encoded_genre = base64.urlsafe_b64encode(base64_genre).decode('utf-8') 40 | 41 | response = requests.get( 42 | f"{ABS_HOST}/api/libraries/{id}/items", 43 | params={ 44 | "limit": 99999, 45 | "filter": f"genres.{url_encoded_genre}", 46 | "token": API_KEY 47 | } 48 | ) 49 | response.raise_for_status() 50 | return response.json()['results'] 51 | 52 | 53 | def append_genre_to_book(book, genre): 54 | genres = genre.split(", ") 55 | book_genres = book['media']['metadata']['genres'] 56 | 57 | # Join the two lists and remove duplicates 58 | book_genres = list(set(book_genres + genres)) 59 | # Remove the genre from the book 60 | book_genres.remove(genre) 61 | 62 | book_id = book['id'] 63 | 64 | patch_body = {"metadata": {"genres": book_genres}} 65 | 66 | response = requests.patch( 67 | f"{ABS_HOST}/api/items/{book_id}/media", 68 | params={"token": API_KEY}, 69 | json=patch_body 70 | ) 71 | response.raise_for_status() 72 | 73 | 74 | def get_all_libraries(): 75 | response = requests.get( 76 | f"{ABS_HOST}/api/libraries", 77 | params={"token": API_KEY} 78 | ) 79 | response.raise_for_status() 80 | return response.json()['libraries'] 81 | 82 | 83 | if __name__ == "__main__": 84 | 85 | if len(LIBRARY_IDS) == 0: 86 | libraries = get_all_libraries() 87 | for library in libraries: 88 | LIBRARY_IDS.append(library['id']) 89 | 90 | print(f"Processing {len(LIBRARY_IDS)} libraries") 91 | print(LIBRARY_IDS) 92 | print("----------------------") 93 | multi_genres = get_multi_genres(get_all_genres()) 94 | 95 | for genre in multi_genres: 96 | print(f"Processing genre {genre}", end="\n\n") 97 | book_titles_overall = [] 98 | for library_id in LIBRARY_IDS: 99 | books = get_all_books_for_genre(genre, library_id) 100 | book_titles = [book['media']['metadata']['title'] for book in books] 101 | book_titles_overall += book_titles 102 | for book in books: 103 | try: 104 | append_genre_to_book(book, genre) 105 | except Exception as e: 106 | print(f"Error processing book {book['id']}: {e}") 107 | print(f"Processed {len(book_titles_overall)} books for genre {genre}: {book_titles_overall}", end="\n\n") 108 | print("----------------------", end="\n") 109 | -------------------------------------------------------------------------------- /web/src/features/stats/components/TimeOfDayChart.vue: -------------------------------------------------------------------------------- 1 | 64 | 65 | 108 | -------------------------------------------------------------------------------- /web/src/features/clients/views/ClientsView.vue: -------------------------------------------------------------------------------- 1 | 33 | 34 | 122 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | .idea/ 10 | .idea 11 | .idea/* 12 | 13 | # Distribution / packaging 14 | .Python 15 | build/ 16 | develop-eggs/ 17 | dist/ 18 | downloads/ 19 | eggs/ 20 | .eggs/ 21 | lib/ 22 | lib64/ 23 | parts/ 24 | sdist/ 25 | var/ 26 | wheels/ 27 | share/python-wheels/ 28 | *.egg-info/ 29 | .installed.cfg 30 | *.egg 31 | MANIFEST 32 | 33 | # PyInstaller 34 | # Usually these files are written by a python script from a template 35 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 36 | *.manifest 37 | *.spec 38 | 39 | # Installer logs 40 | pip-log.txt 41 | pip-delete-this-directory.txt 42 | 43 | # Unit test / coverage reports 44 | htmlcov/ 45 | .tox/ 46 | .nox/ 47 | .coverage 48 | .coverage.* 49 | .cache 50 | nosetests.xml 51 | coverage.xml 52 | *.cover 53 | *.py,cover 54 | .hypothesis/ 55 | .pytest_cache/ 56 | cover/ 57 | 58 | # Translations 59 | *.mo 60 | *.pot 61 | 62 | # Django stuff: 63 | *.log 64 | local_settings.py 65 | db.sqlite3 66 | db.sqlite3-journal 67 | 68 | # Flask stuff: 69 | instance/ 70 | .webassets-cache 71 | 72 | # Scrapy stuff: 73 | .scrapy 74 | 75 | # Sphinx documentation 76 | docs/_build/ 77 | 78 | # PyBuilder 79 | .pybuilder/ 80 | target/ 81 | 82 | # Jupyter Notebook 83 | .ipynb_checkpoints 84 | 85 | # IPython 86 | profile_default/ 87 | ipython_config.py 88 | 89 | # pyenv 90 | # For a library or package, you might want to ignore these files since the code is 91 | # intended to run in multiple environments; otherwise, check them in: 92 | # .python-version 93 | 94 | # pipenv 95 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 96 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 97 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 98 | # install all needed dependencies. 99 | #Pipfile.lock 100 | 101 | # poetry 102 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 103 | # This is especially recommended for binary packages to ensure reproducibility, and is more 104 | # commonly ignored for libraries. 105 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 106 | #poetry.lock 107 | 108 | # pdm 109 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 110 | #pdm.lock 111 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 112 | # in version control. 113 | # https://pdm.fming.dev/#use-with-ide 114 | .pdm.toml 115 | 116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 117 | __pypackages__/ 118 | 119 | # Celery stuff 120 | celerybeat-schedule 121 | celerybeat.pid 122 | 123 | # SageMath parsed files 124 | *.sage.py 125 | 126 | # Environments 127 | .env 128 | .venv 129 | env/ 130 | venv/ 131 | ENV/ 132 | env.bak/ 133 | venv.bak/ 134 | 135 | # Spyder project settings 136 | .spyderproject 137 | .spyproject 138 | 139 | # Rope project settings 140 | .ropeproject 141 | 142 | # mkdocs documentation 143 | /site 144 | 145 | # mypy 146 | .mypy_cache/ 147 | .dmypy.json 148 | dmypy.json 149 | 150 | # Pyre type checker 151 | .pyre/ 152 | 153 | # pytype static type analyzer 154 | .pytype/ 155 | 156 | # Cython debug symbols 157 | cython_debug/ 158 | 159 | # PyCharm 160 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 161 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 162 | # and can be added to the global gitignore or merged into this file. For a more nuclear 163 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 164 | #.idea/ 165 | -------------------------------------------------------------------------------- /tools/update_descriptions.py: -------------------------------------------------------------------------------- 1 | # This script automatically updates the description for library items using the AudiobookShelf API. 2 | 3 | import time 4 | import requests 5 | from urllib import parse 6 | 7 | # Configuration constants 8 | ABS_HOST = "" # AudiobookShelf Host URL 9 | LIBRARY_ID = "" # Library UUID 10 | API_KEY = "" # API Key from user settings 11 | PROVIDER = "audible.com" # Metadata provider (See available providers in API documentation https://api.audiobookshelf.org/#metadata-providers) 12 | DISABLE_RATE_PROTECTION = True # Rate protection, disable to speed up but risk timeouts 13 | 14 | 15 | ############################################################################################################ 16 | 17 | book_info = {} 18 | 19 | # Fetch all library items 20 | library_url = f"{ABS_HOST}/api/libraries/{LIBRARY_ID}/items?token={API_KEY}" 21 | print(f"Fetching library items from: {library_url}") 22 | response = requests.get(library_url) 23 | 24 | if response.status_code != 200: 25 | print("Error fetching library items:", response.status_code) 26 | exit() 27 | 28 | items = response.json().get('results', []) 29 | print(f"Found {len(items)} items in the library.") 30 | 31 | # Process each item in the library 32 | for count, (item) in enumerate(items): 33 | book_id = item['id'] 34 | metadata = item['media']['metadata'] 35 | title = metadata.get('title', "Unknown Title") 36 | authors = metadata.get('authorName', "Unknown Author") 37 | book_info[book_id] = { 'id': book_id, 'title': title, 'status': 'ERROR', 'comment': 'Unknown Error' } 38 | 39 | print(f"\n--- Processing Book: {title} ---") 40 | 41 | if 'asin' not in metadata or metadata['asin'] is None: 42 | book_info[book_id]['status'] = 'NO_ASIN' 43 | match_url = f"{ABS_HOST}/api/search/books?title={parse.quote(title)}&author={parse.quote(authors)}&provider={PROVIDER}&token={API_KEY}" 44 | else: 45 | book_info[book_id]['status'] = 'ASIN_SEARCH' 46 | match_url = f"{ABS_HOST}/api/search/books?title={metadata['asin']}&provider={PROVIDER}&token={API_KEY}" 47 | 48 | new_item_response = requests.get(match_url) 49 | 50 | if new_item_response.status_code != 200: 51 | print(f" Error matching book '{title}':", new_item_response.json()) 52 | book_info[book_id]['comment'] = 'Error matching book' 53 | continue 54 | 55 | if len(new_item_response.json()) == 0: 56 | print(f" Error matching book '{title}' (No results found).") 57 | book_info[book_id]['comment'] = 'No results found' 58 | continue 59 | 60 | best_match = new_item_response.json()[0] 61 | description = best_match.get('description', None) 62 | 63 | if description is None: 64 | print(f" Error matching book '{title}' (No description found).") 65 | book_info[book_id]['comment'] = 'Description retrieval failed - No description found' 66 | continue 67 | 68 | print(f" Description found") 69 | new_metadata = {} 70 | new_metadata['description'] = description 71 | 72 | # Update description for the book 73 | update_url = f"{ABS_HOST}/api/items/{book_id}/media?token={API_KEY}" 74 | update_data = {"metadata": new_metadata} 75 | update_response = requests.patch(update_url, json=update_data) 76 | 77 | if update_response.status_code == 200: 78 | print(f" Description updated successfully for '{title}'.") 79 | book_info[book_id]['status'] = 'FINISHED' 80 | book_info[book_id]['comment'] = 'Description updated' 81 | else: 82 | print(f" Error updating description for '{title}'. Response Code:", update_response.status_code) 83 | book_info[book_id]['comment'] = 'Description update failed' 84 | 85 | # Respect rate-limiting if enabled 86 | if not DISABLE_RATE_PROTECTION: 87 | time.sleep(2) 88 | 89 | 90 | print("\n--- Summary ---") 91 | for book_id, info in book_info.items(): 92 | print(f"{info['title']} ({info['status']}): {info['comment']}") 93 | 94 | print("\n--- Failed Books ---") 95 | for book_id, info in book_info.items(): 96 | if info['status'] != 'FINISHED': 97 | print(f"{info['title']} ({info['status']}): {info['comment']}") 98 | 99 | -------------------------------------------------------------------------------- /tools/plexToAbs.py: -------------------------------------------------------------------------------- 1 | import requests 2 | import json 3 | import os 4 | 5 | absHost = 'http://localhost:3333' 6 | absToken = '' 7 | plexHost = 'http://localhost:32400' 8 | plexToken = '' # Not tested. I don't needed one (I assume it has something to do when connecting with a plex account/claim) 9 | plexSectionId = 1 # Tbh no idea what this is for, for me it's one 10 | params = { 11 | 'X-Plex-Token': plexToken, 12 | 'type': 10, 13 | 'includeCollections': 1, 14 | 'includeExternalMedia': 1, 15 | 'includeAdvanced': 1, 16 | 'includeMeta': 1, 17 | } 18 | 19 | absHeaders = { 20 | 'Authorization': f'Bearer {absToken}', 21 | 'Accept': 'application/json', 22 | } 23 | 24 | absLibraries = requests.get(f"{absHost}/api/libraries?token={absToken}", timeout=30, headers=absHeaders) 25 | absLibraries.raise_for_status() 26 | 27 | allLibraryIds = [library['id'] for library in absLibraries.json()['libraries']] 28 | 29 | libraryItemsId = [] 30 | 31 | for libraryId in allLibraryIds: 32 | libraryItemsResponse = requests.get(f"{absHost}/api/libraries/{libraryId}/items?token={absToken}", timeout=30, headers=absHeaders) 33 | libraryItemsResponse.raise_for_status() 34 | libraryItemsId.extend([item['id'] for item in libraryItemsResponse.json()['results']]) 35 | 36 | print('Fetching items from AudiobookShelf... (this may take a while)') 37 | itemResponse = requests.post(f"{absHost}/api/items/batch/get?token={absToken}", json={ 38 | 'libraryItemIds': libraryItemsId}, timeout=6000, headers=absHeaders) 39 | itemResponse.raise_for_status() 40 | libraryItems = itemResponse.json()['libraryItems'] 41 | print(f"Found {len(libraryItemsId)} items in AudiobookShelf with IDs") 42 | 43 | 44 | plexItemResponse = requests.get(f"{plexHost}/library/sections/{plexSectionId}/all", params=params, timeout=30, headers={ 45 | 'Accept': 'application/json' 46 | }) 47 | plexItemResponse.raise_for_status() 48 | plexItems = plexItemResponse.json()["MediaContainer"]["Metadata"] 49 | 50 | print(f"Found {len(libraryItems)} items in AudiobookShelf") 51 | print(f"Found {len(plexItems)} items in Plex") 52 | 53 | progressUpdates = [] 54 | 55 | for plexItem in plexItems: 56 | plexTrack = plexItem["Media"][0]["Part"][0] 57 | print(json.dumps(plexItem, indent=2)) 58 | plexDuration = plexItem.get("duration", 0) 59 | plexOffset = plexItem.get("viewOffset") 60 | if plexOffset is None: 61 | view_count = plexItem.get("viewCount", 0) 62 | plexOffset = plexDuration if view_count and view_count > 0 else 0 63 | plexViewedAt = plexItem.get("lastViewedAt") 64 | plexPercent = 100 if plexDuration > 0 and (plexOffset / plexDuration * 100) >= 99 else ( 65 | (plexOffset / plexDuration * 100) if plexDuration > 0 else 0) 66 | 67 | if plexViewedAt is None: 68 | print(f"Skipping item {plexItem['title']} - no viewedAt timestamp") 69 | continue 70 | 71 | for libraryItem in libraryItems: 72 | libraryFiles = libraryItem["libraryFiles"] 73 | 74 | for libraryFile in libraryFiles: 75 | if os.path.basename(libraryFile["metadata"]["path"]) == os.path.basename(plexTrack["file"]): 76 | print(f"Found match: {libraryItem['media']['metadata']['title']} ({libraryItem['id']})") 77 | 78 | progressUpdates.append({ 79 | "libraryItemId": libraryItem['id'], 80 | "duration": plexDuration / 1000, 81 | "progress": plexPercent / 100, 82 | "currentTime": plexOffset / 1000, 83 | "isFinished": plexPercent >= 100, 84 | "finishedAt": None if plexPercent < 100 else plexViewedAt, 85 | "startedAt": plexViewedAt, 86 | }) 87 | 88 | break 89 | 90 | print(f"Found {len(progressUpdates)} items to update progress for") 91 | updateRequest = requests.patch(f"{absHost}/api/me/progress/batch/update", json=progressUpdates, timeout=6000, headers=absHeaders) 92 | if updateRequest.status_code != 200: 93 | print(f"Error updating progress: {updateRequest.status_code} - {updateRequest.text}") 94 | print("Progress updates sent to AudiobookShelf") 95 | print("Done") -------------------------------------------------------------------------------- /web/src/features/clients/components/ClientFeatures.vue: -------------------------------------------------------------------------------- 1 | 103 | 104 | 112 | -------------------------------------------------------------------------------- /web/src/features/tools/removeEmptyAuthors.ts: -------------------------------------------------------------------------------- 1 | import { fetchLibraryIds } from "@/common/libraryIds"; 2 | import { useApi } from "@/shared/composables/useApi"; 3 | import type { ToolResult } from "./types"; 4 | 5 | const { get, apiClient, addLog } = useApi(); 6 | 7 | async function getAllLibraries() { 8 | try { 9 | const response = await get("/api/libraries"); 10 | return response.data.libraries || []; 11 | } catch (error) { 12 | console.error("Error fetching libraries:", error); 13 | return []; 14 | } 15 | } 16 | 17 | async function getLibraryAuthors(libraryId: string) { 18 | try { 19 | const response = await get(`/api/libraries/${libraryId}/authors`); 20 | return response.data.authors || []; 21 | } catch (error) { 22 | console.error(`Error fetching authors for library ${libraryId}:`, error); 23 | return []; 24 | } 25 | } 26 | 27 | async function deleteAuthor(authorId: string) { 28 | try { 29 | await apiClient.value.delete(`/api/authors/${authorId}`); 30 | return true; 31 | } catch (error) { 32 | console.error(`Error deleting author ${authorId}:`, error); 33 | return false; 34 | } 35 | } 36 | 37 | export async function executeRemoveEmptyAuthors( 38 | formData: Record, 39 | ): Promise { 40 | try { 41 | const { libraryIds = [], deleteWithoutConfirmation = false } = formData; 42 | 43 | const processableLibraries = await fetchLibraryIds(libraryIds); 44 | 45 | addLog(`Processing ${processableLibraries.length} libraries`); 46 | 47 | let totalAuthorsToDelete = 0; 48 | let totalAuthorsDeleted = 0; 49 | let totalAuthorsDeletionFailed = 0; 50 | 51 | for (const libraryId of processableLibraries) { 52 | addLog(`\nProcessing library ${libraryId}`); 53 | 54 | const authors = await getLibraryAuthors(libraryId); 55 | const authorsToDelete: Array<{ 56 | id: string; 57 | name: string; 58 | numBooks: number; 59 | }> = []; 60 | 61 | // Find authors with 0 books 62 | for (const author of authors) { 63 | if (author.numBooks === 0) { 64 | authorsToDelete.push({ 65 | id: author.id, 66 | name: author.name, 67 | numBooks: author.numBooks, 68 | }); 69 | } 70 | } 71 | 72 | addLog(`Found ${authorsToDelete.length} authors without books`); 73 | 74 | if (authorsToDelete.length === 0) { 75 | addLog("No orphaned authors found in this library"); 76 | continue; 77 | } 78 | 79 | addLog("The following authors will be deleted:"); 80 | for (const author of authorsToDelete) { 81 | addLog( 82 | `- ${author.name} (${author.numBooks} books) - ID: ${author.id}`, 83 | ); 84 | } 85 | 86 | totalAuthorsToDelete += authorsToDelete.length; 87 | 88 | for (const author of authorsToDelete) { 89 | const success = await deleteAuthor(author.id); 90 | if (success) { 91 | addLog(`Deleted author: ${author.name}`); 92 | totalAuthorsDeleted++; 93 | } else { 94 | addLog(`Error deleting author: ${author.name}`); 95 | totalAuthorsDeletionFailed++; 96 | } 97 | } 98 | 99 | addLog("--- Deletion Complete ---\n"); 100 | } 101 | 102 | addLog("\n=== SUMMARY ==="); 103 | addLog(`Total authors found to delete: ${totalAuthorsToDelete}`); 104 | addLog(`Total authors successfully deleted: ${totalAuthorsDeleted}`); 105 | addLog(`Total authors failed to delete: ${totalAuthorsDeletionFailed}`); 106 | 107 | const message = deleteWithoutConfirmation 108 | ? `Successfully processed ${processableLibraries.length} libraries. Deleted ${totalAuthorsDeleted} orphaned authors.` 109 | : `Found ${totalAuthorsToDelete} orphaned authors across ${processableLibraries.length} libraries. Enable 'Delete Without Confirmation' to actually delete them.`; 110 | 111 | return { 112 | success: true, 113 | message: message, 114 | timestamp: new Date().toISOString(), 115 | }; 116 | } catch (error: any) { 117 | return { 118 | success: false, 119 | message: "Failed to delete orphaned authors", 120 | error: error.message || "Unknown error", 121 | timestamp: new Date().toISOString(), 122 | }; 123 | } 124 | } 125 | -------------------------------------------------------------------------------- /tools/correct_listening_sessions.py: -------------------------------------------------------------------------------- 1 | # This script can correct listening session that are to long/inaccurate. 2 | import os 3 | import sys 4 | import time 5 | import requests 6 | 7 | # Configuration constants 8 | ABS_HOST = "" # AudiobookShelf Host URL 9 | USER_ID = "" # The users that should be processed. 10 | API_KEY = "" 11 | 12 | 13 | ######### Code ######### 14 | 15 | def seconds_to_time_string(seconds): 16 | return time.strftime('%H:%M:%S', time.gmtime(seconds)) 17 | 18 | 19 | def fetch_sessions(page): 20 | response = requests.get( 21 | f"{ABS_HOST}/api/users/{USER_ID}/listening-sessions", 22 | params={"itemsPerPage": 10, "page": page, "token": API_KEY} 23 | ) 24 | response.raise_for_status() 25 | return response.json() 26 | 27 | 28 | def display_sessions(sessions): 29 | print("\n" * 100) # Clear console output 30 | for i, session in enumerate(sessions['sessions'], start=1): 31 | index = sessions['page'] * 10 + i 32 | title = session['mediaMetadata']['title'] 33 | listening_time = seconds_to_time_string(session['timeListening']) 34 | print(f"{index}. {title} - {listening_time}") 35 | 36 | 37 | def select_session(sessions, selection): 38 | session_index = selection - 1 - (sessions['page'] * 10) 39 | try: 40 | session = sessions['sessions'][session_index] 41 | return session 42 | except IndexError: 43 | print("Invalid selection. Please try again.") 44 | return None 45 | 46 | 47 | def adjust_session(session): 48 | title = session['mediaMetadata']['title'] 49 | listening_time = session['timeListening'] 50 | start_time = session['startTime'] 51 | end_time = session['currentTime'] 52 | session_id = session['id'] 53 | 54 | print("\nYou have selected the following session:") 55 | print(f"Title: {title}") 56 | print(f"Listening Time: {seconds_to_time_string(listening_time)}") 57 | print(f"Start Time: {seconds_to_time_string(start_time)}") 58 | print(f"End Time: {seconds_to_time_string(end_time)}") 59 | 60 | suggested_time = (end_time - start_time) / 3600 61 | user_input = input( 62 | f"Please enter the new listening time for the session (in hours, e.g., 2.5). The suggested value is {suggested_time:.3f}: ") 63 | 64 | try: 65 | if user_input == "": 66 | new_listening_time = suggested_time * 3600 67 | else: 68 | new_listening_time = float(user_input) * 3600 69 | session['timeListening'] = new_listening_time 70 | return session 71 | except ValueError: 72 | print("Invalid input. Please enter a numeric value.") 73 | return None 74 | 75 | 76 | def update_session(session): 77 | session_id = session['id'] 78 | delete_response = requests.delete( 79 | f"{ABS_HOST}/api/sessions/{session_id}", 80 | params={"token": API_KEY} 81 | ) 82 | if delete_response.status_code == 200: 83 | print(f"Session {session_id} deleted.") 84 | create_response = requests.post( 85 | f"{ABS_HOST}/api/session/local", 86 | params={"token": API_KEY}, 87 | json=session 88 | ) 89 | if create_response.status_code == 200: 90 | print(f"Session {session_id} updated.") 91 | else: 92 | print(f"Error creating session {session_id}: {create_response.status_code}") 93 | else: 94 | print(f"Error deleting session {session_id}: {delete_response.status_code}") 95 | 96 | 97 | def main(): 98 | page_index = 0 99 | 100 | while True: 101 | sessions = fetch_sessions(page_index) 102 | display_sessions(sessions) 103 | 104 | user_input = input('\nType "next", "exit", or the number of the session you want to edit: ') 105 | 106 | if user_input.lower() == 'exit': 107 | break 108 | elif user_input.lower() == 'next': 109 | page_index += 1 110 | else: 111 | try: 112 | session_number = int(user_input) 113 | session = select_session(sessions, session_number) 114 | if session: 115 | adjusted_session = adjust_session(session) 116 | if adjusted_session: 117 | update_session(adjusted_session) 118 | except ValueError: 119 | print("Invalid input. Please try again.") 120 | 121 | 122 | if __name__ == "__main__": 123 | main() 124 | -------------------------------------------------------------------------------- /web/src/features/projects/useProjectsCache.ts: -------------------------------------------------------------------------------- 1 | import { ref } from "vue"; 2 | import type { Project, GitHubStats, ProjectsCache } from "./types"; 3 | 4 | const CACHE_KEY = "abs-toolbox-projects-cache"; 5 | const CACHE_DURATION = 24 * 60 * 60 * 1000; 6 | 7 | export const useProjectsCache = () => { 8 | const isLoading = ref(false); 9 | 10 | const getCachedProjects = (): ProjectsCache | null => { 11 | try { 12 | const cached = localStorage.getItem(CACHE_KEY); 13 | if (!cached) return null; 14 | 15 | const data = JSON.parse(cached) as ProjectsCache; 16 | const now = new Date().getTime(); 17 | const cacheTime = new Date(data.lastUpdated).getTime(); 18 | 19 | if (now - cacheTime < CACHE_DURATION) { 20 | return data; 21 | } 22 | return null; 23 | } catch (error) { 24 | console.error("Error reading projects cache:", error); 25 | return null; 26 | } 27 | }; 28 | 29 | const setCachedProjects = (projects: Project[]) => { 30 | try { 31 | const cache: ProjectsCache = { 32 | projects, 33 | lastUpdated: new Date().toISOString(), 34 | }; 35 | localStorage.setItem(CACHE_KEY, JSON.stringify(cache)); 36 | } catch (error) { 37 | console.error("Error saving projects cache:", error); 38 | } 39 | }; 40 | 41 | const extractGitHubRepoInfo = (url: string) => { 42 | const match = url.match(/github\.com\/([^\/]+)\/([^\/]+)/); 43 | if (!match) return null; 44 | return { owner: match[1], repo: match[2] }; 45 | }; 46 | 47 | const fetchGitHubStats = async (url: string): Promise => { 48 | const repoInfo = extractGitHubRepoInfo(url); 49 | if (!repoInfo) return null; 50 | 51 | try { 52 | const response = await fetch( 53 | `https://api.github.com/repos/${repoInfo.owner}/${repoInfo.repo}`, 54 | ); 55 | if (!response.ok) { 56 | console.warn( 57 | `Failed to fetch GitHub stats for ${url}: ${response.status}`, 58 | ); 59 | return null; 60 | } 61 | 62 | const data = await response.json(); 63 | return { 64 | stars: data.stargazers_count || 0, 65 | forks: data.forks_count || 0, 66 | issues: data.open_issues_count || 0, 67 | language: data.language || "Unknown", 68 | lastUpdated: data.updated_at || "", 69 | description: data.description || "", 70 | topics: data.topics || [], 71 | }; 72 | } catch (error) { 73 | console.error(`Error fetching GitHub stats for ${url}:`, error); 74 | return null; 75 | } 76 | }; 77 | 78 | const fetchProjectsWithStats = async ( 79 | baseProjects: Project[], 80 | ): Promise => { 81 | isLoading.value = true; 82 | 83 | try { 84 | const cached = getCachedProjects(); 85 | if (cached) { 86 | isLoading.value = false; 87 | return cached.projects; 88 | } 89 | 90 | const projectsWithStats = await Promise.all( 91 | baseProjects.map(async (project) => { 92 | const githubStats = await fetchGitHubStats(project.link); 93 | return { 94 | ...project, 95 | githubStats: githubStats || undefined, 96 | lastFetched: new Date().toISOString(), 97 | }; 98 | }), 99 | ); 100 | 101 | setCachedProjects(projectsWithStats); 102 | isLoading.value = false; 103 | return projectsWithStats; 104 | } catch (error) { 105 | console.error("Error fetching projects with stats:", error); 106 | isLoading.value = false; 107 | return baseProjects; 108 | } 109 | }; 110 | 111 | const clearCache = () => { 112 | try { 113 | localStorage.removeItem(CACHE_KEY); 114 | } catch (error) { 115 | console.error("Error clearing projects cache:", error); 116 | } 117 | }; 118 | 119 | const getCacheAge = (): string | null => { 120 | const cached = getCachedProjects(); 121 | if (!cached) return null; 122 | 123 | const cacheTime = new Date(cached.lastUpdated); 124 | const now = new Date(); 125 | const diffMs = now.getTime() - cacheTime.getTime(); 126 | const diffHours = Math.floor(diffMs / (1000 * 60 * 60)); 127 | const diffMinutes = Math.floor((diffMs % (1000 * 60 * 60)) / (1000 * 60)); 128 | 129 | if (diffHours > 0) { 130 | return `${diffHours}h ${diffMinutes}m ago`; 131 | } else { 132 | return `${diffMinutes}m ago`; 133 | } 134 | }; 135 | 136 | return { 137 | isLoading, 138 | fetchProjectsWithStats, 139 | clearCache, 140 | getCacheAge, 141 | getCachedProjects, 142 | }; 143 | }; 144 | -------------------------------------------------------------------------------- /web/src/features/tools/splitGenres.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | 4 | async function getAllBooksForGenre( 5 | genre: string, 6 | libraryId: string, 7 | type: string, 8 | ) { 9 | const { get, addLog } = useApi(); 10 | try { 11 | const base64Genre = btoa(unescape(encodeURIComponent(genre))) 12 | .replace(/\+/g, "-") 13 | .replace(/\//g, "_") 14 | .replace(/=+$/, ""); 15 | 16 | const response = await get( 17 | `/api/libraries/${libraryId}/items?filter=${type}.${base64Genre}`, 18 | ); 19 | const bookCount = response.data.results.length; 20 | addLog( 21 | `Found ${bookCount} books for genre "${genre}" in library ${libraryId}`, 22 | ); 23 | return response.data.results || []; 24 | } catch (error) { 25 | addLog(`Error fetching books for genre "${genre}" in library ${libraryId}`); 26 | console.error( 27 | `Error fetching books for genre ${genre} in library ${libraryId}:`, 28 | error, 29 | ); 30 | return []; 31 | } 32 | } 33 | 34 | async function appendGenreToBook( 35 | book: any, 36 | genre: string, 37 | delimiter: string, 38 | type: "genres" | "tags", 39 | ) { 40 | const { patch, addLog } = useApi(); 41 | const bookId = book.id; 42 | const genres = genre.split(delimiter).map((g: string) => g.trim()); 43 | 44 | try { 45 | if (type === "genres") { 46 | const current = Array.isArray(book.media?.metadata?.genres) 47 | ? book.media.metadata.genres 48 | : []; 49 | const updated = [ 50 | ...new Set([...current.filter((g: string) => g !== genre), ...genres]), 51 | ]; 52 | await patch(`/api/items/${bookId}/media`, { 53 | metadata: { genres: updated }, 54 | }); 55 | } else { 56 | const current = Array.isArray(book.media?.tags) ? book.media.tags : []; 57 | const updated = [ 58 | ...new Set([...current.filter((t: string) => t !== genre), ...genres]), 59 | ]; 60 | await patch(`/api/items/${bookId}/media`, { 61 | tags: updated, 62 | }); 63 | } 64 | addLog(`Updated book: ${book.media.metadata.title}`); 65 | } catch (error) { 66 | addLog(`Error updating book: ${book.media.metadata.title}`); 67 | console.error(`Error appending genre ${genre} to book ${bookId}:`, error); 68 | } 69 | } 70 | 71 | export async function executeSplitGenres( 72 | formData: Record, 73 | ): Promise { 74 | const { get, post, addLog } = useApi(); 75 | 76 | try { 77 | let { type, libraryIds, delimiter, delimiterOverride } = formData; 78 | 79 | if (delimiterOverride) { 80 | delimiter = delimiterOverride; 81 | addLog(`Using delimiter override: ${delimiter}`); 82 | } 83 | 84 | addLog("Starting split genres operation..."); 85 | console.log("Executing split genres with formData:", formData); 86 | 87 | const libraryResponse = await get("/api/libraries"); 88 | 89 | const processableLibraries = []; 90 | 91 | for (const library of libraryResponse.data.libraries) { 92 | if (libraryIds.length === 0 || libraryIds.includes(library.id)) { 93 | processableLibraries.push(library.id); 94 | } 95 | } 96 | 97 | const libraryMessage = `Processing ${processableLibraries.length} libraries`; 98 | addLog(libraryMessage); 99 | 100 | let genres = (await get(`/api/${type}`)).data || []; 101 | genres = genres.genres || genres.tags || []; 102 | 103 | const multiGenres = []; 104 | 105 | for (const genre of genres) { 106 | addLog(`Checking genre: ${genre}`); 107 | if (genre.split(delimiter).length > 1 && !formData.skip.includes(genre)) { 108 | multiGenres.push(genre); 109 | } 110 | } 111 | 112 | addLog(`Found ${multiGenres.length} genres to split`); 113 | 114 | for (const genre of multiGenres) { 115 | const genreMessage = `Processing genre: ${genre}`; 116 | addLog(genreMessage); 117 | 118 | const bookTitlesOverall: string[] = []; 119 | for (const libraryId of processableLibraries) { 120 | const books = await getAllBooksForGenre(genre, libraryId, type); 121 | const bookTitles = books.map((book: any) => book.media.metadata.title); 122 | bookTitlesOverall.push(...bookTitles); 123 | for (const book of books) { 124 | try { 125 | await appendGenreToBook(book, genre, delimiter, type); 126 | } catch (e) { 127 | console.log(`Error processing book ${book.id}: ${e}`); 128 | } 129 | } 130 | } 131 | addLog( 132 | `Processed ${bookTitlesOverall.length} books for genre ${genre}: ${bookTitlesOverall.join(", ")}`, 133 | ); 134 | } 135 | 136 | const successMessage = `${type} split operation completed successfully`; 137 | addLog(successMessage); 138 | 139 | return { 140 | success: true, 141 | message: successMessage, 142 | timestamp: new Date().toISOString(), 143 | }; 144 | } catch (error: any) { 145 | const errorMessage = "Failed to split genres"; 146 | addLog(errorMessage); 147 | return { 148 | success: false, 149 | message: errorMessage, 150 | error: error.message || "Unknown error", 151 | timestamp: new Date().toISOString(), 152 | }; 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /web/src/features/stats/components/StatsConfig.vue: -------------------------------------------------------------------------------- 1 | 90 | 91 | 144 | -------------------------------------------------------------------------------- /web/src/features/projects/views/ProjectsView.vue: -------------------------------------------------------------------------------- 1 | 49 | 50 | 183 | -------------------------------------------------------------------------------- /web/src/features/stats/types.ts: -------------------------------------------------------------------------------- 1 | export interface ListeningSession { 2 | id: string; 3 | userId?: string; 4 | libraryId?: string; 5 | libraryItemId: string; 6 | bookId?: string; 7 | episodeId?: string | null; 8 | mediaType?: "book" | "podcast"; 9 | mediaMetadata?: { 10 | title?: string; 11 | subtitle?: string | null; 12 | authors?: { id?: string; name?: string }[]; 13 | narrators?: string[]; 14 | series?: { id?: string; name?: string; sequence?: string | null }[]; 15 | genres?: string[]; 16 | tags?: string[]; 17 | publishedYear?: string | null; 18 | publishedDate?: string | null; 19 | publisher?: string | null; 20 | description?: string | null; 21 | isbn?: string | null; 22 | asin?: string | null; 23 | language?: string | null; 24 | explicit?: boolean; 25 | abridged?: boolean; 26 | } | null; 27 | displayTitle?: string; 28 | displayAuthor?: string; 29 | coverPath?: string | null; 30 | duration?: number; 31 | playMethod?: number; 32 | mediaPlayer?: string; 33 | deviceInfo?: { 34 | id?: string; 35 | userId?: string; 36 | deviceId?: string; 37 | ipAddress?: string; 38 | clientVersion?: string; 39 | manufacturer?: string; 40 | model?: string; 41 | sdkVersion?: string; 42 | clientName?: string; 43 | deviceName?: string; 44 | } | null; 45 | serverVersion?: string; 46 | date?: string; 47 | dayOfWeek?: string; 48 | timeListening: number; 49 | startTime?: number; 50 | currentTime?: number; 51 | startedAt: number; 52 | updatedAt?: number; 53 | } 54 | 55 | export interface LibraryItem { 56 | id: string; 57 | ino: string; 58 | libraryId: string; 59 | folderId: string; 60 | path: string; 61 | relPath: string; 62 | isFile: boolean; 63 | mediaType: "book" | "podcast"; 64 | media: { 65 | id?: string; 66 | metadata: { 67 | title?: string; 68 | subtitle?: string; 69 | authors?: { id?: string; name?: string }[]; 70 | narrators?: string[]; 71 | series?: { id?: string; name?: string; sequence?: string }[]; 72 | genres?: string[]; 73 | tags?: string[]; 74 | publishedYear?: string; 75 | publishedDate?: string; 76 | publisher?: string; 77 | description?: string; 78 | isbn?: string; 79 | asin?: string; 80 | language?: string; 81 | explicit?: boolean; 82 | }; 83 | coverPath?: string | null; 84 | duration?: number; 85 | }; 86 | } 87 | 88 | export interface MediaProgress { 89 | id: string; 90 | userId: string; 91 | libraryItemId: string; 92 | episodeId: string | null; 93 | mediaItemId: string; 94 | mediaItemType: "book" | "podcast"; 95 | duration: number; 96 | progress: number; 97 | currentTime: number; 98 | isFinished: boolean; 99 | hideFromContinueListening: boolean; 100 | ebookLocation: string | null; 101 | ebookProgress: number | null; 102 | lastUpdate: number; 103 | startedAt: number; 104 | finishedAt: number | null; 105 | } 106 | 107 | export interface Bookmark { 108 | libraryItemId: string; 109 | title: string; 110 | time: number; 111 | createdAt: number; 112 | } 113 | 114 | export interface UserData { 115 | id: string; 116 | username: string; 117 | mediaProgress: MediaProgress[]; 118 | bookmarks: Bookmark[]; 119 | } 120 | 121 | export interface StatsConfig { 122 | startDate: string; 123 | endDate: string; 124 | fetchBookDetails: boolean; 125 | visibleSections: { 126 | totalListeningTime: boolean; 127 | totalSessions: boolean; 128 | finishedBooks: boolean; 129 | finishedPodcasts: boolean; 130 | finishedTotal: boolean; 131 | bookmarksCreated: boolean; 132 | dailyAverage: boolean; 133 | topGenres: boolean; 134 | topAuthors: boolean; 135 | topNarrators: boolean; 136 | topSeries: boolean; 137 | topTags: boolean; 138 | topBooks: boolean; 139 | mostActiveDay: boolean; 140 | mostActiveTime: boolean; 141 | medianSessionDuration: boolean; 142 | topDevices: boolean; 143 | }; 144 | topItemsCount: number; 145 | } 146 | 147 | export interface ProcessedStats { 148 | totalListeningTime: number; 149 | totalSessions: number; 150 | finishedBooks: number; 151 | finishedPodcasts: number; 152 | finishedTotal: number; 153 | bookmarksCreated: number; 154 | dailyAverage: number; 155 | dailyAverageLast7: number; 156 | dailyAverageLast30: number; 157 | dailyAverageLast365: number; 158 | dailyAverageAllTime: number; 159 | topGenres: { name: string; time: number }[]; 160 | topAuthors: { name: string; time: number }[]; 161 | topNarrators: { name: string; time: number }[]; 162 | topSeries: { name: string; time: number }[]; 163 | topTags: { name: string; time: number }[]; 164 | topBooks: { 165 | title: string; 166 | author: string; 167 | time: number; 168 | coverPath: string | null; 169 | libraryItemId: string; 170 | }[]; 171 | mostActiveDay: { day: string; time: number }; 172 | mostActiveTime: { hour: number; time: number }; 173 | medianSessionDuration: number; 174 | topDevices: { name: string; os: string; count: number }[]; 175 | dateRange: { start: string; end: string }; 176 | hourData: Record; 177 | dayData: Record; 178 | } 179 | 180 | export const defaultStatsConfig: StatsConfig = { 181 | startDate: `${new Date().getFullYear()}-01-01`, 182 | endDate: new Date().toISOString().split("T")[0], 183 | fetchBookDetails: false, 184 | visibleSections: { 185 | totalListeningTime: true, 186 | totalSessions: true, 187 | finishedBooks: true, 188 | finishedPodcasts: true, 189 | finishedTotal: true, 190 | bookmarksCreated: true, 191 | dailyAverage: true, 192 | topGenres: true, 193 | topAuthors: true, 194 | topNarrators: true, 195 | topSeries: true, 196 | topTags: true, 197 | topBooks: true, 198 | mostActiveDay: true, 199 | mostActiveTime: true, 200 | medianSessionDuration: true, 201 | topDevices: true, 202 | }, 203 | topItemsCount: 5, 204 | }; 205 | -------------------------------------------------------------------------------- /web/src/shared/components/FilterBar.vue: -------------------------------------------------------------------------------- 1 | 97 | 98 | 174 | 175 | 180 | -------------------------------------------------------------------------------- /web/src/features/projects/projects.ts: -------------------------------------------------------------------------------- 1 | import type { Project, TagDefinition } from "./types"; 2 | 3 | export const projects: Project[] = [ 4 | { 5 | id: "complete-your-collection", 6 | name: "Complete Your Collection", 7 | link: "https://github.com/xFrieDSpuDx/completeseries", 8 | description: "Find audiobooks missing from a series you own.", 9 | authors: ["xFrieDSpuDx"], 10 | authorLinks: ["https://github.com/xFrieDSpuDx"], 11 | tags: ["utility", "abs-native"], 12 | }, 13 | { 14 | id: "abs-opds", 15 | name: "OPDS", 16 | link: "https://github.com/Vito0912/abs-opds", 17 | description: "OPDS-Server for Audiobookshelf", 18 | authors: ["Vito0912"], 19 | authorLinks: ["https://github.com/Vito0912"], 20 | tags: ["server", "integration"], 21 | }, 22 | { 23 | id: "abs-autoconverter", 24 | name: "Autoconverter", 25 | link: "https://github.com/Vito0912/abs-autoconverter", 26 | description: 27 | "This tool automatically converts (and queues) all current and upcoming items in your library to specified codecs using the built-in converter", 28 | authors: ["Vito0912"], 29 | authorLinks: ["https://github.com/Vito0912"], 30 | tags: ["utility", "automation", "abs-native"], 31 | }, 32 | { 33 | id: "audiobookshelf-sonos", 34 | name: "Sonos", 35 | link: "https://github.com/jmt-gh/audiobookshelf-sonos", 36 | description: 37 | "Play your audiobooks from Audiobookshelf on your Sonos speakers", 38 | authors: ["jmt-gh"], 39 | authorLinks: ["https://github.com/jmt-gh"], 40 | tags: ["integration", "abs-native"], 41 | }, 42 | { 43 | id: "abs-alexa", 44 | name: "Alexa", 45 | link: "https://github.com/sevenlayercookie/abs-alexa", 46 | description: "Alexa Skill for interfacing with Audiobookshelf", 47 | authors: ["sevenlayercookie"], 48 | authorLinks: ["https://github.com/sevenlayercookie"], 49 | tags: ["integration", "abs-native"], 50 | }, 51 | { 52 | id: "audiobook-organizer", 53 | name: "Audiobook Organizer", 54 | link: "https://github.com/jeeftor/audiobook-organizer", 55 | description: "Audiobookshelf Organizer ", 56 | authors: ["jeeftor"], 57 | authorLinks: ["https://github.com/jeeftor"], 58 | tags: ["utility", "automation"], 59 | }, 60 | { 61 | id: "ab_mover", 62 | name: "ab_mover", 63 | link: "https://github.com/austinsr1/ab_mover", 64 | description: 65 | "Command line utility to read an Audiobookshelf metadata.json file and create a directory structure based on it.", 66 | authors: ["austinsr1"], 67 | authorLinks: ["https://github.com/austinsr1"], 68 | tags: ["utility"], 69 | }, 70 | { 71 | id: "bookshelf-traveller", 72 | name: "Bookshelf Traveller", 73 | link: "https://github.com/donkevlar/Bookshelf-Traveller", 74 | description: "Fully featured self-hosted audiobookshelf discord bot. ", 75 | authors: ["donkevlar"], 76 | authorLinks: ["https://github.com/donkevlar"], 77 | tags: ["utility", "abs-native"], 78 | }, 79 | { 80 | id: "hass-audiobookshelf", 81 | name: "Hass Audiobookshelf", 82 | link: "https://github.com/wolffshots/hass-audiobookshelf", 83 | description: 84 | "Adds sensors for an Audiobookshelf server to Home Assistant to show connection and active sessions.", 85 | authors: ["wolffshots"], 86 | authorLinks: ["https://github.com/wolffshots"], 87 | tags: ["integration", "abs-native"], 88 | }, 89 | { 90 | id: "audiobookshelf-hardcover-sync", 91 | name: "Hardcover Sync", 92 | link: "https://github.com/drallgood/audiobookshelf-hardcover-sync", 93 | description: "Syncs Audiobookshelf to Hardcover ", 94 | authors: ["drallgood"], 95 | authorLinks: ["https://github.com/drallgood"], 96 | tags: ["integration", "abs-native"], 97 | }, 98 | { 99 | id: "shelfbridge", 100 | name: "ShelfBridge", 101 | link: "https://github.com/rohit-purandare/ShelfBridge", 102 | description: 103 | "Sync your audiobook reading progress from Audiobookshelf to Hardcover automatically ", 104 | authors: ["rohit-purandare"], 105 | authorLinks: ["https://github.com/rohit-purandare"], 106 | tags: ["integration", "abs-native"], 107 | }, 108 | { 109 | id: "audiobookshelf-calibre-plugin", 110 | name: "Audiobookshelf Calibre Plugin", 111 | link: "https://github.com/jbhul/Audiobookshelf-calibre-plugin", 112 | description: 113 | "A calibre plugin to synchronize metadata from Audiobookshelf to calibre ", 114 | authors: ["jbhul"], 115 | authorLinks: ["https://github.com/jbhul"], 116 | tags: ["integration", "abs-native"], 117 | }, 118 | { 119 | id: "audiobookrequest", 120 | name: "Audiobook Request", 121 | link: "https://github.com/markbeep/AudioBookRequest", 122 | description: 123 | "Audiobook request management/wishlist for Plex/Audiobookshelf/Jellyfin ", 124 | authors: ["markbeep"], 125 | authorLinks: ["https://github.com/markbeep"], 126 | tags: ["abs-unspecific"], 127 | }, 128 | { 129 | id: "achew", 130 | name: "achew", 131 | link: "https://github.com/SirGibblets/achew", 132 | description: "Audiobook Chapter Extraction Wizard for Audiobookshelf", 133 | authors: ["SirGibblets"], 134 | authorLinks: ["https://github.com/SirGibblets"], 135 | tags: ["abs-unspecific"], 136 | }, 137 | { 138 | id: "epub-to-audiobook", 139 | name: "EPUB to Audiobook Converter", 140 | link: "https://github.com/p0n1/epub_to_audiobook", 141 | description: 142 | "EPUB to audiobook converter, optimized for Audiobookshelf, WebUI included", 143 | authors: ["p0n1"], 144 | authorLinks: ["https://github.com/p0n1"], 145 | tags: ["abs-unspecific"], 146 | }, 147 | { 148 | id: "bookbot", 149 | name: "BookBot", 150 | link: "https://github.com/itsbryanman/bookbot", 151 | description: 152 | "audiobook organizer/renamer + DRM removal - Editorial comment: DRM is against law in many countries even for personal use, so don't use", 153 | authors: ["itsbryanman"], 154 | authorLinks: ["https://github.com/itsbryanman"], 155 | tags: ["utility"], 156 | }, 157 | ]; 158 | 159 | export const tagDefinitions: Record = { 160 | "abs-native": { 161 | name: "ABS Native", 162 | description: 163 | "Apps designed specifically for Audiobookshelf with direct integration", 164 | }, 165 | "abs-unspecific": { 166 | name: "ABS Unspecific", 167 | description: 168 | "Apps that work with Audiobookshelf but are not specifically designed for it", 169 | }, 170 | utility: { 171 | name: "Utility", 172 | description: "Tools that help with library management and organization", 173 | }, 174 | server: { 175 | name: "Server", 176 | description: "Server-side applications and services", 177 | }, 178 | integration: { 179 | name: "Integration", 180 | description: "Apps that connect ABS with external services and protocols", 181 | }, 182 | automation: { 183 | name: "Automation", 184 | description: "Tools that automate repetitive tasks and workflows", 185 | }, 186 | client: { 187 | name: "Client", 188 | description: "Alternative client applications for accessing your library", 189 | }, 190 | mobile: { 191 | name: "Mobile", 192 | description: "Mobile applications and mobile-specific features", 193 | }, 194 | web: { 195 | name: "Web", 196 | description: "Web-based applications and browser extensions", 197 | }, 198 | }; 199 | 200 | export const availableTags = Object.keys(tagDefinitions); 201 | -------------------------------------------------------------------------------- /web/src/features/settings/views/SettingsView.vue: -------------------------------------------------------------------------------- 1 | 164 | 165 | 219 | -------------------------------------------------------------------------------- /web/src/features/clients/components/ClientCard.vue: -------------------------------------------------------------------------------- 1 | 150 | 151 | 202 | -------------------------------------------------------------------------------- /tools/migrate_backup_to_new_server.py: -------------------------------------------------------------------------------- 1 | # This script helps you migrate to a new server when the paths have changed. 2 | # This is currently not supported by Audiobookshelf, so this script tries its best to migrate the data. 3 | # KEEP A BACKUP OF YOUR DATA BEFORE RUNNING THIS SCRIPT! IT WILL CREATE A BACKUP, BUT BETTER SAFE THAN SORRY! 4 | # MAKE A BACKUP OF YOUR SERVER NOW! 5 | 6 | import json 7 | import os 8 | import shutil 9 | import sqlite3 10 | 11 | # HOW TO USE: 12 | # 1. Make a backup :) - no, really, DO IT! - /config/backups - Then copy it somewhere not where this script runs! 13 | # 2. Download this backup (down arrow icon). 14 | # 3. Place the backup here (/tools) and rename it to "backup.audiobookshelf" (or change the BACKUP_FILE variable). 15 | # 4. Set the paths (see below for examples) - Make sure to use / and not \! READ THE PATHS SECTION CAREFULLY! 16 | # 5. Run the script - Your original file will now be replaced (a backup will be created, with the extension .bak). Check the last edited time to be sure. 17 | # 6. Once the script finishes, it will output "Migration complete." - When you see this, you can proceed with the next steps. Note: This could take time for large databases. 18 | # 7. Create your new server, create any user, and go to the backup section from step one. Then, upload the backup file. 19 | # 8. Now you can restore the backup, and your paths should be correct. 20 | # 9. IMPORTANT! Log out and log back in. Otherwise, the server will crash because the current user does not "exist" anymore. 21 | 22 | # PATHS: 23 | # 1. THIS SCRIPT CURRENTLY ONLY WORKS FOR SERVERS THAT SHARE THE SAME LOCATION FOR ALL LIBRARIES! 24 | # -> Example: If you have a library "books" in /media/libraries/books and another library "podcasts" in 25 | # /media/libraries/podcasts, the script will work as both share the path /media/libraries/. 26 | # -> If you have a library "books" in /media/libraries/books and another library "podcasts" in /media/podcasts, the 27 | # script will not work, as the paths are different. 28 | 29 | # THE PATHS MUST BE INPUT VERY PRECISELY! ENSURE YOU KNOW YOUR CURRENT PATHS! 30 | # For Docker, the default metadata path is /metadata. 31 | # For libraries, if you have them in e.g. /books and /podcasts, you can use / as the path. 32 | # OLD is the current path on your server, and NEW is the path where it will be on your new server. 33 | 34 | # The metadata path is set as an ENV, so you cannot freely set the new one here. It MUST match the current one set as ENV. 35 | # Please inform yourself about the metadata path in the documentation of the version/os you are using. 36 | # Ensure you understand the paths and how they are set up on your new server. 37 | 38 | # As long as you have a backup, you should (in theory) be able to recover the old data. 39 | # But because you never should touch the current server (despite making and downloading a backup) 40 | # There should be no data loss. 41 | 42 | 43 | 44 | # ##### Configuration #### 45 | 46 | BACKUP_FILE = "backup.audiobookshelf" 47 | 48 | OLD_METADATA_DIR = r"/metadata" # Default for docker 49 | METADATA_DIR = r"C:\Users\\Documents\metadata" 50 | 51 | OLD_LIBRARIES_DIR = r"/" # Default for docker example 52 | LIBRARIES_DIR = r"C:\Users\\Documents\libraries" 53 | 54 | # ###### Code #### 55 | 56 | # Create backup 57 | print("Creating a backup of the original backup file...") 58 | shutil.copy(BACKUP_FILE, f"{BACKUP_FILE}.bak") 59 | 60 | # Unzip the backup 61 | print("Unzipping the backup...") 62 | shutil.unpack_archive(BACKUP_FILE, "backup", "zip") 63 | 64 | # Check if the database exists 65 | db_path = os.path.join("backup", "absdatabase.sqlite") 66 | if not os.path.exists(db_path): 67 | print("Error: No database found in backup") 68 | exit() 69 | 70 | # Connect to the database 71 | try: 72 | conn = sqlite3.connect(db_path) 73 | cur = conn.cursor() 74 | print("Connected to the database") 75 | except Exception as e: 76 | print("Error connecting to database:", e) 77 | exit() 78 | 79 | 80 | # Clean and normalize variables 81 | def clean_path(path): 82 | tmp_path = os.path.normpath(path).replace("\\", "/") 83 | if not tmp_path.endswith("/"): 84 | tmp_path += "/" 85 | return tmp_path 86 | 87 | 88 | METADATA_DIR = clean_path(METADATA_DIR) 89 | LIBRARIES_DIR = clean_path(LIBRARIES_DIR) 90 | OLD_METADATA_DIR = clean_path(OLD_METADATA_DIR) 91 | OLD_LIBRARIES_DIR = clean_path(OLD_LIBRARIES_DIR) 92 | 93 | 94 | # Function to replace paths in the database 95 | def replace_path_in_db(table, column, old_path, new_path): 96 | print(f"Replacing paths in {table}.{column} from {old_path} to {new_path}...") 97 | if table == "playbackSessions": 98 | print("This may take a long time with many users and/or started sessions.") 99 | 100 | # Select only the rows that contain the old_path to minimize data fetched 101 | select_sql = f"SELECT id, {column} FROM {table} WHERE {column} LIKE ?" 102 | cur.execute(select_sql, (f'%{old_path}%',)) 103 | rows = cur.fetchall() 104 | 105 | # Prepare updates 106 | updates = [] 107 | for row_id, row_column in rows: 108 | if row_column is not None: 109 | # Replace only the first occurrence 110 | new_column = row_column.replace(old_path, new_path, 1) 111 | if new_column != row_column: 112 | updates.append((new_column, row_id)) 113 | 114 | # Execute updates in a single batch 115 | update_sql = f"UPDATE {table} SET {column} = ? WHERE id = ?" 116 | cur.executemany(update_sql, updates) 117 | conn.commit() 118 | 119 | 120 | # Replace paths in relevant tables and columns 121 | replace_path_in_db('authors', 'imagePath', OLD_METADATA_DIR, METADATA_DIR) 122 | replace_path_in_db('books', 'coverPath', OLD_METADATA_DIR, METADATA_DIR) 123 | replace_path_in_db('feeds', 'coverPath', OLD_METADATA_DIR, METADATA_DIR) 124 | replace_path_in_db('playbackSessions', 'coverPath', OLD_METADATA_DIR, METADATA_DIR) 125 | replace_path_in_db('podcasts', 'coverPath', OLD_METADATA_DIR, METADATA_DIR) 126 | 127 | replace_path_in_db('feedEpisodes', 'filePath', OLD_LIBRARIES_DIR, LIBRARIES_DIR) 128 | replace_path_in_db('libraryFolders', 'path', OLD_LIBRARIES_DIR, LIBRARIES_DIR) 129 | replace_path_in_db('libraryItems', 'path', OLD_LIBRARIES_DIR, LIBRARIES_DIR) 130 | 131 | print("Correcting files in db. This can take a while...") 132 | 133 | # Correct book files 134 | sql = "SELECT id, audioFiles, ebookFile FROM books" 135 | cur.execute(sql) 136 | book_rows = cur.fetchall() 137 | 138 | update_params = [] 139 | 140 | for book_id, audio_files, ebook_file in book_rows: 141 | new_audio_files = None 142 | new_ebook_file = None 143 | 144 | if audio_files is not None: 145 | audio_files = json.loads(audio_files) 146 | for audio_file in audio_files: 147 | audio_file['metadata']['path'] = audio_file['metadata']['path'].replace(OLD_LIBRARIES_DIR, LIBRARIES_DIR, 1) 148 | new_audio_files = json.dumps(audio_files) 149 | 150 | if ebook_file is not None: 151 | ebook_file = json.loads(ebook_file) 152 | ebook_file['metadata']['path'] = ebook_file['metadata']['path'].replace(OLD_LIBRARIES_DIR, LIBRARIES_DIR, 1) 153 | new_ebook_file = json.dumps(ebook_file) 154 | 155 | update_params.append((new_audio_files, new_ebook_file, book_id)) 156 | 157 | sql_update = "UPDATE books SET audioFiles = ?, ebookFile = ? WHERE id = ?" 158 | cur.executemany(sql_update, update_params) 159 | conn.commit() 160 | 161 | 162 | # Correct libraryItems 163 | sql = "SELECT id, libraryFiles FROM libraryItems" 164 | cur.execute(sql) 165 | lib_rows = cur.fetchall() 166 | 167 | update_params = [] 168 | 169 | for item_id, library_files in lib_rows: 170 | new_library_files = None 171 | 172 | if library_files is not None: 173 | library_files = json.loads(library_files) 174 | for library_file in library_files: 175 | library_file['metadata']['path'] = library_file['metadata']['path'].replace(OLD_LIBRARIES_DIR, LIBRARIES_DIR, 1) 176 | new_library_files = json.dumps(library_files) 177 | 178 | update_params.append((new_library_files, item_id)) 179 | 180 | sql_update = "UPDATE libraryItems SET libraryFiles = ? WHERE id = ?" 181 | cur.executemany(sql_update, update_params) 182 | conn.commit() 183 | 184 | 185 | # Correct podcast files 186 | sql = "SELECT id, audioFile FROM podcastEpisodes" 187 | cur.execute(sql) 188 | podcast_rows = cur.fetchall() 189 | 190 | update_params = [] 191 | 192 | for episode_id, audio_file in podcast_rows: 193 | new_audio_file = None 194 | 195 | if audio_file is not None: 196 | audio_file = json.loads(audio_file) 197 | audio_file['metadata']['path'] = audio_file['metadata']['path'].replace(OLD_LIBRARIES_DIR, LIBRARIES_DIR, 1) 198 | new_audio_file = json.dumps(audio_file) 199 | 200 | update_params.append((new_audio_file, episode_id)) 201 | 202 | sql_update = "UPDATE podcastEpisodes SET audioFile = ? WHERE id = ?" 203 | cur.executemany(sql_update, update_params) 204 | conn.commit() 205 | 206 | 207 | # Correct server settings 208 | sql = "SELECT \"key\", \"value\" FROM settings" 209 | cur.execute(sql) 210 | settings_rows = cur.fetchall() 211 | for key, value in settings_rows: 212 | if key == "server-settings": 213 | json_content = json.loads(value) 214 | json_content['backupPath'] = json_content['backupPath'].replace(OLD_METADATA_DIR, METADATA_DIR, 1) 215 | new_value = json.dumps(json_content) 216 | print(f"Updating server settings: {key}") 217 | sql = "UPDATE settings SET value = ? WHERE key = ?" 218 | cur.execute(sql, (new_value, key)) 219 | conn.commit() 220 | 221 | # Close the database connection 222 | conn.close() 223 | 224 | # Repack the backup 225 | print("Repacking the backup...") 226 | shutil.make_archive("backup", 'zip', "backup") 227 | 228 | # Rename the newly packed archive to the original backup file name 229 | os.replace("backup.zip", BACKUP_FILE) 230 | 231 | # Clean up the extracted backup folder 232 | shutil.rmtree("backup") 233 | 234 | print("Migration complete.") 235 | -------------------------------------------------------------------------------- /tools/quick_match_chapters.py: -------------------------------------------------------------------------------- 1 | # This script automatically matches chapters for audiobooks using AudiobookShelf API. 2 | # Handle with care! If chapters are manually added to a book, they might get overwritten. 3 | # Chapters are considered "missing" based on a threshold setting. 4 | # For example, if there are 123 chapters locally and 127 are found, chapters will be replaced. 5 | # However, if 120-126 chapters are found, they won’t be replaced. 6 | 7 | import time 8 | import requests 9 | from urllib import parse 10 | 11 | # Configuration constants 12 | CHAPTER_THRESHOLD = 3 # Threshold for determining missing chapters. Do disable overwriting existing chapters, set to 99999999 ;) 13 | ABS_HOST = "" # AudiobookShelf Host URL 14 | LIBRARY_ID = "" # Library UUID 15 | API_KEY = "" # API Key from user settings 16 | PROVIDER = "audible.de" # Metadata provider (See available providers in API documentation https://api.audiobookshelf.org/#metadata-providers) 17 | REGION = "DE" # Region code (e.g., US, DE) 18 | DISABLE_RATE_PROTECTION = False # Rate protection, disable to speed up but risk timeouts 19 | SEARCH_FOR_ASIN = True # Search for ASIN if not available. Disable this to use tracks as chapters if no ASIN is available. 20 | USE_TRACKS_AS_CHAPTERS = False # Use tracks as chapters if no asin available (Fallback) 21 | 22 | 23 | ############################################################################################################ 24 | 25 | book_info = {} 26 | 27 | # Fetch all library items 28 | library_url = f"{ABS_HOST}/api/libraries/{LIBRARY_ID}/items?token={API_KEY}" 29 | print(f"Fetching library items from: {library_url}") 30 | response = requests.get(library_url) 31 | 32 | if response.status_code != 200: 33 | print("Error fetching library items:", response.status_code) 34 | exit() 35 | 36 | items = response.json().get('results', []) 37 | print(f"Found {len(items)} items in the library.") 38 | 39 | # Process each item in the library 40 | for item in items: 41 | book_id = item['id'] 42 | metadata = item['media']['metadata'] 43 | title = metadata.get('title', "Unknown Title") 44 | subtitle = metadata.get('subtitle', "") 45 | authors = metadata.get('authorName', "Unknown Author") 46 | book_info[book_id] = { 'id': book_id, 'title': title, 'status': 'ERROR', 'comment': 'Unknown Error', 'asin': 'N/A' } 47 | 48 | print(f"\n--- Processing Book: {title} ---") 49 | 50 | # Check if book has ASIN (Amazon Standard Identification Number) 51 | if 'asin' not in metadata or metadata['asin'] is None: 52 | book_info[book_id]['status'] = 'NO_ASIN' 53 | book_info[book_id]['asin'] = 'N/A' 54 | 55 | if SEARCH_FOR_ASIN: 56 | match_url = f"{ABS_HOST}/api/search/books?title={parse.quote(title)}&author={parse.quote(authors)}&provider={PROVIDER}&token={API_KEY}" 57 | new_item_response = requests.get(match_url) 58 | 59 | if new_item_response.status_code != 200: 60 | print(f"Error matching book '{title}':", new_item_response.json()) 61 | continue 62 | 63 | if len(new_item_response.json()) == 0: 64 | print(f"Error matching book '{title}' (No results found).") 65 | book_info[book_id]['comment'] = 'Asin retrieval failed' 66 | continue 67 | 68 | best_match = new_item_response.json()[0] 69 | asin = best_match.get('asin', None) 70 | 71 | if asin is None: 72 | print(f"Error matching book '{title}' (No ASIN found).") 73 | book_info[book_id]['comment'] = 'Asin retrieval failed - No ASIN found' 74 | continue 75 | 76 | item['media']['metadata']['asin'] = asin 77 | metadata['asin'] = asin 78 | print(f"ASIN found: {asin}") 79 | 80 | # Check if asin is now available 81 | if 'asin' not in item['media']['metadata'] or item['media']['metadata']['asin'] is None: 82 | if USE_TRACKS_AS_CHAPTERS: 83 | # Get the tracks and use them as chapters 84 | book_response = requests.get(f"{ABS_HOST}/api/items/{book_id}?expanded=1&token={API_KEY}") 85 | book_info[book_id]['status'] = 'TRACKS' 86 | if book_response.status_code != 200: 87 | print(f"Error fetching book '{title}':", book_response.status_code) 88 | book_info[book_id]['comment'] = 'Tracks retrieval failed' 89 | continue 90 | if len(book_response.json()['media'].get('audioFiles', [])) > 1: 91 | print(f"Using tracks as chapters for '{title}'.") 92 | tracks = book_response.json()['media'].get('audioFiles', []) 93 | new_chapters = [] 94 | 95 | current_time = 0 96 | length_of_book = item['media']['duration'] 97 | 98 | current_chapters_num = item['media'].get('numChapters', 0) 99 | 100 | if current_chapters_num == 0 or abs(current_chapters_num - len(tracks)) > CHAPTER_THRESHOLD: 101 | print(f"Chapters are missing or incorrect for '{title}' (Current num: {current_chapters_num}, Tracks num: {len(tracks)}). Updating...") 102 | else: 103 | print(f"Chapters are fine for '{title}'.") 104 | book_info[book_id]['status'] = 'FINISHED' 105 | book_info[book_id]['comment'] = 'No chapters to update' 106 | continue 107 | 108 | for i, track in enumerate(tracks): 109 | duration = track['duration'] 110 | 111 | # Use track['title'] and remove the extension 112 | track_title = track['metadata']['filename'].split('.')[0] 113 | 114 | new_chapters.append({ 115 | "id": i, 116 | "start": current_time, 117 | "end": current_time + duration, 118 | "title": track_title, 119 | "error": None 120 | }) 121 | current_time += duration - 0.001 # Subtract a tiny amount to avoid rounding issues 122 | if length_of_book < current_time: 123 | break 124 | 125 | # Update chapters for the book 126 | update_url = f"{ABS_HOST}/api/items/{book_id}/chapters?token={API_KEY}" 127 | update_data = {"chapters": new_chapters} 128 | update_response = requests.post(update_url, json=update_data) 129 | 130 | if update_response.status_code == 200: 131 | print(f"Chapters updated successfully for '{title}' (Using tracks!).") 132 | book_info[book_id]['comment'] = 'Tracks used as chapters' 133 | book_info[book_id]['status'] = 'FINISHED' 134 | else: 135 | print(f"Error updating chapters for '{title}'. Response Code:", update_response.status_code) 136 | book_info[book_id]['comment'] = 'Chapters update failed' 137 | 138 | continue 139 | else: 140 | print(f"Error using tracks as chapters for '{title}' (No or 1 track found).") 141 | book_info[book_id]['comment'] = 'Tracks retrieval failed' 142 | else: 143 | print(f"Skipping book '{title}' (No ASIN found and Tracks not used as source).") 144 | book_info[book_id]['comment'] = 'Asin retrieval failed' 145 | continue 146 | else: 147 | book_info[book_id]['asin'] = item['media']['metadata']['asin'] 148 | 149 | # Fetch chapters using ASIN 150 | asin = item['media']['metadata']['asin'] 151 | chapter_url = f"{ABS_HOST}/api/search/chapters?asin={asin}®ion={REGION}&token={API_KEY}" 152 | chapter_response = requests.get(chapter_url) 153 | 154 | if chapter_response.status_code != 200 or chapter_response.json().get('error') is not None: 155 | book_info[book_id]['comment'] = 'Chapters retrieval failed' 156 | print(f"Error fetching chapters for '{title}'. Response Code:", chapter_response.status_code) 157 | continue 158 | 159 | chapters = chapter_response.json().get('chapters', []) 160 | if len(chapters) == 0: 161 | book_info[book_id]['comment'] = 'No chapters found' 162 | print(f"No chapters found for '{title}'.") 163 | continue 164 | print(f"Chapters found for '{title}': {len(chapters)}") 165 | 166 | # Compare current and found chapters 167 | current_chapters_num = item['media'].get('numChapters', 0) 168 | found_chapters_num = len(chapters) 169 | print(f"Current chapter count: {current_chapters_num}") 170 | 171 | if abs(current_chapters_num - found_chapters_num) > CHAPTER_THRESHOLD or current_chapters_num == 0: 172 | print(f"Chapters are missing or incorrect for '{title}'. Updating...") 173 | 174 | new_chapters = [] 175 | length_of_book = item['media']['duration'] # Book length in seconds 176 | 177 | for i, chapter in enumerate(chapters): 178 | start = chapter['startOffsetMs'] / 1000 179 | end = min((chapter['startOffsetMs'] + chapter['lengthMs']) / 1000, length_of_book) 180 | 181 | if start >= length_of_book: 182 | break 183 | 184 | new_chapters.append({ 185 | "id": i, 186 | "start": start, 187 | "end": end, 188 | "title": chapter['title'], 189 | "error": None 190 | }) 191 | 192 | # Update chapters for the book 193 | update_url = f"{ABS_HOST}/api/items/{book_id}/chapters?token={API_KEY}" 194 | update_data = {"chapters": new_chapters} 195 | update_response = requests.post(update_url, json=update_data) 196 | 197 | if update_response.status_code == 200: 198 | print(f"Chapters updated successfully for '{title}'.") 199 | book_info[book_id]['status'] = 'FINISHED' 200 | book_info[book_id]['comment'] = 'Chapters updated' 201 | else: 202 | print(f"Error updating chapters for '{title}'. Response Code:", update_response.status_code) 203 | book_info[book_id]['comment'] = 'Chapters update failed' 204 | else: 205 | book_info[book_id]['status'] = 'FINISHED' 206 | book_info[book_id]['comment'] = 'No chapters to update' 207 | print(f"Chapters are fine for '{title}'.") 208 | 209 | # Respect rate-limiting if enabled 210 | if not DISABLE_RATE_PROTECTION: 211 | time.sleep(2) 212 | 213 | print("\n--- Summary ---") 214 | for book_id, info in book_info.items(): 215 | print(f"{info['title']} ({info['status']}): {info['comment']}\nLink: {ABS_HOST}/item/{book_id}\n") 216 | print("-" * 50) 217 | 218 | print("\n--- Failed Books ---") 219 | for book_id, info in book_info.items(): 220 | if info['status'] != 'FINISHED': 221 | print(f"{info['title']} ({info['status']}): {info['comment']}\nLink: {ABS_HOST}/item/{book_id}\n") 222 | print("-" * 50) -------------------------------------------------------------------------------- /web/src/features/tools/quickMatchChapters.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | 4 | const { get, post, addLog } = useApi(); 5 | 6 | function delay(ms: number) { 7 | return new Promise((resolve) => setTimeout(resolve, ms)); 8 | } 9 | 10 | async function fetchLibraryItems(libraryId: string) { 11 | try { 12 | const response = await get(`/api/libraries/${libraryId}/items`); 13 | return response.data.results || []; 14 | } catch (error) { 15 | console.error("Error fetching library items:", error); 16 | return []; 17 | } 18 | } 19 | 20 | async function searchForBook(title: string, author: string, provider: string) { 21 | try { 22 | const encodedTitle = encodeURIComponent(title); 23 | const encodedAuthor = encodeURIComponent(author); 24 | const response = await get( 25 | `/api/search/books?title=${encodedTitle}&author=${encodedAuthor}&provider=${provider}`, 26 | ); 27 | return response.data || []; 28 | } catch (error) { 29 | console.error(`Error searching for book "${title}":`, error); 30 | return []; 31 | } 32 | } 33 | 34 | async function fetchBookDetails(bookId: string) { 35 | try { 36 | const response = await get(`/api/items/${bookId}?expanded=1`); 37 | return response.data || null; 38 | } catch (error) { 39 | console.error(`Error fetching book details for ${bookId}:`, error); 40 | return null; 41 | } 42 | } 43 | 44 | async function fetchChaptersByAsin(asin: string, region: string) { 45 | try { 46 | const response = await get( 47 | `/api/search/chapters?asin=${asin}®ion=${region}`, 48 | ); 49 | if (response.data.error) { 50 | throw new Error(response.data.error); 51 | } 52 | return response.data.chapters || []; 53 | } catch (error) { 54 | console.error(`Error fetching chapters for ASIN ${asin}:`, error); 55 | return []; 56 | } 57 | } 58 | 59 | async function updateBookChapters(bookId: string, chapters: any[]) { 60 | try { 61 | await post(`/api/items/${bookId}/chapters`, { chapters }); 62 | return true; 63 | } catch (error) { 64 | console.error(`Error updating chapters for book ${bookId}:`, error); 65 | return false; 66 | } 67 | } 68 | 69 | function createChaptersFromTracks(tracks: any[], bookDuration: number) { 70 | const chapters = []; 71 | let currentTime = 0; 72 | 73 | for (let i = 0; i < tracks.length; i++) { 74 | const track = tracks[i]; 75 | const duration = track.duration; 76 | const trackTitle = track.metadata.filename.split(".")[0]; 77 | 78 | chapters.push({ 79 | id: i, 80 | start: currentTime, 81 | end: currentTime + duration, 82 | title: trackTitle, 83 | error: null, 84 | }); 85 | 86 | currentTime += duration - 0.001; // Subtract tiny amount to avoid rounding issues 87 | if (bookDuration < currentTime) { 88 | break; 89 | } 90 | } 91 | 92 | return chapters; 93 | } 94 | 95 | function createChaptersFromAsin(chapters: any[], bookDuration: number) { 96 | const newChapters = []; 97 | 98 | for (let i = 0; i < chapters.length; i++) { 99 | const chapter = chapters[i]; 100 | const start = chapter.startOffsetMs / 1000; 101 | const end = Math.min( 102 | (chapter.startOffsetMs + chapter.lengthMs) / 1000, 103 | bookDuration, 104 | ); 105 | 106 | if (start >= bookDuration) { 107 | break; 108 | } 109 | 110 | newChapters.push({ 111 | id: i, 112 | start: start, 113 | end: end, 114 | title: chapter.title, 115 | error: null, 116 | }); 117 | } 118 | 119 | return newChapters; 120 | } 121 | 122 | export async function executeMatchAudiobookChapters( 123 | formData: Record, 124 | ): Promise { 125 | try { 126 | const { 127 | chapterThreshold = 3, 128 | libraryId, 129 | provider = "audible.com", 130 | region = "US", 131 | disableRateProtection = false, 132 | searchForAsin = true, 133 | useTracksAsChapters = false, 134 | } = formData; 135 | 136 | const bookInfo: Record = {}; 137 | 138 | addLog(`Fetching library items from library: ${libraryId}`); 139 | 140 | const items = await fetchLibraryItems(libraryId); 141 | addLog(`Found ${items.length} items in the library.`); 142 | 143 | // Process each item in the library 144 | for (const item of items) { 145 | const bookId = item.id; 146 | const metadata = item.media.metadata; 147 | const title = metadata.title || "Unknown Title"; 148 | const subtitle = metadata.subtitle || ""; 149 | const authors = metadata.authorName || "Unknown Author"; 150 | 151 | bookInfo[bookId] = { 152 | id: bookId, 153 | title: title, 154 | status: "ERROR", 155 | comment: "Unknown Error", 156 | asin: "N/A", 157 | }; 158 | 159 | addLog(`\n--- Processing Book: ${title} ---`); 160 | 161 | // Check if book has ASIN 162 | if (!metadata.asin) { 163 | bookInfo[bookId].status = "NO_ASIN"; 164 | bookInfo[bookId].asin = "N/A"; 165 | 166 | if (searchForAsin) { 167 | addLog(`Searching for ASIN for "${title}"...`); 168 | const searchResults = await searchForBook(title, authors, provider); 169 | 170 | if (searchResults.length === 0) { 171 | addLog(`Error matching book "${title}" (No results found).`); 172 | bookInfo[bookId].comment = "ASIN retrieval failed"; 173 | continue; 174 | } 175 | 176 | const bestMatch = searchResults[0]; 177 | const asin = bestMatch.asin; 178 | 179 | if (!asin) { 180 | addLog(`Error matching book "${title}" (No ASIN found).`); 181 | bookInfo[bookId].comment = "ASIN retrieval failed - No ASIN found"; 182 | continue; 183 | } 184 | 185 | item.media.metadata.asin = asin; 186 | metadata.asin = asin; 187 | addLog(`ASIN found: ${asin}`); 188 | } 189 | } 190 | 191 | // Check if ASIN is now available 192 | if (!item.media.metadata.asin) { 193 | if (useTracksAsChapters) { 194 | addLog(`Using tracks as chapters for "${title}".`); 195 | bookInfo[bookId].status = "TRACKS"; 196 | 197 | const bookDetails = await fetchBookDetails(bookId); 198 | if (!bookDetails) { 199 | addLog( 200 | `Error fetching book "${title}": Failed to get book details`, 201 | ); 202 | bookInfo[bookId].comment = "Tracks retrieval failed"; 203 | continue; 204 | } 205 | 206 | const audioFiles = bookDetails.media.audioFiles || []; 207 | if (audioFiles.length <= 1) { 208 | addLog( 209 | `Error using tracks as chapters for "${title}" (No or 1 track found).`, 210 | ); 211 | bookInfo[bookId].comment = "Tracks retrieval failed"; 212 | continue; 213 | } 214 | 215 | const currentChaptersNum = item.media.numChapters || 0; 216 | const tracksNum = audioFiles.length; 217 | 218 | if ( 219 | currentChaptersNum === 0 || 220 | Math.abs(currentChaptersNum - tracksNum) > chapterThreshold 221 | ) { 222 | addLog( 223 | `Chapters are missing or incorrect for "${title}" (Current num: ${currentChaptersNum}, Tracks num: ${tracksNum}). Updating...`, 224 | ); 225 | 226 | const newChapters = createChaptersFromTracks( 227 | audioFiles, 228 | item.media.duration, 229 | ); 230 | const success = await updateBookChapters(bookId, newChapters); 231 | 232 | if (success) { 233 | addLog( 234 | `Chapters updated successfully for "${title}" (Using tracks!).`, 235 | ); 236 | bookInfo[bookId].comment = "Tracks used as chapters"; 237 | bookInfo[bookId].status = "FINISHED"; 238 | } else { 239 | addLog(`Error updating chapters for "${title}".`); 240 | bookInfo[bookId].comment = "Chapters update failed"; 241 | } 242 | } else { 243 | addLog(`Chapters are fine for "${title}".`); 244 | bookInfo[bookId].status = "FINISHED"; 245 | bookInfo[bookId].comment = "No chapters to update"; 246 | } 247 | continue; 248 | } else { 249 | addLog( 250 | `Skipping book "${title}" (No ASIN found and Tracks not used as source).`, 251 | ); 252 | bookInfo[bookId].comment = "ASIN retrieval failed"; 253 | continue; 254 | } 255 | } else { 256 | bookInfo[bookId].asin = item.media.metadata.asin; 257 | } 258 | 259 | // Fetch chapters using ASIN 260 | const asin = item.media.metadata.asin; 261 | addLog(`Fetching chapters for ASIN: ${asin}`); 262 | 263 | const chapters = await fetchChaptersByAsin(asin, region); 264 | 265 | if (chapters.length === 0) { 266 | bookInfo[bookId].comment = "No chapters found"; 267 | addLog(`No chapters found for "${title}".`); 268 | continue; 269 | } 270 | 271 | addLog(`Chapters found for "${title}": ${chapters.length}`); 272 | 273 | // Compare current and found chapters 274 | const currentChaptersNum = item.media.numChapters || 0; 275 | const foundChaptersNum = chapters.length; 276 | addLog(`Current chapter count: ${currentChaptersNum}`); 277 | 278 | if ( 279 | Math.abs(currentChaptersNum - foundChaptersNum) > chapterThreshold || 280 | currentChaptersNum === 0 281 | ) { 282 | addLog(`Chapters are missing or incorrect for "${title}". Updating...`); 283 | 284 | const newChapters = createChaptersFromAsin( 285 | chapters, 286 | item.media.duration, 287 | ); 288 | const success = await updateBookChapters(bookId, newChapters); 289 | 290 | if (success) { 291 | addLog(`Chapters updated successfully for "${title}".`); 292 | bookInfo[bookId].status = "FINISHED"; 293 | bookInfo[bookId].comment = "Chapters updated"; 294 | } else { 295 | addLog(`Error updating chapters for "${title}".`); 296 | bookInfo[bookId].comment = "Chapters update failed"; 297 | } 298 | } else { 299 | bookInfo[bookId].status = "FINISHED"; 300 | bookInfo[bookId].comment = "No chapters to update"; 301 | addLog(`Chapters are fine for "${title}".`); 302 | } 303 | 304 | if (!disableRateProtection) { 305 | await delay(2000); 306 | } 307 | } 308 | 309 | addLog("\n--- Summary ---"); 310 | for (const [bookId, info] of Object.entries(bookInfo)) { 311 | addLog(`${info.title} (${info.status}): ${info.comment}`); 312 | addLog("-".repeat(50)); 313 | } 314 | 315 | addLog("\n--- Failed Books ---"); 316 | for (const [bookId, info] of Object.entries(bookInfo)) { 317 | if (info.status !== "FINISHED") { 318 | addLog(`${info.title} (${info.status}): ${info.comment}`); 319 | addLog("-".repeat(50)); 320 | } 321 | } 322 | 323 | return { 324 | success: true, 325 | message: "Audiobook chapters matching completed", 326 | timestamp: new Date().toISOString(), 327 | }; 328 | } catch (error: any) { 329 | return { 330 | success: false, 331 | message: "Failed to match audiobook chapters", 332 | error: error.message || "Unknown error", 333 | timestamp: new Date().toISOString(), 334 | }; 335 | } 336 | } 337 | -------------------------------------------------------------------------------- /web/src/features/tools/migrateServer.ts: -------------------------------------------------------------------------------- 1 | import { useApi } from "@/shared/composables/useApi"; 2 | import type { ToolResult } from "./types"; 3 | import axios from "axios"; 4 | 5 | const { get, post, del, addLog, baseDomain } = useApi(); 6 | 7 | function expandMapping( 8 | mapping: Record, 9 | ): Record { 10 | const expanded: Record = {}; 11 | 12 | for (const [key, value] of Object.entries(mapping)) { 13 | const keyParts = typeof key === "string" ? key.split(";") : []; 14 | const valueParts = 15 | typeof value === "string" 16 | ? value.split(";") 17 | : new Array(keyParts.length).fill(null); 18 | 19 | keyParts.forEach((k, i) => { 20 | const v = valueParts[i]; 21 | if (k && k !== "null" && v && v !== "null") { 22 | expanded[k] = v; 23 | } 24 | }); 25 | } 26 | 27 | return expanded; 28 | } 29 | 30 | function deepReplace(obj: any, mapping: Record): any { 31 | if (Array.isArray(obj)) { 32 | return obj.map((item) => deepReplace(item, mapping)); 33 | } else if (obj && typeof obj === "object") { 34 | return Object.fromEntries( 35 | Object.entries(obj).map(([key, value]) => [ 36 | key, 37 | deepReplace(value, mapping), 38 | ]), 39 | ); 40 | } else if (typeof obj === "string") { 41 | if (obj === "null") return "null"; 42 | return mapping[obj] ? mapping[obj] : obj; 43 | } 44 | return obj; 45 | } 46 | 47 | export async function executeMigrateServer( 48 | formData: Record, 49 | ): Promise { 50 | const { serverUrl, serverToken, apiKeyExpiresIn } = formData; 51 | 52 | const expiresInMinutes = Number.parseInt(String(apiKeyExpiresIn).trim(), 10); 53 | if (Number.isNaN(expiresInMinutes)) { 54 | throw new Error("apiKeyExpiresIn must be a numeric string (minutes)."); 55 | } 56 | const expiresInSeconds = expiresInMinutes * 60; 57 | 58 | const newLibraries = (await get("/api/libraries")).data.libraries; 59 | const oldLibraries = ( 60 | await axios.get(`${serverUrl}/api/libraries`, { 61 | headers: { 62 | Authorization: `Bearer ${serverToken}`, 63 | }, 64 | }) 65 | ).data.libraries; 66 | 67 | const libraryMapping: Record = {}; 68 | 69 | for (const oldLibrary of oldLibraries) { 70 | const newLibrary = newLibraries.find( 71 | (lib: any) => lib.name.toLowerCase() === oldLibrary.name.toLowerCase(), 72 | ); 73 | if (newLibrary) { 74 | libraryMapping[oldLibrary.id] = newLibrary.id; 75 | } 76 | } 77 | 78 | addLog("=== Library Mapping ==="); 79 | for (const oldLibrary of oldLibraries) { 80 | const newLibraryId = libraryMapping[oldLibrary.id]; 81 | if (newLibraryId) { 82 | const newLibrary = newLibraries.find( 83 | (lib: any) => lib.id === newLibraryId, 84 | ); 85 | if (newLibrary) { 86 | addLog( 87 | `${oldLibrary.name} -> ${newLibrary.name}`, 88 | ); 89 | } 90 | } 91 | } 92 | 93 | const newUsers = (await get("/api/users")).data.users; 94 | const oldUsers = ( 95 | await axios.get(`${serverUrl}/api/users`, { 96 | headers: { 97 | Authorization: `Bearer ${serverToken}`, 98 | }, 99 | }) 100 | ).data.users; 101 | 102 | const userMapping: Record = {}; 103 | 104 | for (const oldUser of oldUsers) { 105 | const newUser = newUsers.find( 106 | (user: any) => 107 | user.username.toLowerCase() === oldUser.username.toLowerCase() || 108 | (user.email !== null && 109 | oldUser.email !== null && 110 | user.email.toLowerCase() === oldUser.email.toLowerCase()), 111 | ); 112 | if (newUser) { 113 | userMapping[oldUser.id] = newUser.id; 114 | } 115 | } 116 | 117 | addLog("=== User Mapping ==="); 118 | for (const oldUser of oldUsers) { 119 | const newUserId = userMapping[oldUser.id]; 120 | if (newUserId) { 121 | const newUser = newUsers.find((user: any) => user.id === newUserId); 122 | if (newUser) { 123 | addLog( 124 | `${oldUser.username} -> ${newUser.username}`, 125 | ); 126 | } 127 | } 128 | } 129 | 130 | const itemMapping: Record = {}; 131 | const itemMediaMapping: Record = {}; 132 | 133 | addLog("=== Item Mapping ==="); 134 | for (const [oldId, newId] of Object.entries(libraryMapping)) { 135 | addLog(`Processing library: ${oldId} -> ${newId}`); 136 | 137 | const newItems = (await get(`/api/libraries/${newId}/items`)).data.results; 138 | const oldItems = ( 139 | await axios.get(`${serverUrl}/api/libraries/${oldId}/items`, { 140 | headers: { 141 | Authorization: `Bearer ${serverToken}`, 142 | }, 143 | }) 144 | ).data.results; 145 | 146 | for (let oldItem of oldItems) { 147 | let item = newItems.find( 148 | (item: any) => 149 | item.media.metadata.asin !== null && 150 | oldItem.media.metadata.asin !== null && 151 | item.media.metadata.asin === oldItem.media.metadata.asin, 152 | ); 153 | if (!item) 154 | item = newItems.find( 155 | (item: any) => 156 | item.media.metadata.isbn !== null && 157 | oldItem.media.metadata.isbn !== null && 158 | item.media.metadata.isbn === oldItem.media.metadata.isbn, 159 | ); 160 | if (!item) { 161 | item = newItems.find((candidate: any) => { 162 | const cMeta = candidate.media?.metadata; 163 | const oMeta = oldItem.media?.metadata; 164 | if (!cMeta || !oMeta) return false; 165 | if (!cMeta.title || !oMeta.title) return false; 166 | const cTitle = cMeta.title.toLowerCase(); 167 | const oTitle = oMeta.title.toLowerCase(); 168 | const cSubtitle = (cMeta.subtitle ?? "").toLowerCase(); 169 | const oSubtitle = (oMeta.subtitle ?? "").toLowerCase(); 170 | return cTitle === oTitle && cSubtitle === oSubtitle; 171 | }); 172 | } 173 | 174 | if (item) { 175 | addLog( 176 | `${oldItem.media.metadata.title} -> ${item.media.metadata.title}`, 177 | ); 178 | if (oldItem.mediaType == "podcast") { 179 | oldItem = ( 180 | await axios.get(`${serverUrl}/api/items/${oldItem.id}`, { 181 | headers: { 182 | Authorization: `Bearer ${serverToken}`, 183 | }, 184 | }) 185 | ).data; 186 | item = (await get(`/api/items/${item.id}`)).data; 187 | 188 | for (let i: number = 0; i < oldItem.media.episodes.length; i++) { 189 | const oldEpisode = oldItem.media.episodes[i]; 190 | const newEpisode = item.media.episodes[i]; 191 | if (newEpisode) { 192 | itemMapping[`${oldItem.id};${oldEpisode.id}`] = 193 | `${item.id};${newEpisode.id}`; 194 | } else { 195 | addLog( 196 | `Warning: No mapping found for podcast episode ${oldItem.media.metadata.title} episode "${oldEpisode.title}"`, 197 | ); 198 | } 199 | } 200 | } else { 201 | itemMapping[`${oldItem.id};${null}`] = `${item.id};${null}`; 202 | } 203 | itemMediaMapping[oldItem.media.id] = item.media.id; 204 | } 205 | } 206 | } 207 | 208 | const progressMapping: Record = {}; 209 | 210 | addLog("=== User Progress ==="); 211 | for (const [oldId, newId] of Object.entries(userMapping)) { 212 | addLog(`Processing user: ${oldId} -> ${newId}`); 213 | 214 | const oldProgresses = ( 215 | await axios.get(`${serverUrl}/api/users/${oldId}`, { 216 | headers: { 217 | Authorization: `Bearer ${serverToken}`, 218 | }, 219 | }) 220 | ).data.mediaProgress; 221 | 222 | const pushProgres: object[] = []; 223 | 224 | for (let oldProgress of oldProgresses) { 225 | const itemId = oldProgress.libraryItemId; 226 | const episodeId = oldProgress.episodeId; 227 | 228 | const newItemId = itemMapping[`${itemId ?? null};${episodeId ?? null}`]; 229 | if (newItemId) { 230 | const newProgress = deepReplace( 231 | oldProgress, 232 | expandMapping({ 233 | ...userMapping, 234 | ...libraryMapping, 235 | ...itemMapping, 236 | ...itemMediaMapping, 237 | }), 238 | ); 239 | pushProgres.push(newProgress); 240 | 241 | addLog(`User ${oldId} item ${itemId} mapped to ${newItemId}`); 242 | } else { 243 | addLog(`Warning: No mapping found for user ${oldId} item ${itemId}`); 244 | } 245 | } 246 | progressMapping[newId] = pushProgres; 247 | } 248 | 249 | const newSessions: object[] = []; 250 | 251 | const oldSessions = ( 252 | await axios.get(`${serverUrl}/api/sessions?itemsPerPage=999999`, { 253 | headers: { 254 | Authorization: `Bearer ${serverToken}`, 255 | }, 256 | }) 257 | ).data.sessions; 258 | 259 | const uuidMapping: Record = { 260 | ...userMapping, 261 | ...libraryMapping, 262 | ...itemMapping, 263 | ...itemMediaMapping, 264 | }; 265 | 266 | let firstItem: string | null = null; 267 | 268 | for (const [key, value] of Object.entries(itemMapping)) { 269 | if (key.includes(";null")) { 270 | firstItem = value.split(";")[0] ?? null; 271 | break; 272 | } 273 | } 274 | 275 | if (!firstItem) { 276 | throw new Error( 277 | "No non podcast item found. You need to have at least one non podcast item.", 278 | ); 279 | } 280 | 281 | const sessionsToFix: Record = {}; 282 | 283 | for (const oldSession of oldSessions) { 284 | const newSession = deepReplace(oldSession, expandMapping(uuidMapping)); 285 | 286 | if ( 287 | !itemMapping[ 288 | `${oldSession.libraryItemId};${oldSession.episodeId ?? null}` 289 | ] 290 | ) { 291 | addLog( 292 | `Warning: The library item for session ${oldSession.id} could not be found. Using ${firstItem} as a fallback.`, 293 | ); 294 | newSession.libraryItemId = firstItem; 295 | newSession.episodeId = null; 296 | sessionsToFix[newSession.id] = 297 | `${oldSession.libraryItemId};${oldSession.episodeId ?? null}`; 298 | } 299 | 300 | newSessions.push(newSession); 301 | //console.log(newSession.id, `
${JSON.stringify(newSession, null, 2)}
`); 302 | addLog(`Session ${newSession.id} updated`); 303 | } 304 | 305 | addLog("=== Adding Progress (This may take a while) ==="); 306 | 307 | for (const userId of Object.values(userMapping)) { 308 | const userProgress = progressMapping[userId]; 309 | 310 | const apiKeyResponse = ( 311 | await post("/api/api-keys", { 312 | name: "Migration Script", 313 | expiresIn: expiresInSeconds, 314 | isActive: true, 315 | userId: userId, 316 | }) 317 | ).data; 318 | 319 | const apiKey = apiKeyResponse.apiKey.apiKey; 320 | const apiKeyId = apiKeyResponse.apiKey.id; 321 | 322 | const sessions = newSessions.filter( 323 | (session: any) => session.userId === userId, 324 | ); 325 | 326 | await axios.patch( 327 | `${baseDomain.value}api/me/progress/batch/update`, 328 | userProgress, 329 | { 330 | headers: { 331 | Authorization: `Bearer ${apiKey}`, 332 | }, 333 | }, 334 | ); 335 | 336 | await axios.post( 337 | `${baseDomain.value}api/session/local-all`, 338 | { 339 | sessions: sessions, 340 | }, 341 | { 342 | headers: { 343 | Authorization: `Bearer ${apiKey}`, 344 | }, 345 | }, 346 | ); 347 | await del(`/api/api-keys/${apiKeyId}`); 348 | 349 | addLog(`Added sessions for user ${userId}`); 350 | } 351 | 352 | return { 353 | success: true, 354 | message: "Migration completed successfully. See logs for details.", 355 | timestamp: new Date().toISOString(), 356 | }; 357 | } 358 | --------------------------------------------------------------------------------