tr]:last:border-b-0",
48 | className
49 | )}
50 | {...props}
51 | />
52 | )
53 | }
54 |
55 | function TableRow({ className, ...props }: React.ComponentProps<"tr">) {
56 | return (
57 |
65 | )
66 | }
67 |
68 | function TableHead({ className, ...props }: React.ComponentProps<"th">) {
69 | return (
70 | [role=checkbox]]:translate-y-[2px]",
74 | className
75 | )}
76 | {...props}
77 | />
78 | )
79 | }
80 |
81 | function TableCell({ className, ...props }: React.ComponentProps<"td">) {
82 | return (
83 | [role=checkbox]]:translate-y-[2px]",
87 | className
88 | )}
89 | {...props}
90 | />
91 | )
92 | }
93 |
94 | function TableCaption({
95 | className,
96 | ...props
97 | }: React.ComponentProps<"caption">) {
98 | return (
99 |
104 | )
105 | }
106 |
107 | export {
108 | Table,
109 | TableHeader,
110 | TableBody,
111 | TableFooter,
112 | TableHead,
113 | TableRow,
114 | TableCell,
115 | TableCaption,
116 | }
117 |
--------------------------------------------------------------------------------
/components/ui/tooltip.tsx:
--------------------------------------------------------------------------------
1 | "use client"
2 |
3 | import * as React from "react"
4 | import * as TooltipPrimitive from "@radix-ui/react-tooltip"
5 |
6 | import { cn } from "@/lib/utils"
7 |
8 | function TooltipProvider({
9 | delayDuration = 0,
10 | ...props
11 | }: React.ComponentProps) {
12 | return (
13 |
18 | )
19 | }
20 |
21 | function Tooltip({
22 | ...props
23 | }: React.ComponentProps) {
24 | return (
25 |
26 |
27 |
28 | )
29 | }
30 |
31 | function TooltipTrigger({
32 | ...props
33 | }: React.ComponentProps) {
34 | return
35 | }
36 |
37 | function TooltipContent({
38 | className,
39 | sideOffset = 0,
40 | children,
41 | ...props
42 | }: React.ComponentProps) {
43 | return (
44 |
45 |
54 | {children}
55 |
56 |
57 |
58 | )
59 | }
60 |
61 | export { Tooltip, TooltipTrigger, TooltipContent, TooltipProvider }
62 |
--------------------------------------------------------------------------------
/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | app:
3 | image: ghcr.io/declan-wade/sid:latest
4 | ports:
5 | - "3000:3000"
6 | environment:
7 | - SID_ALLOWED_HOSTS=localhost:3000 ## Required: the host and port where the app is accessible
8 | - REPO_URL=https://@github.com// ## Required: the URL of the repository root
9 | - REPO_NAME=compose-v2 ## Required: the name of the repository
10 | - WORKING_DIR=/home/user/sid/data ## Required if your containers use relative volume bindings
11 | - DB_URL=postgresql://admin:password@db:5432/sid ## Required: the database URL
12 | - GITHUB_WEBHOOK_SECRET="abc" ## This is used to verify the GitHub webhook
13 | volumes:
14 | - ./sid/app/data:/app/data
15 | - /var/run/docker.sock:/var/run/docker.sock
16 | db:
17 | image: postgres
18 | restart: always
19 | volumes:
20 | - ./sid/app/db:/var/lib/postgresql/data
21 | environment:
22 | POSTGRES_USER: admin
23 | POSTGRES_PASSWORD: password
24 | POSTGRES_DB: sid
25 |
--------------------------------------------------------------------------------
/lib/db.ts:
--------------------------------------------------------------------------------
1 | "use server";
2 |
3 | import { PrismaClient } from "@prisma/client";
4 | import { findAllDockerComposeFiles } from "./process";
5 | import { revalidatePath } from "next/cache";
6 |
7 | const prisma = new PrismaClient();
8 |
9 | export async function createStack(formData: any) {
10 | try {
11 | console.info(
12 | `[createStack] Creating stack: ${formData.name} at ${formData.filePath}`,
13 | );
14 | const response = await prisma.stack.create({
15 | data: {
16 | name: formData.name,
17 | status: "created",
18 | path: formData.filePath,
19 | },
20 | });
21 | console.info(`[createStack] Stack created: ${formData.name}`);
22 | createEvent("Success", `Stack created: ${formData.name}`);
23 | revalidatePath("/");
24 | return response;
25 | } catch (err: any) {
26 | console.error(`[createStack] Error creating stack: ${err.message}`);
27 | throw err;
28 | }
29 | }
30 |
31 | export async function syncStacks() {
32 | console.info("[syncStacks] Finding all docker-compose files...");
33 | const composeFiles = await findAllDockerComposeFiles();
34 | if (!composeFiles || composeFiles.length === 0) {
35 | console.info("[syncStacks] No docker-compose files found.");
36 | return [];
37 | }
38 |
39 | const stacksCreated: any[] = [];
40 | for (const filePath of composeFiles) {
41 | const parts = filePath.split("/");
42 | const stackName = parts[parts.length - 2];
43 | try {
44 | console.info(`[syncStacks] Upserting stack: ${stackName} (${filePath})`);
45 | const stack = await prisma.stack.upsert({
46 | where: { name: stackName },
47 | update: {
48 | path: filePath,
49 | status: "synced",
50 | updatedAt: new Date(),
51 | },
52 | create: {
53 | name: stackName,
54 | path: filePath,
55 | status: "synced",
56 | },
57 | });
58 | stacksCreated.push(stack);
59 | console.info(`[syncStacks] Stack synced: ${stackName}`);
60 | await createEvent("Info", `Stack synced: ${stackName}`);
61 | } catch (err: any) {
62 | console.error(
63 | `[syncStacks] Failed to sync stack for ${filePath}: ${err.message}`,
64 | );
65 | await createEvent(
66 | "Error",
67 | `Failed to sync stack for ${filePath}: ${err.message}`,
68 | );
69 | }
70 | }
71 | console.info(
72 | `[syncStacks] Finished syncing stacks. Total: ${stacksCreated.length}`,
73 | );
74 | revalidatePath("/");
75 | return stacksCreated;
76 | }
77 |
78 | export async function getStacks() {
79 | try {
80 | console.info("[getStacks] Fetching all stacks with latest event...");
81 | const response = await prisma.stack.findMany({
82 | include: {
83 | events: {
84 | orderBy: { createdAt: "desc" },
85 | take: 1,
86 | },
87 | },
88 | });
89 | console.info(`[getStacks] Found ${response.length} stacks.`);
90 | return response;
91 | } catch (err: any) {
92 | console.error(`[getStacks] Error fetching stacks: ${err.message}`);
93 | throw err;
94 | }
95 | }
96 |
97 | export async function createEvent(
98 | type: string,
99 | message: any,
100 | stackName?: string,
101 | ) {
102 | try {
103 | console.info(
104 | `[createEvent] Creating event: type=${type}, stackName=${stackName}, message=${typeof message === "string" ? message : JSON.stringify(message)}`,
105 | );
106 | const response = await prisma.event.create({
107 | data: {
108 | type: type,
109 | message: message,
110 | stack: {
111 | connect: stackName ? { name: stackName } : undefined,
112 | },
113 | },
114 | });
115 | console.info(`[createEvent] Event created: ${response.id}`);
116 | return response;
117 | } catch (err: any) {
118 | console.error(`[createEvent] Error creating event: ${err.message}`);
119 | throw err;
120 | }
121 | }
122 |
123 | export async function getEvents(page: number, pageSize: number) {
124 | try {
125 | const skip = (page - 1) * pageSize;
126 | console.info(
127 | `[getEvents] Fetching events: page=${page}, pageSize=${pageSize}, skip=${skip}`,
128 | );
129 | const [events, total] = await Promise.all([
130 | prisma.event.findMany({
131 | orderBy: { createdAt: "desc" },
132 | skip,
133 | take: pageSize,
134 | include: { stack: true },
135 | }),
136 | prisma.event.count(),
137 | ]);
138 | console.info(
139 | `[getEvents] Fetched ${events.length} events (total: ${total})`,
140 | );
141 | return { events, total };
142 | } catch (err: any) {
143 | console.error(`[getEvents] Error fetching events: ${err.message}`);
144 | throw err;
145 | }
146 | }
147 |
148 | export async function refresh(){
149 | revalidatePath("/");
150 | }
--------------------------------------------------------------------------------
/lib/process.ts:
--------------------------------------------------------------------------------
1 | "use server";
2 |
3 | import { spawn } from "node:child_process";
4 | import { readdirSync, statSync } from "fs";
5 | import { join } from "path";
6 | import { createEvent } from "./db";
7 | import { revalidatePath } from "next/cache";
8 |
9 | // Helper function for consistent error handling
10 | function handleProcessError(
11 | functionName: string,
12 | id: string | null,
13 | error: Error,
14 | reject: (reason?: any) => void,
15 | ) {
16 | const message = `${functionName}${id ? `(${id})` : ""} process error: ${error.message}`;
17 | console.error(message);
18 | createEvent("Error", `Process error: ${error.message}`);
19 | reject(new Error(`Process error: ${error.message}`));
20 | }
21 |
22 | function handleProcessClose(
23 | functionName: string,
24 | id: string | null,
25 | code: number,
26 | errorChunks: Buffer[],
27 | reject: (reason?: any) => void,
28 | ): boolean {
29 | if (code !== 0) {
30 | const errorMessage = Buffer.concat(errorChunks).toString().trim();
31 | const message = `${functionName}${id ? `(${id})` : ""} process exited with code ${code}: ${errorMessage || "Unknown error"}`;
32 | console.error(message);
33 | createEvent(
34 | "Error",
35 | `Process exited with code ${code}: ${errorMessage || "Unknown error"}`,
36 | );
37 | reject(
38 | new Error(
39 | `Process exited with code ${code}: ${errorMessage || "Unknown error"}`,
40 | ),
41 | );
42 | return false;
43 | }
44 | return true;
45 | }
46 |
47 | function handleStderr(
48 | functionName: string,
49 | id: string | null,
50 | data: Buffer,
51 | operation: string,
52 | ) {
53 | console.error(`${functionName}${id ? `(${id})` : ""} stderr: ${data}`);
54 | createEvent("Error", `${operation} stderr: ${data}`);
55 | }
56 |
57 | export async function check() {
58 | return new Promise((resolve, reject) => {
59 | const ls = spawn("docker", [
60 | "container",
61 | "ls",
62 | "-a",
63 | "--format={{json .}}",
64 | ]);
65 | let dataChunks: Buffer[] = [];
66 | let errorChunks: Buffer[] = [];
67 |
68 | ls.stdout.on("data", (data) => {
69 | dataChunks.push(data);
70 | });
71 |
72 | ls.stderr.on("data", (data) => {
73 | errorChunks.push(data);
74 | handleStderr("check", null, data, "Check");
75 | });
76 |
77 | ls.on("error", (error) => {
78 | handleProcessError("check", null, error, reject);
79 | });
80 |
81 | ls.on("close", (code) => {
82 | if (!handleProcessClose("check", null, code ?? 1, errorChunks, reject)) {
83 | return;
84 | }
85 |
86 | try {
87 | const rawOutput = Buffer.concat(dataChunks).toString().trim();
88 | const jsonLines = rawOutput
89 | .split("\n")
90 | .filter((line) => line.trim() !== "");
91 |
92 | if (jsonLines.length === 0) {
93 | console.info("check() found no containers.");
94 | resolve({
95 | status: "success",
96 | containers: [],
97 | message: "No containers found",
98 | });
99 | return;
100 | }
101 |
102 | const containers = jsonLines.map((line) => JSON.parse(line));
103 | console.info(`check() found ${containers.length} containers.`);
104 | resolve({ status: "success", containers });
105 | } catch (err) {
106 | console.error(`check() error parsing JSON: ${(err as Error).message}`);
107 | createEvent("Error", `Error parsing JSON: ${(err as Error).message}`);
108 | if (process.env.NTFY_URL) {
109 | fetch(`${process.env.NTFY_URL}`, {
110 | method: "POST", // PUT works too
111 | body: "Error parsing JSON: ${(err as Error).message}",
112 | headers: {
113 | "X-Title": "~~SID~~ - Error",
114 | },
115 | });
116 | }
117 | reject(new Error(`Error parsing JSON: ${(err as Error).message}`));
118 | }
119 | });
120 | });
121 | }
122 |
123 | export async function stopContainer(id: string) {
124 | console.info(`Stopping container with ID: ${id}`);
125 | return new Promise((resolve, reject) => {
126 | const ls = spawn("docker", ["stop", id]);
127 | let dataChunks: Buffer[] = [];
128 | let errorChunks: Buffer[] = [];
129 |
130 | ls.stdout.on("data", (data) => {
131 | dataChunks.push(data);
132 | });
133 |
134 | ls.stderr.on("data", (data) => {
135 | errorChunks.push(data);
136 | handleStderr("stopContainer", id, data, "Stop");
137 | });
138 |
139 | ls.on("error", (error) => {
140 | handleProcessError("stopContainer", id, error, reject);
141 | });
142 |
143 | ls.on("close", (code) => {
144 | if (
145 | !handleProcessClose("stopContainer", id, code ?? 1, errorChunks, reject)
146 | ) {
147 | return;
148 | }
149 |
150 | try {
151 | const rawOutput = Buffer.concat(dataChunks).toString().trim();
152 | console.info(`stopContainer(${id}) success: ${rawOutput}`);
153 | createEvent("Success", `Container ${id} stopped successfully`);
154 | revalidatePath("/");
155 | resolve({ status: "success", output: rawOutput });
156 | } catch (err) {
157 | console.error(
158 | `stopContainer(${id}) error processing output: ${(err as Error).message}`,
159 | );
160 | createEvent(
161 | "Error",
162 | `Error processing output: ${(err as Error).message}`,
163 | );
164 | revalidatePath("/");
165 | reject(new Error(`Error processing output: ${(err as Error).message}`));
166 | }
167 | });
168 | });
169 | }
170 |
171 | export async function killContainer(id: string) {
172 | console.info(`Killing container with ID: ${id}`);
173 | return new Promise((resolve, reject) => {
174 | const ls = spawn("docker", ["kill", id]);
175 | let dataChunks: Buffer[] = [];
176 | let errorChunks: Buffer[] = [];
177 |
178 | ls.stdout.on("data", (data) => {
179 | dataChunks.push(data);
180 | });
181 |
182 | ls.stderr.on("data", (data) => {
183 | errorChunks.push(data);
184 | handleStderr("killContainer", id, data, "Kill");
185 | });
186 |
187 | ls.on("error", (error) => {
188 | handleProcessError("killContainer", id, error, reject);
189 | });
190 |
191 | ls.on("close", (code) => {
192 | if (
193 | !handleProcessClose("killContainer", id, code ?? 1, errorChunks, reject)
194 | ) {
195 | return;
196 | }
197 |
198 | try {
199 | const rawOutput = Buffer.concat(dataChunks).toString().trim();
200 | console.info(`killContainer(${id}) success: ${rawOutput}`);
201 | createEvent("Success", `Container ${id} killed successfully`);
202 | revalidatePath("/");
203 | resolve({ status: "success", output: rawOutput });
204 | } catch (err) {
205 | console.error(
206 | `killContainer(${id}) error processing output: ${(err as Error).message}`,
207 | );
208 | createEvent(
209 | "Error",
210 | `Error processing output: ${(err as Error).message}`,
211 | );
212 | revalidatePath("/");
213 | reject(new Error(`Error processing output: ${(err as Error).message}`));
214 | }
215 | });
216 | });
217 | }
218 |
219 | export async function deleteContainer(id: string) {
220 | console.info(`Deleting container with ID: ${id}`);
221 | return new Promise((resolve, reject) => {
222 | const ls = spawn("docker", ["container", "rm", "-v", id]);
223 | let dataChunks: Buffer[] = [];
224 | let errorChunks: Buffer[] = [];
225 |
226 | ls.stdout.on("data", (data) => {
227 | dataChunks.push(data);
228 | });
229 |
230 | ls.stderr.on("data", (data) => {
231 | errorChunks.push(data);
232 | handleStderr("deleteContainer", id, data, "Delete");
233 | });
234 |
235 | ls.on("error", (error) => {
236 | handleProcessError("deleteContainer", id, error, reject);
237 | });
238 |
239 | ls.on("close", (code) => {
240 | if (
241 | !handleProcessClose(
242 | "deleteContainer",
243 | id,
244 | code ?? 1,
245 | errorChunks,
246 | reject,
247 | )
248 | ) {
249 | return;
250 | }
251 |
252 | try {
253 | const rawOutput = Buffer.concat(dataChunks).toString().trim();
254 | console.info(`deleteContainer(${id}) success: ${rawOutput}`);
255 | createEvent("Success", `Container ${id} deleted successfully`);
256 | revalidatePath("/");
257 | resolve({ status: "success", output: rawOutput });
258 | } catch (err) {
259 | console.error(
260 | `deleteContainer(${id}) error processing output: ${(err as Error).message}`,
261 | );
262 | createEvent(
263 | "Error",
264 | `Error processing output: ${(err as Error).message}`,
265 | );
266 | revalidatePath("/");
267 | reject(new Error(`Error processing output: ${(err as Error).message}`));
268 | }
269 | });
270 | });
271 | }
272 |
273 | export async function restartContainer(id: string) {
274 | console.info(`Restarting container with ID: ${id}`);
275 | return new Promise((resolve, reject) => {
276 | const ls = spawn("docker", ["restart", id]);
277 | let dataChunks: Buffer[] = [];
278 | let errorChunks: Buffer[] = [];
279 |
280 | ls.stdout.on("data", (data) => {
281 | dataChunks.push(data);
282 | });
283 |
284 | ls.stderr.on("data", (data) => {
285 | errorChunks.push(data);
286 | handleStderr("restartContainer", id, data, "Restart");
287 | });
288 |
289 | ls.on("error", (error) => {
290 | handleProcessError("restartContainer", id, error, reject);
291 | });
292 |
293 | ls.on("close", (code) => {
294 | if (
295 | !handleProcessClose(
296 | "restartContainer",
297 | id,
298 | code ?? 1,
299 | errorChunks,
300 | reject,
301 | )
302 | ) {
303 | return;
304 | }
305 |
306 | try {
307 | const rawOutput = Buffer.concat(dataChunks).toString().trim();
308 | console.info(`restartContainer(${id}) success: ${rawOutput}`);
309 | createEvent("Success", `Container ${id} restarted successfully`);
310 | revalidatePath("/");
311 | resolve({ status: "success", output: rawOutput });
312 | } catch (err) {
313 | console.error(
314 | `restartContainer(${id}) error processing output: ${(err as Error).message}`,
315 | );
316 | createEvent(
317 | "Error",
318 | `Error processing output: ${(err as Error).message}`,
319 | );
320 | revalidatePath("/");
321 | reject(new Error(`Error processing output: ${(err as Error).message}`));
322 | }
323 | });
324 | });
325 | }
326 |
327 | export async function clone() {
328 | return new Promise((resolve, reject) => {
329 | const workingDir = process.env.WORKING_DIR || "/app/data";
330 | const repoRoot = process.env.REPO_URL;
331 |
332 | if (!repoRoot) {
333 | const errorMessage =
334 | "Required environment variable REPO_URL is not defined";
335 | console.error(`clone() ${errorMessage}`);
336 | createEvent("Error", "Missing REPO_URL environment variable");
337 | reject(new Error(errorMessage));
338 | return;
339 | }
340 |
341 | if (!process.env.WORKING_DIR) {
342 | console.warn(
343 | "No WORKING_DIR environment variable. Using default path `/app/data/` -- ensure this path is mounted!",
344 | );
345 | createEvent(
346 | "Info",
347 | "No WORKING_DIR environment variable. Using default path `/app/data/` -- ensure this path is mounted!",
348 | );
349 | }
350 |
351 | const repoName = process.env.REPO_NAME;
352 | const repoPath = `${workingDir}/${repoName}`;
353 |
354 | // Detect SSH or HTTPS/PAT repo URL
355 | const isSSH = repoRoot.startsWith("git@");
356 | const isPAT = repoRoot.startsWith("https://");
357 |
358 | // If using SSH, set GIT_SSH_COMMAND to avoid host key prompt (optional)
359 | // You may want to customize this for your environment
360 | const env = { ...process.env };
361 | if (isSSH) {
362 | env.GIT_SSH_COMMAND = "ssh -o StrictHostKeyChecking=accept-new";
363 | }
364 |
365 | const checkDirCmd = `if [ -d "${repoPath}" ]; then
366 | echo "exists";
367 | cd "${repoPath}" && git fetch --all && git pull && echo "${repoPath}";
368 | else
369 | echo "new";
370 | cd "${workingDir}" && git clone "${repoRoot}" && echo "${repoPath}";
371 | fi`;
372 |
373 | const ls = spawn("sh", ["-c", checkDirCmd], { env });
374 | let dataChunks: Buffer[] = [];
375 | let errorChunks: Buffer[] = [];
376 |
377 | ls.stdout.on("data", (data) => {
378 | dataChunks.push(data);
379 | });
380 |
381 | ls.stderr.on("data", (data) => {
382 | errorChunks.push(data);
383 | const stderrText = data.toString().trim();
384 |
385 | // Check if this is actually an error or just git info
386 | const isActualError =
387 | stderrText.includes("fatal:") ||
388 | stderrText.includes("error:") ||
389 | stderrText.includes("permission denied");
390 |
391 | if (isActualError) {
392 | console.error(`clone() stderr: ${data}`);
393 | createEvent("Error", `Clone stderr: ${data}`);
394 | if (process.env.NTFY_URL) {
395 | fetch(`${process.env.NTFY_URL}`, {
396 | method: "POST", // PUT works too
397 | body: `Clone stderr: ${data}`,
398 | headers: {
399 | "X-Title": "~~SID~~ - Error Cloning",
400 | },
401 | });
402 | }
403 | } else {
404 | console.info(`clone() git info: ${data}`);
405 | }
406 | });
407 |
408 | ls.on("error", (error) => {
409 | handleProcessError("clone", null, error, reject);
410 | });
411 |
412 | ls.on("close", (code) => {
413 | if (!handleProcessClose("clone", null, code ?? 1, errorChunks, reject)) {
414 | return;
415 | }
416 |
417 | try {
418 | const output = Buffer.concat(dataChunks).toString().trim();
419 | const lines = output.split("\n");
420 | const resultPath = lines[lines.length - 1];
421 |
422 | if (lines[0] === "exists") {
423 | console.info(
424 | `clone() repository already exists at ${resultPath}, pulled latest changes.`,
425 | );
426 | createEvent(
427 | "Info",
428 | `Repository already exists, pulled latest changes at ${resultPath}`,
429 | );
430 | revalidatePath("/");
431 | resolve({
432 | status: "success",
433 | path: resultPath,
434 | message: "Repository already exists, pulled latest changes",
435 | });
436 | } else {
437 | console.info(`clone() repository newly cloned to ${resultPath}.`);
438 | createEvent("Info", `Repository newly cloned to ${resultPath}`);
439 | if (process.env.NTFY_URL) {
440 | fetch(`${process.env.NTFY_URL}`, {
441 | method: "POST", // PUT works too
442 | body: `Repository newly cloned to ${resultPath}`,
443 | headers: {
444 | "X-Title": "~~SID~~ - Success",
445 | },
446 | });
447 | }
448 | revalidatePath("/");
449 | resolve({
450 | status: "success",
451 | path: resultPath,
452 | message: "Repository newly cloned",
453 | });
454 | }
455 | } catch (err) {
456 | console.error(
457 | `clone() error processing output: ${(err as Error).message}`,
458 | );
459 | createEvent(
460 | "Error",
461 | `Error processing output: ${(err as Error).message}`,
462 | );
463 | revalidatePath("/");
464 | reject(new Error(`Error processing output: ${(err as Error).message}`));
465 | }
466 | });
467 | });
468 | }
469 |
470 | export async function runDockerComposeForChangedDirs(
471 | files: string[],
472 | ): Promise<{ dir: string; result: string; error?: string }[]> {
473 | let workingDir = process.env.WORKING_DIR || "/app/data";
474 |
475 | if (!workingDir) {
476 | console.warn(
477 | "No WORKING_DIR environment variable. Using default path `/app/data/` -- ensure this path is mounted!",
478 | );
479 | }
480 |
481 | if (!process.env.REPO_URL) {
482 | throw new Error("Required environment variable REPO_URL is not set");
483 | }
484 |
485 | const repoName = process.env.REPO_URL?.split("/").pop()?.replace(".git", "");
486 | workingDir = `${workingDir}/${repoName}`;
487 |
488 | // Get unique directories from file paths
489 | const dirs = Array.from(
490 | new Set(
491 | files.map((f) => f.split("/").slice(0, -1).join("/")).filter(Boolean),
492 | ),
493 | );
494 |
495 | const results: { dir: string; result: string; error?: string }[] = [];
496 |
497 | for (const dir of dirs) {
498 | const absDir = `${workingDir}/${dir}`;
499 | console.info(`Running docker compose in: ${absDir}`);
500 |
501 | try {
502 | const result = await new Promise((resolve, reject) => {
503 | const proc = spawn(
504 | "docker",
505 | ["compose", "up", "-d", "--remove-orphans"],
506 | { cwd: absDir },
507 | );
508 | let output = "";
509 | let errorOutput = "";
510 |
511 | proc.stdout.on("data", (data) => {
512 | output += data.toString();
513 | });
514 |
515 | proc.stderr.on("data", (data) => {
516 | errorOutput += data.toString();
517 | });
518 |
519 | proc.on("error", (error) => {
520 | const message = `Failed to start docker compose in ${absDir}: ${error.message}`;
521 | console.error(message);
522 | createEvent("Error", message, dir.split("/")[0]);
523 | if (process.env.NTFY_URL) {
524 | fetch(`${process.env.NTFY_URL}`, {
525 | method: "POST", // PUT works too
526 | body: `Error: {message}`,
527 | headers: {
528 | "X-Title": "~~SID~~ - Failed to start docker compose",
529 | },
530 | });
531 | }
532 | reject(error);
533 | });
534 |
535 | proc.on("close", (code) => {
536 | if (code === 0) {
537 | console.info(
538 | `docker compose up succeeded in ${absDir}: ${output.trim()}`,
539 | );
540 | createEvent(
541 | "Success",
542 | `docker compose up succeeded in ${absDir}`,
543 | dir.split("/")[0],
544 | );
545 | if (process.env.NTFY_URL) {
546 | fetch(`${process.env.NTFY_URL}`, {
547 | method: "POST", // PUT works too
548 | body: `docker compose up succeeded in ${absDir}`,
549 | headers: {
550 | "X-Title": "~~SID~~ - Success",
551 | },
552 | });
553 | }
554 | resolve(output.trim());
555 | } else {
556 | const errorMessage =
557 | errorOutput.trim() || `Exited with code ${code}`;
558 | console.error(
559 | `docker compose up failed in ${absDir}: ${errorMessage}`,
560 | );
561 | createEvent(
562 | "Error",
563 | `docker compose up failed in ${absDir}: ${errorMessage}`,
564 | dir.split("/")[0],
565 | );
566 | if (process.env.NTFY_URL) {
567 | fetch(`${process.env.NTFY_URL}`, {
568 | method: "POST", // PUT works too
569 | body: `docker compose up failed in ${absDir}: ${errorMessage}`,
570 | headers: {
571 | "X-Title": "~~SID~~ - Failed to start docker compose",
572 | },
573 | });
574 | }
575 | reject(new Error(errorMessage));
576 | }
577 | });
578 | });
579 |
580 | results.push({ dir: absDir, result });
581 | } catch (err: any) {
582 | console.error(
583 | `Error running docker compose in ${absDir}: ${err.message}`,
584 | );
585 | createEvent(
586 | "Error",
587 | `Error running docker compose in ${absDir}: ${err.message}`,
588 | dir.split("/")[0],
589 | );
590 | results.push({ dir: absDir, result: "", error: err.message });
591 | }
592 | }
593 |
594 | return results;
595 | }
596 |
597 | export async function findAllDockerComposeFiles(): Promise {
598 | const workingDir = process.env.WORKING_DIR || "/app/data";
599 | const repoRoot = process.env.REPO_URL;
600 |
601 | if (!repoRoot) {
602 | throw new Error("Required environment variable REPO_URL is not set");
603 | }
604 |
605 | const repoName = repoRoot.split("/").pop()?.replace(".git", "");
606 | if (!repoName) {
607 | throw new Error("Unable to determine repository name from REPO_URL");
608 | }
609 |
610 | if (!process.env.WORKING_DIR) {
611 | console.warn(
612 | "No WORKING_DIR environment variable. Using default path `/app/data/` -- ensure this path is mounted!",
613 | );
614 | }
615 |
616 | const rootDir = join(workingDir, repoName);
617 | const results: string[] = [];
618 |
619 | function walk(dir: string) {
620 | let entries: string[];
621 | try {
622 | entries = readdirSync(dir);
623 | } catch (err) {
624 | console.error(
625 | `Error reading directory ${dir}: ${(err as Error).message}`,
626 | );
627 | return;
628 | }
629 |
630 | for (const entry of entries) {
631 | const fullPath = join(dir, entry);
632 | let stats;
633 |
634 | try {
635 | stats = statSync(fullPath);
636 | } catch (err) {
637 | console.error(
638 | `Error stating path ${fullPath}: ${(err as Error).message}`,
639 | );
640 | continue;
641 | }
642 |
643 | if (stats.isDirectory()) {
644 | walk(fullPath);
645 | } else if (
646 | entry === "docker-compose.yml" ||
647 | entry === "docker-compose.yaml"
648 | ) {
649 | results.push(fullPath);
650 | }
651 | }
652 | }
653 |
654 | walk(rootDir);
655 | return results;
656 | }
657 |
658 | export async function runDockerComposeForPath(
659 | path: string,
660 | ): Promise<{ dir: string; result: string; error?: string }> {
661 | const workingDir = process.env.WORKING_DIR;
662 |
663 | if (!workingDir) {
664 | throw new Error("WORKING_DIR environment variable is not set");
665 | }
666 |
667 | // Remove leading/trailing slashes and construct absolute directory path
668 | const cleanPath = path.replace(/^\/|\/$/g, "");
669 | const absDir = `${workingDir}/${cleanPath}`;
670 | console.log("stack name: ", cleanPath.split("/")[1]);
671 | console.info(`Running docker compose in: ${absDir}`);
672 |
673 | try {
674 | const result = await new Promise((resolve, reject) => {
675 | const proc = spawn(
676 | "docker",
677 | ["compose", "up", "-d", "--remove-orphans"],
678 | { cwd: absDir },
679 | );
680 | let output = "";
681 | let errorOutput = "";
682 |
683 | proc.stdout.on("data", (data) => {
684 | output += data.toString();
685 | });
686 |
687 | proc.stderr.on("data", (data) => {
688 | errorOutput += data.toString();
689 | });
690 |
691 | proc.on("error", (error) => {
692 | const message = `Failed to start docker compose in ${absDir}: ${error.message}`;
693 | console.error(message);
694 | createEvent("Error", message, cleanPath.split("/")[1]);
695 | revalidatePath("/");
696 | reject(error);
697 | });
698 |
699 | proc.on("close", (code) => {
700 | if (code === 0) {
701 | console.info(
702 | `docker compose up succeeded in ${absDir}: ${output.trim()}`,
703 | );
704 | createEvent(
705 | "Success",
706 | `docker compose up succeeded in ${absDir}`,
707 | cleanPath.split("/")[1],
708 | );
709 | revalidatePath("/");
710 | resolve(output.trim());
711 | } else {
712 | const errorMessage = errorOutput.trim() || `Exited with code ${code}`;
713 | console.error(
714 | `docker compose up failed in ${absDir}: ${errorMessage}`,
715 | );
716 | createEvent(
717 | "Error",
718 | `docker compose up failed in ${absDir}: ${errorMessage}`,
719 | cleanPath.split("/")[1],
720 | );
721 | revalidatePath("/");
722 | reject(new Error(errorMessage));
723 | }
724 | });
725 | });
726 |
727 | return { dir: absDir, result };
728 | } catch (err: any) {
729 | console.error(`Error running docker compose in ${absDir}: ${err.message}`);
730 | createEvent(
731 | "Error",
732 | `Error running docker compose in ${absDir}: ${err.message}`,
733 | cleanPath.split("/")[1],
734 | );
735 | revalidatePath("/");
736 | return { dir: absDir, result: "", error: err.message };
737 | }
738 | }
739 |
--------------------------------------------------------------------------------
/lib/utils.ts:
--------------------------------------------------------------------------------
1 | import { clsx, type ClassValue } from "clsx";
2 | import { twMerge } from "tailwind-merge";
3 |
4 | export function cn(...inputs: ClassValue[]) {
5 | return twMerge(clsx(inputs));
6 | }
--------------------------------------------------------------------------------
/middleware.ts:
--------------------------------------------------------------------------------
1 | import { NextResponse } from "next/server";
2 |
3 | export function middleware(req: Request) {
4 | const url = new URL(req.url);
5 |
6 | // Allow /api routes to bypass host validation
7 | if (url.pathname.startsWith("/api")) {
8 | return NextResponse.next();
9 | }
10 |
11 | // Check the Host header, if SID_ALLOWED_HOSTS is set
12 | const host = req.headers.get("host");
13 | const port = process.env.PORT || 3000;
14 |
15 | let allowedHosts = [`localhost:${port}`, `127.0.0.1:${port}`];
16 | const allowAll = process.env.SID_ALLOWED_HOSTS === "*";
17 | if (process.env.SID_ALLOWED_HOSTS) {
18 | allowedHosts = allowedHosts.concat(process.env.SID_ALLOWED_HOSTS.split(","));
19 | }
20 | if (!allowAll && (!host || !allowedHosts.includes(host))) {
21 | // eslint-disable-next-line no-console
22 | console.error(
23 | `Host validation failed for: ${host}. Hint: Set the SID_ALLOWED_HOSTS environment variable to allow requests from this host / port.`,
24 | );
25 | return NextResponse.json({ error: "Host validation failed. See logs for more details." }, { status: 400 });
26 | }
27 | return NextResponse.next();
28 | }
29 |
30 | export const config = {
31 | matcher: "/:path*",
32 | };
--------------------------------------------------------------------------------
/next.config.ts:
--------------------------------------------------------------------------------
1 | import type { NextConfig } from "next";
2 |
3 | const nextConfig: NextConfig = {
4 | output: 'standalone',
5 | pageExtensions: ['ts', 'tsx']
6 | };
7 |
8 | export default nextConfig;
9 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "sid",
3 | "homepage": "https://github.com/declan-wade/SID",
4 | "version": "1.0.0",
5 | "license": "MIT License",
6 | "private": false,
7 | "bugs": {
8 | "url": "https://github.com/declan-wade/SID/issues"
9 | },
10 | "scripts": {
11 | "dev": "next dev --turbopack",
12 | "build": "next build --experimental-build-mode compile",
13 | "start": "next start",
14 | "lint": "next lint",
15 | "postinstall": "prisma generate"
16 | },
17 | "dependencies": {
18 | "@octokit/core": "^7.0.3",
19 | "@octokit/webhooks-methods": "^6.0.0",
20 | "@prisma/client": "^6.11.1",
21 | "@radix-ui/react-alert-dialog": "^1.1.14",
22 | "@radix-ui/react-dialog": "^1.1.6",
23 | "@radix-ui/react-label": "^2.1.7",
24 | "@radix-ui/react-slot": "^1.2.3",
25 | "@radix-ui/react-switch": "^1.2.5",
26 | "@radix-ui/react-tooltip": "^1.1.8",
27 | "class-variance-authority": "^0.7.1",
28 | "clsx": "^2.1.1",
29 | "date-fns": "^4.1.0",
30 | "lucide-react": "^0.525.0",
31 | "next": "15.2.4",
32 | "next-themes": "^0.4.6",
33 | "prisma": "^6.11.1",
34 | "react": "^19.1.0",
35 | "react-dom": "^19.1.0",
36 | "sonner": "^2.0.3",
37 | "tailwind-merge": "^3.3.1",
38 | "tw-animate-css": "^1.3.6",
39 | "vaul": "^1.1.2"
40 | },
41 | "devDependencies": {
42 | "@tailwindcss/postcss": "^4",
43 | "@types/node": "^24",
44 | "@types/react": "^19",
45 | "@types/react-dom": "^19",
46 | "tailwindcss": "^4",
47 | "typescript": "^5"
48 | },
49 | "pnpm": {
50 | "supportedArchitectures": {
51 | "os": [
52 | "current"
53 | ],
54 | "cpu": [
55 | "x64",
56 | "arm64"
57 | ]
58 | }
59 | }
60 | }
61 |
--------------------------------------------------------------------------------
/postcss.config.mjs:
--------------------------------------------------------------------------------
1 | const config = {
2 | plugins: ["@tailwindcss/postcss"],
3 | };
4 |
5 | export default config;
6 |
--------------------------------------------------------------------------------
/prisma/migrations/20250710011123_init/migration.sql:
--------------------------------------------------------------------------------
1 | -- CreateTable
2 | CREATE TABLE "Stack" (
3 | "id" TEXT NOT NULL,
4 | "name" TEXT NOT NULL,
5 | "path" TEXT NOT NULL,
6 | "status" TEXT NOT NULL,
7 | "lastEvent" TEXT,
8 | "lastEventAt" TIMESTAMP(3),
9 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
10 | "updatedAt" TIMESTAMP(3) NOT NULL,
11 |
12 | CONSTRAINT "Stack_pkey" PRIMARY KEY ("id")
13 | );
14 |
15 | -- CreateTable
16 | CREATE TABLE "Event" (
17 | "id" TEXT NOT NULL,
18 | "type" TEXT NOT NULL,
19 | "message" TEXT NOT NULL,
20 | "stackId" TEXT,
21 | "createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
22 |
23 | CONSTRAINT "Event_pkey" PRIMARY KEY ("id")
24 | );
25 |
26 | -- CreateIndex
27 | CREATE UNIQUE INDEX "Stack_name_key" ON "Stack"("name");
28 |
29 | -- AddForeignKey
30 | ALTER TABLE "Event" ADD CONSTRAINT "Event_stackId_fkey" FOREIGN KEY ("stackId") REFERENCES "Stack"("id") ON DELETE SET NULL ON UPDATE CASCADE;
31 |
--------------------------------------------------------------------------------
/prisma/migrations/migration_lock.toml:
--------------------------------------------------------------------------------
1 | # Please do not edit this file manually
2 | # It should be added in your version-control system (e.g., Git)
3 | provider = "postgresql"
4 |
--------------------------------------------------------------------------------
/prisma/schema.prisma:
--------------------------------------------------------------------------------
1 | // This is your Prisma schema file,
2 | // learn more about it in the docs: https://pris.ly/d/prisma-schema
3 |
4 | datasource db {
5 | provider = "postgresql"
6 | url = env("DB_URL")
7 | }
8 |
9 | generator client {
10 | provider = "prisma-client-js"
11 | }
12 |
13 | model Stack {
14 | id String @id @default(uuid())
15 | name String @unique
16 | path String
17 | status String
18 | lastEvent String?
19 | lastEventAt DateTime?
20 | createdAt DateTime @default(now())
21 | updatedAt DateTime @updatedAt
22 | events Event[] @relation("StackToEvents")
23 | }
24 |
25 | model Event {
26 | id String @id @default(uuid())
27 | type String
28 | message String
29 | stackId String? // Optional relation to Stack
30 | createdAt DateTime @default(now())
31 | stack Stack? @relation("StackToEvents", fields: [stackId], references: [id])
32 | }
33 |
--------------------------------------------------------------------------------
/public/file.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/globe.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/next.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/vercel.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/public/window.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2017",
4 | "lib": ["dom", "dom.iterable", "esnext"],
5 | "allowJs": true,
6 | "skipLibCheck": true,
7 | "strict": true,
8 | "noEmit": true,
9 | "esModuleInterop": true,
10 | "module": "esnext",
11 | "moduleResolution": "bundler",
12 | "resolveJsonModule": true,
13 | "isolatedModules": true,
14 | "jsx": "preserve",
15 | "incremental": true,
16 | "plugins": [
17 | {
18 | "name": "next"
19 | }
20 | ],
21 | "paths": {
22 | "@/*": ["./*"]
23 | }
24 | },
25 | "include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
26 | "exclude": ["node_modules"]
27 | }
28 |
--------------------------------------------------------------------------------