├── .env.example ├── .gitignore ├── Dockerfile ├── README.md ├── bun.lockb ├── index.js ├── package.json └── src ├── api.js ├── api ├── deploy.js ├── index.js ├── upload.js └── utils.js ├── config └── logger.js ├── storage.js ├── upload.js └── utils.js /.env.example: -------------------------------------------------------------------------------- 1 | # S3 Config CF in this example 2 | AWS_ACCESS_KEY_ID=1234567890abcdef 3 | AWS_SECRET_ACCESS_KEY=abcdef1234567890 4 | AWS_BUCKET_NAME=my-cdn-bucket 5 | AWS_REGION=auto 6 | AWS_ENDPOINT=https://.r2.cloudflarestorage.com 7 | AWS_CDN_URL=https://cdn.beans.com 8 | 9 | # API 10 | API_TOKEN=beans # Set a secure random string 11 | PORT=3000 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | /splitfornpm/ 3 | /.idea/ 4 | /.env 5 | /package-lock.json 6 | /.history -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Use the official Bun image as base 2 | FROM oven/bun:1 3 | 4 | # install curl for coolify healthcheck 5 | RUN apt-get update && apt-get install -y curl wget 6 | 7 | # Set working directory 8 | WORKDIR /app 9 | 10 | # Copy package.json and bun.lockb (if exists) 11 | COPY package*.json bun.lockb* ./ 12 | 13 | # Install dependencies 14 | RUN bun install 15 | 16 | # Copy the rest of the application 17 | COPY . . 18 | 19 | # Expose the port your Express server runs on 20 | EXPOSE 3000 21 | 22 | # Start the server 23 | CMD ["bun", "run", "start"] 24 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | flag 3 |

cdn.hackclub.com

4 |
5 | 6 |

Deep under the waves and storms there lies a vault...

7 | 8 |
9 | Banner 10 |

Banner illustration by @maxwofford.

11 | 12 | 13 | Slack Channel 14 | 15 |
16 | 17 | 18 | ## 📡 API Usage 19 | 20 | - All API endpoints require authentication via `Authorization: Bearer api-token` header 21 | - Use the API_TOKEN from your environment configuration 22 | - Failure to include a valid token will result in 401 Unauthorized responses 23 | 24 | ### V3 API (Latest) 25 | Version 3 26 | 27 | **Endpoint:** `POST https://cdn.hackclub.com/api/v3/new` 28 | 29 | **Headers:** 30 | ``` 31 | Authorization: Bearer api-token 32 | Content-Type: application/json 33 | ``` 34 | 35 | **Request Example:** 36 | ```bash 37 | curl --location 'https://cdn.hackclub.com/api/v3/new' \ 38 | --header 'Authorization: Bearer beans' \ 39 | --header 'Content-Type: application/json' \ 40 | --data '[ 41 | "https://assets.hackclub.com/flag-standalone.svg", 42 | "https://assets.hackclub.com/flag-orpheus-left.png" 43 | ]' 44 | ``` 45 | 46 | **Response:** 47 | ```json 48 | { 49 | "files": [ 50 | { 51 | "deployedUrl": "https://hc-cdn.hel1.your-objectstorage.com/s/v3/64a9472006c4472d7ac75f2d4d9455025d9838d6_flag-standalone.svg", 52 | "file": "0_64a9472006c4472d7ac75f2d4d9455025d9838d6_flag-standalone.svg", 53 | "sha": "64a9472006c4472d7ac75f2d4d9455025d9838d6", 54 | "size": 4365 55 | }, 56 | { 57 | "deployedUrl": "https://hc-cdn.hel1.your-objectstorage.com/s/v3/d926bfd9811ebfe9172187793a171a5cbcc61992_flag-orpheus-left.png", 58 | "file": "1_d926bfd9811ebfe9172187793a171a5cbcc61992_flag-orpheus-left.png", 59 | "sha": "d926bfd9811ebfe9172187793a171a5cbcc61992", 60 | "size": 8126 61 | } 62 | ], 63 | "cdnBase": "https://hc-cdn.hel1.your-objectstorage.com" 64 | } 65 | ``` 66 | 67 |
68 | V2 API 69 | 70 | Version 2 71 | 72 | **Endpoint:** `POST https://cdn.hackclub.com/api/v2/new` 73 | 74 | **Headers:** 75 | ``` 76 | Authorization: Bearer api-token 77 | Content-Type: application/json 78 | ``` 79 | 80 | **Request Example:** 81 | ```json 82 | [ 83 | "https://assets.hackclub.com/flag-standalone.svg", 84 | "https://assets.hackclub.com/flag-orpheus-left.png" 85 | ] 86 | ``` 87 | 88 | **Response:** 89 | ```json 90 | { 91 | "flag-standalone.svg": "https://cdn.example.dev/s/v2/flag-standalone.svg", 92 | "flag-orpheus-left.png": "https://cdn.example.dev/s/v2/flag-orpheus-left.png" 93 | } 94 | ``` 95 |
96 | 97 |
98 | V1 API 99 | 100 | Version 1 101 | 102 | **Endpoint:** `POST https://cdn.hackclub.com/api/v1/new` 103 | 104 | **Headers:** 105 | ``` 106 | Authorization: Bearer api-token 107 | Content-Type: application/json 108 | ``` 109 | 110 | **Request Example:** 111 | ```json 112 | [ 113 | "https://assets.hackclub.com/flag-standalone.svg", 114 | "https://assets.hackclub.com/flag-orpheus-left.png" 115 | ] 116 | ``` 117 | 118 | **Response:** 119 | ```json 120 | [ 121 | "https://cdn.example.dev/s/v1/0_flag-standalone.svg", 122 | "https://cdn.example.dev/s/v1/1_flag-orpheus-left.png" 123 | ] 124 | ``` 125 |
126 | 127 | # Technical Details 128 | 129 | - **Storage Structure:** `/s/v3/{HASH}_{filename}` 130 | - **File Naming:** `/s/{slackUserId}/{unix}_{sanitizedFilename}` 131 | 132 |
133 |
134 |

Made with 💜 for Hack Club

135 |
-------------------------------------------------------------------------------- /bun.lockb: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hackclub/cdn/927e7ef9e2af8078466eb9beb7f1da11b8b74090/bun.lockb -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const logger = require('./src/config/logger'); 2 | 3 | logger.info('Starting CDN application 🚀'); 4 | 5 | const express = require('express'); 6 | const cors = require('cors'); 7 | const apiRoutes = require('./src/api/index.js'); 8 | 9 | // API server 10 | const expressApp = express(); 11 | expressApp.use(cors()); 12 | expressApp.use(express.json()); 13 | expressApp.use(express.urlencoded({ extended: true })); 14 | 15 | // Mount API for all versions 16 | expressApp.use('/api', apiRoutes); 17 | 18 | // redirect route to "https://github.com/hackclub/cdn" 19 | expressApp.get('/', (req, res) => { 20 | res.redirect('https://github.com/hackclub/cdn'); 21 | }); 22 | 23 | // Error handling middleware 24 | expressApp.use((err, req, res, next) => { 25 | logger.error('API Error:', { 26 | error: err.message, 27 | stack: err.stack, 28 | path: req.path, 29 | method: req.method 30 | }); 31 | res.status(500).json({ error: 'Internal server error' }); 32 | }); 33 | 34 | // Fallback route for unhandled paths 35 | expressApp.use((req, res, next) => { 36 | logger.warn(`Unhandled route: ${req.method} ${req.path}`); 37 | res.status(404).json({ error: 'Not found' }); 38 | }); 39 | 40 | // Startup LOGs 41 | (async () => { 42 | try { 43 | const port = parseInt(process.env.PORT || '4553', 10); 44 | expressApp.listen(port, () => { 45 | logger.info('CDN started successfully 🔥', { 46 | apiPort: port, 47 | startTime: new Date().toISOString() 48 | }); 49 | }); 50 | } catch (error) { 51 | logger.error('Failed to start application:', { 52 | error: error.message, 53 | stack: error.stack 54 | }); 55 | process.exit(1); 56 | } 57 | })(); 58 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdn-v2-hackclub", 3 | "version": "1.0.0", 4 | "description": "API to upload files to S3-compatible storage with unique URLs", 5 | "main": "index.js", 6 | "scripts": { 7 | "start": "bun index.js", 8 | "dev": "bun --watch index.js" 9 | }, 10 | "dependencies": { 11 | "@aws-sdk/client-s3": "^3.478.0", 12 | "cors": "^2.8.5", 13 | "express": "^4.21.2", 14 | "multer": "^1.4.5-lts.1", 15 | "node-fetch": "^2.6.1", 16 | "p-limit": "^6.2.0", 17 | "winston": "^3.17.0" 18 | }, 19 | "author": "", 20 | "license": "MIT" 21 | } 22 | -------------------------------------------------------------------------------- /src/api.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const multer = require('multer'); 3 | const router = express.Router(); 4 | const upload = multer({dest: 'uploads/'}); 5 | 6 | router.post('/upload', upload.single('file'), (req, res) => { 7 | if (!req.file) { 8 | return res.status(400).send('No file uploaded.'); 9 | } 10 | 11 | // Handle the uploaded file 12 | console.log('Uploaded file:', req.file); 13 | 14 | res.send('File uploaded successfully.'); 15 | }); 16 | 17 | module.exports = router; 18 | -------------------------------------------------------------------------------- /src/api/deploy.js: -------------------------------------------------------------------------------- 1 | const logger = require('../config/logger'); 2 | const {generateApiUrl, getCdnUrl} = require('./utils'); 3 | 4 | const deployEndpoint = async (files) => { 5 | try { 6 | const deployedFiles = files.map(file => ({ 7 | deployedUrl: generateApiUrl('v3', file.file), 8 | cdnUrl: getCdnUrl(), 9 | contentType: file.contentType || 'application/octet-stream', 10 | ...file 11 | })); 12 | 13 | return { 14 | status: 200, 15 | files: deployedFiles, 16 | cdnBase: getCdnUrl() 17 | }; 18 | } catch (error) { 19 | logger.error('S3 deploy error:', error); 20 | return { 21 | status: 500, 22 | files: [] 23 | }; 24 | } 25 | }; 26 | 27 | module.exports = {deployEndpoint}; -------------------------------------------------------------------------------- /src/api/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const {validateToken, validateRequest, getCdnUrl} = require('./utils'); 3 | const {uploadEndpoint, handleUpload} = require('./upload'); 4 | const logger = require('../config/logger'); 5 | 6 | const router = express.Router(); 7 | 8 | // Require valid API token for all routes 9 | router.use((req, res, next) => { 10 | const tokenCheck = validateToken(req); 11 | if (tokenCheck.status !== 200) { 12 | return res.status(tokenCheck.status).json(tokenCheck.body); 13 | } 14 | next(); 15 | }); 16 | 17 | // Health check route 18 | router.get('/health', (req, res) => { 19 | res.status(200).json({ status: 'ok' }); 20 | }); 21 | 22 | // Format response based on API version compatibility 23 | const formatResponse = (results, version) => { 24 | switch (version) { 25 | case 1: 26 | return results.map(r => r.url); 27 | case 2: 28 | return results.reduce((acc, r, i) => { 29 | const fileName = r.url.split('/').pop(); 30 | acc[`${i}${fileName}`] = r.url; 31 | return acc; 32 | }, {}); 33 | default: 34 | return { 35 | files: results.map((r, i) => ({ 36 | deployedUrl: r.url, 37 | file: `${i}_${r.url.split('/').pop()}`, 38 | sha: r.sha, 39 | size: r.size 40 | })), 41 | cdnBase: getCdnUrl() 42 | }; 43 | } 44 | }; 45 | 46 | // Handle bulk file uploads with version-specific responses 47 | const handleBulkUpload = async (req, res, version) => { 48 | try { 49 | const urls = req.body; 50 | // Basic validation 51 | if (!Array.isArray(urls) || !urls.length) { 52 | return res.status(422).json({error: 'Empty/invalid file array'}); 53 | } 54 | 55 | const downloadAuth = req.headers?.['x-download-authorization']; 56 | // Process all URLs concurrently 57 | logger.debug(`Processing ${urls.length} URLs`); 58 | const results = await Promise.all( 59 | urls.map(url => uploadEndpoint(url, downloadAuth)) 60 | ); 61 | 62 | res.json(formatResponse(results, version)); 63 | } catch (error) { 64 | logger.error('Bulk upload failed:', error); 65 | res.status(500).json({error: 'Internal server error'}); 66 | } 67 | }; 68 | 69 | // API Routes 70 | router.post('/v1/new', (req, res) => handleBulkUpload(req, res, 1)); // Legacy support 71 | router.post('/v2/new', (req, res) => handleBulkUpload(req, res, 2)); // Legacy support 72 | router.post('/v3/new', (req, res) => handleBulkUpload(req, res, 3)); // Current version 73 | router.post('/new', (req, res) => handleBulkUpload(req, res, 3)); // Alias for v3 (latest) 74 | 75 | // Single file upload endpoint 76 | router.post('/upload', async (req, res) => { 77 | try { 78 | const result = await handleUpload(req); 79 | res.status(result.status).json(result.body); 80 | } catch (error) { 81 | logger.error('S3 upload handler error:', error); 82 | res.status(500).json({error: 'Storage upload failed'}); 83 | } 84 | }); 85 | 86 | module.exports = router; -------------------------------------------------------------------------------- /src/api/upload.js: -------------------------------------------------------------------------------- 1 | const fetch = require('node-fetch'); 2 | const crypto = require('crypto'); 3 | const {uploadToStorage} = require('../storage'); 4 | const {generateUrl} = require('./utils'); 5 | const logger = require('../config/logger'); 6 | 7 | // Sanitize file name for storage 8 | function sanitizeFileName(fileName) { 9 | let sanitizedFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_'); 10 | if (!sanitizedFileName) { 11 | sanitizedFileName = 'upload_' + Date.now(); 12 | } 13 | return sanitizedFileName; 14 | } 15 | 16 | // Handle remote file upload to S3 storage 17 | const uploadEndpoint = async (url, downloadAuth = null) => { 18 | try { 19 | logger.debug('Starting download', { url }); 20 | const headers = {}; 21 | 22 | if (downloadAuth) { 23 | headers['Authorization'] = downloadAuth.startsWith('Bearer ') 24 | ? downloadAuth 25 | : `Bearer ${downloadAuth}`; 26 | } 27 | 28 | const response = await fetch(url, { headers }); 29 | 30 | if (!response.ok) { 31 | const error = new Error(`Download failed: ${response.statusText}`); 32 | error.statusCode = response.status; 33 | error.code = 'DOWNLOAD_FAILED'; 34 | if (response.status === 401 || response.status === 403) { 35 | error.code = 'AUTH_FAILED'; 36 | error.message = 'Authentication failed for protected resource'; 37 | } 38 | throw error; 39 | } 40 | 41 | // Generate unique filename using SHA1 (hash) of file contents 42 | const buffer = await response.buffer(); 43 | const sha = crypto.createHash('sha1').update(buffer).digest('hex'); 44 | const originalName = url.split('/').pop(); 45 | const sanitizedFileName = sanitizeFileName(originalName); 46 | const fileName = `${sha}_${sanitizedFileName}`; 47 | 48 | // Upload to S3 storage 49 | logger.debug(`Uploading: ${fileName}`); 50 | const uploadResult = await uploadToStorage('s/v3', fileName, buffer, response.headers.get('content-type')); 51 | if (uploadResult.success === false) { 52 | throw new Error(`Storage upload failed: ${uploadResult.error}`); 53 | } 54 | 55 | return { 56 | url: generateUrl('s/v3', fileName), 57 | sha, 58 | size: buffer.length, 59 | type: response.headers.get('content-type') 60 | }; 61 | } catch (error) { 62 | logger.error('Upload process failed', { 63 | url, 64 | error: error.message, 65 | code: error.code, 66 | statusCode: error.statusCode, 67 | stack: error.stack 68 | }); 69 | 70 | const statusCode = error.statusCode || 500; 71 | const errorResponse = { 72 | error: { 73 | message: error.message, 74 | code: error.code || 'INTERNAL_ERROR', 75 | details: error.details || null, 76 | url: url 77 | }, 78 | success: false 79 | }; 80 | 81 | throw { statusCode, ...errorResponse }; 82 | } 83 | }; 84 | 85 | // Express request handler for file uploads 86 | const handleUpload = async (req) => { 87 | try { 88 | const url = req.body || await req.text(); 89 | const downloadAuth = req.headers?.['x-download-authorization']?.toString(); 90 | 91 | if (url.includes('files.slack.com') && !downloadAuth) { 92 | return { 93 | status: 400, 94 | body: { 95 | error: { 96 | message: 'X-Download-Authorization required for Slack files', 97 | code: 'AUTH_REQUIRED', 98 | details: 'Slack files require authentication' 99 | }, 100 | success: false 101 | } 102 | }; 103 | } 104 | 105 | const result = await uploadEndpoint(url, downloadAuth); 106 | return { status: 200, body: result }; 107 | } catch (error) { 108 | return { 109 | status: error.statusCode || 500, 110 | body: { 111 | error: error.error || { 112 | message: 'Internal server error', 113 | code: 'INTERNAL_ERROR' 114 | }, 115 | success: false 116 | } 117 | }; 118 | } 119 | }; 120 | 121 | module.exports = {uploadEndpoint, handleUpload}; 122 | -------------------------------------------------------------------------------- /src/api/utils.js: -------------------------------------------------------------------------------- 1 | const getCdnUrl = () => process.env.AWS_CDN_URL; 2 | 3 | const generateUrl = (version, fileName) => { 4 | return `${getCdnUrl()}/${version}/${fileName}`; 5 | }; 6 | 7 | const validateToken = (req) => { 8 | const token = req.headers.authorization?.split('Bearer ')[1]; 9 | if (!token || token !== process.env.API_TOKEN) { 10 | return { 11 | status: 401, 12 | body: {error: 'Unauthorized - Invalid or missing API token'} 13 | }; 14 | } 15 | return {status: 200}; 16 | }; 17 | 18 | const validateRequest = (req) => { 19 | // First check token 20 | const tokenCheck = validateToken(req); 21 | if (tokenCheck.status !== 200) { 22 | return tokenCheck; 23 | } 24 | 25 | // Then check method (copied the thing from old api maybe someone is insane and uses the status and not the code) 26 | if (req.method === 'OPTIONS') { 27 | return {status: 204, body: {status: 'YIPPE YAY. YOU HAVE CLEARANCE TO PROCEED.'}}; 28 | } 29 | if (req.method !== 'POST') { 30 | return { 31 | status: 405, 32 | body: {error: 'Method not allowed, use POST'} 33 | }; 34 | } 35 | return {status: 200}; 36 | }; 37 | 38 | module.exports = { 39 | validateRequest, 40 | validateToken, 41 | generateUrl, 42 | getCdnUrl 43 | }; 44 | -------------------------------------------------------------------------------- /src/config/logger.js: -------------------------------------------------------------------------------- 1 | const winston = require('winston'); 2 | 3 | const logger = winston.createLogger({ 4 | level: 'info', 5 | format: winston.format.combine( 6 | winston.format.timestamp(), 7 | winston.format.colorize(), 8 | winston.format.printf(({ level, message, timestamp, ...meta }) => { 9 | let output = `${timestamp} ${level}: ${message}`; 10 | if (Object.keys(meta).length > 0) { 11 | output += ` ${JSON.stringify(meta)}`; 12 | } 13 | return output; 14 | }) 15 | ), 16 | transports: [new winston.transports.Console()] 17 | }); 18 | 19 | module.exports = logger; -------------------------------------------------------------------------------- /src/storage.js: -------------------------------------------------------------------------------- 1 | const { S3Client, PutObjectCommand } = require('@aws-sdk/client-s3'); 2 | const crypto = require('crypto'); 3 | const logger = require('./config/logger'); 4 | const {generateFileUrl} = require('./utils'); 5 | 6 | const MAX_FILE_SIZE = 2 * 1024 * 1024 * 1024; // 2GB in bytes 7 | const CONCURRENT_UPLOADS = 3; // Max concurrent uploads (messages) 8 | 9 | // processed messages 10 | const processedMessages = new Map(); 11 | 12 | let uploadLimit; 13 | 14 | async function initialize() { 15 | const pLimit = (await import('p-limit')).default; 16 | uploadLimit = pLimit(CONCURRENT_UPLOADS); 17 | } 18 | 19 | // Check if the message is older than 24 hours for when the bot was offline 20 | function isMessageTooOld(eventTs) { 21 | const eventTime = parseFloat(eventTs) * 1000; 22 | const currentTime = Date.now(); 23 | const timeDifference = currentTime - eventTime; 24 | const maxAge = 24 * 60 * 60 * 1000; // 24 hours in milliseconds 25 | return timeDifference > maxAge; 26 | } 27 | 28 | // check if the message has already been processed 29 | function isMessageProcessed(messageTs) { 30 | return processedMessages.has(messageTs); 31 | } 32 | 33 | function markMessageAsProcessing(messageTs) { 34 | processedMessages.set(messageTs, true); 35 | } 36 | 37 | // Processing reaction 38 | async function addProcessingReaction(client, event, fileMessage) { 39 | try { 40 | await client.reactions.add({ 41 | name: 'beachball', 42 | timestamp: fileMessage.ts, 43 | channel: event.channel_id 44 | }); 45 | } catch (error) { 46 | logger.error('Failed to add processing reaction:', error.message); 47 | } 48 | } 49 | 50 | // sanitize file names and ensure it's not empty (I don't even know if that's possible but let's be safe) 51 | function sanitizeFileName(fileName) { 52 | let sanitizedFileName = fileName.replace(/[^a-zA-Z0-9.-]/g, '_'); 53 | if (!sanitizedFileName) { 54 | sanitizedFileName = 'upload_' + Date.now(); 55 | } 56 | return sanitizedFileName; 57 | } 58 | 59 | // Generate a unique file name 60 | function generateUniqueFileName(fileName) { 61 | const sanitizedFileName = sanitizeFileName(fileName); 62 | const uniqueFileName = `${Date.now()}-${crypto.randomBytes(16).toString('hex')}-${sanitizedFileName}`; 63 | return uniqueFileName; 64 | } 65 | 66 | // upload files to the /s/ directory 67 | async function processFiles(fileMessage, client) { 68 | const uploadedFiles = []; 69 | const failedFiles = []; 70 | 71 | logger.debug('Starting file processing', { 72 | userId: fileMessage.user, 73 | fileCount: fileMessage.files?.length || 0 74 | }); 75 | 76 | const files = fileMessage.files || []; 77 | for (const file of files) { 78 | logger.debug('Processing file', { 79 | name: file.name, 80 | size: file.size, 81 | type: file.mimetype, 82 | id: file.id 83 | }); 84 | 85 | if (file.size > MAX_FILE_SIZE) { 86 | logger.warn('File exceeds size limit', { 87 | name: file.name, 88 | size: file.size, 89 | limit: MAX_FILE_SIZE 90 | }); 91 | failedFiles.push(file.name); 92 | continue; 93 | } 94 | 95 | try { 96 | logger.debug('Fetching file from Slack', { 97 | name: file.name, 98 | url: file.url_private 99 | }); 100 | 101 | const response = await fetch(file.url_private, { 102 | headers: {Authorization: `Bearer ${process.env.SLACK_BOT_TOKEN}`} 103 | }); 104 | 105 | if (!response.ok) { 106 | throw new Error(`Slack download failed: ${response.status} ${response.statusText}`); 107 | } 108 | 109 | const buffer = await response.buffer(); 110 | const contentType = file.mimetype || 'application/octet-stream'; 111 | const uniqueFileName = generateUniqueFileName(file.name); 112 | const userDir = `s/${fileMessage.user}`; 113 | 114 | const uploadResult = await uploadLimit(() => 115 | uploadToStorage(userDir, uniqueFileName, buffer, contentType) 116 | ); 117 | 118 | if (uploadResult.success === false) { 119 | throw new Error(uploadResult.error); 120 | } 121 | 122 | const url = generateFileUrl(userDir, uniqueFileName); 123 | uploadedFiles.push({ 124 | name: uniqueFileName, 125 | url, 126 | contentType 127 | }); 128 | } catch (error) { 129 | logger.error('File processing failed', { 130 | fileName: file.name, 131 | error: error.message, 132 | stack: error.stack, 133 | slackFileId: file.id, 134 | userId: fileMessage.user 135 | }); 136 | failedFiles.push(file.name); 137 | } 138 | } 139 | 140 | logger.debug('File processing complete', { 141 | successful: uploadedFiles.length, 142 | failed: failedFiles.length 143 | }); 144 | 145 | return {uploadedFiles, failedFiles}; 146 | } 147 | 148 | // update reactions based on success 149 | async function updateReactions(client, event, fileMessage, success) { 150 | try { 151 | await client.reactions.remove({ 152 | name: 'beachball', 153 | timestamp: fileMessage.ts, 154 | channel: event.channel_id 155 | }); 156 | await client.reactions.add({ 157 | name: success ? 'white_check_mark' : 'x', 158 | timestamp: fileMessage.ts, 159 | channel: event.channel_id 160 | }); 161 | } catch (error) { 162 | logger.error('Failed to update reactions:', error.message); 163 | } 164 | } 165 | 166 | // find a file message 167 | async function findFileMessage(event, client) { 168 | try { 169 | const fileInfo = await client.files.info({ 170 | file: event.file_id, 171 | include_shares: true 172 | }); 173 | 174 | if (!fileInfo.ok || !fileInfo.file) { 175 | throw new Error('Could not get file info'); 176 | } 177 | 178 | const channelShare = fileInfo.file.shares?.public?.[event.channel_id] || 179 | fileInfo.file.shares?.private?.[event.channel_id]; 180 | 181 | if (!channelShare || !channelShare.length) { 182 | throw new Error('No share info found for this channel'); 183 | } 184 | 185 | // Get the exact message using the ts from share info 186 | const messageTs = channelShare[0].ts; 187 | 188 | const messageInfo = await client.conversations.history({ 189 | channel: event.channel_id, 190 | latest: messageTs, 191 | limit: 1, 192 | inclusive: true 193 | }); 194 | 195 | if (!messageInfo.ok || !messageInfo.messages.length) { 196 | throw new Error('Could not find original message'); 197 | } 198 | 199 | return messageInfo.messages[0]; 200 | } catch (error) { 201 | logger.error('Error finding file message:', error); 202 | return null; 203 | } 204 | } 205 | 206 | async function sendResultsMessage(client, channelId, fileMessage, uploadedFiles, failedFiles) { 207 | let message = `Hey <@${fileMessage.user}>, `; 208 | if (uploadedFiles.length > 0) { 209 | message += `here ${uploadedFiles.length === 1 ? 'is your link' : 'are your links'}:\n`; 210 | message += uploadedFiles.map(f => `• ${f.name}: ${f.url}`).join('\n'); 211 | } 212 | if (failedFiles.length > 0) { 213 | message += `\n\nFailed to process: ${failedFiles.join(', ')}`; 214 | } 215 | 216 | await client.chat.postMessage({ 217 | channel: channelId, 218 | thread_ts: fileMessage.ts, 219 | text: message 220 | }); 221 | } 222 | 223 | async function handleError(client, channelId, fileMessage, reactionAdded) { 224 | if (fileMessage && reactionAdded) { 225 | try { 226 | await client.reactions.remove({ 227 | name: 'beachball', 228 | timestamp: fileMessage.ts, 229 | channel: channelId 230 | }); 231 | } catch (cleanupError) { 232 | if (cleanupError.data.error !== 'no_reaction') { 233 | logger.error('Cleanup error:', cleanupError); 234 | } 235 | } 236 | try { 237 | await client.reactions.add({ 238 | name: 'x', 239 | timestamp: fileMessage.ts, 240 | channel: channelId 241 | }); 242 | } catch (cleanupError) { 243 | logger.error('Cleanup error:', cleanupError); 244 | } 245 | } 246 | } 247 | 248 | async function handleFileUpload(event, client) { 249 | let fileMessage = null; 250 | let reactionAdded = false; 251 | 252 | try { 253 | if (isMessageTooOld(event.event_ts)) return; 254 | 255 | fileMessage = await findFileMessage(event, client); 256 | if (!fileMessage || isMessageProcessed(fileMessage.ts)) return; 257 | 258 | markMessageAsProcessing(fileMessage.ts); 259 | await addProcessingReaction(client, event, fileMessage); 260 | reactionAdded = true; 261 | 262 | const {uploadedFiles, failedFiles} = await processFiles(fileMessage, client); 263 | await sendResultsMessage(client, event.channel_id, fileMessage, uploadedFiles, failedFiles); 264 | 265 | await updateReactions(client, event, fileMessage, failedFiles.length === 0); 266 | 267 | } catch (error) { 268 | logger.error('Upload failed:', error.message); 269 | await handleError(client, event.channel_id, fileMessage, reactionAdded); 270 | throw error; 271 | } 272 | } 273 | 274 | const s3Client = new S3Client({ 275 | region: process.env.AWS_REGION, 276 | endpoint: process.env.AWS_ENDPOINT, 277 | credentials: { 278 | accessKeyId: process.env.AWS_ACCESS_KEY_ID, 279 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY 280 | } 281 | }); 282 | 283 | async function uploadToStorage(userDir, uniqueFileName, buffer, contentType = 'application/octet-stream') { 284 | try { 285 | const params = { 286 | Bucket: process.env.AWS_BUCKET_NAME, 287 | Key: `${userDir}/${uniqueFileName}`, 288 | Body: buffer, 289 | ContentType: contentType, 290 | CacheControl: 'public, immutable, max-age=31536000' 291 | }; 292 | 293 | logger.info(`Uploading: ${uniqueFileName}`); 294 | await s3Client.send(new PutObjectCommand(params)); 295 | return true; 296 | } catch (error) { 297 | logger.error(`Upload failed: ${error.message}`, { 298 | path: `${userDir}/${uniqueFileName}`, 299 | error: error.message 300 | }); 301 | return false; 302 | } 303 | } 304 | 305 | module.exports = { 306 | handleFileUpload, 307 | initialize, 308 | uploadToStorage 309 | }; 310 | -------------------------------------------------------------------------------- /src/upload.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs'); 2 | const path = require('path'); 3 | const {uploadToStorage} = require('../storage'); 4 | const {generateUrl} = require('./utils'); 5 | const logger = require('../config/logger'); 6 | 7 | // Handle individual file upload 8 | const handleUpload = async (file) => { 9 | try { 10 | const buffer = fs.readFileSync(file.path); 11 | const fileName = path.basename(file.originalname); 12 | // content type detection for S3 13 | const contentType = file.mimetype || 'application/octet-stream'; 14 | const uniqueFileName = `${Date.now()}-${fileName}`; 15 | 16 | // Upload to S3 17 | logger.debug(`Uploading: ${uniqueFileName}`); 18 | const uploaded = await uploadToStorage('s/v3', uniqueFileName, buffer, contentType); 19 | if (!uploaded) throw new Error('Storage upload failed'); 20 | 21 | return { 22 | name: fileName, 23 | url: generateUrl('s/v3', uniqueFileName), 24 | contentType 25 | }; 26 | } catch (error) { 27 | logger.error('Upload failed:', error); 28 | throw error; 29 | } finally { 30 | // Clean up the temporary file 31 | fs.unlinkSync(file.path); 32 | } 33 | }; 34 | 35 | module.exports = {handleUpload}; 36 | -------------------------------------------------------------------------------- /src/utils.js: -------------------------------------------------------------------------------- 1 | // Make the CDN URL 2 | 3 | function generateFileUrl(userDir, uniqueFileName) { 4 | const cdnUrl = process.env.AWS_CDN_URL; 5 | return `${cdnUrl}/${userDir}/${uniqueFileName}`; 6 | } 7 | 8 | module.exports = {generateFileUrl}; --------------------------------------------------------------------------------