├── uploads └── .gitkeep ├── tests ├── uploads │ ├── .gitkeep │ └── test.txt ├── s3.test.js └── local.test.js ├── .babelrc ├── .DS_Store ├── assets ├── .DS_Store └── git │ ├── .DS_Store │ └── homepage.png ├── Dockerfile ├── docker-bake.hcl ├── docker-compose.yml ├── docker-compose.prod.yml ├── config └── version.config.js ├── sharex.sxcu ├── package.json ├── routes └── file.routes.js ├── .env.example ├── .github └── workflows │ └── docker-build.yml ├── engines ├── local.engine.js └── s3.engine.js ├── index.js ├── controllers └── file.controller.js ├── views └── index.ejs ├── README.md └── .gitignore /uploads/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/uploads/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/uploads/test.txt: -------------------------------------------------------------------------------- 1 | test file content -------------------------------------------------------------------------------- /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["@babel/preset-env"] 3 | } 4 | -------------------------------------------------------------------------------- /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/filecoffee/filehost/HEAD/.DS_Store -------------------------------------------------------------------------------- /assets/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/filecoffee/filehost/HEAD/assets/.DS_Store -------------------------------------------------------------------------------- /assets/git/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/filecoffee/filehost/HEAD/assets/git/.DS_Store -------------------------------------------------------------------------------- /assets/git/homepage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/filecoffee/filehost/HEAD/assets/git/homepage.png -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:24-alpine 2 | WORKDIR /usr/src/app 3 | COPY package*.json ./ 4 | RUN npm install 5 | COPY . . 6 | EXPOSE 3000 7 | 8 | CMD ["node", "index.js"] 9 | -------------------------------------------------------------------------------- /docker-bake.hcl: -------------------------------------------------------------------------------- 1 | target "default" { 2 | context = "." 3 | dockerfile = "Dockerfile" 4 | targets = ["build"] 5 | tags = ["filehost:latest"] 6 | no-cache = true 7 | platforms = ["linux/amd64", "linux/arm64"] 8 | } -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | app: 3 | container_name: filehost 4 | build: . 5 | ports: 6 | - "${PORT}:${PORT}" 7 | volumes: 8 | - .:/usr/src/app 9 | - ./uploads:/usr/src/app/uploads 10 | env_file: 11 | - .env 12 | environment: 13 | NODE_ENV: development 14 | -------------------------------------------------------------------------------- /docker-compose.prod.yml: -------------------------------------------------------------------------------- 1 | services: 2 | app: 3 | container_name: filehost 4 | image: ghcr.io/filecoffee/filehost:latest # or use a specific tag 5 | restart: unless-stopped 6 | ports: 7 | - "127.0.0.1:${PORT}:${PORT}" 8 | volumes: 9 | - ./uploads:/usr/src/app/uploads 10 | env_file: 11 | - .env 12 | environment: 13 | NODE_ENV: production -------------------------------------------------------------------------------- /config/version.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | /** 3 | * DO NOT TOUCH. This is the version of the application. 4 | * It'll be updated by the owner of the repository. 5 | * 6 | * 0.0.0X - Small changes 7 | * 0.0.X - Bug fixes 8 | * 0.X.0 - New features 9 | * X.0.0 - Major changes 10 | * 11 | * We use SemVer for versioning. 12 | */ 13 | version: "0.0.1A", 14 | }; 15 | -------------------------------------------------------------------------------- /sharex.sxcu: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "13.7.0", 3 | "Name": "filehost by file.coffee", 4 | "DestinationType": "ImageUploader", 5 | "RequestMethod": "POST", 6 | "RequestURL": "http://example.com/upload?api=YOUR_API_KEY", 7 | "Body": "MultipartFormData", 8 | "FileFormName": "file", 9 | "URL": "$json:url$", 10 | "ThumbnailURL": "$json:thumb_url$", 11 | "DeletionURL": "$json:delete_url$" 12 | } 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "scripts": { 3 | "test": "jest" 4 | }, 5 | "jest": { 6 | "transform": { 7 | "^.+\\.js$": "babel-jest" 8 | }, 9 | "testEnvironment": "node" 10 | }, 11 | "dependencies": { 12 | "aws-sdk": "^2.1632.0", 13 | "dotenv": "^16.4.5", 14 | "ejs": "^3.1.10", 15 | "express": "^4.19.2", 16 | "express-rate-limit": "^7.3.1", 17 | "express-slow-down": "^2.0.3", 18 | "helmet": "^7.1.0", 19 | "mime-types": "^2.1.35", 20 | "multer": "^1.4.5-lts.1", 21 | "nanoid": "^3.3.7" 22 | }, 23 | "devDependencies": { 24 | "@babel/core": "^7.24.6", 25 | "@babel/preset-env": "^7.24.6", 26 | "babel-jest": "^29.7.0", 27 | "jest": "^29.7.0", 28 | "supertest": "^7.0.0" 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /routes/file.routes.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const { uploadFile, getFile } = require("../controllers/file.controller"); 3 | 4 | const router = express.Router(); 5 | 6 | const apiKeys = process.env.API_KEYS.split(","); 7 | const allowPublicUploads = process.env.ALLOW_PUBLIC ?? false; 8 | 9 | const authenticate = (req, res, next) => { 10 | const apiKey = req.headers["x-api-key"] || req.query.api; 11 | if (!apiKey || !apiKeys.includes(apiKey)) { 12 | if (allowPublicUploads) { 13 | req.isPublicUpload = true; 14 | next(); 15 | } else { 16 | return res.status(403).json({ error: "Forbidden" }); 17 | } 18 | } else { 19 | next(); 20 | } 21 | }; 22 | 23 | router.post("/upload", authenticate, uploadFile); 24 | router.get("/u/:filename", getFile); 25 | 26 | module.exports = router; 27 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | # Your host details 2 | HOSTER_EMAIL=hoster@file.coffee 3 | HOSTER_DOMAIN=https://file.coffee 4 | 5 | # This can be 'local' or 's3' 6 | STORAGE_MODE=local 7 | 8 | # If you are using local storage, this is the path where the files will be uploaded 9 | AWS_ACCESS_KEY_ID=your_aws_access_key_id 10 | AWS_SECRET_ACCESS_KEY=your_aws_secret_access_key 11 | AWS_REGION=your_aws_region 12 | S3_BUCKET_NAME=your_s3_bucket_name 13 | S3_ENDPOINT=your_s3_endpoint 14 | 15 | # If you are using local storage, this is the path where the files will be uploaded 16 | LOCAL_UPLOAD_PATH=uploads 17 | 18 | # This is the port where the server will run 19 | PORT=3000 20 | 21 | # Comma-separated list of API keys 22 | API_KEYS=key1,key2,key3 23 | 24 | # This is the maximum file size that can be uploaded and the max file name length. '-1' is unlimited file size, not recommended. 25 | FILE_NAME_LENGTH=10 26 | FILE_MAX_SIZE_MB=30 27 | -------------------------------------------------------------------------------- /.github/workflows/docker-build.yml: -------------------------------------------------------------------------------- 1 | name: File Host Docker Build and Push GHCR 2 | 3 | # Configures this workflow to run every time a change is pushed to the branch called `release`. 4 | on: 5 | push: 6 | tags: 7 | - release/** 8 | 9 | # Defines two custom environment variables for the workflow. These are used for the Container registry domain, and a name for the Docker image that this workflow builds. 10 | env: 11 | REGISTRY: ghcr.io 12 | IMAGE_NAME: ${{ github.repository }} 13 | 14 | jobs: 15 | docker: 16 | runs-on: ubuntu-latest 17 | steps: 18 | - 19 | name: Checkout 20 | uses: actions/checkout@v4 21 | - 22 | name: Log in to the Container registry 23 | uses: docker/login-action@v3 24 | with: 25 | registry: ${{ env.REGISTRY }} 26 | username: ${{ github.actor }} 27 | password: ${{ secrets.GITHUB_TOKEN }} 28 | - 29 | name: Set up QEMU 30 | uses: docker/setup-qemu-action@v3 31 | - 32 | name: Set up Docker Buildx 33 | uses: docker/setup-buildx-action@v3 34 | - 35 | name: Extract version from tag 36 | id: get_version 37 | run: echo "VERSION=${GITHUB_REF#refs/tags/release/}" >> $GITHUB_OUTPUT 38 | - 39 | name: Build and push web filehost image to GHCR 40 | uses: docker/bake-action@v6 41 | with: 42 | source: . 43 | push: true 44 | targets: default 45 | files: | 46 | docker-bake.hcl 47 | set: | 48 | *.tags=ghcr.io/${{ github.repository_owner }}/filehost:latest 49 | *.tags=ghcr.io/${{ github.repository_owner }}/filehost:${{ steps.get_version.outputs.VERSION }} -------------------------------------------------------------------------------- /engines/local.engine.js: -------------------------------------------------------------------------------- 1 | const multer = require("multer"); 2 | const path = require("path"); 3 | const mime = require("mime-types"); 4 | const fs = require("fs"); 5 | const { nanoid } = require("nanoid"); 6 | 7 | const initializeLocalStorage = (multerOptions, fileNameLength, uploadPath) => { 8 | const storage = multer.diskStorage({ 9 | destination: (req, file, cb) => { 10 | cb(null, uploadPath); 11 | }, 12 | filename: (req, file, cb) => { 13 | const ext = mime.extension(file.mimetype); 14 | const randomName = nanoid(fileNameLength); 15 | cb(null, `${randomName}.${ext}`); 16 | }, 17 | }); 18 | 19 | const upload = multer({ storage: storage, ...multerOptions }); 20 | 21 | const writeFile = (req, res, next) => { 22 | upload.single("file")(req, res, (err) => { 23 | if (err) { 24 | return res.status(500).json({ error: err.message }); 25 | } 26 | req.filePath = req.file.filename; 27 | next(); 28 | }); 29 | }; 30 | 31 | const findFile = (filename, res) => { 32 | const filePath = path.join(uploadPath, filename); 33 | fs.access(filePath, fs.constants.F_OK, (err) => { 34 | if (err) { 35 | return res.status(404).json({ error: "File not found" }); 36 | } 37 | res.sendFile(filePath); 38 | }); 39 | }; 40 | 41 | const gatherStatistics = () => { 42 | let totalUploads = 0; 43 | let totalSize = 0; 44 | 45 | const files = fs.readdirSync(uploadPath); 46 | files.forEach((file) => { 47 | const filePath = path.join(uploadPath, file); 48 | const stats = fs.statSync(filePath); 49 | totalUploads++; 50 | totalSize += stats.size; 51 | }); 52 | 53 | return { totalUploads, totalSize }; 54 | }; 55 | 56 | return { writeFile, findFile, gatherStatistics }; 57 | }; 58 | 59 | module.exports = initializeLocalStorage; 60 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | const express = require("express"); 3 | const fileRoutes = require("./routes/file.routes"); 4 | const helmet = require("helmet"); 5 | 6 | const { version } = require("./config/version.config"); 7 | 8 | const app = express(); 9 | const port = process.env.PORT; 10 | const hosterEmail = process.env.HOSTER_EMAIL; 11 | 12 | app.set("view engine", "ejs"); 13 | app.use(fileRoutes); 14 | app.use(helmet()); 15 | 16 | const s3 = require("./engines/s3.engine"); 17 | const local = require("./engines/local.engine"); 18 | const storageMode = process.env.STORAGE_MODE || "local"; 19 | 20 | // Todo: refactor this way. 21 | const fileNameLength = parseInt(process.env.FILE_NAME_LENGTH, 10) || 10; 22 | const multerOptions = { 23 | limits: parseInt(process.env.FILE_MAX_SIZE_MB, 10) * 1024 * 1024, 24 | }; 25 | 26 | app.get("/", async (req, res) => { 27 | let storageEngine; 28 | 29 | if (storageMode === "local") { 30 | storageEngine = local( 31 | multerOptions, 32 | fileNameLength, 33 | process.env.LOCAL_UPLOAD_PATH, 34 | ); 35 | } else if (storageMode === "s3") { 36 | const s3Config = { 37 | accessKeyId: process.env.AWS_ACCESS_KEY_ID, 38 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, 39 | region: process.env.AWS_REGION, 40 | bucketName: process.env.S3_BUCKET_NAME, 41 | endpoint: process.env.S3_ENDPOINT, 42 | }; 43 | storageEngine = s3(multerOptions, fileNameLength, s3Config); 44 | } else { 45 | throw new Error("Invalid STORAGE_MODE"); 46 | } 47 | 48 | const { totalUploads, totalSize } = await storageEngine.gatherStatistics(); 49 | 50 | const kbToMB = totalSize / 1024 / 1024; 51 | 52 | res.render("index", { 53 | totalUploads: totalUploads, 54 | totalSize: kbToMB.toFixed(2), 55 | hosterEmail: hosterEmail, 56 | version: version, 57 | }); 58 | }); 59 | 60 | app.listen(port, () => { 61 | console.log(`Server is running on port ${port}`); 62 | }); 63 | -------------------------------------------------------------------------------- /tests/s3.test.js: -------------------------------------------------------------------------------- 1 | const request = require("supertest"); 2 | const express = require("express"); 3 | const AWS = require("aws-sdk"); 4 | const initializeS3Storage = require("../engines/s3.engine"); 5 | const { uploadFile, getFile } = require("../controllers/file.controller"); 6 | 7 | const app = express(); 8 | const multerOptions = { limits: { fileSize: 1024 * 1024 } }; // 1MB limit 9 | const publicMulterOptions = { limits: { fileSize: 512 * 1024 } }; // 512KB limit 10 | const fileNameLength = 10; 11 | 12 | const s3Config = { 13 | accessKeyId: "fake-access-key-id", 14 | secretAccessKey: "fake-secret-access-key", 15 | region: "fake-region", 16 | bucketName: "fake-bucket-name", 17 | }; 18 | 19 | AWS.S3.prototype.upload = jest.fn((params, callback) => { 20 | callback(null, { Location: `https://fake-s3-url/${params.Key}` }); 21 | }); 22 | 23 | AWS.S3.prototype.getObject = jest.fn((params, callback) => { 24 | callback(null, { 25 | ContentType: "text/plain", 26 | ContentLength: 17, 27 | Body: Buffer.from("test file content"), 28 | }); 29 | }); 30 | 31 | const storageEngine = initializeS3Storage( 32 | multerOptions, 33 | fileNameLength, 34 | s3Config, 35 | ); 36 | 37 | app.post("/upload", (req, res) => { 38 | storageEngine.writeFile(req, res, () => { 39 | res.status(200).json({ 40 | message: "File uploaded successfully", 41 | url: `http://localhost:3000/u/${req.filePath}`, 42 | }); 43 | }); 44 | }); 45 | 46 | app.get("/u/:filename", (req, res) => { 47 | storageEngine.findFile(req.params.filename, res); 48 | }); 49 | 50 | describe("S3 Storage Engine", () => { 51 | it("should upload a file successfully", async () => { 52 | const response = await request(app) 53 | .post("/upload") 54 | .attach("file", Buffer.from("test file content"), "test.txt") 55 | .set("x-api-key", "valid-api-key"); 56 | 57 | expect(response.status).toBe(200); 58 | expect(response.body.message).toBe("File uploaded successfully"); 59 | expect(response.body.url).toMatch(/http:\/\/localhost:3000\/u\/.+\.txt/); 60 | }); 61 | 62 | it("should retrieve a file successfully", async () => { 63 | const response = await request(app).get("/u/test.txt"); 64 | 65 | expect(response.status).toBe(200); 66 | expect(response.text).toBe("test file content"); 67 | }); 68 | }); 69 | -------------------------------------------------------------------------------- /tests/local.test.js: -------------------------------------------------------------------------------- 1 | const request = require("supertest"); 2 | const express = require("express"); 3 | const path = require("path"); 4 | const fs = require("fs"); 5 | const initializeLocalStorage = require("../engines/local.engine"); 6 | const { uploadFile, getFile } = require("../controllers/file.controller"); 7 | 8 | const app = express(); 9 | const uploadPath = path.join(__dirname, "uploads"); 10 | const multerOptions = { limits: { fileSize: 1024 * 1024 } }; // 1MB limit 11 | const publicMulterOptions = { limits: { fileSize: 512 * 1024 } }; // 512KB limit 12 | const fileNameLength = 10; 13 | 14 | if (!fs.existsSync(uploadPath)) { 15 | fs.mkdirSync(uploadPath); 16 | } 17 | 18 | /** 19 | * Ensure we clean up the uploads directory after each test 20 | */ 21 | afterEach(() => { 22 | const files = fs.readdirSync(uploadPath); 23 | files.forEach((file) => { 24 | if (file !== "test.txt" && file !== ".gitkeep") { 25 | fs.unlinkSync(path.join(uploadPath, file)); 26 | } 27 | }); 28 | }); 29 | 30 | const storageEngine = initializeLocalStorage( 31 | multerOptions, 32 | fileNameLength, 33 | uploadPath, 34 | ); 35 | 36 | app.post("/upload", (req, res) => { 37 | storageEngine.writeFile(req, res, () => { 38 | res.status(200).json({ 39 | message: "File uploaded successfully", 40 | url: `http://localhost:3000/u/${req.filePath}`, 41 | }); 42 | }); 43 | }); 44 | 45 | app.get("/u/:filename", (req, res) => { 46 | storageEngine.findFile(req.params.filename, res); 47 | }); 48 | 49 | describe("Local Storage Engine", () => { 50 | it("should upload a file successfully", async () => { 51 | const response = await request(app) 52 | .post("/upload") 53 | .attach("file", Buffer.from("test file content"), "test.txt") 54 | .set("x-api-key", "valid-api-key"); 55 | 56 | expect(response.status).toBe(200); 57 | expect(response.body.message).toBe("File uploaded successfully"); 58 | expect(response.body.url).toMatch(/http:\/\/localhost:3000\/u\/.+\.txt/); 59 | }); 60 | 61 | it("should retrieve a file successfully", async () => { 62 | const filePath = path.join(uploadPath, "test.txt"); 63 | fs.writeFileSync(filePath, "test file content"); 64 | 65 | const response = await request(app).get("/u/test.txt"); 66 | 67 | expect(response.status).toBe(200); 68 | expect(response.text).toBe("test file content"); 69 | }); 70 | }); 71 | -------------------------------------------------------------------------------- /controllers/file.controller.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const rateLimit = require("express-rate-limit"); 3 | const slowDown = require("express-slow-down"); 4 | const initializeLocalStorage = require("../engines/local.engine"); 5 | const initializeS3Storage = require("../engines/s3.engine"); 6 | 7 | const app = express(); 8 | 9 | const storageMode = process.env.STORAGE_MODE || "local"; 10 | const fileNameLength = parseInt(process.env.FILE_NAME_LENGTH, 10) || 10; 11 | const multerOptions = { 12 | limits: parseInt(process.env.FILE_MAX_SIZE_MB, 10) * 1024 * 1024, 13 | }; 14 | 15 | let storageEngine; 16 | 17 | if (storageMode === "local") { 18 | storageEngine = initializeLocalStorage( 19 | multerOptions, 20 | fileNameLength, 21 | process.env.LOCAL_UPLOAD_PATH, 22 | ); 23 | } else if (storageMode === "s3") { 24 | const s3Config = { 25 | accessKeyId: process.env.AWS_ACCESS_KEY_ID, 26 | secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY, 27 | region: process.env.AWS_REGION, 28 | bucketName: process.env.S3_BUCKET_NAME, 29 | endpoint: process.env.S3_ENDPOINT, 30 | }; 31 | storageEngine = initializeS3Storage(multerOptions, fileNameLength, s3Config); 32 | } else { 33 | throw new Error("Invalid STORAGE_MODE"); 34 | } 35 | 36 | const uploadFile = (req, res) => { 37 | storageEngine.writeFile(req, res, () => { 38 | const fileHostDomain = 39 | process.env.FILEHOST_DOMAIN || `https://${req.get("host")}`; 40 | res.status(200).json({ 41 | message: "File uploaded successfully", 42 | url: `${fileHostDomain}/u/${req.filePath}`, 43 | }); 44 | }); 45 | }; 46 | 47 | const getFile = (req, res) => { 48 | const filename = req.params.filename; 49 | storageEngine.findFile(filename, res); 50 | }; 51 | 52 | // Rate limiting middleware 53 | const limiter = rateLimit({ 54 | windowMs: 15 * 60 * 1000, // 15 minutes 55 | max: 100, // Limit each IP to 100 requests per windowMs 56 | message: "Too many requests from this IP, please try again after 15 minutes", 57 | }); 58 | 59 | // Slow down middleware 60 | const speedLimiter = slowDown({ 61 | windowMs: 15 * 60 * 1000, // 15 minutes 62 | delayAfter: 75, // Allow 75 fast requests per 15 minutes. 63 | delayMs: (hits) => hits * 100, // Increase delay by 100ms for each request after 5 requests 64 | }); 65 | 66 | app.use("/u/:filename", limiter, speedLimiter, getFile); 67 | 68 | module.exports = { uploadFile, getFile }; 69 | -------------------------------------------------------------------------------- /views/index.ejs: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | filehost by file.coffee 7 | 8 | 20 | 21 | 22 |
23 |
24 |

Truly your filehost

25 |

Free, open-source, safe, filehosting.

26 |
27 |
28 |
29 |

Statistics

30 |
31 | Total uploads: 32 | <%= totalUploads %> 33 |
34 |
35 | Total size: 36 | <%= totalSize %> MB 37 |
38 |
39 |
40 |

Contact

41 |

Contact the hoster: <%= hosterEmail %>

42 |
43 |
44 | 47 | 48 |
49 | v<%= version %> 50 |
51 |
52 | 53 | 54 | -------------------------------------------------------------------------------- /engines/s3.engine.js: -------------------------------------------------------------------------------- 1 | const multer = require("multer"); 2 | const mime = require("mime-types"); 3 | const AWS = require("aws-sdk"); 4 | const { nanoid } = require("nanoid"); 5 | 6 | const initializeS3Storage = (multerOptions, fileNameLength, s3Config) => { 7 | const s3 = new AWS.S3({ 8 | accessKeyId: s3Config.accessKeyId, 9 | secretAccessKey: s3Config.secretAccessKey, 10 | endpoint: s3Config.endpoint, 11 | s3ForcePathStyle: true, 12 | signatureVersion: "v4", 13 | }); 14 | 15 | const storage = multer.memoryStorage(); 16 | const upload = multer({ storage: storage, ...multerOptions }); 17 | 18 | const writeFile = (req, res, next) => { 19 | upload.single("file")(req, res, (err) => { 20 | if (err) { 21 | return res.status(500).json({ error: err.message }); 22 | } 23 | const ext = mime.extension(req.file.mimetype); 24 | const randomName = nanoid(fileNameLength); 25 | const params = { 26 | Bucket: s3Config.bucketName, 27 | Key: `${randomName}.${ext}`, 28 | Body: req.file.buffer, 29 | ContentType: req.file.mimetype, 30 | }; 31 | 32 | s3.upload(params, (err, data) => { 33 | if (err) { 34 | return res.status(500).json({ error: err.message }); 35 | } 36 | req.filePath = `${randomName}.${ext}`; 37 | next(); 38 | }); 39 | }); 40 | }; 41 | 42 | const findFile = (filename, res) => { 43 | const params = { 44 | Bucket: s3Config.bucketName, 45 | Key: filename, 46 | }; 47 | 48 | s3.getObject(params, (err, data) => { 49 | if (err) { 50 | return res.status(404).json({ error: "File not found" }); 51 | } 52 | res.writeHead(200, { 53 | "Content-Type": data.ContentType, 54 | "Content-Length": data.ContentLength, 55 | }); 56 | res.write(data.Body); 57 | res.end(); 58 | }); 59 | }; 60 | const gatherStatistics = async () => { 61 | let totalUploads = 0; 62 | let totalSize = 0; 63 | 64 | const listParams = { 65 | Bucket: s3Config.bucketName, 66 | }; 67 | 68 | const listObjects = async (params) => { 69 | const data = await s3.listObjectsV2(params).promise(); 70 | data.Contents.forEach((item) => { 71 | totalUploads++; 72 | totalSize += item.Size; 73 | }); 74 | 75 | if (data.IsTruncated) { 76 | params.ContinuationToken = data.NextContinuationToken; 77 | await listObjects(params); 78 | } 79 | }; 80 | 81 | await listObjects(listParams); 82 | 83 | return { totalUploads, totalSize }; 84 | }; 85 | 86 | return { writeFile, findFile, gatherStatistics }; 87 | }; 88 | 89 | module.exports = initializeS3Storage; 90 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # filehost 📁 2 | An open source file host that you can easily selfhost. Made by the creators of the original file.coffee that's now sunset. 3 | 4 | ![https://raw.githubusercontent.com/filecoffee/filehost/main/assets/git/homepage.png](https://raw.githubusercontent.com/filecoffee/filehost/main/assets/git/homepage.png) 5 | 6 | ## Simply start your own file host 🚀 7 | filehost is a one-click (or one-command) setup file host that you can deploy wherever you want. Full control over your own data. No limits. 8 | 9 | ### Use Railway (one-click) 🚂 10 | Hit the Railway button and have your filehost up and running in a few minutes. The button also supports the project since we get a small referral bonus. It costs (for a simple host) a few cents to a dollar per month to host it depending on the bandwidth. If you expect a lot of traffic, we highy recommend setting it up on your own server using Docker since that is a lot more cost efficient. This is just the easiest for a personal host. 11 | 12 | [![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/template/pv_x6t?referralCode=QsZ-bg) 13 | 14 | 15 | ### DIY - Docker! 🐳 16 | Copy the `docker-compose.prod.yml`. 17 | Simply run `docker compose -f .\docker-compose.prod.yml up` on your server after you've configured the `.env` and it's all up and running. 18 | 19 | ``` 20 | Note: We're still actively working on the product and is currently in early alpha stages. We currently recommend also making a backup or chosing S3 as a host with the project. 21 | ``` 22 | 23 | We also recommend forking the project and deploying your forked version to avoid issues when we push updates. You can then easily merge the changes in your forked version. 24 | 25 | ## Roadmap 🛣️ 26 | - [x] File uploads 27 | - [x] Retrieving files 28 | - [x] Unit tests 29 | - [x] File size limits 30 | - [x] ShareX integration 31 | - [x] Allow public uploads 32 | - [x] Basic API keys 33 | - [ ] Whitelisting file types 34 | - [ ] User authentication <-- Currently in development 35 | - [ ] File deletion 36 | - [ ] File expiration 37 | - [ ] File management 38 | - [ ] File statistics 39 | - [ ] File search 40 | - [ ] File tagging using AI 41 | - [ ] File previews 42 | - [x] Rate limiting 43 | - [x] Slowdown on too many requests 44 | - [ ] Advertising support 45 | - [ ] NSFW detection and filtering 46 | 47 | ### S3 Compatible Storage 48 | For the s3 compatbile storage engine, we recommend using Contabo Object Storage. It's a cheap (2,50/mth for 250GB with unlimited bandwidth at 80mbps) and really easy to set up. Just make an account, get the object storage, make a bucket and fill in the details in the `.env` and it _just works_. 49 | 50 | ## Note 51 | It's currently a big work in progress. But stay patient, something you can deploy will be ready a.s.a.p. 52 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | .pnpm-debug.log* 9 | 10 | /tests/uploads/* 11 | !/tests/uploads/test.txt 12 | !/tests/uploads/.gitkeep 13 | 14 | /uploads/* 15 | !/uploads/.gitkeep 16 | 17 | # Diagnostic reports (https://nodejs.org/api/report.html) 18 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 19 | 20 | # Runtime data 21 | pids 22 | *.pid 23 | *.seed 24 | *.pid.lock 25 | 26 | # Directory for instrumented libs generated by jscoverage/JSCover 27 | lib-cov 28 | 29 | # Coverage directory used by tools like istanbul 30 | coverage 31 | *.lcov 32 | 33 | # nyc test coverage 34 | .nyc_output 35 | 36 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 37 | .grunt 38 | 39 | # Bower dependency directory (https://bower.io/) 40 | bower_components 41 | 42 | # node-waf configuration 43 | .lock-wscript 44 | 45 | # Compiled binary addons (https://nodejs.org/api/addons.html) 46 | build/Release 47 | 48 | # Dependency directories 49 | node_modules/ 50 | jspm_packages/ 51 | 52 | # Snowpack dependency directory (https://snowpack.dev/) 53 | web_modules/ 54 | 55 | # TypeScript cache 56 | *.tsbuildinfo 57 | 58 | # Optional npm cache directory 59 | .npm 60 | 61 | # Optional eslint cache 62 | .eslintcache 63 | 64 | # Optional stylelint cache 65 | .stylelintcache 66 | 67 | # Microbundle cache 68 | .rpt2_cache/ 69 | .rts2_cache_cjs/ 70 | .rts2_cache_es/ 71 | .rts2_cache_umd/ 72 | 73 | # Optional REPL history 74 | .node_repl_history 75 | 76 | # Output of 'npm pack' 77 | *.tgz 78 | 79 | # Yarn Integrity file 80 | .yarn-integrity 81 | 82 | # dotenv environment variable files 83 | .env 84 | .env.development.local 85 | .env.test.local 86 | .env.production.local 87 | .env.local 88 | 89 | # parcel-bundler cache (https://parceljs.org/) 90 | .cache 91 | .parcel-cache 92 | 93 | # Next.js build output 94 | .next 95 | out 96 | 97 | # Nuxt.js build / generate output 98 | .nuxt 99 | dist 100 | 101 | # Gatsby files 102 | .cache/ 103 | # Comment in the public line in if your project uses Gatsby and not Next.js 104 | # https://nextjs.org/blog/next-9-1#public-directory-support 105 | # public 106 | 107 | # vuepress build output 108 | .vuepress/dist 109 | 110 | # vuepress v2.x temp and cache directory 111 | .temp 112 | .cache 113 | 114 | # Docusaurus cache and generated files 115 | .docusaurus 116 | 117 | # Serverless directories 118 | .serverless/ 119 | 120 | # FuseBox cache 121 | .fusebox/ 122 | 123 | # DynamoDB Local files 124 | .dynamodb/ 125 | 126 | # TernJS port file 127 | .tern-port 128 | 129 | # Stores VSCode versions used for testing VSCode extensions 130 | .vscode-test 131 | 132 | # yarn v2 133 | .yarn/cache 134 | .yarn/unplugged 135 | .yarn/build-state.yml 136 | .yarn/install-state.gz 137 | .pnp.* 138 | CLAUDE.md 139 | --------------------------------------------------------------------------------