├── logs └── .gitkeep ├── src ├── domain │ ├── limit │ │ ├── claim │ │ │ ├── members.js │ │ │ ├── impactDAO.js │ │ │ ├── ens.js │ │ │ ├── privateFiles.js │ │ │ ├── fileverseDoc.js │ │ │ ├── whiteboard.js │ │ │ ├── publicFiles.js │ │ │ ├── safe.js │ │ │ ├── lens.js │ │ │ └── index.js │ │ ├── index.js │ │ ├── extendStorage.js │ │ ├── getStorageUse.js │ │ ├── claimStorage.js │ │ └── getStorageStatus.js │ ├── task │ │ ├── tasks │ │ │ ├── ownsFarcasterHandle.js │ │ │ ├── ownsENSHandle.js │ │ │ ├── getRank.js │ │ │ ├── ownsSafeMultiSig.js │ │ │ ├── ownsGitcoinPassport.js │ │ │ ├── ownsLensHandle.js │ │ │ ├── getStorage.js │ │ │ └── index.js │ │ ├── index.js │ │ ├── completeMultipleTask.js │ │ ├── completeTask.js │ │ ├── levelUp.js │ │ ├── verifyTask.js │ │ └── getTaskStatus.js │ ├── contants │ │ ├── index.js │ │ ├── response.js │ │ └── job.js │ ├── file │ │ ├── utils.js │ │ ├── index.js │ │ ├── findall.js │ │ ├── getSizeByContract.js │ │ └── create.js │ ├── hashResolver │ │ ├── index.js │ │ ├── resolveHash.js │ │ ├── constants.js │ │ └── getGatewayUrl.js │ ├── publicPortal │ │ ├── index.js │ │ ├── create.js │ │ └── find.js │ ├── analytics.js │ ├── log │ │ ├── index.js │ │ ├── create.js │ │ ├── ddoc.js │ │ ├── getByFile.js │ │ ├── getByContract.js │ │ └── process.js │ ├── content.js │ ├── jobs │ │ ├── create.js │ │ ├── index.js │ │ ├── update.js │ │ └── get.js │ ├── contract │ │ ├── jsonRpcProvider.js │ │ ├── provider.js │ │ ├── index.js │ │ └── abi.json │ ├── index.js │ ├── ipfs │ │ ├── interface.js │ │ ├── index.js │ │ ├── web3storage.js │ │ ├── pinata.js │ │ └── filebase.js │ ├── cache │ │ └── index.js │ └── upload.js ├── interface │ ├── middleware │ │ ├── canView.js │ │ ├── canComment.js │ │ ├── validator.js │ │ ├── isPublic.js │ │ ├── index.js │ │ ├── canListTask.js │ │ ├── canCheckLimit.js │ │ ├── canUpdateLimit.js │ │ ├── canCheckLimitUse.js │ │ ├── canCompleteTask.js │ │ ├── canViewAnalytics.js │ │ ├── isAuthenticated.js │ │ ├── errorHandler.js │ │ └── canUpload.js │ ├── portal │ │ ├── index.js │ │ ├── file │ │ │ ├── index.js │ │ │ └── getPortal.js │ │ └── indexPortal │ │ │ ├── index.js │ │ │ ├── triggerStatus.js │ │ │ ├── enable.js │ │ │ └── trigger.js │ ├── limit │ │ ├── legacyUse.js │ │ ├── claim.js │ │ ├── check.js │ │ ├── use.js │ │ ├── extend.js │ │ └── index.js │ ├── fileList │ │ ├── index.js │ │ └── fileList.js │ ├── content │ │ ├── index.js │ │ └── content.js │ ├── analytics │ │ ├── index.js │ │ ├── file.js │ │ └── contract.js │ ├── log │ │ ├── index.js │ │ ├── ddoc.js │ │ ├── ddocSignup.js │ │ └── create.js │ ├── task │ │ ├── list.js │ │ ├── levelUp.js │ │ ├── verify.js │ │ ├── complete.js │ │ └── index.js │ ├── index.js │ └── upload │ │ ├── index.js │ │ ├── public.js │ │ ├── upload.js │ │ └── comment.js ├── infra │ ├── asyncHandler.js │ ├── database │ │ ├── models │ │ │ ├── index.js │ │ │ ├── stat.js │ │ │ ├── task.js │ │ │ ├── log.js │ │ │ ├── portal.js │ │ │ ├── file.js │ │ │ ├── limit.js │ │ │ └── job.js │ │ └── index.js │ ├── errorHandler.js │ ├── reporter.js │ ├── collaboratorKey.js │ ├── logger.js │ └── ucan.js ├── index.js └── app.js ├── README.md ├── config ├── index.js └── .env.example ├── package.json └── .gitignore /logs/.gitkeep: -------------------------------------------------------------------------------- 1 | This is the directory for server logs -------------------------------------------------------------------------------- /src/domain/limit/claim/members.js: -------------------------------------------------------------------------------- 1 | async function canClaim() { 2 | return false; 3 | } 4 | 5 | module.exports = canClaim; 6 | -------------------------------------------------------------------------------- /src/domain/limit/claim/impactDAO.js: -------------------------------------------------------------------------------- 1 | async function canClaim() { 2 | return false; 3 | } 4 | 5 | module.exports = canClaim; 6 | -------------------------------------------------------------------------------- /src/interface/middleware/canView.js: -------------------------------------------------------------------------------- 1 | async function canView(req, res, next) { 2 | next(); 3 | } 4 | 5 | module.exports = canView; 6 | -------------------------------------------------------------------------------- /src/interface/middleware/canComment.js: -------------------------------------------------------------------------------- 1 | async function canComment(req, res, next) { 2 | next(); 3 | } 4 | 5 | module.exports = canComment; 6 | -------------------------------------------------------------------------------- /src/domain/task/tasks/ownsFarcasterHandle.js: -------------------------------------------------------------------------------- 1 | function ownsFarcasterHandle(invokerAddress) { 2 | return false; 3 | } 4 | 5 | module.exports = ownsFarcasterHandle; 6 | -------------------------------------------------------------------------------- /src/interface/middleware/validator.js: -------------------------------------------------------------------------------- 1 | const { validate, ValidationError, Joi } = require('express-validation'); 2 | 3 | module.exports = { validate, Joi, ValidationError }; 4 | -------------------------------------------------------------------------------- /src/domain/contants/index.js: -------------------------------------------------------------------------------- 1 | 2 | const JobConst = require('./job'); 3 | const Response = require('./response'); 4 | 5 | module.exports = { 6 | JobConst, 7 | Response, 8 | } -------------------------------------------------------------------------------- /src/domain/file/utils.js: -------------------------------------------------------------------------------- 1 | 2 | function getFileVisibility(file) { 3 | return file.tags.includes('private') ? 'private' : 'public'; 4 | } 5 | 6 | module.exports = { getFileVisibility }; -------------------------------------------------------------------------------- /src/domain/hashResolver/index.js: -------------------------------------------------------------------------------- 1 | const resolveIpfsHash = require('./resolveHash'); 2 | const getGatewayUrl = require('./getGatewayUrl'); 3 | 4 | module.exports = { resolveIpfsHash, getGatewayUrl }; -------------------------------------------------------------------------------- /src/domain/publicPortal/index.js: -------------------------------------------------------------------------------- 1 | const updateOrCreate = require('./create'); 2 | const { findOne, findAll } = require('./find'); 3 | 4 | module.exports = { findOne, findAll, updateOrCreate }; 5 | -------------------------------------------------------------------------------- /src/domain/file/index.js: -------------------------------------------------------------------------------- 1 | const create = require('./create'); 2 | const getSizeByContract = require('./getSizeByContract'); 3 | const { findAll, findOne } = require('./findall'); 4 | 5 | 6 | module.exports = { create, getSizeByContract, findAll, findOne }; 7 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Fileverse Storage 2 | Fileverse Storage is the service that handles file uploads from authorized UCANs, supporting [dDocs](https://ddocs.new/), Fileverse Portal, and other Fileverse applications. It enables decentralized, secure, and privacy-preserving file storage. 3 | -------------------------------------------------------------------------------- /src/domain/analytics.js: -------------------------------------------------------------------------------- 1 | const getByFile = require('./log/getByFile'); 2 | const getByContract = require('./log/getByContract'); 3 | const _analytics = {}; 4 | 5 | _analytics.getByFile = getByFile; 6 | _analytics.getByContract = getByContract; 7 | 8 | module.exports = _analytics; 9 | -------------------------------------------------------------------------------- /src/infra/asyncHandler.js: -------------------------------------------------------------------------------- 1 | const asyncHandler = (fn) => (req, res, next) => 2 | Promise.resolve(fn(req, res, next)).catch(next); 3 | 4 | const asyncHandlerArray = (resolvers) => { 5 | return resolvers.map(asyncHandler); 6 | }; 7 | 8 | module.exports = { asyncHandler, asyncHandlerArray }; 9 | -------------------------------------------------------------------------------- /src/domain/task/index.js: -------------------------------------------------------------------------------- 1 | const completeTask = require('./completeTask'); 2 | const getTaskStatus = require('./getTaskStatus'); 3 | const verifyTask = require('./verifyTask'); 4 | const levelUp = require('./levelUp'); 5 | 6 | module.exports = { completeTask, getTaskStatus, verifyTask, levelUp }; 7 | -------------------------------------------------------------------------------- /src/interface/portal/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const router = express.Router(); 3 | 4 | const file = require('./file'); 5 | const index = require('./indexPortal'); 6 | 7 | router.use('/file', file); 8 | router.use('/index', index); 9 | 10 | module.exports = router; 11 | -------------------------------------------------------------------------------- /src/domain/log/index.js: -------------------------------------------------------------------------------- 1 | const create = require('./create'); 2 | const getByFile = require('./getByFile'); 3 | const getByContract = require('./getByContract'); 4 | const { ddocCreate, ddocSignup } = require('./ddoc'); 5 | 6 | module.exports = { create, getByFile, getByContract, ddocCreate, ddocSignup }; 7 | -------------------------------------------------------------------------------- /src/domain/task/tasks/ownsENSHandle.js: -------------------------------------------------------------------------------- 1 | const provider = require('../../contract/jsonRpcProvider'); 2 | 3 | async function ownsENSHandle(invokerAddress) { 4 | const name = await provider.lookupAddress(invokerAddress); 5 | return !!(name && name.endsWith(".eth")); 6 | } 7 | 8 | module.exports = ownsENSHandle; 9 | -------------------------------------------------------------------------------- /src/domain/contants/response.js: -------------------------------------------------------------------------------- 1 | 2 | const GenericResp = { 3 | error: null, 4 | message: "", 5 | data: {}, 6 | } 7 | 8 | const RespMsg = { 9 | SUCCESS: "SUCCESS", 10 | FAILED: "FAILED", 11 | NOT_FOUND: "NOT_FOUND", 12 | ERROR: "ERROR", 13 | } 14 | 15 | module.exports = { GenericResp, RespMsg }; -------------------------------------------------------------------------------- /src/domain/limit/index.js: -------------------------------------------------------------------------------- 1 | const claimStorage = require('./claimStorage'); 2 | const getStorageStatus = require('./getStorageStatus'); 3 | const getStorageUse = require('./getStorageUse'); 4 | const extendStorage = require('./extendStorage'); 5 | module.exports = { claimStorage, getStorageStatus, getStorageUse, extendStorage }; 6 | -------------------------------------------------------------------------------- /src/domain/file/findall.js: -------------------------------------------------------------------------------- 1 | const { File } = require("../../infra/database/models"); 2 | 3 | async function findAll(invokerAddress) { 4 | return await File.find({ invokerAddress }); 5 | } 6 | 7 | 8 | async function findOne(ipfsHash) { 9 | return await File.findOne({ ipfsHash }); 10 | } 11 | 12 | module.exports = { findAll, findOne }; -------------------------------------------------------------------------------- /src/domain/limit/claim/ens.js: -------------------------------------------------------------------------------- 1 | const provider = require('../../contract/jsonRpcProvider'); 2 | 3 | // check if address has .eth ens name 4 | async function canClaim({ invokerAddress }) { 5 | const name = await provider.lookupAddress(invokerAddress); 6 | return !!(name && name.endsWith(".eth")); 7 | } 8 | 9 | module.exports = canClaim; 10 | -------------------------------------------------------------------------------- /src/domain/limit/claim/privateFiles.js: -------------------------------------------------------------------------------- 1 | const { File } = require("../../../infra/database/models"); 2 | 3 | async function canClaim({ contractAddress }) { 4 | const data = await File.findOne({ 5 | contractAddress: contractAddress.toLowerCase(), 6 | tags: "private", 7 | }); 8 | return !!data; 9 | } 10 | 11 | module.exports = canClaim; 12 | -------------------------------------------------------------------------------- /src/domain/limit/claim/fileverseDoc.js: -------------------------------------------------------------------------------- 1 | const { File } = require("../../../infra/database/models"); 2 | 3 | async function canClaim({ contractAddress }) { 4 | const data = await File.findOne({ 5 | contractAddress: contractAddress.toLowerCase(), 6 | tags: "fileverse_document", 7 | }); 8 | return !!data; 9 | } 10 | 11 | module.exports = canClaim; 12 | -------------------------------------------------------------------------------- /src/domain/limit/claim/whiteboard.js: -------------------------------------------------------------------------------- 1 | const { File } = require("../../../infra/database/models"); 2 | 3 | async function canClaim({ contractAddress }) { 4 | const data = await File.findOne({ 5 | contractAddress: contractAddress.toLowerCase(), 6 | tags: "fileverse_whiteboard", 7 | }); 8 | return !!data; 9 | } 10 | 11 | module.exports = canClaim; 12 | -------------------------------------------------------------------------------- /config/index.js: -------------------------------------------------------------------------------- 1 | const dotenv = require('dotenv'); 2 | const path = require('path'); 3 | dotenv.config({ 4 | path: path.resolve( 5 | __dirname, 6 | `./${process.env.NODE_ENV ? process.env.NODE_ENV : ''}.env`, 7 | ), 8 | }); 9 | const config = process.env; 10 | config.SERVICE_NAME = config.SERVICE_NAME || 'storage-service'; 11 | module.exports = config; 12 | -------------------------------------------------------------------------------- /src/domain/content.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require('stream'); 2 | const GetIpfsService = require('./ipfs'); 3 | const Cache = require('./cache'); 4 | const cache = new Cache(); 5 | 6 | async function content(ipfsHash) { 7 | const stream = await GetIpfsService().get(ipfsHash); 8 | return { 9 | contentStream: stream, 10 | }; 11 | } 12 | 13 | module.exports = content; 14 | -------------------------------------------------------------------------------- /src/domain/task/tasks/getRank.js: -------------------------------------------------------------------------------- 1 | function getRank({ collectedPoints }) { 2 | if (collectedPoints >= 680) { 3 | return "open-sourcerer-gold"; 4 | } 5 | if (collectedPoints >= 430) { 6 | return "open-sourcerer-orange"; 7 | } 8 | if (collectedPoints >= 130) { 9 | return "pathfinder"; 10 | } 11 | return "explorer"; 12 | } 13 | 14 | module.exports = getRank; 15 | -------------------------------------------------------------------------------- /src/infra/database/models/index.js: -------------------------------------------------------------------------------- 1 | require('../'); 2 | 3 | const _models = { 4 | Log: require('./log').model, 5 | File: require('./file').model, 6 | Limit: require('./limit').model, 7 | Task: require('./task').model, 8 | Stat: require('./stat').model, 9 | Portal: require('./portal').model, 10 | Job: require('./job').model, 11 | }; 12 | 13 | module.exports = _models; 14 | -------------------------------------------------------------------------------- /src/domain/limit/claim/publicFiles.js: -------------------------------------------------------------------------------- 1 | const { File } = require("../../../infra/database/models"); 2 | 3 | async function canClaim({ contractAddress }) { 4 | const data = await File.find({ 5 | contractAddress: contractAddress.toLowerCase(), 6 | tags: "public", 7 | }) 8 | .limit(4) 9 | .lean(); 10 | return !!(data.length > 3); 11 | } 12 | 13 | module.exports = canClaim; 14 | -------------------------------------------------------------------------------- /src/domain/jobs/create.js: -------------------------------------------------------------------------------- 1 | const { Job } = require('../../infra/database/models'); 2 | 3 | async function createJob(jobType, jobData, contractAddress) { 4 | // create new job 5 | const job = await new Job({ 6 | jobType, 7 | jobData, 8 | contractAddress, 9 | }); 10 | 11 | await job.save(); 12 | return job; 13 | } 14 | 15 | module.exports = { createJob }; -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const config = require('../config'); 2 | const logger = require('./infra/logger'); 3 | 4 | const app = require('./app'); 5 | 6 | // Here you set the PORT and IP of the server 7 | const port = config.PORT || 8001; 8 | const ip = config.IP || '127.0.0.1'; 9 | 10 | app.listen({ port, ip }, () => 11 | logger.info(`🚀 Server ready at http://${ip}:${port}`), 12 | ); 13 | 14 | module.exports = app; 15 | -------------------------------------------------------------------------------- /src/domain/contract/jsonRpcProvider.js: -------------------------------------------------------------------------------- 1 | const { ethers } = require("ethers"); 2 | const provider = require("./provider"); 3 | 4 | function JsonRpcProvider() { 5 | const networkProviderUrl = provider.getNetworkUrl('eth_mainnet'); 6 | const networkProvider = new ethers.providers.JsonRpcProvider( 7 | networkProviderUrl 8 | ); 9 | return networkProvider; 10 | } 11 | 12 | module.exports = JsonRpcProvider(); 13 | -------------------------------------------------------------------------------- /src/domain/contants/job.js: -------------------------------------------------------------------------------- 1 | const Status = { 2 | Pending: 'pending', 3 | Processing: 'processing', 4 | Failed: 'failed', 5 | Completed: 'completed' 6 | } 7 | 8 | const Type = { 9 | PublicPortal: 'public-portal', 10 | } 11 | 12 | const RetryLimit = 3; 13 | const ProcessLimit = 10; 14 | 15 | module.exports = { 16 | RetryLimit, 17 | Type, 18 | ProcessLimit, 19 | Status 20 | }; 21 | 22 | -------------------------------------------------------------------------------- /src/domain/jobs/index.js: -------------------------------------------------------------------------------- 1 | const { getJobByUuid, getAvailableJobs, getJobByContractAddress } = require('./get'); 2 | const { createJob } = require('./create'); 3 | const { updateJobStatus, updateJobRetries } = require('./update'); 4 | 5 | 6 | module.exports = { 7 | getJobByUuid, 8 | createJob, 9 | getAvailableJobs, 10 | updateJobStatus, 11 | updateJobRetries, 12 | getJobByContractAddress 13 | }; 14 | -------------------------------------------------------------------------------- /src/interface/limit/legacyUse.js: -------------------------------------------------------------------------------- 1 | const { limit } = require("../../domain"); 2 | 3 | async function legacyUse(req, res) { 4 | const { contractAddress } = req; 5 | console.log({ contractAddress }); 6 | const data = await limit.getStorageUse({ contractAddress }); 7 | console.log({ data }); 8 | res.json({ ...data, storageLimit: data.storageLimit + data.extraStorage }); 9 | } 10 | 11 | module.exports = [legacyUse]; 12 | -------------------------------------------------------------------------------- /src/interface/fileList/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | 3 | const router = express.Router(); 4 | 5 | const { 6 | asyncHandlerArray, 7 | } = require('../../infra/asyncHandler'); 8 | 9 | // domain 10 | const { fileList, getUniqueFile } = require('./fileList'); 11 | 12 | router.get('/hash', asyncHandlerArray([getUniqueFile])); 13 | router.get('/invoker', asyncHandlerArray([fileList])); 14 | 15 | module.exports = router; 16 | -------------------------------------------------------------------------------- /src/domain/limit/claim/safe.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | 3 | async function canClaim({ invokerAddress }) { 4 | const response = await axios({ 5 | method: 'get', 6 | url: `https://safe-transaction-mainnet.safe.global/api/v1/owners/${invokerAddress}/safes/`, 7 | }); 8 | const { safes } = response && response.data || {}; 9 | const exist = safes && safes.length || null; 10 | return !!exist; 11 | } 12 | 13 | module.exports = canClaim; 14 | -------------------------------------------------------------------------------- /src/interface/content/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | 3 | const router = express.Router(); 4 | 5 | const { 6 | asyncHandler, 7 | asyncHandlerArray, 8 | } = require('../../infra/asyncHandler'); 9 | 10 | // domain 11 | const content = require('./content'); 12 | 13 | // middlewares 14 | const { canView } = require('../middleware'); 15 | 16 | router.get('/', asyncHandler(canView), asyncHandlerArray(content)); 17 | 18 | module.exports = router; 19 | -------------------------------------------------------------------------------- /src/domain/index.js: -------------------------------------------------------------------------------- 1 | const upload = require("./upload"); 2 | const content = require("./content"); 3 | const analytics = require("./analytics"); 4 | const contract = require("./contract"); 5 | const log = require("./log"); 6 | const file = require("./file"); 7 | const limit = require("./limit"); 8 | const task = require("./task"); 9 | 10 | module.exports = { 11 | upload, 12 | content, 13 | analytics, 14 | contract, 15 | log, 16 | file, 17 | limit, 18 | task, 19 | }; 20 | -------------------------------------------------------------------------------- /src/domain/task/tasks/ownsSafeMultiSig.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | 3 | async function ownsSafeMultiSig(invokerAddress) { 4 | const response = await axios({ 5 | method: 'get', 6 | url: `https://safe-transaction-mainnet.safe.global/api/v1/owners/${invokerAddress}/safes/`, 7 | }); 8 | const { safes } = response && response.data || {}; 9 | const exist = safes && safes.length || null; 10 | return !!exist; 11 | } 12 | 13 | module.exports = ownsSafeMultiSig; 14 | -------------------------------------------------------------------------------- /src/interface/portal/file/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const router = express.Router(); 3 | 4 | const { 5 | asyncHandler, 6 | asyncHandlerArray, 7 | } = require('../../../infra/asyncHandler'); 8 | 9 | 10 | const { 11 | getPortalHandler, 12 | getAllPortalHandler 13 | } = require('./getPortal'); 14 | 15 | router.get('/:fileId', asyncHandlerArray([getPortalHandler])); 16 | router.get('/', asyncHandlerArray([getAllPortalHandler])); 17 | 18 | module.exports = router; -------------------------------------------------------------------------------- /src/domain/hashResolver/resolveHash.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | const IPFS_BASE_URLS = require('./constants'); 3 | 4 | /** 5 | * Fetches metadata from IPFS for a given hash. 6 | * @param {string} hash - The IPFS hash to fetch metadata for. 7 | */ 8 | async function resolveIpfsHash(hash) { 9 | const fetchPromises = IPFS_BASE_URLS.map(url => axios.get(url + hash)); 10 | const result = await Promise.any(fetchPromises); 11 | return result?.data; 12 | } 13 | 14 | module.exports = resolveIpfsHash; -------------------------------------------------------------------------------- /src/domain/ipfs/interface.js: -------------------------------------------------------------------------------- 1 | class IpfsStorageInterface { 2 | constructor() { 3 | 4 | } 5 | async upload(readableStreamForFile, { name, attribute, filesize }) { 6 | throw new Error('No Ipfs storage service found'); 7 | } 8 | 9 | async get({ ipfsUrl }) { 10 | throw new Error('No Ipfs storage service found'); 11 | } 12 | 13 | async remove({ ipfsHash }) { 14 | throw new Error('No Ipfs storage service found'); 15 | } 16 | } 17 | 18 | module.exports = IpfsStorageInterface; -------------------------------------------------------------------------------- /src/domain/log/create.js: -------------------------------------------------------------------------------- 1 | const { Log } = require('../../infra/database/models'); 2 | const process = require('./process'); 3 | 4 | async function create(eventName, { fileId, contractAddress, invokerAddress, ipfsHash, tags }) { 5 | const logObject = await new Log({ eventName, fileId, contractAddress, invokerAddress, ipfsHash, tags }).save(); 6 | const safeLog = logObject.safeObject(); 7 | // process logs 8 | await process(contractAddress, invokerAddress, safeLog) 9 | return safeLog; 10 | } 11 | 12 | module.exports = create; 13 | -------------------------------------------------------------------------------- /config/.env.example: -------------------------------------------------------------------------------- 1 | HOST= 2 | DEFAULT_IPFS_SERVICE= 3 | DEFAULT_STORAGE_LIMIT= 4 | ETH_GOERLI_RPC_URL= 5 | ETH_MAINNET_RPC_URL= 6 | ETH_SEPOLIA_RPC_URL= 7 | GNOSIS_MAINNET_RPC_URL= 8 | GNOSIS_TESTNET_RPC_URL= 9 | FILEVERSE_TESTNET_RPC_URL= 10 | GITCOIN_API_KEY= 11 | MONGOURI= 12 | NETWORK_PROVIDER_URL= 13 | NODE_ENV= 14 | PINATA_API_KEY= 15 | PINATA_SECRET_KEY= 16 | SERVICE_DID= 17 | SLACK_REPORTER= 18 | WEB3STORAGE_TOKEN= 19 | FILEBASE_ACCESS_KEY= 20 | FILEBASE_SECRET= 21 | FILEBASE_BUCKET_NAME= 22 | GITCOIN_API_KEY= 23 | X_API_KEY= -------------------------------------------------------------------------------- /src/infra/errorHandler.js: -------------------------------------------------------------------------------- 1 | const reporter = require('./reporter'); 2 | const _errorHandler = {}; 3 | 4 | _errorHandler.throwError = ({ code = 500, message, token, req = {} }) => { 5 | const error = new Error(message); 6 | error.code = code; 7 | error.token = token; 8 | error.req = req; 9 | error.address = req.address; 10 | // Error Reporting to Slack 11 | const errorMessage = `Message: ${message}\nError Code: ${code}`; 12 | reporter.reportError(errorMessage).catch(console.log); 13 | throw error; 14 | }; 15 | 16 | module.exports = _errorHandler; 17 | -------------------------------------------------------------------------------- /src/domain/hashResolver/constants.js: -------------------------------------------------------------------------------- 1 | const config = require('../../../config'); 2 | 3 | /** 4 | * Array of IPFS base URLs. 5 | * If `config.IPFS_BASE_URLS` is defined, it will be split by comma (',') to form the array. 6 | * Otherwise, a default array of IPFS base URLs will be used. 7 | */ 8 | const IPFS_BASE_URLS = config.IPFS_BASE_URLS ? config.IPFS_BASE_URLS.split(',') : [ 9 | 'https://ipfs.io/ipfs/', 10 | 'https://w3s.link/ipfs/', 11 | 'https://dweb.link/ipfs/', 12 | 'https://ipfs.fileverse.io/ipfs/', 13 | ]; 14 | 15 | module.exports = IPFS_BASE_URLS; -------------------------------------------------------------------------------- /src/domain/publicPortal/create.js: -------------------------------------------------------------------------------- 1 | const { Portal } = require('../../infra/database/models'); 2 | 3 | async function updateOrCreate(fileId, contractAddress, files, resolvedContent, resolvedMetadata) { 4 | try { 5 | const portal = await Portal.findOneAndUpdate({ fileId, contractAddress }, { files, resolvedContent, resolvedMetadata }, { new: true, upsert: true }); 6 | return portal; 7 | } catch (error) { 8 | console.error('Error updating portal:', error); 9 | return null; 10 | } 11 | } 12 | 13 | module.exports = updateOrCreate; 14 | -------------------------------------------------------------------------------- /src/interface/middleware/isPublic.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | const X_API_KEY = process.env.X_API_KEY; 4 | 5 | async function isPublic(req, res, next) { 6 | const xApiKey = req.header("x-api-key"); 7 | if (xApiKey === X_API_KEY) { 8 | return next(); 9 | } else { 10 | return ErrorHandler.throwError({ 11 | code: 401, 12 | message: `unauthenticated request, please contact admin for api-key.`, 13 | req, 14 | }); 15 | } 16 | } 17 | 18 | module.exports = isPublic; 19 | -------------------------------------------------------------------------------- /src/domain/task/tasks/ownsGitcoinPassport.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | const config = require('../../../../config'); 3 | 4 | const apiKey = config.GITCOIN_API_KEY; 5 | 6 | async function ownsGitcoinPassport(invokerAddress) { 7 | const response = await axios({ 8 | method: 'get', 9 | url: `https://api.scorer.gitcoin.co/registry/stamps/${invokerAddress}`, 10 | headers: { 'X-API-KEY': apiKey }, 11 | }); 12 | const { items } = response && response.data || {}; 13 | const exist = items[0] || null; 14 | return !!exist; 15 | } 16 | 17 | module.exports = ownsGitcoinPassport; 18 | -------------------------------------------------------------------------------- /src/interface/analytics/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | 3 | const router = express.Router(); 4 | 5 | const { 6 | asyncHandler, 7 | asyncHandlerArray, 8 | } = require('../../infra/asyncHandler'); 9 | 10 | const contractAnalytics = require('./contract'); 11 | const fileAnalytics = require('./file'); 12 | 13 | // middlewares 14 | const { canViewAnalytics } = require('../middleware'); 15 | 16 | router.get('/', asyncHandler(canViewAnalytics), asyncHandlerArray(contractAnalytics)); 17 | router.get('/:fileId', asyncHandler(canViewAnalytics), asyncHandlerArray(fileAnalytics)); 18 | 19 | module.exports = router; 20 | -------------------------------------------------------------------------------- /src/infra/reporter.js: -------------------------------------------------------------------------------- 1 | const config = require('../../config'); 2 | const axios = require('axios'); 3 | 4 | async function reportError(message) { 5 | env = config.HOST ? config.HOST : 'local'; 6 | slackHook = config.SLACK_REPORTER; 7 | if (env !== 'local') { 8 | await axios.post(slackHook, { 9 | "blocks": [ 10 | { 11 | "type": "section", 12 | "text": { 13 | "type": "mrkdwn", 14 | "text": "*Error* ```" + message + "```\n*Environment:* " + env.toUpperCase() 15 | } 16 | } 17 | ] 18 | }); 19 | } 20 | } 21 | 22 | module.exports = { reportError }; 23 | -------------------------------------------------------------------------------- /src/interface/log/index.js: -------------------------------------------------------------------------------- 1 | const { 2 | asyncHandler, 3 | asyncHandlerArray, 4 | } = require('../../infra/asyncHandler'); 5 | const express = require('express'); 6 | const router = express.Router(); 7 | 8 | const canView = require('../middleware/canView'); 9 | 10 | const create = require('./create'); 11 | const ddocCreate = require('./ddoc'); 12 | const ddocSignUp = require('./ddocSignup'); 13 | 14 | router.post('/create', asyncHandler(canView), asyncHandlerArray(create)); 15 | router.get('/create/ddoc', asyncHandlerArray(ddocCreate)); 16 | router.post('/signup/ddoc', asyncHandlerArray(ddocSignUp)); 17 | 18 | module.exports = router; 19 | -------------------------------------------------------------------------------- /src/domain/hashResolver/getGatewayUrl.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | const IPFS_BASE_URLS = require('./constants'); 3 | 4 | /** 5 | * Fetches metadata from IPFS for a given hash. 6 | * @param {string} hash - The IPFS hash to fetch metadata for. 7 | */ 8 | async function getGatewayUrl(hash) { 9 | const fetchPromises = IPFS_BASE_URLS.map(url => axios.get(url + hash)); 10 | const firstResolvedPromise = await Promise.any(fetchPromises); 11 | 12 | if (firstResolvedPromise) { 13 | return firstResolvedPromise.config.url; 14 | } else { 15 | return null; 16 | } 17 | } 18 | 19 | module.exports = getGatewayUrl; -------------------------------------------------------------------------------- /src/domain/task/completeMultipleTask.js: -------------------------------------------------------------------------------- 1 | const { Task } = require("../../infra/database/models"); 2 | 3 | async function completeMultipleTask({ contractAddress, invokerAddress, taskIds }) { 4 | const taskStatus = await Task.findOne({ contractAddress }).lean(); 5 | const taskMap = taskStatus && taskStatus.taskMap || {}; 6 | taskIds.map(elem => { 7 | if (!taskMap[elem]) { 8 | taskMap[elem] = invokerAddress; 9 | } 10 | }) 11 | await Task.findOneAndUpdate( 12 | { contractAddress }, 13 | { $set: { taskMap } }, 14 | { upsert: true } 15 | ); 16 | return true; 17 | } 18 | 19 | module.exports = completeMultipleTask; 20 | -------------------------------------------------------------------------------- /src/interface/task/list.js: -------------------------------------------------------------------------------- 1 | const { task } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const listValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | }; 12 | 13 | async function list(req, res) { 14 | const { contractAddress, invokerAddress } = req; 15 | const data = await task.getTaskStatus({ contractAddress, invokerAddress }); 16 | res.json(data); 17 | } 18 | 19 | module.exports = [validate(listValidation), list]; 20 | -------------------------------------------------------------------------------- /src/domain/limit/claim/lens.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | 3 | async function canClaim({ invokerAddress }) { 4 | const response = await axios({ 5 | method: 'post', 6 | url: 'https://api-mumbai.lens.dev/', 7 | data: { 8 | operationName: "defaultProfile", 9 | variables: { address: invokerAddress }, 10 | query: "query defaultProfile($address: EthereumAddress!) { defaultProfile(request: {ethereumAddress: $address}) { name } }" 11 | } 12 | }); 13 | const { data } = response && response.data || {}; 14 | const exist = data && data.defaultProfile || null; 15 | return !!exist; 16 | } 17 | 18 | module.exports = canClaim; 19 | -------------------------------------------------------------------------------- /src/domain/log/ddoc.js: -------------------------------------------------------------------------------- 1 | const { Log } = require('../../infra/database/models'); 2 | 3 | async function ddocCreate(invokerAddress) { 4 | const logObject = await new Log({ 5 | eventName: "DDOC_CREATE", 6 | invokerAddress, 7 | }).save(); 8 | 9 | const safeLog = logObject.safeObject(); 10 | return safeLog; 11 | } 12 | 13 | async function ddocSignup(invokerAddress) { 14 | const logObject = await new Log({ 15 | eventName: "DDOC_SIGNUP", 16 | invokerAddress, 17 | }).save(); 18 | 19 | const safeLog = logObject.safeObject(); 20 | return safeLog; 21 | } 22 | 23 | module.exports = { ddocCreate, ddocSignup }; 24 | -------------------------------------------------------------------------------- /src/interface/middleware/index.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | validator: require('./validator'), 3 | canView: require('./canView'), 4 | canViewAnalytics: require('./canViewAnalytics'), 5 | canUpload: require('./canUpload'), 6 | errorHandler: require('./errorHandler'), 7 | canCheckLimit: require('./canCheckLimit'), 8 | canUpdateLimit: require('./canUpdateLimit'), 9 | canCheckLimitUse: require('./canCheckLimitUse'), 10 | canListTask: require('./canListTask'), 11 | canCompleteTask: require('./canCompleteTask'), 12 | isAuthenticated: require('./isAuthenticated'), 13 | isPublic: require('./isPublic'), 14 | canComment: require('./canComment') 15 | }; 16 | -------------------------------------------------------------------------------- /src/interface/task/levelUp.js: -------------------------------------------------------------------------------- 1 | const { task } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const levelUpValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | }; 12 | 13 | async function levelUp(req, res) { 14 | const { contractAddress, invokerAddress } = req; 15 | const data = await task.levelUp({ contractAddress, invokerAddress }); 16 | res.json({ success: data }); 17 | } 18 | 19 | module.exports = [validate(levelUpValidation), levelUp]; 20 | -------------------------------------------------------------------------------- /src/domain/task/tasks/ownsLensHandle.js: -------------------------------------------------------------------------------- 1 | const axios = require('axios'); 2 | 3 | async function ownsLensHandle(invokerAddress) { 4 | const response = await axios({ 5 | method: 'post', 6 | url: 'https://api-mumbai.lens.dev/', 7 | data: { 8 | operationName: "defaultProfile", 9 | variables: { address: invokerAddress }, 10 | query: "query defaultProfile($address: EthereumAddress!) { defaultProfile(request: {ethereumAddress: $address}) { name } }" 11 | } 12 | }); 13 | const { data } = response && response.data || {}; 14 | const exist = data && data.defaultProfile || null; 15 | return !!exist; 16 | } 17 | 18 | module.exports = ownsLensHandle; 19 | -------------------------------------------------------------------------------- /src/interface/middleware/canListTask.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function canListTask(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `${invokerAddress} does not have permission to check tasks for portal ${contractAddress}`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = canListTask; 22 | -------------------------------------------------------------------------------- /src/interface/middleware/canCheckLimit.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function canCheckLimit(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `${invokerAddress} does not have permission to check limit for portal ${contractAddress}`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = canCheckLimit; 22 | -------------------------------------------------------------------------------- /src/interface/analytics/file.js: -------------------------------------------------------------------------------- 1 | const { analytics } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const analyticsByFileValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | }).unknown(true), 9 | params: Joi.object({ 10 | fileId: Joi.string().required(), 11 | }), 12 | }; 13 | 14 | async function analyticsByFile(req, res) { 15 | const { contractAddress } = req; 16 | const { fileId } = req.params; 17 | res.json(await analytics.getByFile({ contractAddress, fileId })); 18 | } 19 | 20 | module.exports = [validate(analyticsByFileValidation), analyticsByFile]; 21 | -------------------------------------------------------------------------------- /src/interface/middleware/canUpdateLimit.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function canUpdateLimit(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `${invokerAddress} does not have permission to update limit for portal ${contractAddress}`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = canUpdateLimit; 22 | -------------------------------------------------------------------------------- /src/interface/middleware/canCheckLimitUse.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function canCheckLimitUse(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `${invokerAddress} does not have permission to check limit use for portal ${contractAddress}`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = canCheckLimitUse; 22 | -------------------------------------------------------------------------------- /src/interface/middleware/canCompleteTask.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function canCompleteTask(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `${invokerAddress} does not have permission to complete tasks for portal ${contractAddress}`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = canCompleteTask; 22 | -------------------------------------------------------------------------------- /src/interface/middleware/canViewAnalytics.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function canViewAnalytics(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `${invokerAddress} does not have permission to upload file for subdomain ${contractAddress}`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = canViewAnalytics; 22 | -------------------------------------------------------------------------------- /src/domain/task/completeTask.js: -------------------------------------------------------------------------------- 1 | const { tasks } = require('./tasks'); 2 | const { Task } = require("../../infra/database/models"); 3 | 4 | async function completeTask({ contractAddress, invokerAddress, taskId }) { 5 | const taskFound = tasks.find(elem => elem.taskId === taskId); 6 | if (taskFound.activityType !== "CLIENT") return false; 7 | const taskStatus = await Task.findOne({ contractAddress }).lean(); 8 | const taskMap = taskStatus && taskStatus.taskMap || {}; 9 | taskMap[taskId] = invokerAddress; 10 | await Task.findOneAndUpdate( 11 | { contractAddress }, 12 | { $set: { taskMap } }, 13 | { upsert: true } 14 | ); 15 | return true; 16 | } 17 | 18 | module.exports = completeTask; 19 | -------------------------------------------------------------------------------- /src/domain/file/getSizeByContract.js: -------------------------------------------------------------------------------- 1 | const { File } = require("../../infra/database/models"); 2 | 3 | async function getSizeByContract({ contractAddress }) { 4 | contractAddress = contractAddress.toLowerCase(); 5 | // const startFrom = new Date(); 6 | const res = await File.aggregate([ 7 | { 8 | $match: { 9 | contractAddress, 10 | fileSize: { $gte: 0 }, 11 | }, 12 | }, 13 | { 14 | $group: { 15 | _id: null, 16 | totalFileSize: { $sum: "$fileSize" }, 17 | }, 18 | }, 19 | ]); 20 | const { totalFileSize } = res && res[0] || { totalFileSize: 0 }; 21 | return { contractAddress, totalFileSize }; 22 | } 23 | 24 | module.exports = getSizeByContract; 25 | -------------------------------------------------------------------------------- /src/interface/limit/claim.js: -------------------------------------------------------------------------------- 1 | const { limit } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const claimValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | }; 12 | 13 | async function claim(req, res) { 14 | const { contractAddress, invokerAddress, chainId } = req; 15 | console.log({ contractAddress, invokerAddress, chainId }); 16 | const data = await limit.claimStorage({ contractAddress, invokerAddress }); 17 | res.json(data); 18 | } 19 | 20 | module.exports = [validate(claimValidation), claim]; 21 | -------------------------------------------------------------------------------- /src/domain/task/tasks/getStorage.js: -------------------------------------------------------------------------------- 1 | function getStorage({ 2 | rank 3 | }) { 4 | const totalUnlockableStorage = 1000000000; 5 | let unlockedStorage = 0; 6 | if (rank === 'explorer') { 7 | unlockedStorage = totalUnlockableStorage * 0.20; 8 | } 9 | if (rank === 'pathfinder') { 10 | unlockedStorage = totalUnlockableStorage * 0.40; 11 | } 12 | if (rank === 'open-sourcerer-orange') { 13 | unlockedStorage = totalUnlockableStorage * 0.80; 14 | } 15 | if (rank === 'open-sourcerer-yellow') { 16 | unlockedStorage = totalUnlockableStorage; 17 | } 18 | return { 19 | totalUnlockableStorage, 20 | unlockedStorage, 21 | storageUnit: "byte", 22 | }; 23 | } 24 | 25 | module.exports = getStorage; 26 | -------------------------------------------------------------------------------- /src/interface/limit/check.js: -------------------------------------------------------------------------------- 1 | const { limit } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const checkValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | }; 12 | 13 | async function check(req, res) { 14 | const { contractAddress, invokerAddress, chainId } = req; 15 | console.log({ contractAddress, invokerAddress, chainId }); 16 | const data = await limit.getStorageStatus({ contractAddress, invokerAddress }); 17 | res.json(data); 18 | } 19 | 20 | module.exports = [validate(checkValidation), check]; 21 | -------------------------------------------------------------------------------- /src/domain/publicPortal/find.js: -------------------------------------------------------------------------------- 1 | const { Portal } = require('../../infra/database/models'); 2 | 3 | 4 | async function findOne(fileId, contractAddress) { 5 | try { 6 | const portal = await Portal.findOne({ fileId, contractAddress }); 7 | return portal; 8 | } catch (error) { 9 | console.error('Error finding portal:', error); 10 | return null; 11 | } 12 | } 13 | 14 | async function findAll(contractAddress) { 15 | try { 16 | const portals = await Portal.find({ contractAddress }); 17 | return portals; 18 | } catch (error) { 19 | console.error('Error finding portals:', error); 20 | return []; 21 | } 22 | } 23 | 24 | module.exports = { findOne, findAll }; 25 | -------------------------------------------------------------------------------- /src/interface/portal/indexPortal/index.js: -------------------------------------------------------------------------------- 1 | const { 2 | asyncHandler, 3 | asyncHandlerArray, 4 | } = require('../../../infra/asyncHandler'); 5 | 6 | const express = require('express'); 7 | const router = express.Router(); 8 | 9 | const enablePortalHadler = require('./enable'); 10 | const triggerJobProcessing = require('./trigger'); 11 | const processStatus = require('./triggerStatus'); 12 | const middleware = require('../../middleware'); 13 | 14 | router.post('/', asyncHandlerArray([middleware.isAuthenticated, enablePortalHadler])); 15 | router.get('/trigger', asyncHandlerArray([triggerJobProcessing])); 16 | router.get('/trigger/status', asyncHandlerArray([middleware.isAuthenticated, processStatus])); 17 | 18 | module.exports = router; -------------------------------------------------------------------------------- /src/interface/analytics/contract.js: -------------------------------------------------------------------------------- 1 | const { analytics, file } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const analyticsByContractValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | }).unknown(true), 9 | }; 10 | 11 | async function analyticsByContract(req, res) { 12 | const { contractAddress } = req; 13 | const data = await analytics.getByContract({ contractAddress }); 14 | const fileData = await file.getSizeByContract({ contractAddress }); 15 | data.totalFileSize = fileData.totalFileSize; 16 | res.json(data); 17 | } 18 | 19 | module.exports = [validate(analyticsByContractValidation), analyticsByContract]; 20 | -------------------------------------------------------------------------------- /src/interface/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const router = express.Router(); 3 | 4 | const upload = require('./upload'); 5 | const content = require('./content'); 6 | const analytics = require('./analytics'); 7 | const log = require('./log'); 8 | const limit = require('./limit'); 9 | const task = require('./task'); 10 | // const fileList = require('./fileList'); 11 | const portal = require('./portal'); 12 | 13 | router.use('/upload', upload); 14 | router.use('/content', content); 15 | router.use('/analytics', analytics); 16 | router.use('/log', log); 17 | router.use('/limit', limit); 18 | router.use('/task', task); 19 | // router.use('/list', fileList); 20 | router.use('/portal', portal); 21 | 22 | module.exports = router; 23 | -------------------------------------------------------------------------------- /src/domain/limit/extendStorage.js: -------------------------------------------------------------------------------- 1 | const { Limit } = require("../../infra/database/models"); 2 | 3 | async function extendStorage({ contractAddress }) { 4 | const limit = await Limit.findOne({ contractAddress }); 5 | 6 | if (limit.extendableStorage && limit.extendableStorage <= 0) { 7 | throw new Error("No storage available to extend"); 8 | } 9 | 10 | const resp = await Limit.findOneAndUpdate( 11 | { contractAddress }, 12 | { 13 | $inc: { 14 | extendableStorage: -1000000000, 15 | extraStorage: 1000000000, 16 | }, 17 | }, 18 | { new: true } 19 | ); 20 | 21 | if (!resp) { 22 | throw new Error("Contract not found"); 23 | } 24 | 25 | return true; 26 | } 27 | 28 | module.exports = extendStorage; 29 | -------------------------------------------------------------------------------- /src/interface/log/ddoc.js: -------------------------------------------------------------------------------- 1 | const { log } = require("../../domain"); 2 | const constants = require("../../domain/contants") 3 | 4 | async function ddocCreate(req, res) { 5 | const { invokerAddress } = req; 6 | const resp = { ...constants.Response.GenericResp } 7 | 8 | try { 9 | const ddocLog = await log.ddocCreate(invokerAddress); 10 | resp.status = constants.Response.RespMsg.SUCCESS; 11 | resp.data = { log: ddocLog }; 12 | res.json(resp); 13 | return 14 | } 15 | catch (error) { 16 | resp.status = constants.Response.RespMsg.ERROR; 17 | resp.message = error.message; 18 | res.status(500).json(resp); 19 | return 20 | } 21 | 22 | } 23 | 24 | module.exports = [ddocCreate]; 25 | -------------------------------------------------------------------------------- /src/domain/file/create.js: -------------------------------------------------------------------------------- 1 | const { File, Limit } = require("../../infra/database/models"); 2 | 3 | async function create({ 4 | fileId, 5 | chainId, 6 | fileSize, 7 | ipfsHash, 8 | gatewayUrl, 9 | contractAddress, 10 | invokerAddress, 11 | tags, 12 | }) { 13 | const file = await new File({ 14 | fileId, 15 | chainId, 16 | fileSize, 17 | ipfsHash, 18 | gatewayUrl, 19 | contractAddress, 20 | invokerAddress, 21 | tags, 22 | }).save(); 23 | 24 | await Limit.updateOne( 25 | { contractAddress }, 26 | { 27 | $inc: { storageUse: fileSize }, 28 | $setOnInsert: { contractAddress }, 29 | }, 30 | { upsert: true } 31 | ); 32 | return file.safeObject(); 33 | } 34 | 35 | module.exports = create; 36 | -------------------------------------------------------------------------------- /src/interface/log/ddocSignup.js: -------------------------------------------------------------------------------- 1 | const { log } = require("../../domain"); 2 | const constants = require("../../domain/contants") 3 | 4 | async function ddocSignUp(req, res) { 5 | const { invokerAddress } = req; 6 | const resp = { ...constants.Response.GenericResp } 7 | 8 | try { 9 | const ddocLog = await log.ddocSignup(invokerAddress); 10 | resp.status = constants.Response.RespMsg.SUCCESS; 11 | resp.data = { log: ddocLog }; 12 | res.json(resp); 13 | return 14 | } 15 | catch (error) { 16 | resp.status = constants.Response.RespMsg.ERROR; 17 | resp.message = error.message; 18 | res.status(500).json(resp); 19 | return 20 | } 21 | 22 | } 23 | 24 | module.exports = [ddocSignUp]; 25 | -------------------------------------------------------------------------------- /src/interface/middleware/isAuthenticated.js: -------------------------------------------------------------------------------- 1 | const ErrorHandler = require('../../infra/errorHandler'); 2 | 3 | async function isAuthenticated(req, res, next) { 4 | const invokerAddress = req.invokerAddress; 5 | const contractAddress = req.contractAddress; 6 | if (req.isAuthenticated) { 7 | next(); 8 | } else { 9 | let statusCode = 403; 10 | if (!invokerAddress) { 11 | statusCode = 401; 12 | } 13 | return ErrorHandler.throwError({ 14 | code: statusCode, 15 | message: `invokerAddress: ${invokerAddress} and contractAddress: ${contractAddress} is not authenticated to make this request.`, 16 | req, 17 | }); 18 | } 19 | } 20 | 21 | module.exports = isAuthenticated; 22 | -------------------------------------------------------------------------------- /src/infra/collaboratorKey.js: -------------------------------------------------------------------------------- 1 | const PortalContract = require('../domain/contract'); 2 | const MemberCreds = require('node-cache'); 3 | const cache = new MemberCreds({ stdTTL: 60 }); 4 | 5 | module.exports = async function collaboratorKey({ contractAddress, invokerAddress, chainId }) { 6 | const network = PortalContract.networkFromChainId(chainId); 7 | const cacheKey = `${contractAddress}_${invokerAddress}`.toLowerCase(); 8 | let editDid = cache.get(cacheKey); 9 | if (!editDid) { 10 | const portalContract = new PortalContract(contractAddress, network); 11 | const keys = await portalContract.getCollaboratorKeys(invokerAddress); 12 | editDid = keys.editDid; 13 | cache.set(cacheKey, editDid); 14 | } 15 | return editDid; 16 | } 17 | -------------------------------------------------------------------------------- /src/infra/logger.js: -------------------------------------------------------------------------------- 1 | const bunyan = require('bunyan'); 2 | const config = require('../../config'); 3 | 4 | const logger = bunyan.createLogger({ 5 | name: config.SERVICE_NAME, 6 | streams: [ 7 | { 8 | level: 'debug', 9 | stream: process.stdout, 10 | }, 11 | { 12 | type: 'rotating-file', 13 | level: 'info', 14 | path: `logs/${config.SERVICE_NAME}-debug.log`, 15 | period: '1d', // daily rotation 16 | count: 10, // keep 10 back copies 17 | }, 18 | { 19 | type: 'rotating-file', 20 | level: 'error', 21 | path: `logs/${config.SERVICE_NAME}-error.log`, 22 | period: '1d', // daily rotation 23 | count: 10, // keep 10 back copies 24 | }, 25 | ], 26 | }); 27 | 28 | module.exports = logger; 29 | -------------------------------------------------------------------------------- /src/interface/middleware/errorHandler.js: -------------------------------------------------------------------------------- 1 | const reporter = require('../../infra/reporter'); 2 | const { ValidationError } = require('./validator'); 3 | 4 | // eslint-disable-next-line no-unused-vars 5 | function expressErrorHandler(err, req, res, next) { 6 | // Error Reporting to Slack 7 | const errorMessage = `Message: ${err.message}\nError Code: ${err.statusCode || err.code}`; 8 | reporter.reportError(errorMessage).catch(console.log); 9 | if (err instanceof ValidationError) { 10 | return res 11 | .status(err.statusCode) 12 | .json({ message: err.message, token: err.token }); 13 | } 14 | res 15 | .status(err.code || 500) 16 | .json({ message: err.message, token: err.token }); 17 | next(); 18 | } 19 | 20 | module.exports = expressErrorHandler; 21 | -------------------------------------------------------------------------------- /src/interface/task/verify.js: -------------------------------------------------------------------------------- 1 | const { task } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const verifyValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | body: Joi.object({ 12 | taskId: Joi.string().required(), 13 | }), 14 | }; 15 | 16 | async function verify(req, res) { 17 | const { contractAddress, invokerAddress } = req; 18 | const { taskId } = req.body; 19 | const data = await task.verifyTask({ contractAddress, invokerAddress, taskId }); 20 | res.json({ success: data }); 21 | } 22 | 23 | module.exports = [validate(verifyValidation), verify]; 24 | -------------------------------------------------------------------------------- /src/interface/task/complete.js: -------------------------------------------------------------------------------- 1 | const { task } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const completeValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | body: Joi.object({ 12 | taskId: Joi.string().required(), 13 | }), 14 | }; 15 | 16 | async function complete(req, res) { 17 | const { contractAddress, invokerAddress } = req; 18 | const { taskId } = req.body; 19 | const data = await task.completeTask({ contractAddress, invokerAddress, taskId }); 20 | res.json({ success: data }); 21 | } 22 | 23 | module.exports = [validate(completeValidation), complete]; 24 | -------------------------------------------------------------------------------- /src/interface/limit/use.js: -------------------------------------------------------------------------------- 1 | const { limit } = require("../../domain"); 2 | const { validator } = require("../middleware"); 3 | const { Joi, validate } = validator; 4 | 5 | const useValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | }; 12 | 13 | async function use(req, res) { 14 | const { contractAddress, invokerAddress, chainId } = req; 15 | console.log({ contractAddress, invokerAddress, chainId }); 16 | const data = await limit.getStorageUse({ contractAddress, invokerAddress }); 17 | console.log({ data }); 18 | res.json({ ...data, storageLimit: data.storageLimit + data.extraStorage }); 19 | } 20 | 21 | module.exports = [validate(useValidation), use]; 22 | -------------------------------------------------------------------------------- /src/domain/limit/getStorageUse.js: -------------------------------------------------------------------------------- 1 | const config = require("../../../config"); 2 | const { Limit } = require("../../infra/database/models"); 3 | const DEFAULT_STORAGE_LIMIT = 200000000; 4 | 5 | async function getStorageUse({ contractAddress }) { 6 | const limit = (await Limit.findOne({ contractAddress })) || {}; 7 | const defaultStorageLimit = config.DEFAULT_STORAGE_LIMIT 8 | ? parseInt(config.DEFAULT_STORAGE_LIMIT) // need to typecast to int env vars are string values or undefined 9 | : DEFAULT_STORAGE_LIMIT; 10 | return { 11 | contractAddress, 12 | storageLimit: limit?.storageLimit || defaultStorageLimit, 13 | storageUse: limit?.storageUse || 0, 14 | unit: limit?.unit || "bytes", 15 | extraStorage: limit?.extraStorage || 0, 16 | }; 17 | } 18 | 19 | 20 | module.exports = getStorageUse; 21 | -------------------------------------------------------------------------------- /src/domain/contract/provider.js: -------------------------------------------------------------------------------- 1 | const config = require('../../../config'); 2 | 3 | function getNetworkUrl(network) { 4 | return fromNetworks(network); 5 | } 6 | 7 | function fromNetworks(network) { 8 | if (network === 'eth_goerli') { 9 | return config.ETH_GOERLI_RPC_URL; 10 | } 11 | if (network === 'eth_sepolia') { 12 | return config.ETH_SEPOLIA_RPC_URL; 13 | } 14 | if (network === 'eth_mainnet') { 15 | return config.ETH_MAINNET_RPC_URL; 16 | } 17 | if (network === 'polygon_mainnet') { 18 | return config.POLYGON_MAINNET_RPC_URL; 19 | } 20 | if (network === 'gnosis_mainnet') { 21 | return config.GNOSIS_MAINNET_RPC_URL; 22 | } 23 | if (network === 'gnosis_testnet') { 24 | return config.GNOSIS_TESTNET_RPC_URL; 25 | } 26 | return null; 27 | } 28 | 29 | module.exports = { getNetworkUrl }; 30 | -------------------------------------------------------------------------------- /src/interface/log/create.js: -------------------------------------------------------------------------------- 1 | const { log } = require("../../domain"); 2 | const { validator } = require("../middleware"); 3 | const { Joi, validate } = validator; 4 | 5 | const createValidation = { 6 | body: Joi.object({ 7 | eventName: Joi.string().required(), 8 | fileId: Joi.string().required(), 9 | ipfsHash: Joi.string().optional(), 10 | tags: Joi.array().optional(), 11 | }), 12 | }; 13 | 14 | async function create(req, res) { 15 | const { contractAddress, invokerAddress } = req; 16 | const { eventName, fileId, ipfsHash, tags } = req.body; 17 | const createdData = await log.create(eventName, { 18 | fileId, 19 | contractAddress, 20 | invokerAddress, 21 | ipfsHash, 22 | tags, 23 | }); 24 | res.json(createdData); 25 | } 26 | 27 | module.exports = [validate(createValidation), create]; 28 | -------------------------------------------------------------------------------- /src/interface/task/index.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | 3 | const router = express.Router(); 4 | 5 | const { 6 | asyncHandler, 7 | asyncHandlerArray, 8 | } = require('../../infra/asyncHandler'); 9 | 10 | const list = require('./list'); 11 | const complete = require('./complete'); 12 | const verify = require('./verify'); 13 | const levelUp = require('./levelUp'); 14 | 15 | // middlewares 16 | const { canCompleteTask, canListTask } = require('../middleware'); 17 | 18 | router.get('/list', asyncHandler(canListTask), asyncHandlerArray(list)); 19 | router.post('/complete', asyncHandler(canCompleteTask), asyncHandlerArray(complete)); 20 | router.post('/verify', asyncHandler(canCompleteTask), asyncHandlerArray(verify)); 21 | router.post('/level-up', asyncHandler(canCompleteTask), asyncHandlerArray(levelUp)); 22 | 23 | module.exports = router; 24 | -------------------------------------------------------------------------------- /src/interface/limit/extend.js: -------------------------------------------------------------------------------- 1 | const { limit } = require('../../domain'); 2 | const { validator } = require('../middleware'); 3 | const { Joi, validate } = validator; 4 | 5 | const extendValidation = { 6 | headers: Joi.object({ 7 | contract: Joi.string().required(), 8 | invoker: Joi.string().required(), 9 | chain: Joi.string().required(), 10 | }).unknown(true), 11 | }; 12 | 13 | async function extendHandler(req, res) { 14 | const { contractAddress, invokerAddress, chainId } = req; 15 | console.log({ contractAddress, invokerAddress, chainId }); 16 | try { 17 | const data = await limit.extendStorage({ contractAddress, invokerAddress }); 18 | return res.json({ success: data, message: 'Storage extended' }); 19 | } 20 | catch (error) { 21 | return res.status(400).json({ success: false, message: error.message }); 22 | } 23 | } 24 | 25 | module.exports = [validate(extendValidation), extendHandler]; 26 | -------------------------------------------------------------------------------- /src/interface/upload/index.js: -------------------------------------------------------------------------------- 1 | const fileUpload = require('express-fileupload'); 2 | const express = require('express'); 3 | 4 | const router = express.Router(); 5 | 6 | const { 7 | asyncHandler, 8 | asyncHandlerArray, 9 | } = require('../../infra/asyncHandler'); 10 | 11 | 12 | const upload = require('./upload'); 13 | const uploadPublic = require('./public'); 14 | const uploadComment = require('./comment') 15 | 16 | // middlewares 17 | const { 18 | canUpload, 19 | isPublic, 20 | canComment 21 | } = require('../middleware'); 22 | 23 | router.post( 24 | '/', 25 | asyncHandler(canUpload), 26 | fileUpload(), 27 | asyncHandlerArray(upload), 28 | ); 29 | 30 | router.post( 31 | '/comment', 32 | asyncHandler(canComment), 33 | fileUpload(), 34 | asyncHandlerArray(uploadComment), 35 | ); 36 | 37 | router.post( 38 | '/public', 39 | asyncHandler(isPublic), 40 | fileUpload(), 41 | asyncHandlerArray(uploadPublic), 42 | ); 43 | 44 | module.exports = router; 45 | -------------------------------------------------------------------------------- /src/interface/limit/index.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | 3 | const router = express.Router(); 4 | 5 | const { asyncHandler, asyncHandlerArray } = require("../../infra/asyncHandler"); 6 | 7 | const check = require("./check"); 8 | const claim = require("./claim"); 9 | const use = require("./use"); 10 | const legacyUse = require("./legacyUse"); 11 | 12 | // middlewares 13 | const { 14 | canUpdateLimit, 15 | canCheckLimit, 16 | canCheckLimitUse, 17 | isAuthenticated, 18 | } = require("../middleware"); 19 | const extend = require("./extend"); 20 | 21 | router.get("/check", asyncHandler(canCheckLimit), asyncHandlerArray(check)); 22 | router.get("/use", asyncHandler(canCheckLimitUse), asyncHandlerArray(use)); 23 | router.get("/claim", asyncHandler(canUpdateLimit), asyncHandlerArray(claim)); 24 | router.put("/extend", asyncHandler(isAuthenticated), asyncHandlerArray(extend)); 25 | router.get("/legacy-use", asyncHandlerArray(legacyUse)); 26 | 27 | module.exports = router; 28 | -------------------------------------------------------------------------------- /src/domain/jobs/update.js: -------------------------------------------------------------------------------- 1 | const constants = require('../contants'); 2 | 3 | async function updateJobStatus(job, status) { 4 | // update job status 5 | const allowedStatus = [ 6 | constants.JobConst.Status.Pending, 7 | constants.JobConst.Status.Processing, 8 | constants.JobConst.Status.Completed, 9 | constants.JobConst.Status.Failed 10 | ]; 11 | 12 | if (!allowedStatus.includes(status)) { 13 | throw new Error('Invalid job status'); 14 | } 15 | 16 | job.status = status; 17 | job.updatedAt = new Date(); 18 | await job.save(); 19 | } 20 | 21 | async function updateJobRetries(job) { 22 | job.retries += 1; 23 | job.status = constants.JobConst.Status.Pending; 24 | if (job.retries >= constants.JobConst.RetryLimit) { 25 | job.status = constants.JobConst.Status.Failed; 26 | } 27 | job.updatedAt = new Date(); 28 | await job.save(); 29 | } 30 | 31 | module.exports = { updateJobStatus, updateJobRetries }; -------------------------------------------------------------------------------- /src/interface/upload/public.js: -------------------------------------------------------------------------------- 1 | const Log = require('../../domain/log'); 2 | const { upload } = require('../../domain'); 3 | const { validator } = require('../middleware'); 4 | const { Joi, validate } = validator; 5 | 6 | 7 | 8 | const uploadValidation = { 9 | headers: Joi.object({ 10 | invoker: Joi.string().optional(), 11 | }).unknown(true), 12 | query: Joi.object({ 13 | tags: Joi.array().items(Joi.string()).optional(), 14 | }), 15 | }; 16 | 17 | 18 | async function uploadPublicFn(req, res) { 19 | const { invokerAddress, chainId } = req; 20 | const { tags } = req.query; 21 | 22 | const createdFile = await upload({ 23 | invokerAddress, 24 | chainId, 25 | file: req.files?.file, 26 | tags, 27 | }).catch(console.log); 28 | 29 | await Log.create('upload-public', { invokerAddress, ipfsHash: createdFile.ipfsHash, tags }); 30 | res.json(createdFile); 31 | } 32 | 33 | module.exports = [validate(uploadValidation), uploadPublicFn]; -------------------------------------------------------------------------------- /src/domain/limit/claimStorage.js: -------------------------------------------------------------------------------- 1 | const config = require("../../../config"); 2 | const { Limit } = require("../../infra/database/models"); 3 | const getStorageStatus = require("./getStorageStatus"); 4 | 5 | async function claimStorage({ contractAddress, invokerAddress }) { 6 | const status = await getStorageStatus({ 7 | contractAddress, 8 | invokerAddress, 9 | setCache: true, 10 | }); 11 | let storageLimit = Number(config.DEFAULT_STORAGE_LIMIT); 12 | const claimsMap = {}; 13 | status.claims.map((elem) => { 14 | if (elem.canClaim) { 15 | claimsMap[elem.id] = elem.storage; 16 | storageLimit += elem.storage; 17 | } 18 | }); 19 | await Limit.findOneAndUpdate( 20 | { contractAddress }, 21 | { $set: { storageLimit, claimsMap } }, 22 | { upsert: true } 23 | ); 24 | const newStatus = await getStorageStatus({ 25 | contractAddress, 26 | invokerAddress, 27 | setCache: true, 28 | }); 29 | return newStatus; 30 | } 31 | 32 | module.exports = claimStorage; 33 | -------------------------------------------------------------------------------- /src/interface/upload/upload.js: -------------------------------------------------------------------------------- 1 | const Log = require("../../domain/log"); 2 | const { upload } = require("../../domain"); 3 | const { validator } = require("../middleware"); 4 | const { Joi, validate } = validator; 5 | 6 | const uploadValidation = { 7 | headers: Joi.object({ 8 | contract: Joi.string().required(), 9 | }).unknown(true), 10 | query: Joi.object({ 11 | tags: Joi.array().items(Joi.string()).optional(), 12 | }), 13 | }; 14 | 15 | async function uploadFn(req, res) { 16 | const { contractAddress, invokerAddress, chainId } = req; 17 | const { tags } = req.query; 18 | 19 | const createdFile = await upload({ 20 | contractAddress, 21 | invokerAddress, 22 | chainId, 23 | file: req.files?.file, 24 | tags, 25 | }).catch(console.log); 26 | 27 | await Log.create("upload", { 28 | contractAddress, 29 | invokerAddress, 30 | ipfsHash: createdFile.ipfsHash, 31 | tags, 32 | }); 33 | res.json(createdFile); 34 | } 35 | 36 | module.exports = [validate(uploadValidation), uploadFn]; 37 | -------------------------------------------------------------------------------- /src/infra/database/models/stat.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { Schema } = mongoose; 3 | 4 | const _stat = {}; 5 | 6 | _stat.schema = new Schema({ 7 | contractAddress: { 8 | type: String, 9 | lowercase: true, 10 | required: false, 11 | index: true, 12 | }, 13 | tagStats: { type: Schema.Types.Mixed }, 14 | timeStamp: { 15 | type: Date, 16 | required: true, 17 | default: Date.now, 18 | }, 19 | }); 20 | 21 | _stat.schema.pre('save', function (next) { 22 | this.timeStamp = Date.now(); 23 | next(); 24 | }); 25 | 26 | _stat.schema.methods.safeObject = function () { 27 | const safeFields = [ 28 | '_id', 29 | 'contractAddress', 30 | 'tagStats', 31 | 'timeStamp', 32 | ]; 33 | const newSafeObject = {}; 34 | safeFields.forEach((elem) => { 35 | // eslint-disable-next-line security/detect-object-injection 36 | newSafeObject[elem] = this[elem]; 37 | }); 38 | return newSafeObject; 39 | }; 40 | 41 | _stat.model = mongoose.model('stats', _stat.schema); 42 | 43 | module.exports = _stat; 44 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "fileverse-storage", 3 | "version": "1.0.0", 4 | "description": "Storage Service for Fileverse", 5 | "main": "src/index.js", 6 | "scripts": { 7 | "dev": "nodemon .", 8 | "start": "NODE_ENV=production node src/index.js", 9 | "test": "echo \"Error: no test specified\" && exit 1" 10 | }, 11 | "keywords": [ 12 | "storage", 13 | "ipfs" 14 | ], 15 | "author": "Vijay Krishnavanshi", 16 | "license": "ISC", 17 | "dependencies": { 18 | "@filebase/sdk": "^1.0.4", 19 | "@pinata/sdk": "^1.1.26", 20 | "aws-sdk": "^2.1209.0", 21 | "bunyan": "^1.8.15", 22 | "cors": "^2.8.5", 23 | "dotenv": "^16.0.2", 24 | "ethers": "^5.7.0", 25 | "express": "^4.18.1", 26 | "express-fileupload": "^1.4.0", 27 | "express-validation": "^4.1.0", 28 | "helmet": "^6.0.0", 29 | "mongoose": "^6.5.4", 30 | "morgan": "^1.10.0", 31 | "node-cache": "^5.1.2", 32 | "request": "^2.88.2", 33 | "ucans": "^0.10.0", 34 | "uuid": "^9.0.1", 35 | "web3.storage": "^4.4.0" 36 | }, 37 | "devDependencies": { 38 | "nodemon": "^2.0.19" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/infra/database/models/task.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { Schema } = mongoose; 3 | 4 | const _task = {}; 5 | 6 | _task.schema = new Schema({ 7 | contractAddress: { 8 | type: String, 9 | lowercase: true, 10 | required: true, 11 | index: true, 12 | }, 13 | rank: { type: String, default: 'explorer' }, 14 | taskMap: { type: Schema.Types.Mixed }, 15 | timeStamp: { 16 | type: Date, 17 | required: true, 18 | default: Date.now, 19 | }, 20 | }); 21 | 22 | _task.schema.pre('save', function (next) { 23 | this.timeStamp = Date.now(); 24 | next(); 25 | }); 26 | 27 | _task.schema.methods.safeObject = function () { 28 | const safeFields = [ 29 | '_id', 30 | 'contractAddress', 31 | 'taskMap', 32 | 'rank', 33 | 'timeStamp', 34 | ]; 35 | const newSafeObject = {}; 36 | safeFields.forEach((elem) => { 37 | // eslint-disable-next-line security/detect-object-injection 38 | newSafeObject[elem] = this[elem]; 39 | }); 40 | return newSafeObject; 41 | }; 42 | 43 | _task.model = mongoose.model('points', _task.schema); 44 | 45 | module.exports = _task; 46 | -------------------------------------------------------------------------------- /src/domain/jobs/get.js: -------------------------------------------------------------------------------- 1 | const constants = require('../contants'); 2 | const { Job } = require('../../infra/database/models'); 3 | 4 | async function getJobByContractAddress(contractAddress) { 5 | // get job by contract address 6 | const jobs = await Job.find({ 7 | contractAddress, 8 | status: { $in: [constants.JobConst.Status.Pending, constants.JobConst.Status.Processing] }, 9 | retries: { $lt: constants.JobConst.RetryLimit } 10 | }).exec(); 11 | 12 | return jobs 13 | } 14 | 15 | 16 | async function getJobByUuid(uuid) { 17 | // get job by uuid 18 | const job = await Job.findOne({ uuid }).exec(); 19 | return job; 20 | } 21 | 22 | async function getAvailableJobs(limit) { 23 | // filter jobs by status in Pending or Processing 24 | const jobs = await Job.find({ 25 | status: { $in: [constants.JobConst.Status.Pending, constants.JobConst.Status.Processing] }, 26 | retries: { $lt: constants.JobConst.RetryLimit } 27 | }).sort({ updatedAt: 1 }).limit(limit).exec(); 28 | 29 | return jobs; 30 | } 31 | 32 | 33 | module.exports = { getJobByUuid, getAvailableJobs, getJobByContractAddress }; -------------------------------------------------------------------------------- /src/domain/ipfs/index.js: -------------------------------------------------------------------------------- 1 | const config = require('../../../config'); 2 | const Web3StorageService = require('./web3storage'); 3 | const Pinata = require('./pinata'); 4 | const FileBase = require('./filebase'); 5 | 6 | 7 | /** 8 | * Returns an instance of the IPFS service based on the provided IPFS storage. 9 | * If no IPFS storage is provided, it defaults to the 'pinata' service. 10 | * 11 | * @param {string} ipfsStorage - The IPFS storage to use. 12 | * @returns {IpfsStorageInterface} - An instance of the IPFS service. 13 | */ 14 | function GetIpfsService(ipfsStorage) { 15 | let defaultService = config.DEFAULT_IPFS_SERVICE ? config.DEFAULT_IPFS_SERVICE : 'pinata'; 16 | ipfsStorage = ipfsStorage ? ipfsStorage : defaultService; 17 | 18 | switch (ipfsStorage) { 19 | case 'web3.storage': 20 | return new Web3StorageService(); 21 | case 'pinata': 22 | return new Pinata(); 23 | case 'filebase': 24 | return new FileBase(); 25 | default: 26 | console.error(`Invalid IPFS storage: ${ipfsStorage}. Using default IPFS storage: ${defaultService}`); 27 | return GetIpfsService(defaultService) 28 | } 29 | } 30 | 31 | module.exports = { GetIpfsService }; 32 | -------------------------------------------------------------------------------- /src/domain/log/getByFile.js: -------------------------------------------------------------------------------- 1 | const { Log } = require("../../infra/database/models"); 2 | 3 | async function getByFile({ contractAddress, fileId }) { 4 | contractAddress = contractAddress.toLowerCase(); 5 | const res = await Log.aggregate([ 6 | { 7 | $match: { 8 | contractAddress, 9 | fileId, 10 | // timeStamp: { $gte: startFrom }, 11 | }, 12 | }, 13 | { 14 | $group: { 15 | _id: { 16 | time: { $dateToString: { format: "%Y-%m-%d", date: "$timeStamp" } }, 17 | eventName: "$eventName", 18 | }, 19 | eventName: { $first: "$eventName" }, 20 | count: { $sum: 1 }, 21 | dateLabel: { 22 | $first: { 23 | $dateToString: { format: "%Y-%m-%d", date: "$timeStamp" }, 24 | }, 25 | }, 26 | timeStamp: { 27 | $first: { $subtract: ["$timeStamp", new Date("1970-01-01")] }, 28 | }, 29 | }, 30 | }, 31 | { $sort: { timeStamp: 1 } }, 32 | { 33 | $project: { 34 | _id: 0, 35 | }, 36 | }, 37 | ]); 38 | 39 | return { contractAddress, fileId, dataPoints: res }; 40 | } 41 | 42 | module.exports = getByFile; 43 | -------------------------------------------------------------------------------- /src/interface/portal/indexPortal/triggerStatus.js: -------------------------------------------------------------------------------- 1 | const Job = require('../../../domain/jobs'); 2 | const constants = require('../../../domain/contants'); 3 | 4 | async function processStatus(req, res) { 5 | let resp = { ...constants.Response.GenericResp }; 6 | const { jobUuid } = req.query; 7 | 8 | try { 9 | const job = await Job.getJobByUuid(jobUuid); 10 | if (!job) { 11 | resp.message = constants.Response.RespMsg.NOT_FOUND; 12 | res.status(404).send(resp); 13 | return; 14 | } 15 | 16 | resp.message = constants.Response.RespMsg.SUCCESS; 17 | resp.data = { 18 | uuid: job.uuid, 19 | status: job.status, 20 | contractAddress: job.contractAddress, 21 | jobData: job.jobData, 22 | }; 23 | res.status(200).send(resp); 24 | } catch (err) { 25 | // Log and send an error response if there is an error in getting the job 26 | const error = "Error in getting job: " + err; 27 | console.log(error); 28 | resp.message = constants.Response.RespMsg.ERROR; 29 | resp.error = error; 30 | res.status(500).send(resp); 31 | } 32 | } 33 | 34 | module.exports = processStatus; -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | # Logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (https://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # TypeScript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | production.env 59 | development.env 60 | *.env 61 | 62 | # next.js build output 63 | .next 64 | *.log.* 65 | .gitsigners 66 | -------------------------------------------------------------------------------- /src/app.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | /* 4 | * This file exports the app that is used by the server to expose the routes. 5 | * And make the routes visible. 6 | */ 7 | 8 | const express = require('express'); 9 | const logger = require('morgan'); 10 | const cors = require('cors'); 11 | const helmet = require('helmet'); 12 | 13 | const router = require('./interface'); 14 | const { errorHandler } = require('./interface/middleware'); 15 | const { asyncHandler } = require('./infra/asyncHandler'); 16 | const ucan = require('./infra/ucan'); 17 | 18 | // Express App 19 | const app = express(); 20 | 21 | // parse application/x-www-form-urlencoded 22 | app.use(express.urlencoded({ extended: false })); 23 | 24 | // parse application/json 25 | app.use(express.json()); 26 | 27 | // Use default logger for now 28 | app.use(logger('combined')); 29 | app.use(cors()); 30 | app.use( 31 | helmet({ 32 | contentSecurityPolicy: false, 33 | frameguard: false, 34 | }), 35 | ); 36 | 37 | app.use(asyncHandler(ucan.verify)); 38 | // This is to check if the service is online or not 39 | app.use('/ping', function (req, res) { 40 | res.json({ reply: 'pong' }); 41 | res.end(); 42 | }); 43 | 44 | app.use('/', router); 45 | 46 | app.use(errorHandler); 47 | 48 | // Export the express app instance 49 | module.exports = app; 50 | -------------------------------------------------------------------------------- /src/infra/database/models/log.js: -------------------------------------------------------------------------------- 1 | const mongoose = require("mongoose"); 2 | const { Schema } = mongoose; 3 | 4 | const _log = {}; 5 | 6 | _log.schema = new Schema({ 7 | invokerAddress: { type: String }, 8 | contractAddress: { 9 | type: String, 10 | lowercase: true, 11 | required: false, 12 | index: true, 13 | }, 14 | eventName: { type: String, required: true }, 15 | fileId: { type: String }, 16 | tags: { 17 | type: [String], 18 | index: true, 19 | default: [], 20 | }, 21 | timeStamp: { type: Date, required: true, default: Date.now }, 22 | }); 23 | 24 | _log.schema.index({ eventName: 1, contractAddress: 1 }); 25 | 26 | _log.schema.pre("save", function (next) { 27 | this.timeStamp = Date.now(); 28 | next(); 29 | }); 30 | 31 | _log.schema.methods.safeObject = function () { 32 | const safeFields = [ 33 | "_id", 34 | "eventName", 35 | "invokerAddress", 36 | "contractAddress", 37 | "fileId", 38 | "tags", 39 | "timeStamp", 40 | ]; 41 | const newSafeObject = {}; 42 | safeFields.forEach((elem) => { 43 | // eslint-disable-next-line security/detect-object-injection 44 | newSafeObject[elem] = this[elem]; 45 | }); 46 | return newSafeObject; 47 | }; 48 | 49 | _log.model = mongoose.model("logs", _log.schema); 50 | 51 | module.exports = _log; 52 | -------------------------------------------------------------------------------- /src/domain/task/levelUp.js: -------------------------------------------------------------------------------- 1 | const { Task, Limit } = require("../../infra/database/models"); 2 | const { getRank, getStorage } = require("./tasks"); 3 | const getTaskStatus = require("./getTaskStatus"); 4 | const getStorageUse = require('../limit/getStorageUse'); 5 | 6 | async function updateStorageLimit({ contractAddress, addedStorage }) { 7 | const storageUse = await getStorageUse({ contractAddress }); 8 | await Limit.findOneAndUpdate( 9 | { contractAddress }, 10 | { $set: { storageLimit: storageUse.storageLimit + addedStorage } }, 11 | { upsert: true } 12 | ); 13 | } 14 | 15 | async function levelUp({ contractAddress, invokerAddress }) { 16 | const { 17 | collectedPoints, 18 | canLevelUp, 19 | rank: currentRank, 20 | } = await getTaskStatus({ contractAddress, invokerAddress }); 21 | if (!canLevelUp) { 22 | return false; 23 | } 24 | const currentStorage = getStorage({ rank: currentRank }); 25 | const newRank = getRank({ collectedPoints }); 26 | const newStorage = getStorage({ rank: newRank }); 27 | const addedStorage = newStorage.unlockedStorage - currentStorage.unlockedStorage; 28 | // add to the limit 29 | await updateStorageLimit({ contractAddress, addedStorage }); 30 | await Task.findOneAndUpdate( 31 | { contractAddress }, 32 | { $set: { rank: newRank } }, 33 | { upsert: true } 34 | ); 35 | return true; 36 | } 37 | 38 | module.exports = levelUp; 39 | -------------------------------------------------------------------------------- /src/interface/upload/comment.js: -------------------------------------------------------------------------------- 1 | const { upload } = require("../../domain"); 2 | const { validator } = require("../middleware"); 3 | const { Joi, validate } = validator; 4 | const ErrorHandler = require("../../infra/errorHandler"); 5 | 6 | const uploadValidation = { 7 | headers: Joi.object({ 8 | invoker: Joi.string().optional(), 9 | }).unknown(true), 10 | }; 11 | const commentSchema = Joi.object({ 12 | encryptedData: Joi.string().allow("").required(), 13 | id: Joi.string().required(), 14 | username: Joi.string().required(), 15 | timestamp: Joi.date().timestamp("javascript").optional(), // milliseconds (13 digits) 16 | }); 17 | 18 | async function uploadCommentFn(req, res) { 19 | const file = req.files?.file; 20 | if (file.mimetype !== "application/json") { 21 | return ErrorHandler.throwError({ 22 | code: 400, 23 | message: `File must be a JSON file`, 24 | req, 25 | }); 26 | } 27 | 28 | const jsonData = JSON.parse(file.data.toString()); 29 | const { error } = commentSchema.validate(jsonData); 30 | 31 | if (error) { 32 | return ErrorHandler.throwError({ 33 | code: 400, 34 | message: error.details[0].message, 35 | req, 36 | }); 37 | } 38 | 39 | const createdFile = await upload({ 40 | file: req.files?.file, 41 | }).catch(console.log); 42 | res.json(createdFile); 43 | } 44 | 45 | module.exports = [validate(uploadValidation), uploadCommentFn]; 46 | -------------------------------------------------------------------------------- /src/domain/log/getByContract.js: -------------------------------------------------------------------------------- 1 | const { Log } = require("../../infra/database/models"); 2 | 3 | async function getByContract({ contractAddress }) { 4 | contractAddress = contractAddress.toLowerCase(); 5 | // const startFrom = new Date(); 6 | const res = await Log.aggregate([ 7 | { 8 | $match: { 9 | contractAddress, 10 | // timeStamp: { $gte: startFrom }, 11 | }, 12 | }, 13 | { 14 | $group: { 15 | _id: { 16 | time: { $dateToString: { format: "%Y-%m-%d", date: "$timeStamp" } }, 17 | eventName: "$eventName", 18 | }, 19 | eventName: { $first: "$eventName" }, 20 | count: { $sum: 1 }, 21 | dateLabel: { 22 | $first: { 23 | $dateToString: { format: "%Y-%m-%d", date: "$timeStamp" }, 24 | }, 25 | }, 26 | timeStamp: { 27 | $first: { $subtract: ["$timeStamp", new Date("1970-01-01")] }, 28 | }, 29 | }, 30 | }, 31 | { $sort: { timeStamp: 1 } }, 32 | { 33 | $project: { 34 | _id: 0, 35 | }, 36 | }, 37 | ]); 38 | 39 | const downloads = await Log.find({ 40 | eventName: "download", 41 | contractAddress, 42 | }).count(); 43 | const views = await Log.find({ eventName: "view", contractAddress }).count(); 44 | 45 | return { contractAddress, dataPoints: res, downloads, views }; 46 | } 47 | 48 | module.exports = getByContract; 49 | -------------------------------------------------------------------------------- /src/domain/task/verifyTask.js: -------------------------------------------------------------------------------- 1 | const { 2 | tasks, 3 | ownsENSHandle, 4 | ownsFarcasterHandle, 5 | ownsLensHandle, 6 | ownsSafeMultiSig, 7 | ownsGitcoinPassport, 8 | } = require("./tasks"); 9 | const { Task } = require("../../infra/database/models"); 10 | 11 | async function verifyTask({ contractAddress, invokerAddress, taskId }) { 12 | const onchainTaskIds = tasks 13 | .filter((elem) => elem.category === "ONCHAIN") 14 | .map((elem) => elem.taskId); 15 | if (!onchainTaskIds.includes(taskId)) return false; 16 | let status = false; 17 | if (taskId === "OWN_ENS_DOMAIN") { 18 | status = await ownsENSHandle(invokerAddress); 19 | } 20 | if (taskId === "OWN_FARCASTER_HANDLE") { 21 | status = await ownsFarcasterHandle(invokerAddress); 22 | } 23 | if (taskId === "OWN_LENS_HANDLE") { 24 | status = await ownsLensHandle(invokerAddress); 25 | } 26 | if (taskId === "OWN_SAFE_MULTISIG") { 27 | status = await ownsSafeMultiSig(invokerAddress); 28 | } 29 | if (taskId === "OWN_GITCOIN_PASSPORT") { 30 | status = await ownsGitcoinPassport(invokerAddress); 31 | } 32 | if (!status) return false; 33 | const taskStatus = await Task.findOne({ contractAddress }).lean(); 34 | const taskMap = (taskStatus && taskStatus.taskMap) || {}; 35 | taskMap[taskId] = invokerAddress; 36 | await Task.findOneAndUpdate( 37 | { contractAddress }, 38 | { $set: { taskMap } }, 39 | { upsert: true } 40 | ); 41 | return true; 42 | } 43 | 44 | module.exports = verifyTask; 45 | -------------------------------------------------------------------------------- /src/infra/database/index.js: -------------------------------------------------------------------------------- 1 | // Bring Mongoose into the app 2 | const mongoose = require('mongoose'); 3 | const config = require('../../../config'); 4 | const logger = require('../logger'); 5 | 6 | // Build the connection string 7 | const dbURI = config.MONGOURI || 'mongodb://localhost/boilerplate_graphql'; 8 | 9 | // Create the database connection 10 | mongoose.connect(dbURI, (err) => { 11 | if (err) { 12 | logger.info('DB Error: ', err); 13 | throw err; 14 | } else { 15 | logger.info(dbURI); 16 | logger.info('MongoDB Connected'); 17 | } 18 | }); 19 | 20 | // CONNECTION EVENTS 21 | // When successfully connected 22 | mongoose.connection.on('connected', function () { 23 | logger.info('Mongoose default connection open to ' + dbURI); 24 | }); 25 | 26 | // If the connection throws an error 27 | mongoose.connection.on('error', function (err) { 28 | logger.info('Mongoose default connection error: ' + err); 29 | }); 30 | 31 | // When the connection is disconnected 32 | mongoose.connection.on('disconnected', function () { 33 | logger.info('Mongoose default connection disconnected'); 34 | }); 35 | 36 | // If the Node process ends, close the Mongoose connection 37 | process.on('SIGINT', function () { 38 | mongoose.connection.close(function () { 39 | logger.info( 40 | 'Mongoose default connection disconnected through app termination', 41 | ); 42 | throw new Error( 43 | 'Mongoose default connection disconnected through app termination', 44 | ); 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /src/interface/portal/indexPortal/enable.js: -------------------------------------------------------------------------------- 1 | const Job = require('../../../domain/jobs'); 2 | const constants = require('../../../domain/contants'); 3 | const Task = require('../../../domain/task') 4 | 5 | async function enablePortalHadler(req, res) { 6 | let resp = { ...constants.Response.GenericResp }; 7 | const { invokerAddress, contractAddress, chainId } = req; 8 | const { publicLayoutFileId } = req.body; 9 | 10 | const jobBody = { 11 | chainId: chainId, 12 | publicLayoutFileId: publicLayoutFileId, 13 | } 14 | 15 | try { 16 | const newJob = await Job.createJob(constants.JobConst.Type.PublicPortal, jobBody, contractAddress); 17 | resp.message = constants.Response.RespMsg.SUCCESS; 18 | resp.data = { 19 | uuid: newJob.uuid, 20 | status: newJob.status, 21 | contractAddress: newJob.contractAddress, 22 | jobData: newJob.jobData, 23 | }; 24 | const taskId = "EDIT_PUBLIC_PORTAL"; 25 | await Task.completeTask({ contractAddress, invokerAddress, taskId }); 26 | res.status(200).send(resp); 27 | } catch (err) { 28 | // Log and send an error response if there is an error in creating the job 29 | const error = "Error in creating job: " + err; 30 | console.log(error); 31 | resp.message = constants.Response.RespMsg.ERROR; 32 | resp.error = error; 33 | res.status(500).send(resp); 34 | } 35 | 36 | } 37 | 38 | module.exports = enablePortalHadler; 39 | 40 | -------------------------------------------------------------------------------- /src/domain/ipfs/web3storage.js: -------------------------------------------------------------------------------- 1 | const PassThrough = require('stream').PassThrough; 2 | const request = require('request'); 3 | const { Web3Storage } = require('web3.storage'); 4 | const IpfsStorageInterface = require('./interface'); 5 | const config = require('./../../../config'); 6 | 7 | class Web3StorageService extends IpfsStorageInterface { 8 | constructor() { 9 | super(); 10 | this.client = new Web3Storage({ token: config.WEB3STORAGE_TOKEN }); 11 | } 12 | 13 | async upload(readableStreamForFile, { name, attribute, filesize }) { 14 | const cid = await this.client.put([ 15 | { name, stream: () => readableStreamForFile }, 16 | ]); 17 | if (!cid) return null; 18 | const info = await this.client.status(cid); 19 | return { 20 | ipfsUrl: `https://w3s.link/ipfs/${cid}/${name}`, 21 | ipfsHash: `${cid}/${name}`, 22 | ipfsStorage: 'web3.storage', 23 | pinSize: info.dagSize, 24 | timestamp: (new Date(info.created)).getTime(), 25 | }; 26 | } 27 | 28 | async get({ ipfsUrl }) { 29 | if (!ipfsUrl) { 30 | return null; 31 | } 32 | const authenticUrl = `https://w3s.link/ipfs/${ipfsUrl}`; 33 | const ipfsStream = new PassThrough(); 34 | request(authenticUrl).pipe(ipfsStream); 35 | return ipfsStream; 36 | } 37 | 38 | async remove({ ipfsHash }) { 39 | if (!ipfsHash) { 40 | return null; 41 | } 42 | const hashes = ipfsHash.split('/'); 43 | return await this.client.delete(hashes[0]); 44 | } 45 | } 46 | 47 | module.exports = Web3StorageService; 48 | -------------------------------------------------------------------------------- /src/infra/database/models/portal.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { Schema } = mongoose; 3 | 4 | const _portal = {}; 5 | 6 | _portal.schema = new Schema({ 7 | contractAddress: { 8 | type: String, 9 | lowercase: true, 10 | required: true, 11 | index: true, 12 | }, 13 | fileId: { 14 | type: String, 15 | required: true, 16 | index: true, 17 | }, 18 | files: { 19 | type: Array, 20 | default: [], 21 | required: true, 22 | }, 23 | resolvedContent: { 24 | type: JSON, 25 | required: true, 26 | }, 27 | resolvedMetadata: { 28 | type: JSON, 29 | required: true, 30 | }, 31 | createdAt: { 32 | type: Date, 33 | default: Date.now, 34 | required: true, 35 | }, 36 | updatedAt: { 37 | type: Date, 38 | default: Date.now, 39 | required: true, 40 | }, 41 | }); 42 | 43 | _portal.schema.methods.safeObject = function () { 44 | const safeFields = [ 45 | '_id', 46 | 'contractAddress', 47 | 'fileId', 48 | 'files', 49 | 'resolvedContent', 50 | 'resolvedMetadata', 51 | 'createdAt', 52 | 'updatedAt', 53 | ]; 54 | const newSafeObject = {}; 55 | safeFields.forEach((elem) => { 56 | // eslint-disable-next-line security/detect-object-injection 57 | newSafeObject[elem] = this[elem]; 58 | }); 59 | return newSafeObject; 60 | }; 61 | 62 | _portal.model = mongoose.model('portal', _portal.schema); 63 | 64 | module.exports = _portal; 65 | -------------------------------------------------------------------------------- /src/infra/database/models/file.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { Schema } = mongoose; 3 | 4 | const _file = {}; 5 | 6 | _file.schema = new Schema({ 7 | invokerAddress: { type: String, index: true }, 8 | contractAddress: { 9 | type: String, 10 | lowercase: true, 11 | required: false, 12 | index: true, 13 | }, 14 | gatewayUrl: { 15 | type: String, 16 | default: null, 17 | required: false, 18 | }, 19 | fileId: { type: String }, 20 | chainId: { type: String }, 21 | ipfsHash: { type: String }, 22 | fileSize: { type: Number }, 23 | tags: { 24 | type: [String], 25 | index: true, 26 | default: [], 27 | }, 28 | namespace: { 29 | type: String, 30 | default: null, 31 | required: false, 32 | }, 33 | timeStamp: { 34 | type: Date, 35 | required: true, 36 | default: Date.now, 37 | }, 38 | }); 39 | 40 | _file.schema.pre('save', function (next) { 41 | this.timeStamp = Date.now(); 42 | next(); 43 | }); 44 | 45 | _file.schema.methods.safeObject = function () { 46 | const safeFields = [ 47 | '_id', 48 | 'invokerAddress', 49 | 'contractAddress', 50 | 'gatewayUrl', 51 | 'fileId', 52 | 'ipfsHash', 53 | 'fileSize', 54 | 'tags', 55 | 'namespace', 56 | 'timeStamp', 57 | ]; 58 | const newSafeObject = {}; 59 | safeFields.forEach((elem) => { 60 | // eslint-disable-next-line security/detect-object-injection 61 | newSafeObject[elem] = this[elem]; 62 | }); 63 | return newSafeObject; 64 | }; 65 | 66 | _file.model = mongoose.model('files', _file.schema); 67 | 68 | module.exports = _file; 69 | -------------------------------------------------------------------------------- /src/domain/cache/index.js: -------------------------------------------------------------------------------- 1 | const config = require('../../../config'); 2 | const S3 = require('aws-sdk/clients/s3'); 3 | 4 | class S3Service { 5 | constructor() { 6 | this.accessKeyId = config.S3_ACCESS_KEY_ID; 7 | this.secretAccessKey = config.S3_SECRET_ACCESS_KEY; 8 | this.bucketName = config.S3_BUCKET_NAME; 9 | this.region = config.S3_BUCKET_REGION; 10 | this.s3 = new S3({ 11 | accessKeyId: this.accessKeyId, 12 | secretAccessKey: this.secretAccessKey, 13 | bucketName: this.bucketName, 14 | region: this.region, 15 | }); 16 | } 17 | 18 | queue(file) { 19 | return { 20 | cachedUrl: '', 21 | }; 22 | } 23 | 24 | formatFile(file) { 25 | return { 26 | s3Key: file.key, 27 | s3Url: `https://${config.S3_BUCKET_NAME}/${file.key}`, 28 | }; 29 | } 30 | 31 | async upload(data, { name, mimetype, base = 'files' }) { 32 | const params = { 33 | Body: data, 34 | Key: `${base}/${name}`, 35 | ContentType: mimetype, 36 | Bucket: this.bucketName, 37 | }; 38 | const file = await this.s3.upload(params).promise(); 39 | return this.formatFile(file); 40 | } 41 | 42 | async get({ s3Key }) { 43 | if (!s3Key) { 44 | return null; 45 | } 46 | const params = { 47 | Key: s3Key, 48 | Bucket: this.bucketName, 49 | }; 50 | const file = await this.s3.getObject(params).promise(); 51 | return file && file.Body; 52 | } 53 | 54 | async remove({ s3Key }) { 55 | const params = { 56 | Key: s3Key, 57 | Bucket: this.bucketName, 58 | }; 59 | await this.s3.deleteObject(params).promise(); 60 | } 61 | } 62 | 63 | module.exports = S3Service; 64 | -------------------------------------------------------------------------------- /src/infra/database/models/limit.js: -------------------------------------------------------------------------------- 1 | const config = require("../../../../config"); 2 | const mongoose = require("mongoose"); 3 | const { Schema } = mongoose; 4 | 5 | const _limit = {}; 6 | 7 | _limit.schema = new Schema({ 8 | contractAddress: { 9 | type: String, 10 | lowercase: true, 11 | required: false, 12 | index: true, 13 | }, 14 | invokerAddress: { 15 | type: String, 16 | lowercase: true, 17 | required: false, 18 | index: true, 19 | default: null, 20 | }, 21 | storageLimit: { 22 | type: Number, 23 | default: config.DEFAULT_STORAGE_LIMIT || 200000000, 24 | }, 25 | storageUse: { 26 | type: Number, 27 | default: 0, 28 | }, 29 | extraStorage: { 30 | type: Number, 31 | default: 0, 32 | }, 33 | extendableStorage: { 34 | type: Number, 35 | default: 1000000000, // 1GB 36 | }, 37 | unit: { type: String, default: "bytes" }, 38 | claimsMap: { type: Schema.Types.Mixed }, 39 | timeStamp: { 40 | type: Date, 41 | required: true, 42 | default: Date.now, 43 | }, 44 | }); 45 | 46 | _limit.schema.pre("save", function (next) { 47 | this.timeStamp = Date.now(); 48 | next(); 49 | }); 50 | 51 | _limit.schema.methods.safeObject = function () { 52 | const safeFields = [ 53 | "_id", 54 | "contractAddress", 55 | "storageLimit", 56 | "timeStamp", 57 | "extraStorage", 58 | ]; 59 | const newSafeObject = {}; 60 | safeFields.forEach((elem) => { 61 | // eslint-disable-next-line security/detect-object-injection 62 | newSafeObject[elem] = this[elem]; 63 | }); 64 | return newSafeObject; 65 | }; 66 | 67 | _limit.model = mongoose.model("limits", _limit.schema); 68 | 69 | module.exports = _limit; 70 | -------------------------------------------------------------------------------- /src/interface/middleware/canUpload.js: -------------------------------------------------------------------------------- 1 | const getStorageUse = require("../../domain/limit/getStorageUse"); 2 | const reporter = require("../../infra/reporter"); 3 | 4 | async function checkStorageLimit(contractAddress, invokerAddress) { 5 | if (!contractAddress && !invokerAddress) { 6 | return false; 7 | } 8 | 9 | const limit = await getStorageUse({ contractAddress, invokerAddress }); 10 | const totalAllowedStorage = limit.storageLimit + limit.extraStorage; 11 | 12 | return limit.storageUse >= totalAllowedStorage; 13 | } 14 | 15 | function drainReq(req, res, statusCode, message) { 16 | if (req.readable) { 17 | req.resume(); 18 | req.on("end", () => { 19 | res.status(statusCode).json({ error: message }); 20 | }); 21 | } else { 22 | res.status(statusCode).json({ error: message }); 23 | } 24 | } 25 | 26 | async function canUpload(req, res, next) { 27 | const invokerAddress = req.invokerAddress; 28 | const contractAddress = req.contractAddress; 29 | if (!req.isAuthenticated) { 30 | const statusCode = invokerAddress ? 403 : 401; 31 | const message = `invokerAddress: ${invokerAddress} does not have permission to upload file for subdomain: ${contractAddress}`; 32 | reporter.reportError(message).catch(console.log); 33 | drainReq(req, res, statusCode, message); 34 | return; 35 | } 36 | 37 | const storageLimitBreached = await checkStorageLimit( 38 | contractAddress, 39 | invokerAddress 40 | ); 41 | if (storageLimitBreached) { 42 | const statusCode = 400; 43 | const message = `Storage for ${contractAddress} is full, please either claim more storage or contact us on twitter @fileverse`; 44 | reporter.reportError(message).catch(console.log); 45 | drainReq(req, res, statusCode, message); 46 | return; 47 | } 48 | 49 | next(); 50 | } 51 | 52 | module.exports = canUpload; 53 | -------------------------------------------------------------------------------- /src/domain/limit/getStorageStatus.js: -------------------------------------------------------------------------------- 1 | const config = require("../../../config"); 2 | const { Limit } = require("../../infra/database/models"); 3 | const { claims } = require("./claim"); 4 | 5 | const NodeCache = require("node-cache"); 6 | const storageClaimChache = new NodeCache({ stdTTL: 300 }); 7 | 8 | async function formatClaims(invokerAddress, contractAddress, claimsMap, removeCache = false) { 9 | const cacheKey = `${invokerAddress}_${contractAddress}`; 10 | let claimsData = storageClaimChache.get(cacheKey); 11 | if (!claimsData || removeCache) { 12 | const promises = claims.map(async (elem) => { 13 | const object = {}; 14 | object.id = elem.id; 15 | object.name = elem.name; 16 | object.logo = elem.logo; 17 | object.storage = elem.storage; 18 | object.unit = elem.unit; 19 | object.type = elem.type; 20 | object.enabled = elem.enabled; 21 | object.claimed = (claimsMap && claimsMap[elem.id]) ? true : false; 22 | object.canClaim = await elem 23 | .canClaim({ invokerAddress, contractAddress }) 24 | .catch((error) => { 25 | console.log(error); 26 | return false; 27 | }); 28 | return object; 29 | }); 30 | claimData = await Promise.all(promises); 31 | storageClaimChache.set(cacheKey, claimData); 32 | } 33 | return claimData; 34 | } 35 | 36 | async function getStorageStatus({ contractAddress, invokerAddress, setCache = false }) { 37 | const limit = await Limit.findOne({ contractAddress }); 38 | 39 | return { 40 | contractAddress, 41 | storageLimit: (limit && limit.storageLimit) || config.DEFAULT_STORAGE_LIMIT, 42 | claims: await formatClaims(invokerAddress, contractAddress, limit?.claimsMap, setCache), 43 | extendableStorage: limit?.extendableStorage ?? 10000000, 44 | }; 45 | } 46 | 47 | module.exports = getStorageStatus; 48 | -------------------------------------------------------------------------------- /src/infra/database/models/job.js: -------------------------------------------------------------------------------- 1 | const mongoose = require('mongoose'); 2 | const { Schema } = mongoose; 3 | const constants = require('../../../domain/contants'); 4 | const uuidv4 = require('uuid').v4; 5 | 6 | 7 | const _job = {}; 8 | 9 | _job.schema = new Schema({ 10 | uuid: { 11 | type: String, 12 | lowercase: true, 13 | required: true, 14 | index: true, 15 | default: uuidv4(), 16 | }, 17 | contractAddress: { 18 | type: String, 19 | required: true, 20 | index: true, 21 | }, 22 | jobType: { 23 | type: String, 24 | required: true, 25 | index: true, 26 | }, 27 | retries: { 28 | type: Number, 29 | default: 0, 30 | Range: { min: 0, max: constants.JobConst.RetryLimit }, 31 | required: true, 32 | }, 33 | jobData: { 34 | type: JSON, 35 | required: true, 36 | default: {}, 37 | }, 38 | status: { 39 | type: String, 40 | default: constants.JobConst.Status.Pending, 41 | required: true, 42 | index: true, 43 | }, 44 | createdAt: { 45 | type: Date, 46 | default: Date.now, 47 | required: true, 48 | }, 49 | updatedAt: { 50 | type: Date, 51 | default: Date.now, 52 | required: true, 53 | }, 54 | }); 55 | 56 | _job.schema.methods.safeObject = function () { 57 | const safeFields = [ 58 | '_id', 59 | 'uuid', 60 | 'jobType', 61 | 'retries', 62 | 'jobData', 63 | 'status', 64 | 'createdAt', 65 | 'updatedAt', 66 | ]; 67 | const newSafeObject = {}; 68 | safeFields.forEach((elem) => { 69 | // eslint-disable-next-line security/detect-object-injection 70 | newSafeObject[elem] = this[elem]; 71 | }); 72 | return newSafeObject; 73 | }; 74 | 75 | _job.model = mongoose.model('job', _job.schema); 76 | 77 | module.exports = _job; 78 | -------------------------------------------------------------------------------- /src/interface/fileList/fileList.js: -------------------------------------------------------------------------------- 1 | const { getFileVisibility } = require('../../domain/file/utils'); 2 | const File = require('../../domain/file'); 3 | 4 | const genericResp = { 5 | message: "", 6 | data: [], 7 | error: null 8 | } 9 | 10 | function getResponse(files) { 11 | // create list of dictionaries with file metadata containing only the necessary fields 12 | const fileList = files.map((file) => { 13 | const { ipfsHash, gatewayUrl, fileId, chainId, contractAddress } = file; 14 | const visibility = getFileVisibility(file); 15 | 16 | return { ipfsHash, gatewayUrl, visibility, fileId, chainId, contractAddress }; 17 | }); 18 | 19 | return fileList; 20 | } 21 | 22 | async function getUniqueFile(req, resp) { 23 | const { ipfsHash } = req.query; 24 | const response = { ...genericResp }; 25 | if (!ipfsHash) { 26 | response.error = 'ipfsHash is required'; 27 | return resp.status(400).json(response); 28 | } 29 | try { 30 | const file = await File.findOne(ipfsHash); 31 | if (!file) { 32 | response.message = 'no files found for given ipfsHash'; 33 | return resp.status(200).json(response); 34 | } 35 | response.data = getResponse([file]); 36 | response.message = "SUCCESS"; 37 | } catch (error) { 38 | response.error = error.message; 39 | } 40 | 41 | resp.status(200).json(response); 42 | } 43 | 44 | async function fileList(req, resp) { 45 | const response = { ...genericResp }; 46 | 47 | if (!req.isAuthenticated) { 48 | response.error = 'UNAUTHORISED REQUEST'; 49 | return resp.status(401).json(response); 50 | } 51 | const { invokerAddress } = req.query; 52 | 53 | const files = await File.findAll(invokerAddress); 54 | if (!files) { 55 | response.message = 'no files found for given invokerAddress' 56 | return resp.status(400).json(response); 57 | } 58 | 59 | response.data = getResponse(files); 60 | response.message = "SUCCESS"; 61 | resp.status(200).json(response); 62 | } 63 | 64 | module.exports = { fileList, getUniqueFile }; 65 | 66 | -------------------------------------------------------------------------------- /src/domain/upload.js: -------------------------------------------------------------------------------- 1 | const { Readable } = require('stream'); 2 | const File = require('./file'); 3 | const Cache = require('./cache'); 4 | const { GetIpfsService } = require('./ipfs'); 5 | const cache = new Cache(); 6 | 7 | /** 8 | * Uploads a file to IPFS and saves its metadata to the database. 9 | * @param {Object} options - The upload options. 10 | * @param {string} options.fileId - The ID of the file. 11 | * @param {string} options.chainId - The ID of the blockchain. 12 | * @param {string} options.contractAddress - The address of the smart contract. 13 | * @param {Object} options.file - The file object containing name, mimetype, and data. 14 | * @param {string} options.invokerAddress - The address of the invoker. 15 | * @param {Array} options.tags - The tags associated with the file. 16 | * @returns {Object} - The uploaded file metadata. 17 | */ 18 | async function upload({ fileId, chainId, contractAddress, file, invokerAddress, tags, namespace }) { 19 | // Extract file metadata 20 | const { name, mimetype, data } = file; 21 | 22 | // Create a readable stream from file data 23 | const stream = Readable.from(data); 24 | stream.path = name; 25 | // Calculate file size 26 | const filesize = data.length; 27 | 28 | console.log('Uploading file:', mimetype, ", size:", filesize, ", bytes from contract:", contractAddress, ", invoker:", invokerAddress); 29 | // Upload file to IPFS 30 | const ipfsFile = await GetIpfsService().upload(stream, { name, filesize }); 31 | 32 | // Queue file for caching 33 | const cachedFile = await cache.queue(ipfsFile); 34 | 35 | // Add file metadata to the database 36 | await File.create({ 37 | chainId, 38 | fileId, 39 | ipfsHash: ipfsFile?.ipfsHash, 40 | gatewayUrl: ipfsFile?.ipfsUrl, 41 | contractAddress, 42 | invokerAddress, 43 | fileSize: ipfsFile?.pinSize, 44 | tags: tags || [], 45 | }); 46 | 47 | // Return uploaded file metadata 48 | return { 49 | ipfsUrl: ipfsFile?.ipfsUrl, 50 | ipfsHash: ipfsFile?.ipfsHash, 51 | ipfsStorage: ipfsFile?.ipfsStorage, 52 | cachedUrl: cachedFile?.cachedUrl, 53 | fileSize: ipfsFile?.pinSize, 54 | mimetype, 55 | fileId, 56 | contractAddress, 57 | }; 58 | } 59 | 60 | module.exports = upload; 61 | -------------------------------------------------------------------------------- /src/interface/content/content.js: -------------------------------------------------------------------------------- 1 | const qs = require('querystring'); 2 | const mime = require('mime-types'); 3 | const File = require('../../domain/file') 4 | const { content } = require('../../domain'); 5 | const { validator } = require('../middleware'); 6 | const { Joi, validate } = validator; 7 | 8 | const Log = require('../../domain/log'); 9 | 10 | const contentValidation = { 11 | headers: Joi.object({ 12 | contract: Joi.string().required(), 13 | }).unknown(true), 14 | query: Joi.object({ 15 | ipfsHash: Joi.string().required(), 16 | fileId: Joi.string().required(), 17 | filename: Joi.string().required(), 18 | mimetype: Joi.string().required(), 19 | download: Joi.boolean().optional(), 20 | }), 21 | }; 22 | 23 | /** Define the contentFn function that handles the request and response 24 | * 25 | * Handles the request and response for retrieving content. 26 | * @param {Object} req - The request object. 27 | * @param {Object} res - The response object. 28 | */ 29 | async function contentFn(req, res) { 30 | // Extract the contract from the request headers 31 | const { contract } = req.headers; 32 | // Extract the necessary query parameters from the request 33 | const { ipfsHash, fileId, filename, mimetype } = req.query; 34 | const { download } = req.query; 35 | // Retrieve the content stream using the ipfsHash 36 | const { contentStream } = await content(ipfsHash); 37 | 38 | // Set the response headers 39 | const header = { 40 | 'Content-Type': mimetype, 41 | }; 42 | 43 | console.log(contentStream); 44 | // If the download query parameter is present, set the Content-Disposition header for downloading 45 | if (download) { 46 | header['Content-Disposition'] = `attachment; filename="${qs.escape( 47 | filename, 48 | )}"`; 49 | // Log the download event 50 | await Log.create('download', { contractAddress: contract, fileId, ipfsHash }); 51 | } else { 52 | // Log the view event 53 | await Log.create('view', { contractAddress: contract, fileId, ipfsHash }); 54 | } 55 | // Write the response headers 56 | res.writeHead(200, header); 57 | // Pipe the content stream to the response 58 | contentStream.pipe(res); 59 | } 60 | 61 | module.exports = [validate(contentValidation), contentFn]; 62 | -------------------------------------------------------------------------------- /src/domain/ipfs/pinata.js: -------------------------------------------------------------------------------- 1 | const PassThrough = require('stream').PassThrough; 2 | const request = require('request'); 3 | const config = require('../../../config'); 4 | const pinataSDK = require('@pinata/sdk'); 5 | const IpfsStorageInterface = require('./interface'); 6 | 7 | 8 | class Pinata extends IpfsStorageInterface { 9 | constructor() { 10 | super(); 11 | this.apiKey = config.PINATA_API_KEY; 12 | this.secretApiKey = config.PINATA_SECRET_KEY; 13 | this.pinataGateway = config.PINATA_GATEWAY || 'https://ipfs.fileverse.io/ipfs'; 14 | this.pinata = pinataSDK(this.apiKey, this.secretApiKey); 15 | } 16 | 17 | formatFile(file) { 18 | return { 19 | ipfsUrl: `${this.pinataGateway}/${file.IpfsHash}`, 20 | ipfsHash: file.IpfsHash, 21 | ipfsStorage: 'pinata', 22 | pinSize: file.PinSize, 23 | timestamp: file.Timestamp, 24 | }; 25 | } 26 | 27 | async upload(readableStreamForFile, { name, attributes, filesize }) { 28 | const keyvalues = {}; 29 | (attributes || []).forEach((attribute) => { 30 | keyvalues[attribute.trait_type] = attribute.value; 31 | }); 32 | const options = { 33 | pinataMetadata: { 34 | name, 35 | keyvalues, 36 | }, 37 | pinataOptions: { 38 | cidVersion: 0, 39 | }, 40 | }; 41 | 42 | try { 43 | console.time("Upload to Pinata duration"); 44 | const file = await this.pinata.pinFileToIPFS( 45 | readableStreamForFile, 46 | options, 47 | ); 48 | console.timeEnd("Upload to Pinata duration"); 49 | return this.formatFile(file); 50 | } 51 | catch (e) { 52 | console.log("error while uploading to pinata", e); 53 | throw e; 54 | } 55 | } 56 | 57 | async get({ ipfsUrl }) { 58 | if (!ipfsUrl) { 59 | return null; 60 | } 61 | const ipfsStream = new PassThrough(); 62 | request(ipfsUrl).pipe(ipfsStream); 63 | return ipfsStream; 64 | } 65 | 66 | async unPinFile(ipfsHash) { 67 | try { 68 | await this.pinata.unpin(ipfsHash); 69 | } catch (e) { 70 | console.log(e.reason); 71 | } 72 | } 73 | 74 | async remove({ ipfsHash }) { 75 | if (!ipfsHash) { 76 | return null; 77 | } 78 | return this.unPinFile(ipfsHash); 79 | } 80 | } 81 | 82 | module.exports = Pinata; 83 | -------------------------------------------------------------------------------- /src/domain/limit/claim/index.js: -------------------------------------------------------------------------------- 1 | const ens = require("./ens"); 2 | const lens = require("./lens"); 3 | const safe = require("./safe"); 4 | const publicFiles = require("./publicFiles"); 5 | const whiteboard = require("./whiteboard"); 6 | const privateFiles = require("./privateFiles"); 7 | const impactDAO = require("./impactDAO"); 8 | const members = require("./members"); 9 | 10 | module.exports = { 11 | claims: [ 12 | { 13 | id: "ENS", 14 | name: "Own an ENS Domain", 15 | logo: "", 16 | storage: 100000000, 17 | unit: "bytes", 18 | canClaim: ens, 19 | type: "External", 20 | enabled: true, 21 | }, 22 | { 23 | id: "LENS", 24 | name: "Own a Lens handle", 25 | logo: "", 26 | storage: 100000000, 27 | unit: "bytes", 28 | canClaim: lens, 29 | type: "External", 30 | enabled: true, 31 | }, 32 | { 33 | id: "SAFE", 34 | name: "Own a Safe multisig", 35 | logo: "", 36 | storage: 100000000, 37 | unit: "bytes", 38 | canClaim: safe, 39 | type: "External", 40 | enabled: true, 41 | }, 42 | { 43 | id: "IMPACTDAO", 44 | name: "Impact DAO", 45 | logo: "", 46 | storage: 100000000, 47 | unit: "bytes", 48 | canClaim: impactDAO, 49 | type: "External", 50 | enabled: false, 51 | }, 52 | { 53 | id: "PUBLIC_FILES", 54 | name: "Upload 3 Public Files", 55 | logo: "", 56 | storage: 200000000, 57 | unit: "bytes", 58 | canClaim: publicFiles, 59 | type: "Internal", 60 | enabled: true, 61 | }, 62 | { 63 | id: "WHITEBOARD", 64 | name: "Create & Upload 1 Whiteboard", 65 | logo: "", 66 | storage: 100000000, 67 | unit: "bytes", 68 | canClaim: whiteboard, 69 | type: "Internal", 70 | enabled: true, 71 | }, 72 | { 73 | id: "PRIVATE_FILES", 74 | name: "Created 1 Private File", 75 | logo: "", 76 | storage: 100000000, 77 | unit: "bytes", 78 | canClaim: privateFiles, 79 | type: "Internal", 80 | enabled: true, 81 | }, 82 | { 83 | id: "MEMBERS", 84 | name: "Have 3 Portal Members", 85 | logo: "", 86 | storage: 100000000, 87 | unit: "bytes", 88 | canClaim: members, 89 | type: "Internal", 90 | enabled: false, 91 | }, 92 | ], 93 | }; 94 | -------------------------------------------------------------------------------- /src/domain/contract/index.js: -------------------------------------------------------------------------------- 1 | const abi = require('./abi.json'); 2 | const { ethers } = require("ethers"); 3 | const provider = require('./provider'); 4 | 5 | class PortalContract { 6 | constructor(contractAddress, network) { 7 | this.contractAddress = contractAddress; 8 | this.contractABI = abi; 9 | this.networkProviderUrl = provider.getNetworkUrl(network); 10 | this.networkProvider = new ethers.providers.JsonRpcProvider(this.networkProviderUrl); 11 | this.contractInstance = new ethers.Contract(this.contractAddress, this.contractABI, this.networkProvider); 12 | } 13 | 14 | async getFileCount() { 15 | const fileCount = await this.contractInstance.getFileCount(); 16 | return fileCount; 17 | } 18 | 19 | async getFile(fileId) { 20 | const file = await this.contractInstance.files(fileId); 21 | return file; 22 | } 23 | 24 | async getCollaboratorList() { 25 | const collaboratorList = await this.contractInstance.getCollaborators(); 26 | return collaboratorList; 27 | } 28 | 29 | async getCollaboratorCount() { 30 | const collaboratorCount = await this.contractInstance.getCollaboratorCount(); 31 | return collaboratorCount; 32 | } 33 | 34 | async isCollaborator(address) { 35 | const isCollaborator = await this.contractInstance.isCollaborator(address); 36 | return isCollaborator; 37 | } 38 | 39 | async isMember(address) { 40 | const memberDetail = await this.contractInstance.members(address); 41 | return memberDetail; 42 | } 43 | 44 | async getCollaboratorKeys(address) { 45 | const keyDetail = await this.contractInstance.collaboratorKeys(address); 46 | return { account: address, viewDid: keyDetail[0], editDid: keyDetail[1] }; 47 | } 48 | 49 | async isOwner(address) { 50 | const owner = await this.contractInstance.owner(); 51 | return address.toLowerCase() === owner.toLowerCase(); 52 | } 53 | 54 | static networkFromChainId(chainId) { 55 | if (!chainId) { 56 | return 'eth_goerli'; 57 | } 58 | const chainIdInNumber = Number(chainId); 59 | if (chainIdInNumber === 5) { 60 | return 'eth_goerli'; 61 | } 62 | if (chainIdInNumber === 11155111) { 63 | return 'eth_sepolia'; 64 | } 65 | if (chainIdInNumber === 8420) { 66 | return 'fileverse_testnet'; 67 | } 68 | if (chainIdInNumber === 1) { 69 | return 'eth_mainnet'; 70 | } 71 | if (chainIdInNumber === 137) { 72 | return 'polygon_mainnet'; 73 | } 74 | if (chainIdInNumber === 100) { 75 | return 'gnosis_mainnet'; 76 | } 77 | if (chainIdInNumber === 10200) { 78 | return 'gnosis_testnet'; 79 | } 80 | return 'eth_goerli'; 81 | } 82 | }; 83 | 84 | module.exports = PortalContract; 85 | -------------------------------------------------------------------------------- /src/domain/task/getTaskStatus.js: -------------------------------------------------------------------------------- 1 | const config = require("../../../config"); 2 | const { Task } = require("../../infra/database/models"); 3 | const { tasks, getRank, getStorage } = require("./tasks"); 4 | 5 | const NodeCache = require("node-cache"); 6 | const taskChache = new NodeCache({ stdTTL: 300 }); 7 | 8 | async function formatTasks({ 9 | invokerAddress, 10 | contractAddress, 11 | taskMap, 12 | removeCache, 13 | }) { 14 | const cacheKey = `${invokerAddress}_${contractAddress}`; 15 | let tasksData = taskChache.get(cacheKey); 16 | if (!tasksData || removeCache) { 17 | const promises = tasks.map(async (elem) => { 18 | const object = {}; 19 | object.taskId = elem.taskId; 20 | object.activityType = elem.taskId; 21 | object.name = elem.name; 22 | object.points = elem.points; 23 | object.type = elem.type; 24 | object.category = elem.category; 25 | object.completed = taskMap[elem.taskId] ? true : false; 26 | return object; 27 | }); 28 | tasksData = await Promise.all(promises); 29 | taskChache.set(cacheKey, tasksData); 30 | } 31 | return tasksData; 32 | } 33 | 34 | async function formatTaskStatus({ 35 | invokerAddress, 36 | contractAddress, 37 | taskMap, 38 | currentRank, 39 | removeCache, 40 | }) { 41 | const tasks = await formatTasks({ 42 | invokerAddress, 43 | contractAddress, 44 | taskMap, 45 | removeCache, 46 | }); 47 | let totalPoints = 0; 48 | let collectedPoints = 0; 49 | tasks.map((elem) => { 50 | totalPoints += elem.points; 51 | if (elem.completed) { 52 | collectedPoints += elem.points; 53 | } 54 | }); 55 | const newRank = getRank({ 56 | collectedPoints, 57 | }); 58 | let canLevelUp = false; 59 | if (currentRank !== newRank) { 60 | canLevelUp = true; 61 | } 62 | const { totalUnlockableStorage, unlockedStorage, storageUnit } = getStorage({ 63 | rank: currentRank, 64 | }); 65 | return { 66 | tasks, 67 | totalPoints, 68 | collectedPoints, 69 | rank: currentRank, 70 | totalUnlockableStorage, 71 | unlockedStorage, 72 | storageUnit, 73 | canLevelUp, 74 | }; 75 | } 76 | 77 | async function getTaskStatus({ 78 | contractAddress, 79 | invokerAddress, 80 | setCache = true, 81 | }) { 82 | const taskStatus = await Task.findOne({ contractAddress }); 83 | const currentRank = taskStatus && taskStatus.rank || 'explorer'; 84 | const { 85 | tasks, 86 | totalPoints, 87 | collectedPoints, 88 | totalUnlockableStorage, 89 | unlockedStorage, 90 | storageUnit, 91 | canLevelUp, 92 | } = await formatTaskStatus({ 93 | invokerAddress, 94 | contractAddress, 95 | taskMap: (taskStatus && taskStatus.taskMap) || {}, 96 | currentRank, 97 | removeCache: setCache, 98 | }); 99 | return { 100 | tasks, 101 | rank: currentRank, 102 | canLevelUp, 103 | totalPoints, 104 | collectedPoints, 105 | totalUnlockableStorage, 106 | unlockedStorage, 107 | storageUnit, 108 | }; 109 | } 110 | 111 | module.exports = getTaskStatus; 112 | -------------------------------------------------------------------------------- /src/interface/portal/file/getPortal.js: -------------------------------------------------------------------------------- 1 | const Portal = require('../../../domain/publicPortal'); 2 | const constants = require('../../../domain/contants'); 3 | const Job = require('../../../domain/jobs'); 4 | 5 | 6 | async function getAllPortalHandler(req, res) { 7 | let resp = { ...constants.Response.GenericResp }; 8 | const { contractAddress } = req; 9 | 10 | const portals = await Portal.findAll(contractAddress); 11 | const respPortals = portals.map(portal => ({ 12 | portalAddress: portal.contractAddress, 13 | fileId: portal.fileId, 14 | normalisedFiles: portal.files, 15 | resolvedContent: portal.resolvedContent, 16 | resolvedMetadata: portal.resolvedMetadata, 17 | createdAt: portal.createdAt ? portal.createdAt : null, 18 | updatedAt: portal.updatedAt ? portal.updatedAt : null, 19 | })); 20 | 21 | const queueJobs = await Job.getJobByContractAddress(contractAddress); 22 | const respJobs = queueJobs.map(job => ({ 23 | uuid: job.uuid, 24 | status: job.status, 25 | jobData: job.jobData, 26 | createdAt: job.createdAt ? job.createdAt : null, 27 | updatedAt: job.updatedAt ? job.updatedAt : null, 28 | })); 29 | 30 | resp.data = { 31 | portals: respPortals, 32 | queueJobs: respJobs 33 | } 34 | 35 | if (resp.data.portals.length > 0 || resp.data.queueJobs.length > 0) { 36 | resp.message = constants.Response.RespMsg.SUCCESS; 37 | } else { 38 | resp.message = constants.Response.RespMsg.NOT_FOUND; 39 | resp.error = "Portal not found for contractAddress: " + contractAddress; 40 | } 41 | 42 | 43 | res.status(200).send(resp); 44 | } 45 | 46 | async function getPortalHandler(req, res) { 47 | let resp = { ...constants.Response.GenericResp }; 48 | const { contractAddress } = req; 49 | const { fileId } = req.params; 50 | 51 | const portal = await Portal.findOne(fileId, contractAddress); 52 | if (portal) { 53 | resp.message = constants.Response.RespMsg.SUCCESS; 54 | resp.data = { 55 | portalAddress: portal.contractAddress, 56 | fileId: portal.fileId, 57 | normalisedFiles: portal.files, 58 | resolvedContent: portal.resolvedContent, 59 | resolvedMetadata: portal.resolvedMetadata, 60 | createdAt: portal.createdAt ? portal.createdAt : null, 61 | updatedAt: portal.updatedAt ? portal.updatedAt : null, 62 | } 63 | res.status(200).send(resp); 64 | return; 65 | } 66 | 67 | const queueJobs = await Job.getJobByContractAddress(contractAddress); 68 | if (queueJobs.length > 0) { 69 | resp.message = constants.Response.RespMsg.SUCCESS; 70 | resp.data = queueJobs.map(job => { 71 | if (job.jobData.fileId === fileId) { 72 | return { 73 | uuid: job.uuid, 74 | status: job.status, 75 | jobData: job.jobData, 76 | createdAt: job.createdAt ? job.createdAt : null, 77 | updatedAt: job.updatedAt ? job.updatedAt : null, 78 | } 79 | } 80 | }); 81 | res.status(200).send(resp); 82 | return; 83 | } 84 | 85 | resp.message = constants.Response.RespMsg.NOT_FOUND; 86 | resp.error = "Portal not found for fileId: " + fileId; 87 | res.status(200).send(resp) 88 | } 89 | 90 | module.exports = { getPortalHandler, getAllPortalHandler }; -------------------------------------------------------------------------------- /src/infra/ucan.js: -------------------------------------------------------------------------------- 1 | const config = require('../../config'); 2 | const collaboratorKey = require('./collaboratorKey'); 3 | const { v4: uuidv4 } = require('uuid'); 4 | const ucans = require('ucans'); 5 | 6 | const serviceDID = config.SERVICE_DID; 7 | 8 | 9 | async function validateNamespace(namespace, invokerAddress, token) { 10 | try { 11 | const result = await ucans.verify(token, { 12 | audience: serviceDID, 13 | requiredCapabilities: [ 14 | { 15 | capability: { 16 | with: { scheme: "storage", hierPart: invokerAddress }, 17 | can: { namespace: namespace, segments: ["CREATE", "GET"] } 18 | }, 19 | rootIssuer: invokerAddress, 20 | } 21 | ], 22 | }); 23 | return result.ok; 24 | } catch (error) { 25 | console.error('Error verifying UCAN with namespace:', error); 26 | return false; 27 | } 28 | } 29 | 30 | async function validateContractAddress(contractAddress, invokerAddress, token, chainId) { 31 | let invokerDid = null; 32 | 33 | try { 34 | invokerDid = await collaboratorKey({ contractAddress, invokerAddress, chainId }); 35 | } catch (error) { 36 | console.error('Error retrieving invoker DID:', error); 37 | return false; 38 | } 39 | 40 | if (!invokerDid) { 41 | return false; 42 | } 43 | 44 | try { 45 | const result = await ucans.verify(token, { 46 | audience: serviceDID, 47 | requiredCapabilities: [ 48 | { 49 | capability: { 50 | with: { scheme: "storage", hierPart: contractAddress.toLowerCase() }, 51 | can: { namespace: "file", segments: ["CREATE"] } 52 | }, 53 | rootIssuer: invokerDid, 54 | } 55 | ], 56 | }); 57 | return result.ok; 58 | } catch (error) { 59 | console.error('Error verifying UCAN with contract address:', error); 60 | return false; 61 | } 62 | } 63 | 64 | async function validateInvokerAddress(invokerAddress, token) { 65 | try { 66 | const result = await ucans.verify(token, { 67 | audience: serviceDID, 68 | requiredCapabilities: [ 69 | { 70 | capability: { 71 | with: { scheme: "storage", hierPart: invokerAddress }, 72 | can: { namespace: "file", segments: ["CREATE", "GET"] } 73 | }, 74 | rootIssuer: invokerAddress, 75 | } 76 | ], 77 | }); 78 | return result.ok; 79 | } catch (error) { 80 | console.error('Error verifying UCAN with invoker address:', error); 81 | return false; 82 | } 83 | } 84 | 85 | let verify = async (req, res, next) => { 86 | const contractAddress = req.headers?.contract; 87 | const invokerAddress = req.headers?.invoker; 88 | const chainId = req.headers?.chain; 89 | const namespace = req.headers?.namespace; 90 | 91 | req.requestId = uuidv4(); 92 | req.isAuthenticated = false; 93 | req.invokerAddress = invokerAddress; 94 | req.contractAddress = contractAddress; 95 | req.chainId = chainId; 96 | req.namespace = namespace; 97 | console.log('req.requestId: ', req.requestId); 98 | 99 | // Express headers are auto converted to lowercase 100 | let token = req.headers['authorization']; 101 | if (!token || !invokerAddress) { 102 | return next(); 103 | } 104 | 105 | token = token.startsWith('Bearer ') ? token.slice(7, token.length) : token; 106 | 107 | if (namespace) { 108 | req.isAuthenticated = await validateNamespace(namespace, invokerAddress, token); 109 | } else if (contractAddress) { 110 | req.isAuthenticated = await validateContractAddress(contractAddress, invokerAddress, token, chainId); 111 | } else { 112 | req.isAuthenticated = await validateInvokerAddress(invokerAddress, token); 113 | } 114 | 115 | next(); 116 | }; 117 | 118 | module.exports = { verify }; -------------------------------------------------------------------------------- /src/domain/log/process.js: -------------------------------------------------------------------------------- 1 | // best way is to process the logs and increment the stats in the DB per portal 2 | // fileverse_chat -> File 3 | // fileverse_files -> File 4 | // fileverse_documents -> DDocs 5 | // fileverse_whiteboard -> Whiteboard 6 | 7 | // public -> Public 8 | // private -> Collaborator 9 | // members_private -> Members 10 | // gated -> Token Gated 11 | const completeMultipleTask = require("../task/completeMultipleTask"); 12 | const { Stat } = require("../../infra/database/models"); 13 | 14 | async function completeTasks({ 15 | contractAddress, 16 | invokerAddress, 17 | currentTags, 18 | tagStats, 19 | }) { 20 | const taskIds = []; 21 | if ( 22 | tagStats["fileverse_files"] >= 3 && 23 | currentTags.includes("fileverse_files") && 24 | currentTags.includes("public") 25 | ) { 26 | taskIds.push("UPLOAD_PUBLIC_FILE"); 27 | } 28 | if ( 29 | tagStats["fileverse_files"] > 0 && 30 | currentTags.includes("fileverse_files") && 31 | currentTags.includes("private") 32 | ) { 33 | taskIds.push("UPLOAD_PRIVATE_FILE"); 34 | } 35 | if ( 36 | tagStats["fileverse_documents"] > 0 && 37 | currentTags.includes("fileverse_documents") && 38 | currentTags.includes("public") 39 | ) { 40 | taskIds.push("PUBLISH_PUBLIC_DDOC"); 41 | } 42 | if ( 43 | tagStats["fileverse_whiteboard"] > 0 && 44 | currentTags.includes("fileverse_whiteboard") && 45 | currentTags.includes("private") 46 | ) { 47 | taskIds.push("PUBLISH_PRIVATE_WHITEBOARD"); 48 | } 49 | if ( 50 | tagStats["fileverse_dpage"] > 0 && 51 | currentTags.includes("fileverse_dpage") && 52 | currentTags.includes("public") 53 | ) { 54 | taskIds.push("PUBLISH_PUBLIC_DPAGE"); 55 | } 56 | if ( 57 | tagStats["fileverse_chat"] > 0 && 58 | currentTags.includes("fileverse_chat") 59 | ) { 60 | taskIds.push("CHAT_ON_FILE"); 61 | } 62 | if ( 63 | tagStats["fileverse_dpage"] >= 3 && 64 | currentTags.includes("fileverse_dpage") 65 | ) { 66 | taskIds.push("CREATE_PUBLISH_3_DPAGES"); 67 | } 68 | if ( 69 | tagStats["fileverse_documents"] >= 3 && 70 | currentTags.includes("fileverse_documents") 71 | ) { 72 | taskIds.push("CREATE_PUBLISH_3_DDOCS"); 73 | } 74 | if ( 75 | tagStats["fileverse_whiteboard"] >= 10 && 76 | currentTags.includes("fileverse_whiteboard") && 77 | currentTags.includes("public") 78 | ) { 79 | taskIds.push("PUBLISH_10_PUBLIC_WHITEBOARDS"); 80 | } 81 | if ( 82 | tagStats["fileverse_documents"] >= 10 && 83 | currentTags.includes("fileverse_documents") && 84 | currentTags.includes("public") 85 | ) { 86 | taskIds.push("PUBLISH_10_PUBLIC_DDOCS"); 87 | } 88 | if ( 89 | tagStats["fileverse_chat"] >= 10 && 90 | currentTags.includes("fileverse_chat") 91 | ) { 92 | taskIds.push("COMMENT_ON_10_FILES"); 93 | } 94 | 95 | if (taskIds.length === 0) return; 96 | await completeMultipleTask({ contractAddress, invokerAddress, taskIds }); 97 | } 98 | 99 | async function process(contractAddress, invokerAddress = 'system', log) { 100 | const currentTags = log.tags || []; 101 | if (currentTags.length === 0) { 102 | return; 103 | } 104 | const statObject = await Stat.findOne({ contractAddress }); 105 | const tagStats = (statObject && statObject.tagStats) || {}; 106 | currentTags.map((tag) => { 107 | if (!tagStats[tag]) tagStats[tag] = 0; 108 | tagStats[tag] = tagStats[tag] + 1; 109 | }); 110 | await Stat.findOneAndUpdate( 111 | { contractAddress }, 112 | { $set: { tagStats } }, 113 | { upsert: true } 114 | ); 115 | // mark the task complete if done based on stat 116 | await completeTasks({ 117 | contractAddress, 118 | invokerAddress, 119 | currentTags, 120 | tagStats, 121 | }); 122 | return; 123 | } 124 | 125 | module.exports = process; -------------------------------------------------------------------------------- /src/domain/ipfs/filebase.js: -------------------------------------------------------------------------------- 1 | const { ObjectManager, PinManager } = require('@filebase/sdk'); 2 | 3 | const config = require('../../../config'); 4 | const { Readable, PassThrough } = require('stream'); 5 | const IpfsStorageInterface = require('./interface'); 6 | const { error } = require('console'); 7 | 8 | /** 9 | * Represents a Filebase object. 10 | * @class 11 | * @extends IpfsStorageInterface 12 | */ 13 | class FileBase extends IpfsStorageInterface { 14 | /** 15 | * Represents a Filebase object. 16 | * @constructor 17 | */ 18 | constructor() { 19 | super(); 20 | const accessKey = config.FILEBASE_ACCESS_KEY 21 | const secret = config.FILEBASE_SECRET 22 | const bucketName = config.FILEBASE_BUCKET_NAME 23 | 24 | this.objectManager = new ObjectManager(accessKey, secret, { 25 | bucket: bucketName 26 | }); 27 | 28 | this.pinManager = new PinManager(accessKey, secret, { 29 | bucket: bucketName 30 | }); 31 | 32 | } 33 | 34 | /** 35 | * Uploads a file to IPFS using the Filebase storage. 36 | * 37 | * @param {ReadableStream} readableStreamForFile - The readable stream for the file to be uploaded. 38 | * @param {Object} options - The options for the upload. 39 | * @param {string} options.name - The name of the file. 40 | * @param {string} options.attribute - The attribute of the file. 41 | * @param {number} options.filesize - The size of the file in bytes. 42 | * @returns {Object} - The upload result containing the IPFS URL, IPFS hash, storage type, pin size, and timestamp. 43 | */ 44 | async upload(readableStreamForFile, { name, attribute, filesize }) { 45 | try { 46 | let response = await this.objectManager.upload(name, readableStreamForFile); 47 | let cid = response.cid; 48 | 49 | return { 50 | ipfsUrl: `https://ipfs.filebase.io/ipfs/${cid}`, 51 | ipfsHash: `${cid}`, 52 | ipfsStorage: 'filebase', 53 | pinSize: filesize, 54 | timestamp: Date.now(), 55 | }; 56 | } catch (error) { 57 | console.error("Error while uploading object to filebase:", error); 58 | } 59 | 60 | } 61 | 62 | /** 63 | * Retrieves the IPFS content from the specified URL. 64 | * @param {Object} options - The options for retrieving the IPFS content. 65 | * @param {string} options.ipfsUrl - The URL of the IPFS content. 66 | * @returns {ReadableStream} - A readable stream containing the IPFS content. 67 | */ 68 | async get({ ipfsUrl }) { 69 | try { 70 | let resp = await this.pinManager.download(ipfsUrl, { 71 | endpoint: "https://ipfs.filebase.io/" 72 | }); 73 | 74 | const ipfsStream = Readable.from(resp); 75 | return ipfsStream; 76 | } catch (error) { 77 | console.error("Error while getting object from filebase:", error); 78 | } 79 | } 80 | 81 | /** 82 | * Removes a file from IPFS based on the given IPFS hash. 83 | * @param {Object} options - The options for removing the file. 84 | * @param {string} options.ipfsHash - The IPFS hash of the file to be removed. 85 | * @returns {Promise} A promise that resolves when the file is successfully removed. 86 | */ 87 | async remove({ ipfsHash }) { 88 | try { 89 | let hashes = ipfsHash.split('/'); 90 | 91 | const requestid = await this.getRequestId({ hashes }); 92 | if (requestid == "" || requestid == undefined) { 93 | throw error("No object found for the given IPFS hash"); 94 | } 95 | 96 | return await this.pinManager.delete(requestid); 97 | } catch (error) { 98 | console.error("Error while removing object from filebase:", error); 99 | } 100 | } 101 | 102 | /** 103 | * Retrieves the request ID associated with the given IPFS hash. 104 | * 105 | * @param {Object} options - The options object. 106 | * @param {string} options.ipfsHash - The IPFS hash. 107 | * @returns {string} The request ID. 108 | */ 109 | async getRequestId({ ipfsHash }) { 110 | try { 111 | const resp = await this.pinManager.list({ cid: [ipfsHash] }); 112 | if (resp['count'] == 0) { 113 | console.log("No pin found with the given cid: ", cid); 114 | return; 115 | } 116 | return resp['results'][0]['requestid'] 117 | } catch (error) { 118 | console.error("Error while getting request ID from filebase:", error); 119 | } 120 | } 121 | } 122 | 123 | module.exports = FileBase; -------------------------------------------------------------------------------- /src/interface/portal/indexPortal/trigger.js: -------------------------------------------------------------------------------- 1 | const PortalContract = require('../../../domain/contract'); 2 | const Portal = require('../../../domain/publicPortal'); 3 | const HASH = require('../../../domain/hashResolver'); 4 | const Job = require('../../../domain/jobs'); 5 | const constants = require('../../../domain/contants'); 6 | const config = require('../../../../config'); 7 | 8 | function extractFilesFromPortal(publicLayoutFile) { 9 | const sections = publicLayoutFile.sections; 10 | 11 | let resp = []; 12 | sections.forEach(section => { 13 | const files = section.files; 14 | resp = resp.concat(files); 15 | }); 16 | 17 | return resp; 18 | } 19 | 20 | 21 | async function getNormalizedFiles(publicLayoutFile) { 22 | const files = extractFilesFromPortal(publicLayoutFile); 23 | 24 | let normalisedFiles = [] 25 | for (const file of files) { 26 | const ipfsHash = file?.metadata?.ipfsHash; 27 | 28 | const gatewayUrl = ipfsHash ? await HASH.getGatewayUrl(ipfsHash) : null; 29 | 30 | normalisedFiles.push({ 31 | name: file.metadata.name, 32 | type: file.type, 33 | mimeType: file.metadata.mimeType, 34 | ipfsHash: ipfsHash, 35 | gatewayUrl: gatewayUrl, 36 | metadata: file.metadata 37 | }); 38 | } 39 | 40 | return normalisedFiles; 41 | } 42 | 43 | async function processJobs(job) { 44 | const jobData = job.jobData; 45 | const contractAddress = job.contractAddress; 46 | const chainId = jobData.chainId; 47 | const publicLayoutFileId = jobData.publicLayoutFileId; 48 | 49 | try { 50 | await Job.updateJobStatus(job, constants.JobConst.Status.Processing); 51 | 52 | // Create an instance of PortalContract with the contract address and chain ID 53 | const network = PortalContract.networkFromChainId(chainId); 54 | const portalContract = new PortalContract(contractAddress, network); 55 | // Get the content hash and metadata hash from the portal contract 56 | let { metadataIPFSHash, contentIPFSHash } = await portalContract.getFile(publicLayoutFileId); 57 | // Resolve the content hash and metadata hash to get the actual content and metadata 58 | const publicLayoutContent = await HASH.resolveIpfsHash(contentIPFSHash); 59 | 60 | // Get the public layout file from the content 61 | if (metadataIPFSHash.startsWith("fileverse_public_portal_metadata_file_")) { 62 | metadataIPFSHash = metadataIPFSHash.replace("fileverse_public_portal_metadata_file_", ""); 63 | } 64 | const publicLayoutMetadata = await HASH.resolveIpfsHash(metadataIPFSHash); 65 | 66 | // Get the normalized files from the public layout file 67 | const normalisedFiles = await getNormalizedFiles(publicLayoutContent); 68 | 69 | // Update or create a new portal with the contract address, normalized files, file ID, resolved content, and resolved metadata 70 | await Portal.updateOrCreate( 71 | fileId = publicLayoutFileId, 72 | contractAddress, 73 | files = normalisedFiles, 74 | resolvedContent = publicLayoutContent, 75 | resolvedMetadata = publicLayoutMetadata 76 | ); 77 | 78 | await Job.updateJobStatus(job, constants.JobConst.Status.Completed); 79 | } 80 | catch (err) { 81 | await Job.updateJobRetries(job); 82 | // Log and send an error response if there is an error in updating the portal files 83 | console.log("Error in indexing portal files for jobUuid:", job.uuid, "with error: ", err); 84 | } 85 | } 86 | 87 | async function getJobsToProcess() { 88 | const jobsToProcess = await Job.getAvailableJobs(constants.JobConst.ProcessLimit); 89 | // create a map of jobs with key as contractAddress and fileId from JobBody, and value as job 90 | 91 | const jobMap = {}; 92 | for (const job of jobsToProcess) { 93 | const jobData = job.jobData; 94 | const contractAddress = job.contractAddress; 95 | const publicLayoutFileId = jobData.publicLayoutFileId; 96 | 97 | const jobKey = `${contractAddress}_${publicLayoutFileId}`; 98 | 99 | // check if key already exists in jobMap, 100 | // if yes, then update the value with the latest job 101 | if (jobMap[jobKey]) { 102 | const existingJob = jobMap[jobKey]; 103 | if (existingJob.createdAt < job.createdAt) { 104 | jobMap[jobKey] = job; 105 | } 106 | } else { 107 | jobMap[jobKey] = job; 108 | } 109 | } 110 | 111 | return Object.values(jobMap); 112 | } 113 | 114 | async function triggerJobProcessing(req, res) { 115 | // Extract contract address and chain ID from request 116 | const CRON_SECRET_KEY = config.CRON_SECRET_KEY; 117 | const secretKey = req.headers['x-secret-key']; 118 | 119 | if (secretKey !== CRON_SECRET_KEY) { 120 | res.status(401).send({ error: 'Unauthorized' }); 121 | return; 122 | } 123 | 124 | const jobsToProcess = await getJobsToProcess(); 125 | const jobProcessPromises = jobsToProcess.map((job) => { return processJobs(job); }); 126 | Promise.all(jobProcessPromises); 127 | 128 | // Send a success response if the portal is enabled successfully 129 | res.status(200).send({}); 130 | } 131 | 132 | module.exports = triggerJobProcessing; -------------------------------------------------------------------------------- /src/domain/task/tasks/index.js: -------------------------------------------------------------------------------- 1 | const getRank = require('./getRank'); 2 | const getStorage = require('./getStorage'); 3 | const ownsENSHandle = require('./ownsENSHandle'); 4 | const ownsFarcasterHandle = require('./ownsFarcasterHandle'); 5 | const ownsLensHandle = require('./ownsLensHandle'); 6 | const ownsSafeMultiSig = require('./ownsSafeMultiSig'); 7 | const ownsGitcoinPassport = require('./ownsGitcoinPassport'); 8 | 9 | module.exports = { 10 | getRank, 11 | getStorage, 12 | ownsENSHandle, 13 | ownsFarcasterHandle, 14 | ownsLensHandle, 15 | ownsSafeMultiSig, 16 | ownsGitcoinPassport, 17 | tasks: [ 18 | { 19 | taskId: "EDIT_PUBLIC_PORTAL", 20 | name: "Edit your public portal", 21 | activityType: "CLIENT", 22 | points: 50, 23 | type: "CREATE", 24 | category: "DISCOVERY", 25 | }, 26 | { 27 | taskId: "PRIVATE_KEY_DOWNLOAD", 28 | name: "Download your encryption keys", 29 | activityType: "CLIENT", 30 | points: 50, 31 | type: "SECURITY", 32 | category: "DISCOVERY", 33 | }, 34 | { 35 | taskId: "UPLOAD_PUBLIC_FILE", 36 | name: "Upload 3 public files", 37 | activityType: "AUTO", 38 | points: 10, 39 | type: "UPLOAD", 40 | category: "DISCOVERY", 41 | }, 42 | { 43 | taskId: "UPLOAD_PRIVATE_FILE", 44 | name: "Upload a private file", 45 | activityType: "AUTO", 46 | points: 10, 47 | type: "UPLOAD", 48 | category: "DISCOVERY", 49 | }, 50 | { 51 | taskId: "PUBLISH_PUBLIC_DPAGE", 52 | name: "Publish a public dPage", 53 | activityType: "AUTO", 54 | points: 10, 55 | type: "PUBLISH", 56 | category: "DISCOVERY", 57 | }, 58 | { 59 | taskId: "PUBLISH_PRIVATE_WHITEBOARD", 60 | name: "Publish a private whiteboard", 61 | activityType: "AUTO", 62 | points: 10, 63 | type: "PUBLISH", 64 | category: "DISCOVERY", 65 | }, 66 | { 67 | taskId: "PUBLISH_PUBLIC_DDOC", 68 | name: "Publish a public dDoc", 69 | activityType: "AUTO", 70 | points: 10, 71 | type: "PUBLISH", 72 | category: "DISCOVERY", 73 | }, 74 | { 75 | taskId: "CHAT_ON_FILE", 76 | name: "Use chat on a file", 77 | activityType: "CLIENT", 78 | points: 10, 79 | type: "CHAT", 80 | category: "DISCOVERY", 81 | }, 82 | { 83 | taskId: "TRY_LIVE_COLLABORATION", 84 | name: "Try a live collaboration", 85 | activityType: "CLIENT", 86 | points: 10, 87 | type: "SHARE", 88 | category: "DISCOVERY", 89 | }, 90 | { 91 | taskId: "INVITE_PORTAL_COLLABORATOR", 92 | name: "Invite a collaborator", 93 | activityType: "CLIENT", 94 | points: 10, 95 | type: "GROUP", 96 | category: "DISCOVERY", 97 | }, 98 | { 99 | taskId: "CREATE_TWITTER_POST", 100 | name: "Share the dPage on X", 101 | activityType: "CLIENT", 102 | points: 50, 103 | type: "CREATE", 104 | category: "ACHIEVEMENT", 105 | }, 106 | { 107 | taskId: "CREATE_PUBLISH_3_DPAGES", 108 | name: "Create and publish 3 dPages", 109 | activityType: "AUTO", 110 | points: 25, 111 | type: "CREATE", 112 | category: "ACHIEVEMENT", 113 | }, 114 | { 115 | taskId: "CREATE_PUBLISH_3_DDOCS", 116 | name: "Create and publish 3 dDocs", 117 | activityType: "AUTO", 118 | points: 25, 119 | type: "CREATE", 120 | category: "ACHIEVEMENT", 121 | }, 122 | { 123 | taskId: "COMMENT_ON_10_FILES", 124 | name: "Comment on 10 Files", 125 | activityType: "AUTO", 126 | points: 50, 127 | type: "CHAT", 128 | category: "ACHIEVEMENT", 129 | }, 130 | { 131 | taskId: "PUBLISH_10_PUBLIC_WHITEBOARDS", 132 | name: "Publish 10 Public whiteboards", 133 | activityType: "AUTO", 134 | points: 50, 135 | type: "CREATE", 136 | category: "ACHIEVEMENT", 137 | }, 138 | { 139 | taskId: "PUBLISH_10_PUBLIC_DDOCS", 140 | name: "Publish 10 Public dDocs", 141 | activityType: "AUTO", 142 | points: 50, 143 | type: "CREATE", 144 | category: "ACHIEVEMENT", 145 | }, 146 | { 147 | taskId: "CREATE_LENSTER_POST", 148 | name: "Share a dPage on Hey", 149 | activityType: "CLIENT", 150 | points: 50, 151 | type: "CREATE", 152 | category: "ACHIEVEMENT", 153 | }, 154 | { 155 | taskId: "OWN_ENS_DOMAIN", 156 | name: "Own a ENS Domain", 157 | activityType: "AUTO", 158 | points: 50, 159 | type: "ENS", 160 | category: "ONCHAIN", 161 | }, 162 | { 163 | taskId: "OWN_FARCASTER_HANDLE", 164 | name: "Own a Farcaster Handle", 165 | activityType: "AUTO", 166 | points: 50, 167 | type: "FARCASTER", 168 | category: "ONCHAIN", 169 | }, 170 | { 171 | taskId: "OWN_LENS_HANDLE", 172 | name: "Own a Lens Handle", 173 | activityType: "AUTO", 174 | points: 50, 175 | type: "LENS", 176 | category: "ONCHAIN", 177 | }, 178 | { 179 | taskId: "OWN_SAFE_MULTISIG", 180 | name: "Own a Safe Multisig", 181 | activityType: "AUTO", 182 | points: 50, 183 | type: "SAFE", 184 | category: "ONCHAIN", 185 | }, 186 | { 187 | taskId: "OWN_GITCOIN_PASSPORT", 188 | name: "Own a Gitcoin Passport", 189 | activityType: "AUTO", 190 | points: 50, 191 | type: "GITCOIN", 192 | category: "ONCHAIN", 193 | }, 194 | ], 195 | }; 196 | -------------------------------------------------------------------------------- /src/domain/contract/abi.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "inputs": [ 4 | { 5 | "internalType": "string", 6 | "name": "_metadataIPFSHash", 7 | "type": "string" 8 | }, 9 | { 10 | "internalType": "string", 11 | "name": "_ownerViewDid", 12 | "type": "string" 13 | }, 14 | { 15 | "internalType": "string", 16 | "name": "_ownerEditDid", 17 | "type": "string" 18 | }, 19 | { 20 | "internalType": "address", 21 | "name": "owner", 22 | "type": "address" 23 | }, 24 | { 25 | "internalType": "address", 26 | "name": "_trustedForwarder", 27 | "type": "address" 28 | }, 29 | { 30 | "components": [ 31 | { 32 | "internalType": "bytes32", 33 | "name": "portalEncryptionKeyVerifier", 34 | "type": "bytes32" 35 | }, 36 | { 37 | "internalType": "bytes32", 38 | "name": "portalDecryptionKeyVerifier", 39 | "type": "bytes32" 40 | }, 41 | { 42 | "internalType": "bytes32", 43 | "name": "memberEncryptionKeyVerifier", 44 | "type": "bytes32" 45 | }, 46 | { 47 | "internalType": "bytes32", 48 | "name": "memberDecryptionKeyVerifier", 49 | "type": "bytes32" 50 | } 51 | ], 52 | "internalType": "struct PortalKeyVerifiers.KeyVerifier", 53 | "name": "_keyVerifier", 54 | "type": "tuple" 55 | } 56 | ], 57 | "stateMutability": "nonpayable", 58 | "type": "constructor" 59 | }, 60 | { 61 | "anonymous": false, 62 | "inputs": [ 63 | { 64 | "indexed": true, 65 | "internalType": "address", 66 | "name": "account", 67 | "type": "address" 68 | }, 69 | { 70 | "indexed": true, 71 | "internalType": "address", 72 | "name": "by", 73 | "type": "address" 74 | } 75 | ], 76 | "name": "AddedCollaborator", 77 | "type": "event" 78 | }, 79 | { 80 | "anonymous": false, 81 | "inputs": [ 82 | { 83 | "indexed": true, 84 | "internalType": "uint256", 85 | "name": "fileId", 86 | "type": "uint256" 87 | }, 88 | { 89 | "indexed": false, 90 | "internalType": "string", 91 | "name": "metadataIPFSHash", 92 | "type": "string" 93 | }, 94 | { 95 | "indexed": false, 96 | "internalType": "string", 97 | "name": "contentIPFSHash", 98 | "type": "string" 99 | }, 100 | { 101 | "indexed": false, 102 | "internalType": "string", 103 | "name": "gateIPFSHash", 104 | "type": "string" 105 | }, 106 | { 107 | "indexed": true, 108 | "internalType": "address", 109 | "name": "by", 110 | "type": "address" 111 | } 112 | ], 113 | "name": "AddedFile", 114 | "type": "event" 115 | }, 116 | { 117 | "anonymous": false, 118 | "inputs": [ 119 | { 120 | "indexed": true, 121 | "internalType": "uint256", 122 | "name": "fileId", 123 | "type": "uint256" 124 | }, 125 | { 126 | "indexed": false, 127 | "internalType": "string", 128 | "name": "metadataIPFSHash", 129 | "type": "string" 130 | }, 131 | { 132 | "indexed": false, 133 | "internalType": "string", 134 | "name": "contentIPFSHash", 135 | "type": "string" 136 | }, 137 | { 138 | "indexed": false, 139 | "internalType": "string", 140 | "name": "gateIPFSHash", 141 | "type": "string" 142 | }, 143 | { 144 | "indexed": true, 145 | "internalType": "address", 146 | "name": "by", 147 | "type": "address" 148 | } 149 | ], 150 | "name": "EditedFile", 151 | "type": "event" 152 | }, 153 | { 154 | "anonymous": false, 155 | "inputs": [ 156 | { 157 | "indexed": true, 158 | "internalType": "address", 159 | "name": "previousOwner", 160 | "type": "address" 161 | }, 162 | { 163 | "indexed": true, 164 | "internalType": "address", 165 | "name": "newOwner", 166 | "type": "address" 167 | } 168 | ], 169 | "name": "OwnershipTransferStarted", 170 | "type": "event" 171 | }, 172 | { 173 | "anonymous": false, 174 | "inputs": [ 175 | { 176 | "indexed": true, 177 | "internalType": "address", 178 | "name": "previousOwner", 179 | "type": "address" 180 | }, 181 | { 182 | "indexed": true, 183 | "internalType": "address", 184 | "name": "newOwner", 185 | "type": "address" 186 | } 187 | ], 188 | "name": "OwnershipTransferred", 189 | "type": "event" 190 | }, 191 | { 192 | "anonymous": false, 193 | "inputs": [ 194 | { 195 | "indexed": true, 196 | "internalType": "address", 197 | "name": "account", 198 | "type": "address" 199 | } 200 | ], 201 | "name": "RegisteredCollaboratorKeys", 202 | "type": "event" 203 | }, 204 | { 205 | "anonymous": false, 206 | "inputs": [ 207 | { 208 | "indexed": true, 209 | "internalType": "address", 210 | "name": "account", 211 | "type": "address" 212 | }, 213 | { 214 | "indexed": true, 215 | "internalType": "address", 216 | "name": "by", 217 | "type": "address" 218 | } 219 | ], 220 | "name": "RemovedCollaborator", 221 | "type": "event" 222 | }, 223 | { 224 | "anonymous": false, 225 | "inputs": [ 226 | { 227 | "indexed": true, 228 | "internalType": "address", 229 | "name": "account", 230 | "type": "address" 231 | } 232 | ], 233 | "name": "RemovedCollaboratorKeys", 234 | "type": "event" 235 | }, 236 | { 237 | "anonymous": false, 238 | "inputs": [ 239 | { 240 | "indexed": false, 241 | "internalType": "bytes32", 242 | "name": "portalEncryptionKeyVerifier", 243 | "type": "bytes32" 244 | }, 245 | { 246 | "indexed": false, 247 | "internalType": "bytes32", 248 | "name": "portalDecryptionKeyVerifier", 249 | "type": "bytes32" 250 | }, 251 | { 252 | "indexed": false, 253 | "internalType": "bytes32", 254 | "name": "memberEncryptionKeyVerifier", 255 | "type": "bytes32" 256 | }, 257 | { 258 | "indexed": false, 259 | "internalType": "bytes32", 260 | "name": "memberDecryptionKeyVerifier", 261 | "type": "bytes32" 262 | } 263 | ], 264 | "name": "UpdatedKeyVerifiers", 265 | "type": "event" 266 | }, 267 | { 268 | "anonymous": false, 269 | "inputs": [ 270 | { 271 | "indexed": false, 272 | "internalType": "string", 273 | "name": "metadataIPFSHash", 274 | "type": "string" 275 | }, 276 | { 277 | "indexed": true, 278 | "internalType": "address", 279 | "name": "by", 280 | "type": "address" 281 | } 282 | ], 283 | "name": "UpdatedPortalMetadata", 284 | "type": "event" 285 | }, 286 | { 287 | "inputs": [], 288 | "name": "acceptOwnership", 289 | "outputs": [], 290 | "stateMutability": "nonpayable", 291 | "type": "function" 292 | }, 293 | { 294 | "inputs": [ 295 | { 296 | "internalType": "address", 297 | "name": "collaborator", 298 | "type": "address" 299 | } 300 | ], 301 | "name": "addCollaborator", 302 | "outputs": [], 303 | "stateMutability": "nonpayable", 304 | "type": "function" 305 | }, 306 | { 307 | "inputs": [ 308 | { 309 | "internalType": "string", 310 | "name": "_metadataIPFSHash", 311 | "type": "string" 312 | }, 313 | { 314 | "internalType": "string", 315 | "name": "_contentIPFSHash", 316 | "type": "string" 317 | }, 318 | { 319 | "internalType": "string", 320 | "name": "_gateIPFSHash", 321 | "type": "string" 322 | }, 323 | { 324 | "internalType": "enum FileversePortal.FileType", 325 | "name": "filetype", 326 | "type": "uint8" 327 | }, 328 | { 329 | "internalType": "uint256", 330 | "name": "version", 331 | "type": "uint256" 332 | } 333 | ], 334 | "name": "addFile", 335 | "outputs": [], 336 | "stateMutability": "nonpayable", 337 | "type": "function" 338 | }, 339 | { 340 | "inputs": [ 341 | { 342 | "internalType": "address", 343 | "name": "", 344 | "type": "address" 345 | } 346 | ], 347 | "name": "collaboratorKeys", 348 | "outputs": [ 349 | { 350 | "internalType": "string", 351 | "name": "viewDid", 352 | "type": "string" 353 | }, 354 | { 355 | "internalType": "string", 356 | "name": "editDid", 357 | "type": "string" 358 | } 359 | ], 360 | "stateMutability": "view", 361 | "type": "function" 362 | }, 363 | { 364 | "inputs": [ 365 | { 366 | "internalType": "uint256", 367 | "name": "fileId", 368 | "type": "uint256" 369 | }, 370 | { 371 | "internalType": "string", 372 | "name": "_metadataIPFSHash", 373 | "type": "string" 374 | }, 375 | { 376 | "internalType": "string", 377 | "name": "_contentIPFSHash", 378 | "type": "string" 379 | }, 380 | { 381 | "internalType": "string", 382 | "name": "_gateIPFSHash", 383 | "type": "string" 384 | }, 385 | { 386 | "internalType": "enum FileversePortal.FileType", 387 | "name": "filetype", 388 | "type": "uint8" 389 | }, 390 | { 391 | "internalType": "uint256", 392 | "name": "version", 393 | "type": "uint256" 394 | } 395 | ], 396 | "name": "editFile", 397 | "outputs": [], 398 | "stateMutability": "nonpayable", 399 | "type": "function" 400 | }, 401 | { 402 | "inputs": [ 403 | { 404 | "internalType": "uint256", 405 | "name": "", 406 | "type": "uint256" 407 | } 408 | ], 409 | "name": "files", 410 | "outputs": [ 411 | { 412 | "internalType": "string", 413 | "name": "metadataIPFSHash", 414 | "type": "string" 415 | }, 416 | { 417 | "internalType": "string", 418 | "name": "contentIPFSHash", 419 | "type": "string" 420 | }, 421 | { 422 | "internalType": "string", 423 | "name": "gateIPFSHash", 424 | "type": "string" 425 | }, 426 | { 427 | "internalType": "enum FileversePortal.FileType", 428 | "name": "fileType", 429 | "type": "uint8" 430 | }, 431 | { 432 | "internalType": "uint256", 433 | "name": "version", 434 | "type": "uint256" 435 | } 436 | ], 437 | "stateMutability": "view", 438 | "type": "function" 439 | }, 440 | { 441 | "inputs": [], 442 | "name": "getCollaboratorCount", 443 | "outputs": [ 444 | { 445 | "internalType": "uint256", 446 | "name": "", 447 | "type": "uint256" 448 | } 449 | ], 450 | "stateMutability": "view", 451 | "type": "function" 452 | }, 453 | { 454 | "inputs": [], 455 | "name": "getCollaboratorKeysCount", 456 | "outputs": [ 457 | { 458 | "internalType": "uint256", 459 | "name": "", 460 | "type": "uint256" 461 | } 462 | ], 463 | "stateMutability": "view", 464 | "type": "function" 465 | }, 466 | { 467 | "inputs": [], 468 | "name": "getCollaborators", 469 | "outputs": [ 470 | { 471 | "internalType": "address[]", 472 | "name": "", 473 | "type": "address[]" 474 | } 475 | ], 476 | "stateMutability": "view", 477 | "type": "function" 478 | }, 479 | { 480 | "inputs": [], 481 | "name": "getFileCount", 482 | "outputs": [ 483 | { 484 | "internalType": "uint256", 485 | "name": "", 486 | "type": "uint256" 487 | } 488 | ], 489 | "stateMutability": "view", 490 | "type": "function" 491 | }, 492 | { 493 | "inputs": [ 494 | { 495 | "internalType": "address", 496 | "name": "account", 497 | "type": "address" 498 | } 499 | ], 500 | "name": "isCollaborator", 501 | "outputs": [ 502 | { 503 | "internalType": "bool", 504 | "name": "", 505 | "type": "bool" 506 | } 507 | ], 508 | "stateMutability": "view", 509 | "type": "function" 510 | }, 511 | { 512 | "inputs": [ 513 | { 514 | "internalType": "address", 515 | "name": "forwarder", 516 | "type": "address" 517 | } 518 | ], 519 | "name": "isTrustedForwarder", 520 | "outputs": [ 521 | { 522 | "internalType": "bool", 523 | "name": "", 524 | "type": "bool" 525 | } 526 | ], 527 | "stateMutability": "view", 528 | "type": "function" 529 | }, 530 | { 531 | "inputs": [ 532 | { 533 | "internalType": "uint256", 534 | "name": "", 535 | "type": "uint256" 536 | } 537 | ], 538 | "name": "keyVerifiers", 539 | "outputs": [ 540 | { 541 | "internalType": "bytes32", 542 | "name": "portalEncryptionKeyVerifier", 543 | "type": "bytes32" 544 | }, 545 | { 546 | "internalType": "bytes32", 547 | "name": "portalDecryptionKeyVerifier", 548 | "type": "bytes32" 549 | }, 550 | { 551 | "internalType": "bytes32", 552 | "name": "memberEncryptionKeyVerifier", 553 | "type": "bytes32" 554 | }, 555 | { 556 | "internalType": "bytes32", 557 | "name": "memberDecryptionKeyVerifier", 558 | "type": "bytes32" 559 | } 560 | ], 561 | "stateMutability": "view", 562 | "type": "function" 563 | }, 564 | { 565 | "inputs": [], 566 | "name": "metadataIPFSHash", 567 | "outputs": [ 568 | { 569 | "internalType": "string", 570 | "name": "", 571 | "type": "string" 572 | } 573 | ], 574 | "stateMutability": "view", 575 | "type": "function" 576 | }, 577 | { 578 | "inputs": [], 579 | "name": "owner", 580 | "outputs": [ 581 | { 582 | "internalType": "address", 583 | "name": "", 584 | "type": "address" 585 | } 586 | ], 587 | "stateMutability": "view", 588 | "type": "function" 589 | }, 590 | { 591 | "inputs": [], 592 | "name": "pendingOwner", 593 | "outputs": [ 594 | { 595 | "internalType": "address", 596 | "name": "", 597 | "type": "address" 598 | } 599 | ], 600 | "stateMutability": "view", 601 | "type": "function" 602 | }, 603 | { 604 | "inputs": [ 605 | { 606 | "internalType": "string", 607 | "name": "viewDid", 608 | "type": "string" 609 | }, 610 | { 611 | "internalType": "string", 612 | "name": "editDid", 613 | "type": "string" 614 | } 615 | ], 616 | "name": "registerCollaboratorKeys", 617 | "outputs": [], 618 | "stateMutability": "nonpayable", 619 | "type": "function" 620 | }, 621 | { 622 | "inputs": [ 623 | { 624 | "internalType": "address", 625 | "name": "prevCollaborator", 626 | "type": "address" 627 | }, 628 | { 629 | "internalType": "address", 630 | "name": "collaborator", 631 | "type": "address" 632 | } 633 | ], 634 | "name": "removeCollaborator", 635 | "outputs": [], 636 | "stateMutability": "nonpayable", 637 | "type": "function" 638 | }, 639 | { 640 | "inputs": [], 641 | "name": "removeCollaboratorKeys", 642 | "outputs": [], 643 | "stateMutability": "nonpayable", 644 | "type": "function" 645 | }, 646 | { 647 | "inputs": [], 648 | "name": "renounceOwnership", 649 | "outputs": [], 650 | "stateMutability": "nonpayable", 651 | "type": "function" 652 | }, 653 | { 654 | "inputs": [ 655 | { 656 | "internalType": "address", 657 | "name": "newOwner", 658 | "type": "address" 659 | } 660 | ], 661 | "name": "transferOwnership", 662 | "outputs": [], 663 | "stateMutability": "nonpayable", 664 | "type": "function" 665 | }, 666 | { 667 | "inputs": [ 668 | { 669 | "internalType": "bytes32", 670 | "name": "portalEncryptionKeyVerifier", 671 | "type": "bytes32" 672 | }, 673 | { 674 | "internalType": "bytes32", 675 | "name": "portalDecryptionKeyVerifier", 676 | "type": "bytes32" 677 | }, 678 | { 679 | "internalType": "bytes32", 680 | "name": "memberEncryptionKeyVerifier", 681 | "type": "bytes32" 682 | }, 683 | { 684 | "internalType": "bytes32", 685 | "name": "memberDecryptionKeyVerifier", 686 | "type": "bytes32" 687 | } 688 | ], 689 | "name": "updateKeyVerifiers", 690 | "outputs": [], 691 | "stateMutability": "nonpayable", 692 | "type": "function" 693 | }, 694 | { 695 | "inputs": [ 696 | { 697 | "internalType": "string", 698 | "name": "_metadataIPFSHash", 699 | "type": "string" 700 | } 701 | ], 702 | "name": "updateMetadata", 703 | "outputs": [], 704 | "stateMutability": "nonpayable", 705 | "type": "function" 706 | } 707 | ] --------------------------------------------------------------------------------