├── .nvmrc
├── frontend
├── src
│ ├── index.js
│ ├── components
│ │ ├── Logo.css
│ │ ├── ItemList.css
│ │ ├── Header.css
│ │ ├── Logo.jsx
│ │ ├── Header.jsx
│ │ ├── FileExplorer.css
│ │ ├── Content.css
│ │ ├── PlacementList.jsx
│ │ ├── Sidebar.jsx
│ │ ├── Sidebar.css
│ │ ├── AssignmentList.jsx
│ │ ├── Content.jsx
│ │ ├── ItemList.jsx
│ │ └── FileExplorer.jsx
│ ├── main.jsx
│ ├── App.jsx
│ ├── App.css
│ ├── index.css
│ └── assets
│ │ └── react.svg
├── public
│ ├── arfleet-logo.png
│ └── vite.svg
├── vite.config.js
├── .gitignore
├── index.html
├── .eslintrc.cjs
└── package.json
├── package.json
├── backend
├── resources
│ ├── public
│ │ └── arfleet-logo.png
│ └── logo.txt
├── src
│ ├── arweave
│ │ ├── index.js
│ │ ├── deal.js
│ │ ├── marketplace.js
│ │ ├── passes.js
│ │ └── ao.js
│ ├── .sequelizerc
│ ├── provider
│ │ ├── announce.js
│ │ ├── background
│ │ │ ├── challengesQueue.js
│ │ │ ├── challengeResponse.js
│ │ │ └── decryptChunksQueue.js
│ │ ├── repl.js
│ │ └── index.js
│ ├── utils
│ │ ├── constants.js
│ │ ├── color.js
│ │ ├── backgroundQueue.js
│ │ ├── b64.js
│ │ └── index.js
│ ├── db
│ │ ├── models
│ │ │ ├── Provider.js
│ │ │ ├── index.js
│ │ │ ├── AssignmentChunk.js
│ │ │ ├── Assignment.js
│ │ │ ├── PSPlacement.js
│ │ │ ├── PSPlacementChunk.js
│ │ │ ├── Placement.js
│ │ │ ├── base.js
│ │ │ ├── PlacementChunk.js
│ │ │ └── Chunk.js
│ │ ├── makemigration.js
│ │ ├── index.js
│ │ └── migrate.js
│ ├── client
│ │ ├── apiStore.js
│ │ ├── background
│ │ │ ├── placementChunkQueue.js
│ │ │ ├── providerAnnouncements.js
│ │ │ ├── assignmentQueue.js
│ │ │ └── placementQueue.js
│ │ ├── index.js
│ │ └── deployer.js
│ ├── cmd
│ │ └── index.js
│ ├── encryption
│ │ ├── rsa_keypair.js
│ │ ├── rsa_sign.js
│ │ ├── aes_encrypt.js
│ │ ├── aes_decrypt.js
│ │ ├── rsa_verify.js
│ │ ├── rsa_decrypt.js
│ │ └── rsa_encrypt.js
│ ├── wallet
│ │ └── index.js
│ ├── api
│ │ └── index.js
│ ├── config
│ │ └── index.js
│ └── index.js
└── package.json
├── .gitignore
├── lua
├── libs
│ ├── hex.lua
│ ├── sha256.lua
│ └── base64.lua
├── ArFleetMarketplace.lua
└── ArFleetDeal.lua
├── frontend_old
├── package.json
└── src
│ └── index.js
├── reset
├── arfleet
├── LICENSE.md
├── README.md
└── doc
└── doc.md
/.nvmrc:
--------------------------------------------------------------------------------
1 | 21
--------------------------------------------------------------------------------
/frontend/src/index.js:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "dependencies": {
3 | "cors": "^2.8.5"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/frontend/public/arfleet-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aoacc/arfleet-js/HEAD/frontend/public/arfleet-logo.png
--------------------------------------------------------------------------------
/backend/resources/public/arfleet-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/aoacc/arfleet-js/HEAD/backend/resources/public/arfleet-logo.png
--------------------------------------------------------------------------------
/frontend/src/components/Logo.css:
--------------------------------------------------------------------------------
1 | .logo {
2 | /* width: 100px; */
3 | height: 50px;
4 | }
5 | .logo img {
6 | /* width: 100%; */
7 | height: 100%;
8 | }
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | backend/node_modules
3 | frontend/node_modules
4 | backend/dist
5 | frontend/dist
6 | .env
7 | testdir
8 | testdir2
9 | _local
10 | .DS_Store
11 |
--------------------------------------------------------------------------------
/frontend/src/components/ItemList.css:
--------------------------------------------------------------------------------
1 | .list-group {
2 | /* no radius */
3 | border-radius: 0 !important;
4 | }
5 |
6 | .list-group-item {
7 | padding: 0.2rem 0.5rem !important;
8 | }
--------------------------------------------------------------------------------
/backend/src/arweave/index.js:
--------------------------------------------------------------------------------
1 | const Arweave = require('arweave');
2 |
3 | const arweave = Arweave.init({
4 | host: '127.0.0.1',
5 | port: 1984,
6 | protocol: 'http'
7 | });
8 |
9 | module.exports = arweave;
--------------------------------------------------------------------------------
/frontend/vite.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from 'vite'
2 | import react from '@vitejs/plugin-react'
3 |
4 | // https://vitejs.dev/config/
5 | export default defineConfig({
6 | plugins: [react()],
7 | })
8 |
--------------------------------------------------------------------------------
/frontend/src/components/Header.css:
--------------------------------------------------------------------------------
1 | header {
2 | display: flex;
3 | align-items: flex-start;
4 | justify-content: space-between;
5 |
6 | background-color: rgba(0, 0, 0, 0.1);
7 | padding: 1rem;
8 | color: white;
9 | }
10 |
--------------------------------------------------------------------------------
/backend/src/.sequelizerc:
--------------------------------------------------------------------------------
1 | // .sequelizerc
2 |
3 | const path = require('path');
4 |
5 | module.exports = {
6 | 'models-path': path.resolve('backend', 'src', 'db', 'models'),
7 | 'seeders-path': path.resolve('backend', 'src', 'db', 'seeders'),
8 | 'migrations-path': path.resolve('backend', 'src', 'migrations')
9 | };
--------------------------------------------------------------------------------
/frontend/src/components/Logo.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import './Logo.css'
3 |
4 | const Logo = () => {
5 | return (
6 |
7 |

8 |
9 | )
10 | }
11 |
12 | export default Logo
--------------------------------------------------------------------------------
/lua/libs/hex.lua:
--------------------------------------------------------------------------------
1 | function HexToBytes(str)
2 | return (str:gsub('..', function (cc)
3 | return string.char(tonumber(cc, 16))
4 | end))
5 | end
6 |
7 | function BytesToHex(str)
8 | return (str:gsub('.', function (c)
9 | return string.format('%02x', string.byte(c))
10 | end))
11 | end
--------------------------------------------------------------------------------
/frontend/src/components/Header.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import Logo from './Logo'
3 |
4 | import './Header.css'
5 |
6 | const Header = () => {
7 | return (
8 |
9 |
10 | {/* Add other header content here */}
11 |
12 | )
13 | }
14 |
15 | export default Header
--------------------------------------------------------------------------------
/frontend/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | pnpm-debug.log*
8 | lerna-debug.log*
9 |
10 | node_modules
11 | dist
12 | dist-ssr
13 | *.local
14 |
15 | # Editor directories and files
16 | .vscode/*
17 | !.vscode/extensions.json
18 | .idea
19 | .DS_Store
20 | *.suo
21 | *.ntvs*
22 | *.njsproj
23 | *.sln
24 | *.sw?
25 |
--------------------------------------------------------------------------------
/frontend/src/main.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import ReactDOM from 'react-dom/client'
3 | import App from './App.jsx'
4 | import './index.css'
5 |
6 | import 'bootstrap/dist/css/bootstrap.min.css';
7 | import 'bootstrap/dist/js/bootstrap.bundle.min.js';
8 |
9 | ReactDOM.createRoot(document.getElementById('root')).render(
10 |
11 |
12 | ,
13 | )
14 |
--------------------------------------------------------------------------------
/frontend_old/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "arfleet-frontend",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1",
8 | "client": "node src/index.js client",
9 | "provider": "node src/index.js provider"
10 | },
11 | "author": "",
12 | "license": "ISC",
13 | "dependencies": {
14 | "express": "^4.18.2"
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/backend/src/provider/announce.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const marketplace = require('../arweave/marketplace');
3 |
4 | const announce = async(provider, connectionStrings = null) => {
5 | await marketplace.announce(provider, connectionStrings);
6 |
7 | const announcement = await marketplace.getAnnouncement(provider.address);
8 |
9 | console.log("Persisted Announcement:", announcement);
10 | }
11 |
12 | module.exports = {
13 | announce
14 | }
--------------------------------------------------------------------------------
/frontend/src/App.jsx:
--------------------------------------------------------------------------------
1 | import { useState } from 'react'
2 | import './App.css'
3 |
4 | import Header from './components/Header'
5 | import Sidebar from './components/Sidebar'
6 | import Content from './components/Content'
7 |
8 | function App() {
9 | // const []
10 |
11 | return (
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 | )
20 | }
21 |
22 | export default App
--------------------------------------------------------------------------------
/reset:
--------------------------------------------------------------------------------
1 | rm -rf ~/.arfleet-client
2 | rm -rf ~/.arfleet-provider
3 | rm backend/src/migrations/*
4 | mkdir -p ~/.arfleet-client
5 | mkdir -p ~/.arfleet-provider
6 | ./arfleet client makemigration
7 | ./arfleet client migrate
8 | ./arfleet provider migrate
9 | if [ -f ./_local/keys/client.json ]; then
10 | cp ./_local/keys/client.json ~/.arfleet-client/wallet.json
11 | fi
12 |
13 | if [ -f ./_local/keys/provider.json ]; then
14 | cp ./_local/keys/provider.json ~/.arfleet-provider/wallet.json
15 | fi
16 |
17 | echo "Done"
--------------------------------------------------------------------------------
/frontend/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Vite + React
8 |
9 |
10 |
11 |
12 |
13 |
14 |
--------------------------------------------------------------------------------
/backend/src/utils/constants.js:
--------------------------------------------------------------------------------
1 | const KB = 1024;
2 | const MB = 1024 * KB;
3 | const GB = 1024 * MB;
4 | const TB = 1024 * GB;
5 | const PB = 1024 * TB;
6 |
7 | const WINSTON = 1;
8 | const AR = 10 ** 12;
9 |
10 | const SECOND = 1000;
11 | const MINUTE = 60 * SECOND;
12 | const HOUR = 60 * MINUTE;
13 | const DAY = 24 * HOUR;
14 | const WEEK = 7 * DAY;
15 | const MONTH = 30 * DAY;
16 | const YEAR = 365 * DAY;
17 |
18 | module.exports = {
19 | KB, MB, GB, TB, PB,
20 | WINSTON, AR,
21 | SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, YEAR
22 | };
--------------------------------------------------------------------------------
/frontend/src/components/FileExplorer.css:
--------------------------------------------------------------------------------
1 | .file-explorer {
2 | /* padding: 1rem; */
3 | }
4 |
5 | .file-explorer h2 {
6 | /* margin-bottom: 1rem; */
7 | }
8 |
9 | .file-item {
10 | display: flex;
11 | align-items: center;
12 | padding: 0.5rem;
13 | cursor: pointer;
14 | }
15 |
16 | .file-item:hover {
17 | background-color: #f0f0f0;
18 | }
19 |
20 | .file-item i {
21 | margin-right: 0.5rem;
22 | }
23 |
24 | .file-item.folder i {
25 | color: #ffd700;
26 | }
27 |
28 | .file-item.file i {
29 | color: #4a4a4a;
30 | }
--------------------------------------------------------------------------------
/frontend/src/components/Content.css:
--------------------------------------------------------------------------------
1 | .content {
2 | display: flex;
3 | flex-direction: column;
4 | height: 100%;
5 | }
6 |
7 | .content-top {
8 | display: flex;
9 | flex: 1;
10 | border-bottom: 1px solid #e0e0e0;
11 | }
12 |
13 | .content-top-left,
14 | .content-top-right {
15 | flex: 1;
16 | /* padding: 1rem; */
17 | overflow-y: auto;
18 | }
19 |
20 | .content-top-left {
21 | border-right: 1px solid #e0e0e0;
22 | }
23 |
24 | .content-bottom {
25 | flex: 1;
26 | /* padding: 1rem; */
27 | overflow-y: auto;
28 | }
--------------------------------------------------------------------------------
/backend/src/db/models/Provider.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 |
4 | class Provider extends Model {
5 | constructor(...args) {
6 | super(...args);
7 | }
8 | }
9 |
10 | Provider.init(
11 | {
12 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
13 | connection_strings: {type: Sequelize.DataTypes.JSON, allowNull: true},
14 | prices: {type: Sequelize.DataTypes.JSON, allowNull: true},
15 | },
16 | {
17 | indexes: [
18 | ]
19 | }
20 | );
21 |
22 | module.exports = { Provider };
--------------------------------------------------------------------------------
/arfleet:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | const { spawn } = require('child_process');
4 |
5 | // Pass arguments
6 | const args = process.argv.slice(2);
7 |
8 | // First argument to arfleet
9 | const mode = args[0];
10 |
11 | // Start backend
12 | const backend = spawn('node', ['backend/src/index.js', ...args], { stdio: 'inherit' });
13 |
14 | // Start frontend
15 | if ((mode === 'client' || mode === 'provider') && !args.includes('store') && !args.includes('makemigration') && !args.includes('migrate') && !args.includes('transferpass')) {
16 | const frontend = spawn('node', ['frontend/src/index.js', ...args], { stdio: 'inherit' });
17 | }
18 |
--------------------------------------------------------------------------------
/frontend/.eslintrc.cjs:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | root: true,
3 | env: { browser: true, es2020: true },
4 | extends: [
5 | 'eslint:recommended',
6 | 'plugin:react/recommended',
7 | 'plugin:react/jsx-runtime',
8 | 'plugin:react-hooks/recommended',
9 | ],
10 | ignorePatterns: ['dist', '.eslintrc.cjs'],
11 | parserOptions: { ecmaVersion: 'latest', sourceType: 'module' },
12 | settings: { react: { version: '18.2' } },
13 | plugins: ['react-refresh'],
14 | rules: {
15 | 'react/jsx-no-target-blank': 'off',
16 | 'react-refresh/only-export-components': [
17 | 'warn',
18 | { allowConstantExport: true },
19 | ],
20 | },
21 | }
22 |
--------------------------------------------------------------------------------
/frontend/src/components/PlacementList.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import ItemList from './ItemList'
3 |
4 | const PlacementList = ({ assignmentId, onSelect }) => {
5 | const placements = [
6 | {
7 | id: 'a2138c42542a57e1c413aab0949aa3e3',
8 | name: 'Placement 1'
9 | },
10 | {
11 | id: '081f4231c215d86cc92490c56fbbb59e',
12 | name: 'Placement 2'
13 | }
14 | ];
15 |
16 | return (
17 |
18 | {/*
Placements
*/}
19 |
20 |
21 | )
22 | }
23 |
24 | export default PlacementList
--------------------------------------------------------------------------------
/backend/src/client/apiStore.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const utils = require('../utils');
3 | const deployer = require('../client/deployer');
4 |
5 | const apiStore = async(req, res) => {
6 | try {
7 | const path = req.body.path;
8 | console.log("Storing path:", path);
9 |
10 | const storeInfo = await deployer.store(path);
11 | res.json({ assignmentId: storeInfo.hash, message: "Queued for storage: " + path });
12 | // res.send("Queued for storage: " + path);
13 | } catch (error) {
14 | console.error(error);
15 | res.status(500).send("Error storing path: " + error.message);
16 | }
17 | }
18 |
19 | module.exports = {
20 | apiStore
21 | };
22 |
--------------------------------------------------------------------------------
/backend/src/cmd/index.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const nodepath = require('path');
3 |
4 | const client_store = async (path) => {
5 | // Call API with the path
6 | const axios = require('axios');
7 |
8 | const API_URL = "http://" + config.client.apiServer.host + ":" + config.client.apiServer.port;
9 |
10 | try {
11 | const fullpath = nodepath.resolve(path);
12 | const response = await axios.post(API_URL + '/store', { path: fullpath });
13 |
14 | console.log(response.data);
15 | } catch (error) {
16 | console.error("Error connecting to the API:", error.message);
17 | process.exit(1);
18 | }
19 | }
20 |
21 | module.exports = {
22 | client_store
23 | };
--------------------------------------------------------------------------------
/frontend/src/components/Sidebar.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import './Sidebar.css'
3 |
4 | const Sidebar = () => {
5 | return (
6 |
16 | )
17 | }
18 |
19 | export default Sidebar
--------------------------------------------------------------------------------
/backend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "arfleet-backend",
3 | "version": "1.0.0",
4 | "description": "",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "echo \"Error: no test specified\" && exit 1",
8 | "client": "node src/index.js client",
9 | "provider": "node src/index.js provider"
10 | },
11 | "author": "",
12 | "license": "ISC",
13 | "dependencies": {
14 | "@permaweb/aoconnect": "^0.0.51",
15 | "arweave": "^1.15.0",
16 | "axios": "^1.6.8",
17 | "body-parser": "^1.20.2",
18 | "commander": "^12.0.0",
19 | "express": "^4.19.2",
20 | "mime-types": "^2.1.35",
21 | "sequelize": "^6.37.2",
22 | "sequelize-auto-migrations": "github:pointnetwork/sequelize-auto-migrations",
23 | "sqlite3": "^5.1.7",
24 | "umzug": "^3.7.0"
25 | }
26 | }
27 |
--------------------------------------------------------------------------------
/frontend/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "frontend",
3 | "private": true,
4 | "version": "0.0.0",
5 | "type": "module",
6 | "scripts": {
7 | "dev": "vite",
8 | "build": "vite build",
9 | "lint": "eslint . --ext js,jsx --report-unused-disable-directives --max-warnings 0",
10 | "preview": "vite preview"
11 | },
12 | "dependencies": {
13 | "bootstrap": "^5.3.3",
14 | "react": "^18.3.1",
15 | "react-dom": "^18.3.1"
16 | },
17 | "devDependencies": {
18 | "@types/react": "^18.3.3",
19 | "@types/react-dom": "^18.3.0",
20 | "@vitejs/plugin-react": "^4.3.1",
21 | "eslint": "^8.57.0",
22 | "eslint-plugin-react": "^7.34.3",
23 | "eslint-plugin-react-hooks": "^4.6.2",
24 | "eslint-plugin-react-refresh": "^0.4.7",
25 | "vite": "^5.3.4"
26 | }
27 | }
28 |
--------------------------------------------------------------------------------
/frontend/src/App.css:
--------------------------------------------------------------------------------
1 | html, body {
2 | background: black;
3 | }
4 |
5 | #root {
6 | margin: 0 auto;
7 | }
8 |
9 | .app-container {
10 | display: flex;
11 | flex-direction: column;
12 | height: 100vh;
13 | }
14 |
15 | .main-container {
16 | display: flex;
17 | flex: 1;
18 | overflow: hidden;
19 | }
20 |
21 | main {
22 | flex: 1;
23 | /* padding: 1rem; */
24 | background-color: #222;
25 | overflow-y: auto;
26 | }
27 |
28 | @keyframes logo-spin {
29 | from {
30 | transform: rotate(0deg);
31 | }
32 | to {
33 | transform: rotate(360deg);
34 | }
35 | }
36 |
37 | @media (prefers-reduced-motion: no-preference) {
38 | a:nth-of-type(2) .logo {
39 | animation: logo-spin infinite 20s linear;
40 | }
41 | }
42 |
43 | .card {
44 | padding: 2em;
45 | }
46 |
47 | .read-the-docs {
48 | color: #888;
49 | }
--------------------------------------------------------------------------------
/frontend_old/src/index.js:
--------------------------------------------------------------------------------
1 | const express = require('express');
2 | const app = express();
3 | const port = 3000; // You can change this port number if needed
4 |
5 | // Parse mode and submode, use argv
6 | const mode = process.argv[2];
7 | const submode = process.argv[3];
8 |
9 | if (mode === 'client' && submode === 'store') {
10 | return;
11 | }
12 |
13 | // Start the server if mode is 'server'
14 | if (mode === 'server') {
15 | // Middleware to parse JSON requests
16 | app.use(express.json());
17 |
18 | // Define a simple route
19 | app.get('/', (req, res) => {
20 | res.send('Hello from the server!');
21 | });
22 |
23 | // Start the server
24 | app.listen(port, () => {
25 | console.log(`Server is running on http://localhost:${port}`);
26 | });
27 | } else {
28 | console.log("Start frontend here!!!");
29 | }
30 |
31 | // TODO:
32 | // React
33 | // Bootstrap
--------------------------------------------------------------------------------
/frontend/src/components/Sidebar.css:
--------------------------------------------------------------------------------
1 | aside {
2 | width: 200px;
3 | /* background-color: #f0f0f0; */
4 | background-color: black;
5 | padding: 1rem;
6 | overflow-y: auto;
7 | }
8 |
9 | .sidebar-nav ul {
10 | list-style-type: none;
11 | padding: 0;
12 | margin: 0;
13 | }
14 |
15 | .sidebar-nav li {
16 | margin-bottom: 0;
17 | }
18 |
19 | .sidebar-nav a {
20 | display: flex;
21 | align-items: center;
22 | padding: 10px 15px;
23 | text-decoration: none;
24 | color: white;
25 | font-size: 14px;
26 | transition: background-color 0.3s ease;
27 | }
28 |
29 | .sidebar-nav a:hover {
30 | /* background-color: #e0e0e0; */
31 | color: rgb(255, 216, 143);
32 | }
33 |
34 | .sidebar-nav i {
35 | margin-right: 10px;
36 | width: 20px;
37 | text-align: center;
38 | }
39 |
40 | /* Active item styling */
41 | .sidebar-nav a.active {
42 | /* background-color: #d0d0d0; */
43 | font-weight: bold;
44 | }
--------------------------------------------------------------------------------
/backend/src/utils/color.js:
--------------------------------------------------------------------------------
1 | // Function to get colored terminal text
2 | function color(text, color) {
3 | const colors = {
4 | reset: "\x1b[0m",
5 | bright: "\x1b[1m",
6 | dim: "\x1b[2m",
7 | underscore: "\x1b[4m",
8 | blink: "\x1b[5m",
9 | reverse: "\x1b[7m",
10 | hidden: "\x1b[8m",
11 |
12 | black: "\x1b[30m",
13 | red: "\x1b[31m",
14 | green: "\x1b[32m",
15 | yellow: "\x1b[33m",
16 | blue: "\x1b[34m",
17 | magenta: "\x1b[35m",
18 | cyan: "\x1b[36m",
19 | white: "\x1b[37m",
20 |
21 | bgBlack: "\x1b[40m",
22 | bgRed: "\x1b[41m",
23 | bgGreen: "\x1b[42m",
24 | bgYellow: "\x1b[43m",
25 | bgBlue: "\x1b[44m",
26 | bgMagenta: "\x1b[45m",
27 | bgCyan: "\x1b[46m",
28 | bgWhite: "\x1b[47m",
29 | };
30 |
31 | return `${colors[color] || colors.reset}${text}${colors.reset}`;
32 | }
33 |
34 | module.exports = { color };
35 |
--------------------------------------------------------------------------------
/backend/src/encryption/rsa_keypair.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const crypto = require('crypto');
3 |
4 | const generateKeyPair = async() => {
5 | return new Promise((resolve, reject) => {
6 | crypto.generateKeyPair('rsa', {
7 | modulusLength: config.rsa_encryption.bits,
8 | // publicExponent: PUBEXP, // todo: supposedly 3 makes it faster for decryption than encryption
9 | publicKeyEncoding: {
10 | type: 'spki',
11 | format: 'pem'
12 | },
13 | privateKeyEncoding: {
14 | type: 'pkcs8',
15 | format: 'pem',
16 | // cipher: 'aes-256-cbc',
17 | // passphrase: 'top secret'
18 | }
19 | }, async(err, publicKey, privateKey) => {
20 | if (err) reject('Error: '+err);
21 |
22 | resolve({public_key: publicKey, private_key: privateKey});
23 | });
24 | })
25 | }
26 |
27 | module.exports = { generateKeyPair };
28 |
--------------------------------------------------------------------------------
/frontend/src/components/AssignmentList.jsx:
--------------------------------------------------------------------------------
1 | import React from 'react'
2 | import ItemList from './ItemList'
3 |
4 | const AssignmentList = ({ onSelect }) => {
5 | const assignments = [
6 | {
7 | id: 'a2138c42542a57e1c413aab0949aa3e3',
8 | name: 'Assignment 1'
9 | },
10 | {
11 | id: '081f4231c215d86cc92490c56fbbb59e',
12 | name: 'Assignment 2'
13 | },
14 | {
15 | id: '1bfd09afae8b4b7fde31ac5e6005342e',
16 | name: 'Assignment 3'
17 | },
18 | {
19 | id: 'b1946ac92492d2347c6235b4d2611184',
20 | name: 'Assignment 4'
21 | },
22 | {
23 | id: '591785b794601e212b260e25925636fd',
24 | name: 'Assignment 5'
25 | }
26 | ]
27 |
28 | return (
29 |
30 | {/*
Assignments
*/}
31 | {/* List assignments here */}
32 |
33 |
34 | )
35 | }
36 |
37 | export default AssignmentList
--------------------------------------------------------------------------------
/frontend/src/components/Content.jsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react'
2 | import AssignmentList from './AssignmentList'
3 | import PlacementList from './PlacementList'
4 | import FileExplorer from './FileExplorer'
5 | import './Content.css'
6 |
7 | const Content = () => {
8 | const [selectedAssignment, setSelectedAssignment] = useState(null)
9 | const [selectedPlacement, setSelectedPlacement] = useState(1)
10 |
11 | return (
12 |
13 |
21 |
22 |
23 |
24 |
25 | )
26 | }
27 |
28 | export default Content
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | Copyright (c) 2012-2024 Scott Chacon and others
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining
4 | a copy of this software and associated documentation files (the
5 | "Software"), to deal in the Software without restriction, including
6 | without limitation the rights to use, copy, modify, merge, publish,
7 | distribute, sublicense, and/or sell copies of the Software, and to
8 | permit persons to whom the Software is furnished to do so, subject to
9 | the following conditions:
10 |
11 | The above copyright notice and this permission notice shall be
12 | included in all copies or substantial portions of the Software.
13 |
14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/backend/src/encryption/rsa_sign.js:
--------------------------------------------------------------------------------
1 | #!/bin/env
2 |
3 | const fs = require('fs');
4 | const crypto = require('crypto');
5 | const utils = require('../utils');
6 | const config = require('../config');
7 |
8 | const BITS = config.rsa_encryption.bits;
9 |
10 | function signFile(filePath, privKey) {
11 | const file = fs.readFileSync(filePath);
12 | const fileSignature = crypto.privateEncrypt({
13 | key: privKey,
14 | padding: crypto.constants.RSA_PKCS1_PADDING,
15 | }, file);
16 |
17 | return fileSignature.toString('hex');
18 | }
19 |
20 | process.on('message', async (message) => {
21 | if (message.command === 'sign') {
22 | const { filePath, privKey } = message;
23 |
24 | try {
25 | const signature = signFile(filePath, privKey);
26 |
27 | // send response to master process
28 | process.send({ 'command': 'sign', 'success': true, 'signature': signature });
29 | } catch (e) {
30 | console.error(e);
31 | process.send({ 'command': 'sign', 'success': false });
32 | }
33 | }
34 | });
35 |
36 | module.exports = { signFile };
--------------------------------------------------------------------------------
/backend/src/db/makemigration.js:
--------------------------------------------------------------------------------
1 | const nodepath = require('path');
2 |
3 | const pathFromScriptToCwd = (x) => {
4 | const thisScriptPath = nodepath.dirname(process.argv[1]);
5 | const absolutePath = nodepath.resolve(nodepath.join(thisScriptPath, x));
6 | const cwd = process.cwd();
7 | const relativePath = nodepath.relative(cwd, absolutePath);
8 | return relativePath;
9 | }
10 |
11 | const makeMigration = () => {
12 | console.log("Making migration...")
13 |
14 | // A little hack: prepare sequelize-auto-migrations for reading from the current datadir config
15 | process.argv = [
16 | './arfleet',
17 | 'makemigration',
18 | '--models-path',
19 | // 'dist/db/models',
20 | pathFromScriptToCwd('../src/db/models'),
21 | '--migrations-path',
22 | pathFromScriptToCwd('../src/migrations'),
23 | '--name',
24 | 'automigration'
25 | ];
26 |
27 | const {Database} = require('../db/models');
28 | Database.init();
29 |
30 | require('sequelize-auto-migrations/bin/makemigration.js');
31 |
32 | return;
33 | }
34 |
35 | module.exports = {
36 | makeMigration
37 | };
--------------------------------------------------------------------------------
/frontend/src/components/ItemList.jsx:
--------------------------------------------------------------------------------
1 | import React, { useState } from 'react'
2 |
3 | import './ItemList.css'
4 |
5 | const ItemList = ({ items }) => {
6 | const [activeItemId, setActiveItemId] = useState(null);
7 |
8 | const handleItemClick = (itemId) => {
9 | setActiveItemId(itemId);
10 | };
11 |
12 | return (
13 |
14 |
15 | {items.map((item) => (
16 | - handleItemClick(item.id)}
20 | style={{ cursor: 'pointer' }}
21 | >
22 |
23 |
{item.id}
24 |
{item.name}
25 |
26 |
27 | ))}
28 |
29 |
30 | )
31 | }
32 |
33 | export default ItemList
--------------------------------------------------------------------------------
/backend/src/encryption/aes_encrypt.js:
--------------------------------------------------------------------------------
1 | #!/bin/env
2 |
3 | const fs = require('fs');
4 | const crypto = require('crypto');
5 | const utils = require('../utils');
6 | const config = require('../config');
7 |
8 | const KEY = Buffer.from(config.aes_encryption.key, 'hex');
9 |
10 | function encryptFile(filePath, toFile) {
11 | const readStream = fs.createReadStream(filePath);
12 | const writeStream = fs.createWriteStream(toFile);
13 |
14 | const cipher = crypto.createCipher('aes256', KEY);
15 |
16 | readStream.pipe(cipher).pipe(writeStream);
17 | }
18 |
19 | process.on('message', async (message) => {
20 | if (message.command === 'encrypt') {
21 | const {filePath, chunkId, linkId} = message;
22 |
23 | const suffix = '.'+linkId+'.enc';
24 | const encryptedPath = filePath + suffix;
25 |
26 | encryptFile(filePath, encryptedPath);
27 |
28 | process.send({
29 | 'command': 'encrypt',
30 | 'success': true,
31 | 'chunkId': chunkId,
32 | 'linkId': linkId,
33 | 'hash': utils.hashFnHex(fs.readFileSync(encryptedPath))
34 | });
35 | }
36 | });
37 |
38 | module.exports = {encryptFile};
--------------------------------------------------------------------------------
/backend/src/encryption/aes_decrypt.js:
--------------------------------------------------------------------------------
1 | #!/bin/env
2 |
3 | const fs = require('fs');
4 | const crypto = require('crypto');
5 | const utils = require('../utils');
6 | const config = require('../config');
7 |
8 | const KEY = Buffer.from(config.aes_encryption.key, 'hex');
9 |
10 | function decryptFile(fileIn, fileOut) {
11 | const readStream = fs.createReadStream(fileIn);
12 | const writeStream = fs.createWriteStream(fileOut);
13 |
14 | const decipher = crypto.createDecipher('aes256', KEY);
15 |
16 | readStream.pipe(decipher).pipe(writeStream);
17 | }
18 |
19 | process.on('message', async (message) => {
20 | if (message.command === 'decrypt') {
21 | const {fileIn, fileOut, chunkId} = message;
22 |
23 | try {
24 | decryptFile(fileIn, fileOut);
25 | } catch(e) {
26 | console.log('Error', e);
27 | throw e;
28 | }
29 |
30 | process.send({
31 | 'command': 'decrypt',
32 | 'success': true,
33 | 'chunkId': chunkId,
34 | 'hashIn': utils.hashFnHex(fs.readFileSync(fileIn)),
35 | 'hashOut': utils.hashFnHex(fs.readFileSync(fileOut))
36 | });
37 | }
38 | });
39 |
40 | module.exports = {decryptFile};
--------------------------------------------------------------------------------
/backend/src/arweave/deal.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const nodepath = require('path');
3 |
4 | const ao = () => {
5 | return require('./ao').getAoInstance();
6 | }
7 |
8 | const config = require('../config');
9 |
10 | const loadLuaSourceFile = (filename) => {
11 | const thisScriptPath = __dirname;
12 | return fs.readFileSync(nodepath.join(thisScriptPath, '..', '..', '..', 'lua', filename), 'utf-8');
13 | }
14 |
15 | const spawnDeal = async(extra_lines) => {
16 | const thisScriptPath = __dirname;
17 | const sources = [
18 | loadLuaSourceFile('libs/hex.lua'),
19 | loadLuaSourceFile('libs/sha256.lua'),
20 | loadLuaSourceFile('libs/base64.lua'),
21 | loadLuaSourceFile('ArFleetDeal.lua'),
22 | ];
23 |
24 | const sources_concat = sources.join('\n\n');
25 |
26 | const process_id = await ao().spawn(sources_concat, [{name: "Name", value: "arfleet-deal"}]); // todo 3: why not working in explorer?
27 |
28 | await ao().sendAction(process_id, "Eval", extra_lines);
29 |
30 | return process_id;
31 | }
32 |
33 | const sendCollateral = async(process_id, collateral) => {
34 | await ao().sendAction(process_id, "SendCollateral", collateral);
35 | }
36 |
37 | module.exports = {
38 | spawnDeal,
39 | sendCollateral
40 | }
--------------------------------------------------------------------------------
/backend/src/wallet/index.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const arweave = require('../arweave');
3 | const utils = require('../utils');
4 | const config = require('../config');
5 |
6 | class Wallet {
7 | constructor(path) {
8 | this.walletPath = path;
9 | this._address = null;
10 | }
11 |
12 | readPrivateKey() {
13 | return fs.readFileSync(this.walletPath, 'utf8');
14 | }
15 |
16 | async getAddress() {
17 | if (!this._address) {
18 | this._address = await arweave.wallets.jwkToAddress(JSON.parse(this.readPrivateKey()));
19 | }
20 | return this._address;
21 | }
22 |
23 | async sign(data) {
24 | const jwk = JSON.parse(this.readPrivateKey());
25 | const signer = await arweave.wallets.jwkToSigner(jwk);
26 | return signer.sign(data);
27 | }
28 | }
29 |
30 | const createWallet = async(path) => {
31 | const key = await arweave.wallets.generate();
32 | fs.writeFileSync(path, JSON.stringify(key));
33 | };
34 |
35 | const initWallet = async() => {
36 | const walletPath = utils.getDatadir(config.walletPath);
37 |
38 | if (!fs.existsSync(walletPath)) {
39 | await createWallet(walletPath);
40 | }
41 |
42 | return new Wallet(walletPath);
43 | };
44 |
45 | module.exports = {
46 | initWallet
47 | }
--------------------------------------------------------------------------------
/backend/src/db/models/index.js:
--------------------------------------------------------------------------------
1 | const { Chunk } = require('./Chunk');
2 | const { Assignment } = require('./Assignment');
3 | const { AssignmentChunk } = require('./AssignmentChunk');
4 | const { Placement } = require('./Placement');
5 | const { PlacementChunk } = require('./PlacementChunk');
6 |
7 | const { PSPlacement } = require('./PSPlacement');
8 | const { PSPlacementChunk } = require('./PSPlacementChunk');
9 |
10 | const {Database} = require('../index');
11 | const sequelize = Database.client;
12 |
13 | // Dependencies
14 | // File.belongsToMany(Chunk, {through: FileMap});
15 | // Chunk.belongsToMany(File, {through: FileMap});
16 | // FileMap.belongsTo(Chunk);
17 | // FileMap.belongsTo(File);
18 | // File.hasMany(FileMap);
19 | // Chunk.hasMany(FileMap);
20 |
21 | AssignmentChunk.belongsTo(Assignment);
22 | Assignment.hasMany(AssignmentChunk);
23 |
24 | PlacementChunk.belongsTo(Placement);
25 | Placement.hasMany(PlacementChunk);
26 |
27 | Assignment.hasMany(Placement);
28 | Placement.belongsTo(Assignment);
29 |
30 | PSPlacementChunk.belongsTo(PSPlacement);
31 | PSPlacement.hasMany(PSPlacementChunk);
32 |
33 | module.exports = {
34 | Chunk,
35 | Assignment,
36 | AssignmentChunk,
37 | Placement,
38 | PlacementChunk,
39 |
40 | PSPlacement,
41 | PSPlacementChunk,
42 |
43 | Database,
44 | sequelize
45 | };
46 |
--------------------------------------------------------------------------------
/backend/src/db/index.js:
--------------------------------------------------------------------------------
1 | const utils = require('../utils');
2 | const config = require('../config');
3 | const path = require('path');
4 |
5 | const {Sequelize, Transaction} = require('sequelize');
6 |
7 | // const log = logger.child({module: 'Sequelize'});
8 |
9 | class Database {
10 | static client;
11 |
12 | static init() {
13 | if (!Database.client) {
14 | const dbConfig = config.db;
15 | const storage = utils.getDatadir(dbConfig.storage);
16 | Database.client = new Sequelize(
17 | dbConfig.database,
18 | dbConfig.username,
19 | dbConfig.password,
20 | {
21 | dialect: dbConfig.dialect,
22 | define: dbConfig.define,
23 | storage,
24 | transactionType: dbConfig.transactionType,
25 | retry: {max: dbConfig.retry.max},
26 | logQueryParameters: true,
27 | // logging: dbConfig.enable_db_logging ? log.trace.bind(log) : false,
28 | logging: dbConfig.enable_db_logging,
29 | isolationLevel: Transaction.ISOLATION_LEVELS.SERIALIZABLE
30 | }
31 | );
32 | }
33 |
34 | return Database.client;
35 | }
36 | }
37 |
38 | module.exports = {Database};
39 |
--------------------------------------------------------------------------------
/frontend/src/index.css:
--------------------------------------------------------------------------------
1 | :root {
2 | font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif;
3 | line-height: 1.5;
4 | font-weight: 400;
5 |
6 | color-scheme: light dark;
7 | color: rgba(255, 255, 255, 0.87);
8 | background-color: #242424;
9 |
10 | font-synthesis: none;
11 | text-rendering: optimizeLegibility;
12 | -webkit-font-smoothing: antialiased;
13 | -moz-osx-font-smoothing: grayscale;
14 | }
15 |
16 | a {
17 | font-weight: 500;
18 | color: #646cff;
19 | text-decoration: inherit;
20 | }
21 | a:hover {
22 | color: #535bf2;
23 | }
24 |
25 | body {
26 | margin: 0;
27 | padding: 0;
28 | height: 100vh;
29 | }
30 |
31 | #root {
32 | height: 100%;
33 | }
34 |
35 | h1 {
36 | font-size: 3.2em;
37 | line-height: 1.1;
38 | }
39 |
40 | button {
41 | border-radius: 8px;
42 | border: 1px solid transparent;
43 | padding: 0.6em 1.2em;
44 | font-size: 1em;
45 | font-weight: 500;
46 | font-family: inherit;
47 | background-color: #1a1a1a;
48 | cursor: pointer;
49 | transition: border-color 0.25s;
50 | }
51 | button:hover {
52 | border-color: #646cff;
53 | }
54 | button:focus,
55 | button:focus-visible {
56 | outline: 4px auto -webkit-focus-ring-color;
57 | }
58 |
59 | @media (prefers-color-scheme: light) {
60 | :root {
61 | color: #213547;
62 | background-color: #ffffff;
63 | }
64 | a:hover {
65 | color: #747bff;
66 | }
67 | button {
68 | background-color: #f9f9f9;
69 | }
70 | }
--------------------------------------------------------------------------------
/frontend/public/vite.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/src/encryption/rsa_verify.js:
--------------------------------------------------------------------------------
1 | #!/bin/env
2 |
3 | const fs = require('fs');
4 | const crypto = require('crypto');
5 | const utils = require('../utils');
6 | const config = require('../config');
7 |
8 | const BITS = config.rsa_encryption.bits;
9 |
10 | function verifyFile(filePath, pubKey, signature) {
11 | const file = fs.readFileSync(filePath);
12 | const sigBuffer = Buffer.from(signature, "hex");
13 |
14 | let verified;
15 | try {
16 | verified = crypto.publicDecrypt({
17 | key: pubKey,
18 | padding: crypto.constants.RSA_PKCS1_PADDING,
19 | }, sigBuffer);
20 | } catch (e) {
21 | console.error(e);
22 | return false;
23 | }
24 |
25 | // compare the decrypted signature to file content
26 | return verified.toString() == file.toString();
27 | }
28 |
29 | process.on('message', async (message) => {
30 | if (message.command === 'verify') {
31 | const { filePath, pubKey, signature } = message;
32 |
33 | try {
34 | const verification = verifyFile(filePath, pubKey, signature);
35 |
36 | // send response to master process
37 | process.send({
38 | 'command': 'verify',
39 | 'success': verification,
40 | });
41 | } catch (e) {
42 | console.error(e);
43 | process.send({ 'command': 'verify', 'success': false });
44 | }
45 | }
46 | });
47 |
48 | module.exports = { verifyFile };
--------------------------------------------------------------------------------
/backend/src/arweave/marketplace.js:
--------------------------------------------------------------------------------
1 | const ao = () => { return require('./ao').getAoInstance(); }
2 |
3 | const config = require('../config');
4 |
5 | const announce = async(provider, connectionStrings = null) => {
6 | if (connectionStrings) {
7 | provider.connectionStrings = connectionStrings;
8 | } else {
9 | connectionStrings = provider.connectionStrings;
10 | }
11 |
12 | console.log(`Announcing from ${provider.address}, URL are ${provider.connectionStrings}`);
13 |
14 | await ao().sendActionJSON(config.marketplace, "Announce", {
15 | "Connection-Strings": provider.connectionStrings,
16 | "Storage-Capacity": await provider.getCapacityRemaining(),
17 | "Storage-Price-Deal": await provider.getStoragePriceDeal(),
18 | "Storage-Price-Upload-KB-Sec": await provider.getStoragePriceUploadKBSec(),
19 | "Min-Challenge-Duration": await provider.getMinChallengeDuration(),
20 | "Min-Storage-Duration": await provider.getMinStorageDuration(),
21 | "Max-Storage-Duration": await provider.getMaxStorageDuration(),
22 | });
23 | }
24 |
25 | const getAnnouncement = async(provider_id) => {
26 | const ret = await ao().sendActionJSON(config.marketplace, "Get-Announcement", {"Provider": provider_id});
27 | return JSON.parse(ret);
28 | }
29 |
30 | const getAnnouncements = async() => {
31 | const ret = await ao().sendActionJSON(config.marketplace, "Get-Announcements", {});
32 | return JSON.parse(ret);
33 | }
34 |
35 | module.exports = {
36 | announce,
37 | getAnnouncement,
38 | getAnnouncements
39 | }
--------------------------------------------------------------------------------
/frontend/src/components/FileExplorer.jsx:
--------------------------------------------------------------------------------
1 | import React, { useState, useEffect } from 'react'
2 | import ItemList from './ItemList'
3 | import './FileExplorer.css'
4 |
5 | const FileExplorer = ({ placementId }) => {
6 | const [files, setFiles] = useState([])
7 |
8 | useEffect(() => {
9 | // Fetch files for the selected placement
10 | if (placementId) {
11 | fetchFiles(placementId)
12 | }
13 | }, [placementId])
14 |
15 | const fetchFiles = async (id) => {
16 | // Mock API call to fetch files
17 | // In a real implementation, this would be an actual API call
18 | const mockFiles = [
19 | { id: '1', name: 'file1.txt', type: 'file' },
20 | { id: '2', name: 'folder1', type: 'folder' },
21 | { id: '3', name: 'file2.jpg', type: 'file' },
22 | ]
23 | setFiles(mockFiles)
24 | }
25 |
26 | return (
27 |
28 | {placementId ? (
29 |
(
32 |
33 |
34 | {item.name}
35 |
36 | )}
37 | />
38 | ) : (
39 | Select a placement to view files
40 | )}
41 |
42 | )
43 | }
44 |
45 | export default FileExplorer
--------------------------------------------------------------------------------
/backend/src/client/background/placementChunkQueue.js:
--------------------------------------------------------------------------------
1 | const axios = require('axios');
2 | const Sequelize = require('sequelize');
3 | const { PLACEMENT_STATUS } = require('../../db/models/Placement');
4 | const { Assignment, Placement, AssignmentChunk, PlacementChunk } = require('../../db/models');
5 | const { BackgroundQueue } = require('../../utils/backgroundQueue');
6 |
7 | let placementChunkQueue = new BackgroundQueue({
8 | REBOOT_INTERVAL: 5 * 1000,
9 | addCandidates: async () => {
10 | const candidates = await PlacementChunk.findAll({
11 | where: {
12 | is_encrypted: false,
13 | }
14 | });
15 | const ids = candidates.map(c => c.id);
16 | return ids;
17 | },
18 | processCandidate: async (placement_chunk_id) => {
19 | const placementChunk = await PlacementChunk.findOrFail(placement_chunk_id);
20 | if (placementChunk.is_encrypted) {
21 | return;
22 | }
23 |
24 | await placementChunk.encrypt();
25 |
26 | // Try to see if it's fully encrypted
27 | const placement_id = placementChunk.placement_id;
28 | const nonEncryptedChunks = await PlacementChunk.count({
29 | where: {
30 | placement_id,
31 | is_encrypted: false,
32 | }
33 | });
34 | if (nonEncryptedChunks === 0) {
35 | setImmediate(async () => {
36 | const { placementQueue } = require('./placementQueue');
37 | placementQueue.add(placement_id);
38 | });
39 | }
40 | }
41 | }, 'placementChunkQueue');
42 |
43 | module.exports = { placementChunkQueue };
44 |
--------------------------------------------------------------------------------
/backend/src/utils/backgroundQueue.js:
--------------------------------------------------------------------------------
1 | class BackgroundQueue {
2 | constructor(
3 | {
4 | REBOOT_INTERVAL = 5 * 1000,
5 | addCandidates = async () => [],
6 | processCandidate = async () => {}
7 | },
8 | name = "unnamed-queue"
9 | ) {
10 | this.queue = [];
11 | this.running = false;
12 | this.addCandidates = addCandidates;
13 | this.processCandidate = processCandidate;
14 | this.REBOOT_INTERVAL = REBOOT_INTERVAL;
15 | this.name = name;
16 | this.boot();
17 | }
18 |
19 | // Start processing the queue
20 | async boot() {
21 | console.log(`Starting queue: ${this.name}`);
22 | this.running = true;
23 | while (this.running) {
24 | try {
25 | // Add new candidates to the queue
26 | const candidates = await this.addCandidates();
27 | this.queue.push(...candidates);
28 |
29 | // Process each candidate in the queue
30 | while (this.queue.length > 0) {
31 | const candidate = this.queue.shift();
32 | await this.processCandidate(candidate);
33 | }
34 |
35 | // Wait for the reboot interval before checking the queue again
36 | await this.sleep(this.REBOOT_INTERVAL);
37 | } catch (error) {
38 | console.error(`Error in queue ${this.name}:`, error);
39 | }
40 | }
41 | }
42 |
43 | add(id){
44 | this.queue.push(id);
45 | }
46 | // Utility function to pause execution for a given time
47 | sleep(ms) {
48 | return new Promise(resolve => setTimeout(resolve, ms));
49 | }
50 | }
51 |
52 | module.exports = { BackgroundQueue };
53 |
--------------------------------------------------------------------------------
/backend/src/utils/b64.js:
--------------------------------------------------------------------------------
1 | const { Buffer } = require('buffer');
2 |
3 | function concatBuffers(buffers) {
4 | return Buffer.concat(buffers);
5 | }
6 |
7 | function b64UrlToString(b64UrlString) {
8 | let buffer = b64UrlToBuffer(b64UrlString);
9 | return bufferToString(buffer);
10 | }
11 |
12 | function bufferToString(buffer) {
13 | return buffer.toString('utf-8');
14 | }
15 |
16 | function stringToBuffer(string) {
17 | return Buffer.from(string, 'utf-8');
18 | }
19 |
20 | function stringToB64Url(string) {
21 | return bufferTob64Url(stringToBuffer(string));
22 | }
23 |
24 | function b64UrlToBuffer(b64UrlString) {
25 | return Buffer.from(b64UrlDecode(b64UrlString), 'base64');
26 | }
27 |
28 | function bufferTob64(buffer) {
29 | return buffer.toString('base64');
30 | }
31 |
32 | function bufferTob64Url(buffer) {
33 | return b64UrlEncode(bufferTob64(buffer));
34 | }
35 |
36 | function b64UrlEncode(b64String) {
37 | try {
38 | return b64String
39 | .replace(/\+/g, "-")
40 | .replace(/\//g, "_")
41 | .replace(/=/g, "");
42 | } catch (error) {
43 | throw new Error("Failed to encode string " + error);
44 | }
45 | }
46 |
47 | function b64UrlDecode(b64UrlString) {
48 | try {
49 | b64UrlString = b64UrlString.replace(/\-/g, "+").replace(/_/g, "/");
50 | const padding = b64UrlString.length % 4 === 0 ? 0 : 4 - (b64UrlString.length % 4);
51 | return b64UrlString.concat("=".repeat(padding));
52 | } catch (error) {
53 | throw new Error("Failed to decode string " + error);
54 | }
55 | }
56 |
57 | module.exports = {
58 | concatBuffers,
59 | b64UrlToString,
60 | bufferToString,
61 | stringToBuffer,
62 | stringToB64Url,
63 | b64UrlToBuffer,
64 | bufferTob64,
65 | bufferTob64Url,
66 | b64UrlEncode,
67 | b64UrlDecode
68 | };
--------------------------------------------------------------------------------
/backend/src/client/index.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const utils = require('../utils');
3 | const providerAnnouncements = require('./background/providerAnnouncements');
4 | const passes = require('../arweave/passes');
5 | const { getAoInstance } = require('../arweave/ao');
6 | const { color } = require('../utils/color');
7 | const { Assignment, Placement } = require('../db/models');
8 |
9 | let state = {};
10 |
11 | class Client {
12 | constructor({ wallet }) {
13 | this.wallet = wallet;
14 | this.start();
15 | }
16 |
17 | async start() {
18 | this.address = await this.wallet.getAddress();
19 |
20 | this.ao = getAoInstance({ wallet: this.wallet });
21 |
22 | console.log("Datadir: ", utils.getDatadir());
23 |
24 | await utils.outputWalletAddressAndBalance(this.ao, this.address, config.defaultToken, config.defaultTokenDecimals, config.defaultTokenSymbol);
25 |
26 | await passes.startChecking(this.address);
27 |
28 | providerAnnouncements.startChecking();
29 |
30 | const { placementChunkQueue } = require('./background/placementChunkQueue');
31 | placementChunkQueue; // start the queue
32 | }
33 |
34 | async getAssignments() {
35 | const assignments = await Assignment.findAll();
36 | return assignments;
37 | }
38 |
39 | async getPlacements(assignmentId) {
40 | const placements = await Placement.findAll({ where: { assignment_id: assignmentId } });
41 | return placements;
42 | }
43 | }
44 |
45 | let clientInstance;
46 |
47 | function getClientInstance(initialState = null) {
48 | if(!clientInstance) {
49 | if (!initialState) throw new Error("Client is not initialized with a state");
50 | clientInstance = new Client(initialState);
51 | }
52 |
53 | return clientInstance;
54 | }
55 |
56 | module.exports = getClientInstance;
57 |
--------------------------------------------------------------------------------
/backend/src/db/migrate.js:
--------------------------------------------------------------------------------
1 | const { Sequelize, QueryInterface } = require('sequelize');
2 | const { Umzug, SequelizeStorage, MigrationError } = require('umzug');
3 | const path = require('path');
4 | const { Database } = require('../db/index');
5 |
6 | const glob = require('glob');
7 |
8 | const migrate = async () => {
9 | console.log('Starting database migration');
10 |
11 | const sequelize = Database.init();
12 | const migrationsGlob = path.join(__dirname, '../../src/migrations/*.js');
13 | const resolvedMigrationsGlob = path.resolve(migrationsGlob);
14 |
15 | // List files and debug log them
16 | const files = glob.sync(resolvedMigrationsGlob);
17 | // console.log({files}, 'Migrations files');
18 |
19 | const context = sequelize.getQueryInterface();
20 |
21 | const umzug = new Umzug({
22 | migrations: files.map((file) => ({
23 | name: path.basename(file),
24 | path: file,
25 | up: async ({context}) => {
26 | console.log({name: path.basename(file), file}, 'Migrating');
27 | const migration = require(file);
28 | return migration.up(context, Sequelize);
29 | },
30 | down: async ({context}) => {
31 | const migration = require(file);
32 | return migration.down(context, Sequelize);
33 | }
34 | })),
35 | context: context,
36 | storage: new SequelizeStorage({sequelize}),
37 | // logger: log
38 | });
39 |
40 | // console.log({migrationsGlob}, 'Migrations glob');
41 |
42 | try {
43 | await umzug.up();
44 | } catch (e) {
45 | console.error(e);
46 | if (e instanceof MigrationError) {
47 | await umzug.down();
48 | } else {
49 | throw e;
50 | }
51 | }
52 |
53 | console.log('Database migration finished');
54 | };
55 |
56 | module.exports = { migrate };
--------------------------------------------------------------------------------
/backend/src/api/index.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const MODE = process.env.MODE;
3 | const apiServerConfig = config[MODE].apiServer;
4 |
5 | const startApi = async() => {
6 | const express = require('express');
7 | const app = express();
8 |
9 | // app.use(logger('dev'));
10 | app.use(express.json());
11 | app.use(express.urlencoded({ extended: false }));
12 | // app.use(cookieParser());
13 | // app.use(express.static(path.join(__dirname, 'public')));
14 |
15 | const host = apiServerConfig.host;
16 | const port = process.env.API_PORT || apiServerConfig.port;
17 |
18 | if (MODE === 'client') {
19 | const { apiStore } = require('../client/apiStore');
20 | app.post('/store', apiStore);
21 |
22 | app.get('/api/assignments', async(req, res) => {
23 | const getClientInstance = require('../client');
24 | const client = getClientInstance();
25 | const assignments = await client.getAssignments();
26 | res.send({ assignments: assignments });
27 | });
28 |
29 | app.get('/api/assignments/:id', async(req, res) => {
30 | const getClientInstance = require('../client');
31 | const client = getClientInstance();
32 | const placements = await client.getAssignments(req.params.id);
33 | res.send({ placements: placements });
34 | });
35 |
36 | app.get('/api/placements/:id', async(req, res) => {
37 | const getClientInstance = require('../client');
38 | const client = getClientInstance();
39 | const placement = await client.getPlacements(req.params.id);
40 | res.send({ placement: placement });
41 | });
42 | }
43 |
44 | if (MODE === 'provider') {
45 | }
46 |
47 | app.get('/', (req, res) => {
48 | res.send('Hello from ' + MODE + ' API!')
49 | });
50 |
51 | app.listen(port, host, () => {
52 | console.log(`API app listening on http://${host}:${port}`);
53 | });
54 | }
55 |
56 | module.exports = {
57 | startApi
58 | }
59 |
--------------------------------------------------------------------------------
/backend/src/encryption/rsa_decrypt.js:
--------------------------------------------------------------------------------
1 | #!/bin/env
2 |
3 | const fs = require('fs');
4 | const crypto = require('crypto');
5 | const utils = require('../utils');
6 | const config = require('../config');
7 |
8 | const BITS = config.rsa_encryption.bits;
9 | const STUPID_PADDING = 1;
10 |
11 | function decryptFile(fileIn, fileOut, pubKey) {
12 | const readSize = BITS/8;
13 | const writeSize = readSize-STUPID_PADDING;
14 |
15 | let fe = fs.openSync(fileIn, 'r');
16 | let fd = fs.openSync(fileOut, 'w+');
17 | let c = 0;
18 | let previousReadBuffer = Buffer.alloc(readSize); // initial buffer for CBC mode
19 | while (true) {
20 | let readBuffer = Buffer.alloc(readSize);
21 | let bytesRead = fs.readSync(fe, readBuffer, 0, readSize, null);
22 |
23 | let decrypted = crypto.publicDecrypt({key: pubKey, padding: crypto.constants.RSA_NO_PADDING}, readBuffer);
24 |
25 | // Turning ECB mode into CBC mode
26 | let mixIn = Buffer.alloc(writeSize);
27 | previousReadBuffer.copy(mixIn, STUPID_PADDING, STUPID_PADDING, writeSize);
28 | decrypted = utils.xorBuffersInPlace(decrypted, mixIn);
29 |
30 | // for STUPID_PADDING==1, decrypted now has decrypted data starting at byte 1, and byte 0 should be 0x00
31 | // todo: validate that the stupid padding bytes are 0x00 (or valid)?
32 | // todo: validate that the rest of the padding is filled with 0x00
33 |
34 | fs.writeSync(fd, decrypted, STUPID_PADDING, decrypted.length-STUPID_PADDING);
35 |
36 | if (bytesRead !== readSize) break;
37 |
38 | previousReadBuffer = readBuffer;
39 |
40 | c++;
41 | }
42 |
43 | fs.closeSync(fe);
44 | fs.closeSync(fd);
45 | }
46 |
47 | process.on('message', async (message) => {
48 | if (message.command === 'decrypt') {
49 | const {fileIn, fileOut, chunkId, pubKey} = message;
50 |
51 | try {
52 | decryptFile(fileIn, fileOut, pubKey);
53 | } catch(e) {
54 | console.log('Error', e);
55 | throw e;
56 | }
57 |
58 | // send response to master process
59 | // todo from encrypt.js: todo: reading the file AGAIN??? can't you hash it while encrypting?
60 | process.send({ 'command': 'decrypt', 'success': true, 'chunkId': chunkId, 'hashIn': utils.hashFnHex(fs.readFileSync(fileIn)), 'hashOut': utils.hashFnHex(fs.readFileSync(fileOut)) });
61 | }
62 | });
63 |
64 | module.exports = {decryptFile};
--------------------------------------------------------------------------------
/lua/ArFleetMarketplace.lua:
--------------------------------------------------------------------------------
1 | local json = require("json")
2 |
3 | State = {
4 | Announcements = {},
5 | Prices = {},
6 | }
7 |
8 | function Log(msg)
9 | print(msg)
10 | end
11 |
12 | -- The Handle function must be defined before we use it
13 | function Handle(type, fn)
14 | Handlers.add(
15 | type,
16 | Handlers.utils.hasMatchingTag("Action", type),
17 | function(msg)
18 | local Data = json.decode(msg.Data)
19 | local Result = fn(msg, Data)
20 | if Result == nil then
21 | return
22 | end
23 | Handlers.utils.reply(Result)(msg)
24 | end
25 | )
26 | end
27 |
28 | Handle("Announce", function(msg, Data)
29 | local From = msg.From
30 |
31 | local ConnectionStrings = Data["Connection-Strings"]
32 | local StorageCapacity = Data["Storage-Capacity"]
33 |
34 | State.Announcements[From] = {
35 | ConnectionStrings = ConnectionStrings,
36 | StorageCapacity = StorageCapacity
37 | }
38 | end)
39 |
40 | Handle("Update-Price", function(msg)
41 | local From = msg.From
42 |
43 | local Token = Data["Token"]
44 | local Price = Data["Price"]
45 |
46 | -- Go through the list of announcements and update the price
47 | -- or create a new entry if it doesn't exist
48 | -- Pair From, Token should be unique
49 | for k, v in pairs(State.Announcements) do
50 | if k == From then
51 | -- If State.Prices[From] doesn't exist, create it
52 | if State.Prices[From] == nil then
53 | State.Prices[From] = {}
54 | end
55 |
56 | -- If the From.Token doesn't exist, create it with the provided price
57 | if State.Prices[From][Token] == nil then
58 | State.Prices[From][Token] = {}
59 | end
60 |
61 | State.Prices[From][Token] = Price
62 | end
63 | end
64 | end)
65 |
66 | Handle("Get-Announcements", function(msg)
67 | return json.encode(State.Announcements)
68 | end)
69 |
70 | Handle("Get-Announcement", function(msg, Data)
71 | local Provider = Data["Provider"]
72 | return json.encode(State.Announcements[Provider])
73 | end)
74 |
75 | Handle("Get-Prices", function(msg)
76 | return json.encode(State.Prices)
77 | end)
78 |
79 | Handle("Disconnect-Ownership", function(msg)
80 | Owner = ""
81 | end)
82 |
83 | Handle("Get-Owner", function(msg)
84 | return Owner
85 | end)
86 |
87 |
--------------------------------------------------------------------------------
/backend/src/db/models/AssignmentChunk.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 | const utils = require('../../utils');
4 |
5 | class AssignmentChunk extends Model {
6 | constructor(...args) {
7 | super(...args);
8 | }
9 |
10 | static getPath(chunk_id) {
11 | return utils.getDatadir('/assignment_chunks/' + chunk_id);
12 | }
13 | }
14 |
15 | AssignmentChunk.init(
16 | {
17 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
18 | assignment_id: {type: Sequelize.DataTypes.STRING, allowNull: false},
19 | pos: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
20 | chunk_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
21 | // dl_status: {
22 | // type: Sequelize.DataTypes.STRING,
23 | // defaultValue: CHUNK_DOWNLOAD_STATUS.NOT_STARTED
24 | // },
25 | // ul_status: {
26 | // type: Sequelize.DataTypes.STRING,
27 | // defaultValue: CHUNK_UPLOAD_STATUS.NOT_STARTED
28 | // },
29 | // retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
30 | // validation_retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
31 | // txid: {type: Sequelize.DataTypes.STRING, allowNull: true},
32 | // expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true}
33 | },
34 | {
35 | indexes: [
36 | { fields: ['assignment_id', 'pos'] },
37 | // {fields: ['ul_status']},
38 | // {fields: ['dl_status']}
39 | ]
40 | }
41 | );
42 |
43 | // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
44 |
45 | // const modificationHook = (m) => {
46 | // // if (m.changed() && m.changed().includes('ul_status')) {
47 | // // markChunkUlStatusInCache(m.id, m.changed().ul_status);
48 | // // processQueue(EventTypes.CHUNK_UPLOAD_STATUS_CHANGED, m.id);
49 | // // }
50 | // // if (m.changed() && m.changed().includes('dl_status')) {
51 | // // processQueue(EventTypes.CHUNK_DOWNLOAD_STATUS_CHANGED, m.id);
52 | // // }
53 | // };
54 |
55 | // Chunk.addHook('afterDestroy', (m) => modificationHook(m));
56 | // Chunk.addHook('afterUpdate', (m) => modificationHook(m));
57 | // Chunk.addHook('afterSave', (m) => modificationHook(m));
58 | // Chunk.addHook('afterUpsert', (m) => modificationHook(m[0]));
59 |
60 | module.exports = { AssignmentChunk };
--------------------------------------------------------------------------------
/backend/src/provider/background/challengesQueue.js:
--------------------------------------------------------------------------------
1 | const { PSPlacement } = require('../../db/models');
2 | const { PS_PLACEMENT_STATUS } = require('../../db/models/PSPlacement');
3 | const Sequelize = require('sequelize');
4 | const { BackgroundQueue } = require('../../utils/backgroundQueue');
5 | const prepareChallengeResponse = require('./challengeResponse');
6 |
7 | let challengesQueue = new BackgroundQueue({
8 | REBOOT_INTERVAL: 5 * 1000,
9 | addCandidates: async () => {
10 | const candidates = await PSPlacement.findAll({
11 | where: {
12 | status: PS_PLACEMENT_STATUS.COMPLETED,
13 | next_challenge: {
14 | [Sequelize.Op.lt]: Date.now()
15 | }
16 | }
17 | });
18 | const ids = candidates.map(c => c.id);
19 | return ids;
20 | },
21 | processCandidate: async (placement_id) => {
22 | console.log('Processing challenge for placement: ', placement_id);
23 |
24 | const placement = await PSPlacement.findOrFail(placement_id);
25 |
26 | // create challenge
27 | const { getAoInstance } = require('../../arweave/ao');
28 | const challenge = await getAoInstance().sendAction(placement.process_id, 'GetChallenge', '');
29 |
30 | // console.log('Challenge: ', challenge);
31 |
32 | // if starts with Error:
33 | if (challenge.startsWith('Error:')) {
34 | console.log('Can\'t obtain challenge: ', challenge);
35 | if (challenge.includes('Not activated')){
36 | placement.status = PS_PLACEMENT_STATUS.FAILED;
37 | await placement.save();
38 | }
39 | return;
40 | }
41 |
42 | console.log('Challenge: ', challenge);
43 | const challengeResponse = await prepareChallengeResponse(placement, challenge);
44 | console.log('Challenge response: ', challengeResponse);
45 |
46 | const challengeResult = await getAoInstance().sendActionJSON(placement.process_id, 'SubmitChallenge', challengeResponse);
47 | console.log('Challenge result: ', challengeResult);
48 |
49 | const state = await getAoInstance().getState(placement.process_id);
50 | console.log('State: ', state);
51 |
52 | const nextChallenge = state["NextVerification"];
53 | placement.next_challenge = new Date(nextChallenge * 1000);
54 | await placement.save();
55 | }
56 | }, 'challengesQueue');
57 |
58 | module.exports = challengesQueue;
59 |
--------------------------------------------------------------------------------
/backend/src/db/models/Assignment.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 |
4 | class Assignment extends Model {
5 | constructor(...args) {
6 | super(...args);
7 | }
8 | }
9 |
10 | Assignment.init(
11 | {
12 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
13 | size: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
14 | chunk_count: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
15 | root_hash: {type: Sequelize.DataTypes.STRING, allowNull: true},
16 | desired_redundancy: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
17 | achieved_redundancy: {type: Sequelize.DataTypes.INTEGER, allowNull: false, defaultValue: 0},
18 | desired_storage_duration: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
19 | is_active: {type: Sequelize.DataTypes.BOOLEAN, allowNull: false, defaultValue: false},
20 | // dl_status: {
21 | // type: Sequelize.DataTypes.STRING,
22 | // defaultValue: CHUNK_DOWNLOAD_STATUS.NOT_STARTED
23 | // },
24 | // ul_status: {
25 | // type: Sequelize.DataTypes.STRING,
26 | // defaultValue: CHUNK_UPLOAD_STATUS.NOT_STARTED
27 | // },
28 | // retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
29 | // validation_retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
30 | // txid: {type: Sequelize.DataTypes.STRING, allowNull: true},
31 | // expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true}
32 | },
33 | {
34 | indexes: [
35 | // {fields: ['ul_status']},
36 | // {fields: ['dl_status']}
37 | ]
38 | }
39 | );
40 |
41 | // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
42 |
43 | // const modificationHook = (m) => {
44 | // // if (m.changed() && m.changed().includes('ul_status')) {
45 | // // markChunkUlStatusInCache(m.id, m.changed().ul_status);
46 | // // processQueue(EventTypes.CHUNK_UPLOAD_STATUS_CHANGED, m.id);
47 | // // }
48 | // // if (m.changed() && m.changed().includes('dl_status')) {
49 | // // processQueue(EventTypes.CHUNK_DOWNLOAD_STATUS_CHANGED, m.id);
50 | // // }
51 | // };
52 |
53 | // Chunk.addHook('afterDestroy', (m) => modificationHook(m));
54 | // Chunk.addHook('afterUpdate', (m) => modificationHook(m));
55 | // Chunk.addHook('afterSave', (m) => modificationHook(m));
56 | // Chunk.addHook('afterUpsert', (m) => modificationHook(m[0]));
57 |
58 | module.exports = { Assignment };
--------------------------------------------------------------------------------
/backend/src/encryption/rsa_encrypt.js:
--------------------------------------------------------------------------------
1 | #!/bin/env
2 |
3 | const fs = require('fs');
4 | const crypto = require('crypto');
5 | const path = require('path');
6 | const utils = require('../utils');
7 | const config = require('../config');
8 |
9 | const BITS = config.rsa_encryption.bits;
10 | const STUPID_PADDING = 1;
11 |
12 | function encryptFile(filePath, toFile, privKey) {
13 | const fromFile = path.resolve(filePath);
14 |
15 | const writeSize = BITS/8;
16 | const readSize = writeSize-STUPID_PADDING;
17 |
18 | // todo: check that file exists and it's a file?
19 |
20 | let fd = fs.openSync(fromFile, 'r');
21 | let fe = fs.openSync(toFile, 'w+');
22 | let c = 0;
23 | let encrypted = Buffer.alloc(readSize); // initial empty buffer for CBC mode
24 | while (true) {
25 | let buffer = Buffer.alloc(writeSize);
26 | let bytesRead = fs.readSync(fd, buffer, STUPID_PADDING, readSize, null);
27 |
28 | // Turning ECB mode into CBC mode
29 |
30 | // Note: We do encrypted.slice to never have the first STUPID_PADDING bytes not 0x00 even after XOR
31 | let mixIn = Buffer.alloc(readSize);
32 | encrypted.copy(mixIn, STUPID_PADDING, STUPID_PADDING, readSize);
33 |
34 | buffer = utils.xorBuffersInPlace(buffer, mixIn);
35 |
36 | try {
37 | encrypted = crypto.privateEncrypt({key: privKey, padding: crypto.constants.RSA_NO_PADDING}, buffer);
38 | } catch(e) {
39 | console.log('crypto.privateEncrypt returned error: '+e);
40 | console.log('Initial buffer:', buffer.length+" bytes:", buffer.toString('hex'), buffer.toString());
41 | console.log({privKey});
42 | console.log({c});
43 | throw e;
44 | }
45 |
46 | fs.writeSync(fe, encrypted, 0, encrypted.length);
47 |
48 | if (bytesRead !== readSize) {
49 | break;
50 | }
51 |
52 | c++;
53 | }
54 | fs.closeSync(fd);
55 | fs.closeSync(fe);
56 | }
57 |
58 | process.on('message', async (message) => {
59 | if (message.command === 'encrypt') {
60 | const {filePath, chunkId, privKey, linkId} = message;
61 |
62 | const suffix = '.'+linkId+'.enc';
63 | const encryptedPath = filePath + suffix;
64 |
65 | encryptFile(filePath, encryptedPath, privKey);
66 |
67 | // send response to master process
68 | // todo: reading the file AGAIN??? can't you hash it while encrypting?
69 | process.send({ 'command': 'encrypt', 'success': true, 'chunkId': chunkId, 'linkId': linkId, 'hash': utils.hashFnHex(fs.readFileSync(encryptedPath)) });
70 | }
71 | });
72 |
73 | module.exports = {encryptFile};
--------------------------------------------------------------------------------
/backend/src/provider/repl.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const nodepath = require('path');
3 | const utils = require('../utils');
4 | const config = require('../config');
5 | const { getAoInstance } = require('../arweave/ao');
6 |
7 | const startProviderRepl = async (provider) => {
8 | const readline = require('readline');
9 |
10 | // check that the terminal width is at least 80
11 | const terminal_width = process.stdout.columns;
12 | if (terminal_width >= 80) {
13 | const logo_txt = fs.readFileSync(nodepath.join(__dirname, '..', '..', 'resources/logo.txt'), 'utf-8');
14 |
15 | // print logo
16 | console.log('\x1b[32m%s\x1b[0m', logo_txt); // Green color
17 | }
18 |
19 | console.log("Enter 'help' to see a list of commands.");
20 |
21 | const rl = readline.createInterface({
22 | input: process.stdin,
23 | output: process.stdout,
24 | prompt: '> '
25 | });
26 |
27 | rl.prompt();
28 |
29 | rl.on('line', async(line) => {
30 | line = line.trim();
31 | cmd = line.split(' ')[0];
32 |
33 | switch (cmd) {
34 | case 'announce':
35 | console.log('Announcing...');
36 | const { announce } = require('./announce');
37 | let connectionStrings = process.env.CONNECTION_STRINGS || line.split(' ')[1]
38 | await announce(provider, connectionStrings);
39 | break;
40 | case 'balance':
41 | await utils.outputWalletAddressAndBalance(provider.ao, provider.address, config.defaultToken, config.defaultTokenDecimals, config.defaultTokenSymbol);
42 | break;
43 | case 'help':
44 | console.log('Commands:');
45 | console.log(' announce - Announce the provider to the given connection strings');
46 | console.log(' balance - Show the balance of the provider');
47 | console.log(' help - Show this help message');
48 | console.log(' exit - Exit the REPL');
49 | console.log(' quit - Exit the REPL');
50 | break;
51 | case 'exit':
52 | case 'quit':
53 | rl.close();
54 | break;
55 | case '':
56 | break;
57 | default:
58 | console.log('Invalid command: "' + line.trim() + '"');
59 | break;
60 | }
61 | rl.prompt();
62 | }).on('close', () => {
63 | process.exit(0); // todo: warning, might kill background processes! need to shut down gracefully
64 | });
65 | }
66 |
67 | module.exports = {
68 | startProviderRepl
69 | }
--------------------------------------------------------------------------------
/backend/src/config/index.js:
--------------------------------------------------------------------------------
1 | const { KB, MB, GB, TB, PB, WINSTON, AR, SECOND, MINUTE, HOUR, DAY, WEEK, MONTH, YEAR } = require('../utils/constants');
2 |
3 | const defaultConfig = {
4 | walletPath: 'wallet.json',
5 | client: {
6 | defaultDatadir: '~/.arfleet-client',
7 | apiServer: {
8 | host: '127.0.0.1',
9 | port: 8885
10 | },
11 | defaultDesiredRedundancy: 1,
12 | defaultDesiredStorageDuration: 6 * MONTH,
13 | fetchAnnouncementsInterval: 1 * MINUTE,
14 | defaultMaxChallengeDuration: 1 * WEEK,
15 | },
16 | provider: {
17 | defaultDatadir: '~/.arfleet-provider',
18 | apiServer: {
19 | host: '127.0.0.1',
20 | port: 8886
21 | },
22 | publicServer: {
23 | host: '0.0.0.0',
24 | port: 8890
25 | },
26 | defaultStorageCapacity: 1 * GB,
27 | defaultStoragePriceDeal: 1 * WINSTON,
28 | defaultStoragePriceUploadKBSec: 1 * WINSTON,
29 | defaultMinStorageDuration: 1 * DAY,
30 | defaultMaxStorageDuration: 6 * MONTH,
31 | defaultMinChallengeDuration: 1 * DAY,
32 | },
33 | db: {
34 | define: {
35 | underscored: true,
36 | timestamps: true,
37 | createdAt: 'created_at',
38 | updatedAt: 'updated_at'
39 | },
40 | dialect: 'sqlite',
41 | storage: 'arfleet.db',
42 | transactionType: 'DEFERRED',
43 | retry: {
44 | max: 5
45 | },
46 | enable_db_logging: false
47 | },
48 | chunkSize: 4096,
49 | _chunkSize: 2048,
50 | chunkinfoPrologue: 'ARFLEET\x05\x06\xf5\xf6*INFO',
51 | directoryPrologue: 'ARFLEET\x05\x06\xf5\xf6*DIR',
52 | encryptedChunkPrologue: 'ARFLEET\x05\x06\xf5\xf6*ENC',
53 |
54 | marketplace: '-jydy0Gqhtdf2ilVR0zbGrizkx4GJXfcvpJYXzQxwlU',
55 | aoScheduler: '_GQ33BkPtZrqxA84vM8Zk-N2aO0toNNu_C-l-rawrBA',
56 | aosModule: '9afQ1PLf2mrshqCTZEzzJTR2gWaC9zNPnYgYEqg1Pt4',
57 | defaultToken: 'xU9zFkq3X2ZQ6olwNVvr1vUWIjc3kXTWr7xKQD6dh10',
58 | defaultTokenDecimals: 12,
59 | defaultTokenSymbol: 'wAR',
60 |
61 | passes: {
62 | address: 'kBQOWxXVSj21ZhLqMTFEIJllEal1z_l8YgRRdxIm7pw',
63 | fetchPassesInterval: 5 * MINUTE,
64 | },
65 |
66 | aoConfig: {
67 | MU_URL: process.env.MU_URL || "https://mu.ao-testnet.xyz",
68 | CU_URL: process.env.CU_URL || "https://cu.ao-testnet.xyz",
69 | // GATEWAY_URL: "https://arweave.net",
70 | GATEWAY_URL: process.env.GATEWAY_URL || "https://arweave-search.goldsky.com",
71 | },
72 |
73 | rsa_encryption: {
74 | bits: 1024,
75 | }
76 | };
77 |
78 | module.exports = defaultConfig;
79 |
--------------------------------------------------------------------------------
/backend/src/provider/background/challengeResponse.js:
--------------------------------------------------------------------------------
1 | const { PSPlacementChunk } = require('../../db/models');
2 | const utils = require('../../utils');
3 | const fs = require('fs');
4 |
5 | const prepareChallengeResponse = async(placement, challenge) => {
6 | const result = {};
7 |
8 | const walk = [];
9 |
10 | let idx = 0;
11 | let next;
12 | let node = placement.merkle_tree_full;
13 |
14 | console.log(JSON.stringify(node));
15 |
16 | while(true) {
17 | const add = {
18 | value: node.value,
19 | left_value: node.left ? node.left.value : null,
20 | right_value: node.right ? node.right.value : null
21 | };
22 |
23 | // recalc hash
24 | const left_part = (add.left_value) ? Buffer.from(add.left_value, 'hex') : Buffer.alloc(0)
25 | const right_part = (add.right_value) ? Buffer.from(add.right_value, 'hex') : Buffer.alloc(0)
26 | add.hash = utils.hashFnHex(Buffer.concat([left_part, right_part]));
27 | add.hash0 = utils.hashFnHex(Buffer.concat([Buffer.from([0x00]), left_part, right_part]));
28 |
29 | walk.push(add);
30 |
31 | const bit = challenge[idx];
32 | let bit_log = "[[CH]] "+idx+" "+bit+" ";
33 | if (bit === '0') {
34 | next = node.left;
35 | bit_log += "L ";
36 | } else if (bit === '1') {
37 | next = node.right;
38 | bit_log += "R ";
39 | } else {
40 | throw new Error('Invalid bit: '+bit);
41 | }
42 | bit_log += node.value + " " + add.hash + " " + add.hash0 + " next " + JSON.stringify(next ? next.value : null);
43 | console.log(bit_log);
44 |
45 | if (!next) {
46 | break;
47 | }
48 |
49 | node = next;
50 | idx++; // increment index
51 | }
52 |
53 | // remove last node
54 | const leaf = walk.pop();
55 |
56 | // in the last node now, replace hash with hash0
57 | const last = walk.pop();
58 | last.hash = last.hash0;
59 | walk.push(last);
60 |
61 | result.Path = walk.map((n) => {
62 | return [n.value, n.left_value, n.right_value];
63 | });
64 |
65 | // reverse
66 | const chunk = await PSPlacementChunk.findOneBy('encrypted_chunk_id', leaf.value);
67 | // console.log('Searching for chunk: ', leaf.value);
68 | if (!chunk) throw new Error('Chunk not found: '+leaf.value); // todo: handle better
69 |
70 | const chunk_path = PSPlacementChunk.getPath(chunk.id);
71 | const data = fs.readFileSync(chunk_path, null);
72 | const data_base64 = data.toString('base64');
73 |
74 | result.Leaf = data_base64;
75 |
76 | result.Challenge = challenge;
77 |
78 | return result;
79 | }
80 |
81 | module.exports = prepareChallengeResponse;
--------------------------------------------------------------------------------
/backend/src/db/models/PSPlacement.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 |
4 | class PSPlacement extends Model {
5 | constructor(...args) {
6 | super(...args);
7 | }
8 |
9 | validateOwnership(client_id) {
10 | if (this.client_id !== client_id) {
11 | throw new Error('Client does not own this placement');
12 | }
13 | }
14 |
15 | validateStatus(status) {
16 | if (this.status !== status) {
17 | throw new Error(`Placement status is not ${status}`);
18 | }
19 | }
20 |
21 | async getCollateralLeftToSend() {
22 | const { getAoInstance } = require('../../arweave/ao');
23 | const state = await getAoInstance().getState(this.process_id)
24 |
25 | return state["RequiredCollateral"] - state["ReceivedCollateral"];
26 | }
27 | }
28 |
29 | PS_PLACEMENT_STATUS = {
30 | CREATED: 'created',
31 | ACCEPTED: 'accepted',
32 | FAILED: 'failed',
33 | COMPLETED: 'completed',
34 | }
35 |
36 | PSPlacement.init(
37 | {
38 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
39 | client_id: {type: Sequelize.DataTypes.STRING, allowNull: false},
40 | merkle_root: {type: Sequelize.DataTypes.STRING, allowNull: true},
41 | merkle_tree_full: {type: Sequelize.DataTypes.JSON, allowNull: true},
42 | process_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
43 | public_key: {type: Sequelize.DataTypes.STRING, allowNull: true},
44 | expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true},
45 | next_challenge: {type: Sequelize.DataTypes.DATE, allowNull: true},
46 | is_collaterized: {type: Sequelize.DataTypes.BOOLEAN, allowNull: false, defaultValue: false},
47 | status: {
48 | type: Sequelize.DataTypes.STRING,
49 | defaultValue: PS_PLACEMENT_STATUS.CREATED
50 | }
51 | },
52 | {
53 | indexes: [
54 | // {fields: ['ul_status']},
55 | // {fields: ['dl_status']}
56 | ]
57 | }
58 | );
59 |
60 | // // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
61 |
62 | // const modificationHook = (m) => {
63 | // if (m.changed() && m.changed().includes('status')) {
64 | // const { placementQueue } = require('../../client/background/placementQueue');
65 | // placementQueue.add(m.id);
66 | // }
67 | // };
68 |
69 | // Placement.addHook('afterDestroy', (m) => modificationHook(m));
70 | // Placement.addHook('afterUpdate', (m) => modificationHook(m));
71 | // Placement.addHook('afterSave', (m) => modificationHook(m));
72 | // Placement.addHook('afterUpsert', (m) => modificationHook(m[0]));
73 |
74 | module.exports = { PSPlacement, PS_PLACEMENT_STATUS };
75 |
--------------------------------------------------------------------------------
/backend/src/provider/index.js:
--------------------------------------------------------------------------------
1 | const utils = require('../utils');
2 | const config = require('../config');
3 | const { getAoInstance } = require('../arweave/ao');
4 | const passes = require('../arweave/passes');
5 | const { color } = require('../utils/color');
6 |
7 | class Provider {
8 | constructor({ wallet }) {
9 | this.wallet = wallet;
10 | this.start();
11 | }
12 |
13 | async start() {
14 | this.address = await this.wallet.getAddress();
15 |
16 | this.ao = getAoInstance({ wallet: this.wallet });
17 |
18 | console.log("Starting provider...");
19 | console.log("Datadir: ", utils.getDatadir());
20 |
21 | await utils.outputWalletAddressAndBalance(this.ao, this.address, config.defaultToken, config.defaultTokenDecimals, config.defaultTokenSymbol);
22 |
23 | await passes.startChecking(this.address);
24 |
25 | const { startPublicServer } = require('./server');
26 | const result = await startPublicServer();
27 | this.externalIP = result.externalIP;
28 | this.connectionStrings = result.connectionStrings;
29 |
30 | const challengesQueue = require('./background/challengesQueue.js');
31 | challengesQueue; // start the queue
32 |
33 | const decryptChunksQueue = require('./background/decryptChunksQueue.js');
34 | decryptChunksQueue; // start the queue
35 |
36 | const { startProviderRepl } = require('./repl.js');
37 | await startProviderRepl(this);
38 | }
39 |
40 | async getCapacityRemaining() {
41 | return Math.max(0, this.getCapacityLimit() - this.getCapacityUsed());
42 | }
43 |
44 | async getCapacityLimit() {
45 | return config.provider.defaultStorageCapacity; // todo: allow user to adjust
46 | }
47 |
48 | async getCapacityUsed() {
49 | return 0; // todo
50 | }
51 |
52 | async getStoragePriceDeal() {
53 | return config.provider.defaultStoragePriceDeal; // todo: allow user to adjust
54 | }
55 |
56 | async getStoragePriceUploadKBSec() {
57 | return config.provider.defaultStoragePriceUploadKBSec; // todo: allow user to adjust
58 | }
59 |
60 | async getMinChallengeDuration() {
61 | return config.provider.defaultMinChallengeDuration; // todo: allow user to adjust
62 | }
63 |
64 | async getMinStorageDuration() {
65 | return config.provider.defaultMinStorageDuration; // todo: allow user to adjust
66 | }
67 |
68 | async getMaxStorageDuration() {
69 | return config.provider.defaultMaxStorageDuration; // todo: allow user to adjust
70 | }
71 | }
72 |
73 | let providerInstance;
74 |
75 | function getProviderInstance(initialState = null) {
76 | if(!providerInstance) {
77 | if (!initialState) throw new Error("Provider is not initialized with a state");
78 | providerInstance = new Provider(initialState);
79 | }
80 |
81 | return providerInstance;
82 | }
83 |
84 | module.exports = getProviderInstance;
--------------------------------------------------------------------------------
/backend/src/arweave/passes.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 | const { MINUTE } = require('../utils/constants');
3 | const axios = require('axios');
4 | const { getAoInstance } = require('./ao');
5 | const { color } = require('../utils/color');
6 | let passes = null;
7 |
8 | const checkPasses = async(firstTime = false, ourAddress = null) => {
9 | console.log("Checking passes...");
10 | try {
11 | const passAddress = config.passes.address;
12 |
13 | const ao = getAoInstance();
14 | const response = await ao.dryRun(passAddress, "Info");
15 |
16 | const passesReturned = response.Balances;
17 |
18 | const passesDestringified = Object.fromEntries(
19 | Object.entries(passesReturned).map(([key, value]) => [key, Number(value)])
20 | );
21 |
22 | const passesFiltered = Object.fromEntries(
23 | Object.entries(passesDestringified).filter(([key, value]) => value > 0)
24 | );
25 |
26 | passesToSave = passesFiltered;
27 |
28 | if (!passesToSave) {
29 | console.error(color("WARNING: No passes returned from AO, not saving", "red"));
30 | return;
31 | }
32 |
33 | passes = passesToSave;
34 |
35 | if (firstTime) {
36 | console.log(Object.keys(passes).length.toString() + " ArFleet:Genesis passes found");
37 | if (ourAddress) {
38 | if (hasPass(ourAddress)) {
39 | console.log(color("✅ You have an ArFleet:Genesis pass! 🎉", "green"));
40 | } else {
41 | console.log("");
42 | console.log(color("WARNING: You don't have an ArFleet:Genesis pass to participate in the testnet! 😢", "red"));
43 | console.log("");
44 | console.log(color("Providers/clients on testnet won't be able to connect to you without a valid pass.", "red"));
45 | console.log("");
46 | console.log(color("ArFleet:Genesis passes are this asset on Bazar: https://bazar.arweave.dev/#/asset/"+config.passes.address+"", "red"));
47 | console.log("");
48 | console.log(color("Send the pass to your address here: " + ourAddress, "red"));
49 | }
50 | }
51 | }
52 |
53 | // Success!
54 | } catch(e) {
55 | console.error(e);
56 | }
57 | }
58 |
59 | const hasPass = (address) => {
60 | // console.log({passes})
61 | return passes && passes[address] && passes[address] > 0;
62 | }
63 |
64 | const startChecking = async(ourAddress = null) => {
65 | await checkPasses(true, ourAddress);
66 |
67 | // Leave default value here so it doesn't become 0 if unset
68 | setInterval(checkPasses, config.fetchPassesInterval || 5 * MINUTE);
69 | }
70 |
71 | const getPasses = () => {
72 | return passes;
73 | }
74 |
75 | module.exports = {
76 | checkPasses,
77 | startChecking,
78 | getPasses,
79 | hasPass
80 | }
--------------------------------------------------------------------------------
/backend/src/provider/background/decryptChunksQueue.js:
--------------------------------------------------------------------------------
1 | const { PSPlacement, PSPlacementChunk } = require('../../db/models');
2 | const Sequelize = require('sequelize');
3 | const { BackgroundQueue } = require('../../utils/backgroundQueue');
4 | const { decryptFile } = require('../../encryption/rsa_decrypt');
5 | const utils = require('../../utils');
6 | const fs = require('fs');
7 | const nodepath = require('path');
8 |
9 | let decryptChunksQueue = new BackgroundQueue({
10 | REBOOT_INTERVAL: 10 * 1000,
11 | addCandidates: async () => {
12 | const candidates = await PSPlacementChunk.findAll({
13 | where: {
14 | is_received: true,
15 | encrypted_chunk_id: {
16 | [Sequelize.Op.ne]: null
17 | },
18 | original_chunk_id: null
19 | }
20 | });
21 | const ids = candidates.map(c => c.id);
22 | return ids;
23 | },
24 | processCandidate: async (placement_chunk_id) => {
25 | // console.log('Decrypting placement chunk: ', placement_chunk_id);
26 |
27 | const placement_chunk = await PSPlacementChunk.findOrFail(placement_chunk_id);
28 | // console.log('Placement chunk: ', placement_chunk);
29 |
30 | // console.log('Placement chunk: ', placement_chunk);
31 |
32 | // read
33 | const encrypted_chunk_path = PSPlacementChunk.getPath(placement_chunk.id);
34 | const decrypted_chunk_path = PSPlacementChunk.getDecryptedPath(placement_chunk.id);
35 | utils.mkdirp(nodepath.dirname(decrypted_chunk_path));
36 |
37 | // get the key
38 | const placement = await PSPlacement.findOrFail(placement_chunk.placement_id);
39 | const public_key = placement.public_key;
40 | if (!public_key) {
41 | // console.log('No public key to decrypt placement chunk: ', placement_chunk.id);
42 | // todo: mark as decryption failed
43 | return;
44 | }
45 |
46 | console.log('Decrypting placement chunk: ', placement_chunk_id);
47 |
48 | // decrypt
49 | decryptFile(encrypted_chunk_path, decrypted_chunk_path, public_key);
50 |
51 | // original size
52 | if (placement_chunk.original_size !== null) {
53 | const dataToCut = fs.readFileSync(decrypted_chunk_path, null);
54 | const cutData = dataToCut.slice(0, placement_chunk.original_size);
55 | fs.writeFileSync(decrypted_chunk_path, cutData, null);
56 | }
57 |
58 | // update
59 | // console.log('[Updating placement chunk]: ', placement_chunk);
60 | placement_chunk.is_decrypted = true;
61 | placement_chunk.original_chunk_id = utils.hashFnHex(fs.readFileSync(decrypted_chunk_path, null));
62 | await placement_chunk.save();
63 |
64 | // console.log('Decrypted placement chunk: ', placement_chunk.id, '->', placement_chunk.original_chunk_id);
65 | // console.log('[DecryptChunksQueue] Decrypted placement chunk: ', placement_chunk);
66 | }
67 | }, 'decryptChunksQueue');
68 |
69 | module.exports = decryptChunksQueue;
70 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ArFleet Project
2 |
3 |
4 |
5 |
6 | [https://arfleet.io](https://arfleet.io)
7 |
8 | > [!NOTE]
9 | > More documentation for ArFleet is available at [docs.arfleet.io](https://docs.arfleet.io).
10 |
11 | ## Overview
12 |
13 | ArFleet is a protocol built on and for Arweave+ao designed to facilitate the purchase of time-limited data storage from permissionless peers, eliminating the requirement of a third-party for enforcement.
14 |
15 | NOTE: For testnet, only holders of [ArFleet Genesis Pass](https://ao-bazar.arweave.dev/#/asset/kBQOWxXVSj21ZhLqMTFEIJllEal1z_l8YgRRdxIm7pw) would be able to use the provisioned testnet providers
16 |
17 | ## Getting Started
18 |
19 | ### Prerequisites
20 |
21 | - Node.js (v14 or later)
22 | - npm or Yarn
23 | - Git
24 |
25 | ### Installation
26 |
27 | 1. Clone the repository:
28 | ```bash
29 | git clone https://github.com/aoacc/arfleet-js.git
30 | ```
31 | 2. Navigate to the backend directory and install dependencies:
32 | ```bash
33 | cd arfleet-js/backend
34 | npm install
35 | ```
36 | 3. Navigate to the frontend directory and install dependencies:
37 | ```bash
38 | cd ../frontend
39 | npm install
40 | ```
41 | Go back to the project directory
42 | ```bash
43 | cd ..
44 | ```
45 | ### Running the Application
46 |
47 | Run:
48 |
49 | ```bash
50 | ./arfleet client
51 | ```
52 |
53 | if you are a client.
54 |
55 | Or,
56 |
57 | ```bash
58 | ./arfleet provider
59 | ```
60 |
61 | if you are a provider.
62 |
63 | When your client is running, you can use the `./arfleet client store ` command to store your data.
64 |
65 | Note: in the current version, the data is available publicly.
66 |
67 | ## Updates
68 |
69 | 1. To pull the latest updates from the main repo run:
70 | ```bash
71 | git pull origin master
72 | ```
73 | 2. Navigate to the backend directory and install dependencies:
74 | ```bash
75 | cd arfleet-js/backend
76 | npm install
77 | ```
78 | 3. Navigate to the frontend directory and install dependencies:
79 | ```bash
80 | cd ../frontend
81 | npm install
82 | ```
83 |
84 | ## Transfer Pass
85 |
86 | To transfer your pass to another address, you can use the `./arfleet (client or provider) transferpass ` command. <-- instead of `(client or provider)` here, choose only one.
87 |
88 | ## Wallet Location
89 |
90 | If you need access to the wallet generated by the node, you can find it in your home directory, then `.arfleet-client` or `.arfleet-provider` folder, then `wallet.json`.
91 |
92 | ## Using A Custom Wallet Address
93 |
94 | To use your own wallet address, save it's keyfile from the wallet extension and rename it `wallet.json`. Then, replace the existing `wallet.json` file in the `.arfleet-client` or `.arfleet-provider` folder with your file. This will configure your Client or Provider node to use the desired wallet address.
95 |
96 | ## Contributing
97 |
98 | Contributions are welcome! Feel free to open an issue or a pull request
99 |
100 | ## License
101 |
102 | This project is licensed under the MIT License - see the [LICENSE.md](LICENSE.md) file for details.
103 |
--------------------------------------------------------------------------------
/backend/src/client/background/providerAnnouncements.js:
--------------------------------------------------------------------------------
1 | const marketplace = require('../../arweave/marketplace');
2 | const { hasPass } = require('../../arweave/passes');
3 | const config = require('../../config');
4 | const axios = require('axios');
5 |
6 | let announcements = [];
7 |
8 | const checkAnnouncements = async() => {
9 | announcements = await marketplace.getAnnouncements();
10 | console.log("Announcements:", announcements);
11 |
12 | announcements = [];
13 |
14 | // And now check local announcements
15 | checkLocalAnnouncements();
16 | }
17 |
18 | const checkLocalAnnouncements = async() => {
19 | const port = config.provider.publicServer.port;
20 | const connectionStrings = [
21 | `http://localhost:${port}/announcement`,
22 | `https://p1.arfleet.io/announcement`,
23 | `https://p2.arfleet.io/announcement`,
24 | `https://p3.arfleet.io/announcement`,
25 | ];
26 |
27 | for (const connectionString of connectionStrings) {
28 | try {
29 | console.log("Looking for local announcement");
30 | const localAnnouncement = await axios.get(connectionString);
31 | if (localAnnouncement.data.announcement) {
32 | console.log("Local announcement:", localAnnouncement.data.announcement);
33 | announcements[localAnnouncement.data.announcement.ProviderId] = localAnnouncement.data.announcement;
34 | console.log("Announcements:", announcements);
35 | } else {
36 | console.log("No local announcement found");
37 | }
38 | } catch(e) {
39 | console.log("No local announcement available: can't connect to", connectionString);
40 | // Do nothing
41 | }
42 | }
43 | }
44 |
45 | const startChecking = async() => {
46 | checkAnnouncements();
47 |
48 | // Leave default value here so it doesn't become 0 if unset
49 | setInterval(checkAnnouncements, config.client.fetchAnnouncementsInterval || 1 * 60 * 1000);
50 | }
51 |
52 | // todo: validate the announcement schema, because some bugs might come from there (parsing it, default values etc.)
53 | const getProvidersToConnect = () => {
54 | let result = [];
55 | for (const [provider, announcement] of Object.entries(announcements)) {
56 | // check if has a pass
57 | if (!hasPass(provider)) {
58 | // console.log("Provider", provider, "has no pass");
59 | continue;
60 | }
61 |
62 | result.push({
63 | address: provider,
64 | connectionStrings: announcement["ConnectionStrings"],
65 | storageCapacity: announcement["Storage-Capacity"],
66 | storagePriceDeal: announcement["Storage-Price-Deal"],
67 | storagePriceUploadKBSec: announcement["Storage-Price-Upload-KB-Sec"],
68 | minChallengeDuration: announcement["Min-Challenge-Duration"],
69 | minStorageDuration: announcement["Min-Storage-Duration"],
70 | maxStorageDuration: announcement["Max-Storage-Duration"],
71 | // todo: report version
72 | // todo: if any of the values are missing or wrong, invalidate
73 | })
74 | }
75 |
76 | console.log("Providers to connect:", result.length);
77 |
78 | return result;
79 | }
80 |
81 | module.exports = {
82 | startChecking,
83 | announcements,
84 | getProvidersToConnect,
85 | }
--------------------------------------------------------------------------------
/doc/doc.md:
--------------------------------------------------------------------------------
1 | [] announcements - how will clients find providers, and how can they become ones behind nat? and what if i want to test on localhost [] turn/proxy server? at least for now?
2 | [] how to give people tokens to test
3 |
4 | [] signature + encrypt
5 |
6 | [] retrieval discovery. dht?
7 | [] proper frontend
8 |
9 | - [] retrieval
10 |
11 | [] instead of 5 seconds, trigger changes immediately. and 5 seconds could now be extended [] accelerate instead of 5 s (and chunks too)
12 | [] accelerate in general
13 | [] rsa in separate processes [] multithreaded encryption
14 |
15 | [] UI
16 | [] private vs public vs unlisted files
17 |
18 | [] will fail on another `store` because assignment already exists, fix this.
19 |
20 | [] pricing and deal duration
21 |
22 | [] delete storage we don't have to keep anymore (slashed or expired) [] provider: delete chunks that we don't have to keep because the deal never finalized
23 | [] warnings for the provider in ui when the next challenge is, so they stay online during that time
24 |
25 | [] allow to flip the switch and not accept any more deals
26 |
27 | [] from time to time check deals, and if something is wrong, recover the data and send to another provider
28 | [] advanced: allow to transfer the storage deal to another provider (from the provider itself)
29 |
30 | - [] better errors from api
31 |
32 | [] clean the connection string from extra stuff and only leave host and port and known protocol - for security
33 | [] if some provider has been dead for a long while, don't contact them, even with other placements
34 | [] if the deal doesn't move for some time, fail it. same from provider side!
35 |
36 |
37 | [] what happens if a file is uploaded with one of the prologues? security check
38 |
39 | [] maintain peers addr book
40 |
41 |
42 | what will we have to keep after the container is stored?
43 |
44 | 1. providers (storage link counterparty)
45 | 2. (key we don't have to store, derivable)
46 | 3. encrypted tree root of the container - that's the container id
47 | 4. the root directory index id. if it's in several chunks, it will give us many pointers into the same container tree
48 |
49 | Moving parts
50 | - Core
51 | - Config
52 | - Utils
53 | - Datadir for client/provider
54 | - Cli tool
55 | - Arweave wallet
56 | - Sqlite3 database / sequelize ORM / automigrations
57 | - Connect to Arweave and ao
58 | - RSA encrypt/decrypt
59 | - RSA sign/verify
60 | - AES256 encrypt/decrypt
61 | - Signing/verifying requests/responses between parties
62 | - Lua Modules
63 | - ArFleetMarketplace.lua
64 | - ArFleetDeal.lua
65 | - libraries: sha256, base64, hex
66 | - Client
67 | - Local API
68 | - Store files/directories
69 | - Assignment
70 | - Chunkify
71 | - Merklize
72 | - Build file index
73 | - Build directory index
74 | - Reach out to Marketplace to get announcements
75 | - Auto create placements when redundancy < desired
76 | - Placement
77 | - Find provider
78 | - Negotiate
79 | - Encrypt with RSA
80 | - Merklize replica
81 | - Spawn the deal process
82 | - Fund the deal
83 | - Transfer
84 | - Finalize
85 | - Retrieve files
86 | - Background queues
87 | - Continue after restart (queue boot)
88 | - Provider
89 | - Local API
90 | - Public API (for clients to reach out to)
91 | - ... same functionality as client but from the other side ...
92 | - REPL
93 | - "announce" / "announce [IP:port]"
--------------------------------------------------------------------------------
/backend/src/db/models/PSPlacementChunk.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 | const fs = require('fs');
4 | const utils = require('../../utils');
5 | const nodepath = require('path');
6 |
7 | class PSPlacementChunk extends Model {
8 | constructor(...args) {
9 | super(...args);
10 | }
11 |
12 | static getPath(placement_chunk_id) {
13 | return utils.getDatadir('/ps_placement_chunks/' + placement_chunk_id);
14 | }
15 |
16 | static getDecryptedPath(placement_chunk_id) {
17 | return utils.getDatadir('/ps_placement_chunks_decrypted/' + placement_chunk_id);
18 | }
19 |
20 | static async getData(chunk_id) {
21 | try {
22 | const chunk = await PSPlacementChunk.findOneByOrFail('encrypted_chunk_id', chunk_id);
23 | // console.log('Get data: chunk_id', chunk_id);
24 | const data = fs.readFileSync(PSPlacementChunk.getPath(chunk.id));
25 | res.send(data);
26 | } catch(e) {
27 |
28 | // console.error('Error: Chunk not found: ', chunk_id, e);
29 |
30 | try {
31 | const chunk = await PSPlacementChunk.findOneByOrFail('original_chunk_id', chunk_id);
32 | let data = fs.readFileSync(PSPlacementChunk.getDecryptedPath(chunk.id));
33 |
34 | const original_size = chunk.original_size;
35 | if (original_size) {
36 | // cut data
37 | data = data.slice(0, original_size);
38 | }
39 |
40 | return data;
41 | } catch(e) {
42 |
43 | // 404
44 | throw new Error('Chunk not found: ' + chunk_id);
45 |
46 | }
47 | }
48 | }
49 | }
50 |
51 | PSPlacementChunk.init(
52 | {
53 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
54 | placement_id: {type: Sequelize.DataTypes.STRING, allowNull: false}, // shouldn't it be p_s_placement_id?
55 | original_chunk_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
56 | original_size: {type: Sequelize.DataTypes.BIGINT, allowNull: true},
57 | encrypted_chunk_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
58 | is_received: {type: Sequelize.DataTypes.BOOLEAN, allowNull: false, defaultValue: false},
59 | pos: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
60 | },
61 | {
62 | indexes: [
63 | {fields: ['placement_id']},
64 | {fields: ['original_chunk_id']},
65 | {fields: ['encrypted_chunk_id']},
66 | {fields: ['placement_id', 'pos']},
67 | {fields: ['placement_id', 'is_received']},
68 | // {fields: ['ul_status']},
69 | // {fields: ['dl_status']}
70 | ]
71 | }
72 | );
73 |
74 | // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
75 |
76 | // const modificationHook = (m) => {
77 | // // if (m.changed() && m.changed().includes('ul_status')) {
78 | // // markChunkUlStatusInCache(m.id, m.changed().ul_status);
79 | // // processQueue(EventTypes.CHUNK_UPLOAD_STATUS_CHANGED, m.id);
80 | // // }
81 | // // if (m.changed() && m.changed().includes('dl_status')) {
82 | // // processQueue(EventTypes.CHUNK_DOWNLOAD_STATUS_CHANGED, m.id);
83 | // // }
84 | // };
85 |
86 | // Chunk.addHook('afterDestroy', (m) => modificationHook(m));
87 | // Chunk.addHook('afterUpdate', (m) => modificationHook(m));
88 | // Chunk.addHook('afterSave', (m) => modificationHook(m));
89 | // Chunk.addHook('afterUpsert', (m) => modificationHook(m[0]));
90 |
91 | module.exports = { PSPlacementChunk };
--------------------------------------------------------------------------------
/backend/src/db/models/Placement.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 | const { generateKeyPair } = require('../../encryption/rsa_keypair');
4 |
5 | class Placement extends Model {
6 | constructor(...args) {
7 | super(...args);
8 | }
9 |
10 | async startEncryption() {
11 | const keypair = await generateKeyPair();
12 | this.private_key = keypair.private_key;
13 | this.public_key = keypair.public_key;
14 | await this.save();
15 | }
16 |
17 | getConnectionString() {
18 | return this.provider_connection_strings[0]; // todo: go through all in the future/certain %
19 | }
20 | }
21 |
22 | PLACEMENT_STATUS = {
23 | CREATED: 'created',
24 | UNAVAILABLE: 'unavailable',
25 | INITIALIZED: 'initialized',
26 | ENCRYPTED: 'encrypted',
27 | PROCESS_SPAWNED: 'process_spawned',
28 | FUNDED: 'funded',
29 | ACCEPTED: 'accepted',
30 | TRANSFERRED: 'transferred',
31 | FAILED: 'failed',
32 | COMPLETED: 'completed',
33 | }
34 |
35 | Placement.init(
36 | {
37 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
38 | assignment_id: {type: Sequelize.DataTypes.STRING, allowNull: false},
39 | provider_id: {type: Sequelize.DataTypes.STRING, allowNull: false},
40 | provider_connection_strings: {type: Sequelize.DataTypes.JSON, allowNull: true},
41 | merkle_root: {type: Sequelize.DataTypes.STRING, allowNull: true},
42 | merkle_tree: {type: Sequelize.DataTypes.JSON, allowNull: true},
43 | process_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
44 | private_key: {type: Sequelize.DataTypes.STRING, allowNull: true},
45 | public_key: {type: Sequelize.DataTypes.STRING, allowNull: true},
46 | expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true},
47 | is_funded: {type: Sequelize.DataTypes.BOOLEAN, allowNull: false, defaultValue: false},
48 | required_reward: {type: Sequelize.DataTypes.BIGINT, allowNull: true},
49 | required_collateral: {type: Sequelize.DataTypes.BIGINT, allowNull: true},
50 | error_was: {type: Sequelize.DataTypes.STRING, allowNull: true},
51 | status: {
52 | type: Sequelize.DataTypes.STRING,
53 | defaultValue: PLACEMENT_STATUS.CREATED
54 | }
55 | // dl_status: {
56 | // type: Sequelize.DataTypes.STRING,
57 | // defaultValue: CHUNK_DOWNLOAD_STATUS.NOT_STARTED
58 | // },
59 | // ul_status: {
60 | // type: Sequelize.DataTypes.STRING,
61 | // defaultValue: CHUNK_UPLOAD_STATUS.NOT_STARTED
62 | // },
63 | // retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
64 | // validation_retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
65 | // txid: {type: Sequelize.DataTypes.STRING, allowNull: true},
66 | // expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true}
67 | },
68 | {
69 | indexes: [
70 | // {fields: ['ul_status']},
71 | // {fields: ['dl_status']}
72 | ]
73 | }
74 | );
75 |
76 | // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
77 |
78 | const modificationHook = (m) => {
79 | if (m.changed() && m.changed().includes('status')) {
80 | const { placementQueue } = require('../../client/background/placementQueue');
81 | placementQueue.add(m.id);
82 | }
83 | };
84 |
85 | Placement.addHook('afterDestroy', (m) => modificationHook(m));
86 | Placement.addHook('afterUpdate', (m) => modificationHook(m));
87 | Placement.addHook('afterSave', (m) => modificationHook(m));
88 | Placement.addHook('afterUpsert', (m) => modificationHook(m[0]));
89 |
90 | module.exports = { Placement, PLACEMENT_STATUS };
91 |
--------------------------------------------------------------------------------
/backend/src/db/models/base.js:
--------------------------------------------------------------------------------
1 | // Brought over from https://raw.githubusercontent.com/pointnetwork/pointnetwork/develop/src/db/models/base.ts
2 |
3 | const sequelize_lib = require('sequelize');
4 | const {Database} = require('..');
5 | const {ModelAttributes, InitOptions, Transaction, UpsertOptions} = sequelize_lib;
6 |
7 | class Model extends sequelize_lib.Model {
8 | constructor(...args) {
9 | super(...args);
10 | }
11 |
12 | static get connection() {
13 | if (!Model._connection) {
14 | Model._connection = Database.init();
15 | }
16 | return Model._connection;
17 | }
18 |
19 | static init(attributes, options) {
20 | const defaultOptions = {sequelize: Model.connection};
21 | options = Object.assign({}, defaultOptions, options);
22 |
23 | const defaultAttributeOptions = {allowNull: false};
24 | for (const fieldName in attributes) {
25 | const v = attributes[fieldName];
26 | if (v === null || v.constructor.name !== 'Object') {
27 | throw Error(
28 | 'Oops, I didn\'t think of how to handle this case: the options for attribute \'' +
29 | fieldName +
30 | '\' are not an object (value: ' +
31 | v +
32 | ')'
33 | );
34 | }
35 | attributes[fieldName] = Object.assign({}, defaultAttributeOptions, v);
36 | }
37 |
38 | super.init(attributes, options);
39 | }
40 |
41 | static async findByIdOrCreate(
42 | id,
43 | defaults,
44 | transaction) {
45 | const options = {returning: true};
46 | if (transaction) options.transaction = transaction;
47 |
48 | const upsertResult = await this.upsert(Object.assign({}, {id}, defaults), options);
49 |
50 | if (upsertResult[1] !== null) throw new Error('upsertResult[1] !== null');
51 |
52 | // const instance = upsertResult[0];
53 | // return instance;
54 |
55 | return await this.findOrFail(id, options);
56 | }
57 |
58 | async refresh() {
59 | return await this.reload();
60 | }
61 |
62 | static async allBy(field, value) {
63 | const _options = Object.assign({}, {where: {[field]: value}});
64 | return await this.findAll(_options);
65 | }
66 |
67 | static async findOneBy(field, value) {
68 | const collection = await this.findAll({
69 | where: {[field]: value},
70 | limit: 1 // Note: we only want one instance
71 | });
72 | return collection.length < 1 ? null : collection[0];
73 | }
74 |
75 | static async findOneByOrFail(field, value) {
76 | const one = await this.findOneBy(field, value);
77 | if (one === null) {
78 | throw Error(
79 | 'Row not found: Model ' + this.constructor.name + ', ' + field + ' #' + value
80 | );
81 | } // todo: sanitize!
82 | return one;
83 | }
84 |
85 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
86 | static async find(id, ...args) {
87 | return await this.findByPk(id, ...args);
88 | }
89 |
90 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
91 | static async findOrFail(id, ...args) {
92 | const result = await this.findByPk(id, ...args);
93 | if (!result) throw Error('Row not found: Model ' + this.constructor.name + ', id #' + id); // todo: sanitize!
94 | return result;
95 | }
96 |
97 | // eslint-disable-next-line @typescript-eslint/no-explicit-any
98 | static transaction(...args) {
99 | return this.connection.transaction(...args);
100 | }
101 |
102 | }
103 |
104 | module.exports = Model;
--------------------------------------------------------------------------------
/frontend/src/assets/react.svg:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/backend/resources/logo.txt:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 | ....:::::::::::::
7 | ..::::..... .....
8 | ..::..
9 | .::.. --.
10 | ..:.. :==-.
11 | .::. .-+-.
12 | ... .-+-.
13 | .=+:
14 | :+=.
15 | .++.
16 | =+:
17 | .. ++.
18 | .:. .................................... .++.
19 | ::.::::::-----=====+++++*****######%%%%%%%%%%%%%%%%%%%#. :*=
20 | .:...........::::::::::-----------=====================-::::::::: +*.
21 | ::...::::::------======++++++******######%%%%%%%%%%%%%%%%%%%%%%%#: :*-
22 | .::...........::::::::::-----------=========================:.... *+
23 | .::...::::::------======++++++******########################+ +*.
24 | .-..........::::::::::----------==========+++++++++++++++++++++=. +#.
25 | .-......::::::::-------=======++++++++**************************: +#.
26 | .-:......:::::::-------=======++++++++*********************= *#
27 | .-:.......::::::::---------========++++++++++++++++++++++++- .#+
28 | :-..::::----====++++****###############*: -#-
29 | .-:.....:::::::-------==================. *#
30 | --::::::-----=====+++++*****#####%%%%%%%%%%%%%%%%%%#: =#-
31 | .--................::::::::::::::::::::::::::::::::: :#+
32 | .-: :#*.
33 | .=- :#*.
34 | .=-. -#+
35 | .-=: =-
36 | :=-.
37 | .-=-. -+.
38 | .-=-: .-**-
39 | :=+-. :+#+:
40 | .-=+-:. .-+#*-.
41 | .:=++=-:. .:=+#*=:
42 | .:-=+++==----:::----=++**+=-:.
43 | ..::---====---::.
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/backend/src/db/models/PlacementChunk.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 | const fs = require('fs');
4 | const utils = require('../../utils');
5 | const { encryptFile } = require('../../encryption/rsa_encrypt');
6 | const nodepath = require('path');
7 |
8 | class PlacementChunk extends Model {
9 | constructor(...args) {
10 | super(...args);
11 | }
12 |
13 | async encrypt() {
14 | const { Placement, AssignmentChunk } = require('.');
15 | const placementChunk = this;
16 | const placement = await Placement.findOrFail(placementChunk.placement_id);
17 | const original_chunk_id = placementChunk.original_chunk_id;
18 | const original_chunk_path = AssignmentChunk.getPath(original_chunk_id);
19 | const placement_chunk_path = PlacementChunk.getPath(placementChunk.id);
20 |
21 | // create directory if not exists
22 | utils.mkdirp(nodepath.dirname(placement_chunk_path))
23 |
24 | encryptFile(original_chunk_path, placement_chunk_path, placement.private_key);
25 |
26 | // hash the chunk
27 | const data = fs.readFileSync(placement_chunk_path, null);
28 | const hash = utils.hashFnHex(data);
29 |
30 | placementChunk.is_encrypted = true;
31 | placementChunk.encrypted_chunk_id = hash;
32 | // console.log('Writing Encrypted chunk ID: ', placementChunk.encrypted_chunk_id);
33 |
34 | await placementChunk.save();
35 | }
36 |
37 | static getPath(placement_chunk_id) {
38 | return utils.getDatadir('/placement_chunks/' + placement_chunk_id);
39 | }
40 | }
41 |
42 | PlacementChunk.init(
43 | {
44 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
45 | placement_id: {type: Sequelize.DataTypes.STRING, allowNull: false},
46 | is_encrypted: {type: Sequelize.DataTypes.BOOLEAN, allowNull: false, defaultValue: false},
47 | is_sent: {type: Sequelize.DataTypes.BOOLEAN, allowNull: false, defaultValue: false},
48 | original_chunk_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
49 | encrypted_chunk_id: {type: Sequelize.DataTypes.STRING, allowNull: true},
50 | original_size: {type: Sequelize.DataTypes.BIGINT, allowNull: true},
51 | pos: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
52 | // dl_status: {
53 | // type: Sequelize.DataTypes.STRING,
54 | // defaultValue: CHUNK_DOWNLOAD_STATUS.NOT_STARTED
55 | // },
56 | // ul_status: {
57 | // type: Sequelize.DataTypes.STRING,
58 | // defaultValue: CHUNK_UPLOAD_STATUS.NOT_STARTED
59 | // },
60 | // retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
61 | // validation_retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
62 | // txid: {type: Sequelize.DataTypes.STRING, allowNull: true},
63 | // expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true}
64 | },
65 | {
66 | indexes: [
67 | {fields: ['placement_id']},
68 | {fields: ['is_encrypted']},
69 | {fields: ['is_sent']},
70 | {fields: ['original_chunk_id']},
71 | {fields: ['encrypted_chunk_id']},
72 | {fields: ['placement_id', 'pos']},
73 | {fields: ['placement_id', 'is_sent']},
74 | // {fields: ['ul_status']},
75 | // {fields: ['dl_status']}
76 | ]
77 | }
78 | );
79 |
80 | // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
81 |
82 | // const modificationHook = (m) => {
83 | // // if (m.changed() && m.changed().includes('ul_status')) {
84 | // // markChunkUlStatusInCache(m.id, m.changed().ul_status);
85 | // // processQueue(EventTypes.CHUNK_UPLOAD_STATUS_CHANGED, m.id);
86 | // // }
87 | // // if (m.changed() && m.changed().includes('dl_status')) {
88 | // // processQueue(EventTypes.CHUNK_DOWNLOAD_STATUS_CHANGED, m.id);
89 | // // }
90 | // };
91 |
92 | // Chunk.addHook('afterDestroy', (m) => modificationHook(m));
93 | // Chunk.addHook('afterUpdate', (m) => modificationHook(m));
94 | // Chunk.addHook('afterSave', (m) => modificationHook(m));
95 | // Chunk.addHook('afterUpsert', (m) => modificationHook(m[0]));
96 |
97 | module.exports = { PlacementChunk };
--------------------------------------------------------------------------------
/backend/src/db/models/Chunk.js:
--------------------------------------------------------------------------------
1 | const Model = require('./base');
2 | const Sequelize = require('sequelize');
3 | // import {processQueue, EventTypes} from '../../client/storage/callbacks';
4 | // import {UPLOAD_RETRY_LIMIT} from '../../client/storage/config';
5 | // import {markChunkUlStatusInCache} from '../../client/storage/progress';
6 |
7 | // export const CHUNK_DOWNLOAD_STATUS = {
8 | // NOT_STARTED: 'NOT_STARTED',
9 | // ENQUEUED: 'ENQUEUED',
10 | // IN_PROGRESS: 'IN_PROGRESS',
11 | // COMPLETED: 'COMPLETED',
12 | // FAILED: 'FAILED'
13 | // };
14 |
15 | // export const CHUNK_UPLOAD_STATUS = {
16 | // NOT_STARTED: 'NOT_STARTED',
17 | // ENQUEUED: 'ENQUEUED',
18 | // IN_PROGRESS: 'IN_PROGRESS',
19 | // COMPLETED: 'COMPLETED',
20 | // VALIDATING: 'VALIDATING',
21 | // FAILED: 'FAILED'
22 | // };
23 |
24 | class Chunk extends Model {
25 | constructor(...args) {
26 | super(...args);
27 | }
28 |
29 | // static async update(fields, options, ...args) {
30 | // await super.update(fields, options, ...args);
31 |
32 | // if (fields.ul_status && options.where && options.where.id) {
33 | // processQueue(EventTypes.CHUNK_UPLOAD_STATUS_CHANGED, options.where.id);
34 | // }
35 | // if (fields.dl_status && options.where && options.where.id) {
36 | // processQueue(EventTypes.CHUNK_DOWNLOAD_STATUS_CHANGED, options.where.id);
37 | // }
38 | // }
39 |
40 | // async markAsFailedOrRestart() {
41 | // const chunk = await Chunk.findOrFail(this.id);
42 |
43 | // if (chunk.ul_status === CHUNK_UPLOAD_STATUS.IN_PROGRESS) {
44 | // const retry_count = chunk.retry_count + 1;
45 |
46 | // let ul_status;
47 | // if (chunk.retry_count >= UPLOAD_RETRY_LIMIT) {
48 | // ul_status = CHUNK_UPLOAD_STATUS.FAILED;
49 | // } else {
50 | // // still has retries, re-enqueue
51 | // ul_status = CHUNK_UPLOAD_STATUS.ENQUEUED;
52 | // }
53 |
54 | // await Chunk.update({ul_status, retry_count}, {
55 | // where: {
56 | // id: chunk.id,
57 | // ul_status: CHUNK_UPLOAD_STATUS.IN_PROGRESS
58 | // }
59 | // });
60 | // } else {
61 | // // we fell out of sync
62 | // // don't touch the chunk row
63 | // }
64 | // }
65 | }
66 |
67 | Chunk.init(
68 | {
69 | id: {type: Sequelize.DataTypes.STRING, unique: true, primaryKey: true},
70 | size: {type: Sequelize.DataTypes.INTEGER, allowNull: true},
71 | // dl_status: {
72 | // type: Sequelize.DataTypes.STRING,
73 | // defaultValue: CHUNK_DOWNLOAD_STATUS.NOT_STARTED
74 | // },
75 | // ul_status: {
76 | // type: Sequelize.DataTypes.STRING,
77 | // defaultValue: CHUNK_UPLOAD_STATUS.NOT_STARTED
78 | // },
79 | // retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
80 | // validation_retry_count: {type: Sequelize.DataTypes.INTEGER, defaultValue: 0},
81 | // txid: {type: Sequelize.DataTypes.STRING, allowNull: true},
82 | // expires: {type: Sequelize.DataTypes.BIGINT, allowNull: true}
83 | },
84 | {
85 | indexes: [
86 | // {fields: ['ul_status']},
87 | // {fields: ['dl_status']}
88 | ]
89 | }
90 | );
91 |
92 | // NOTE: These hooks are not working when using .update(). Had to hook into ::update() method
93 |
94 | const modificationHook = (m) => {
95 | // if (m.changed() && m.changed().includes('ul_status')) {
96 | // markChunkUlStatusInCache(m.id, m.changed().ul_status);
97 | // processQueue(EventTypes.CHUNK_UPLOAD_STATUS_CHANGED, m.id);
98 | // }
99 | // if (m.changed() && m.changed().includes('dl_status')) {
100 | // processQueue(EventTypes.CHUNK_DOWNLOAD_STATUS_CHANGED, m.id);
101 | // }
102 | };
103 |
104 | Chunk.addHook('afterDestroy', (m) => modificationHook(m));
105 | Chunk.addHook('afterUpdate', (m) => modificationHook(m));
106 | Chunk.addHook('afterSave', (m) => modificationHook(m));
107 | Chunk.addHook('afterUpsert', (m) => modificationHook(m[0]));
108 |
109 | module.exports = { Chunk };
--------------------------------------------------------------------------------
/backend/src/client/background/assignmentQueue.js:
--------------------------------------------------------------------------------
1 | const { Assignment, Placement } = require('../../db/models');
2 | const { placementQueue } = require('./placementQueue');
3 | const announcements = require('./providerAnnouncements');
4 | const Sequelize = require('sequelize');
5 | const { BackgroundQueue } = require('../../utils/backgroundQueue');
6 | const config = require('../../config');
7 |
8 | let verifyProviderConstraints = (provider, assignment) => {
9 | // Storage Capacity
10 | const storageCapacity = provider.storageCapacity;
11 | const assignmentSize = assignment.size;
12 | if (assignmentSize > storageCapacity) {
13 | console.log('Assignment size is greater than provider storage capacity');
14 | return false;
15 | }
16 |
17 | // Max Storage Duration
18 | const maxStorageDuration = provider.maxStorageDuration;
19 | const desiredStorageDuration = assignment.desired_storage_duration;
20 | if (desiredStorageDuration > maxStorageDuration) {
21 | console.log('Desired storage duration is greater than max storage duration by provider');
22 | return false;
23 | }
24 |
25 | // Min Storage Duration
26 | const minStorageDuration = provider.minStorageDuration;
27 | if (desiredStorageDuration < minStorageDuration) {
28 | console.log('Desired storage duration is less than min storage duration by provider');
29 | return false;
30 | }
31 |
32 | // Challenge Duration
33 | const providerMinChallengeDuration = provider.minChallengeDuration;
34 | const clientMaxChallengeDuration = config.client.defaultChallengeDuration; // allow user to adjust
35 | if (providerMinChallengeDuration > clientMaxChallengeDuration) {
36 | console.log('Provider min challenge duration is greater than client max challenge duration');
37 | return false;
38 | }
39 |
40 | // Storage Price
41 | const storagePriceDeal = provider.storagePriceDeal;
42 | const storagePriceUploadKBSec = provider.storagePriceUploadKBSec;
43 | // todo: negotiate on prices
44 |
45 | // todo: sanity check the values
46 |
47 | return true;
48 | };
49 |
50 | let assignmentQueue = new BackgroundQueue({
51 | REBOOT_INTERVAL: 5 * 1000,
52 | addCandidates: async () => {
53 | const candidates = await Assignment.findAll({
54 | where: {
55 | is_active: true,
56 | achieved_redundancy: {
57 | [Sequelize.Op.lt]: Sequelize.col('desired_redundancy')
58 | }
59 | }
60 | });
61 | const ids = candidates.map(c => c.id);
62 | return ids;
63 | },
64 | processCandidate: async (assignment_id) => {
65 | console.log('Processing assignment: ', assignment_id);
66 |
67 | const assignment = await Assignment.findOrFail(assignment_id);
68 | // console.log('Assignment: ', assignment);
69 |
70 | if (assignment.desired_redundancy > assignment.achieved_redundancy) {
71 | // try to find a matching provider
72 | console.log(`Redundancy for ${assignment_id} not achieved (${assignment.achieved_redundancy}/${assignment.desired_redundancy}), trying to find a matching provider`);
73 |
74 | let providersToConnect = announcements.getProvidersToConnect()
75 | .sort(() => Math.random() - 0.5); // todo: instead of random shuffle, order based on price, connectivity, reputation etc.
76 |
77 | if (providersToConnect.length === 0) {
78 | console.log('No providers to connect');
79 | return;
80 | }
81 |
82 | for (const provider of providersToConnect) {
83 | // Make sure we didn't try this one already
84 | const count = await Placement.count({
85 | where: {
86 | id: assignment.id + '_' + provider.address
87 | }
88 | });
89 |
90 | // console.log(await Placement.allBy('id', assignment.id + '_' + provider.address));
91 |
92 | console.log('Count: ', count);
93 | if (count > 0) {
94 | // update connection strings
95 | const placement = await Placement.findOneByOrFail('id', assignment.id + '_' + provider.address);
96 | placement.provider_connection_strings = (provider.connectionStrings || '').split('|');
97 | await placement.save();
98 |
99 | // console.log('Already tried this provider');
100 | // todo: retry after some time
101 | continue;
102 | }
103 |
104 | // Verify the constraints
105 | const valid = verifyProviderConstraints(provider, assignment);
106 | if (!valid) {
107 | console.log('Provider constraints not met');
108 | continue;
109 | }
110 |
111 | // Create the link
112 | const placement = await Placement.create({
113 | id: assignment.id + '_' + provider.address,
114 | assignment_id: assignment.id,
115 | provider_id: provider.address,
116 | provider_connection_strings: (provider.connectionStrings || '').split('|'),
117 | });
118 |
119 | placementQueue.add(placement.id);
120 | }
121 | }
122 | }
123 | }, 'assignment-queue');
124 |
125 | module.exports = assignmentQueue;
126 |
--------------------------------------------------------------------------------
/lua/libs/sha256.lua:
--------------------------------------------------------------------------------
1 | -- From http://pastebin.com/gsFrNjbt linked from http://www.computercraft.info/forums2/index.php?/topic/8169-sha-256-in-pure-lua/
2 |
3 | --
4 | -- Adaptation of the Secure Hashing Algorithm (SHA-244/256)
5 | -- Found Here: http://lua-users.org/wiki/SecureHashAlgorithm
6 | --
7 | -- Using an adapted version of the bit library
8 | -- Found Here: https://bitbucket.org/Boolsheet/bslf/src/1ee664885805/bit.lua
9 | --
10 |
11 | local MOD = 2^32
12 | local MODM = MOD-1
13 |
14 | local function memoize(f)
15 | local mt = {}
16 | local t = setmetatable({}, mt)
17 | function mt:__index(k)
18 | local v = f(k)
19 | t[k] = v
20 | return v
21 | end
22 | return t
23 | end
24 |
25 | local function make_bitop_uncached(t, m)
26 | local function bitop(a, b)
27 | local res,p = 0,1
28 | while a ~= 0 and b ~= 0 do
29 | local am, bm = a % m, b % m
30 | res = res + t[am][bm] * p
31 | a = (a - am) / m
32 | b = (b - bm) / m
33 | p = p*m
34 | end
35 | res = res + (a + b) * p
36 | return res
37 | end
38 | return bitop
39 | end
40 |
41 | local function make_bitop(t)
42 | local op1 = make_bitop_uncached(t,2^1)
43 | local op2 = memoize(function(a) return memoize(function(b) return op1(a, b) end) end)
44 | return make_bitop_uncached(op2, 2 ^ (t.n or 1))
45 | end
46 |
47 | local bxor1 = make_bitop({[0] = {[0] = 0,[1] = 1}, [1] = {[0] = 1, [1] = 0}, n = 4})
48 |
49 | local function bxor(a, b, c, ...)
50 | local z = nil
51 | if b then
52 | a = a % MOD
53 | b = b % MOD
54 | z = bxor1(a, b)
55 | if c then z = bxor(z, c, ...) end
56 | return z
57 | elseif a then return a % MOD
58 | else return 0 end
59 | end
60 |
61 | local function band(a, b, c, ...)
62 | local z
63 | if b then
64 | a = a % MOD
65 | b = b % MOD
66 | z = ((a + b) - bxor1(a,b)) / 2
67 | if c then z = bit32_band(z, c, ...) end
68 | return z
69 | elseif a then return a % MOD
70 | else return MODM end
71 | end
72 |
73 | local function bnot(x) return (-1 - x) % MOD end
74 |
75 | local function rshift1(a, disp)
76 | if disp < 0 then return lshift(a,-disp) end
77 | return math.floor(a % 2 ^ 32 / 2 ^ disp)
78 | end
79 |
80 | local function rshift(x, disp)
81 | if disp > 31 or disp < -31 then return 0 end
82 | return rshift1(x % MOD, disp)
83 | end
84 |
85 | local function lshift(a, disp)
86 | if disp < 0 then return rshift(a,-disp) end
87 | return (a * 2 ^ disp) % 2 ^ 32
88 | end
89 |
90 | local function rrotate(x, disp)
91 | x = x % MOD
92 | disp = disp % 32
93 | local low = band(x, 2 ^ disp - 1)
94 | return rshift(x, disp) + lshift(low, 32 - disp)
95 | end
96 |
97 | local k = {
98 | 0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5,
99 | 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
100 | 0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3,
101 | 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
102 | 0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc,
103 | 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
104 | 0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7,
105 | 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
106 | 0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13,
107 | 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
108 | 0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3,
109 | 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
110 | 0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5,
111 | 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
112 | 0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208,
113 | 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
114 | }
115 |
116 | local function str2hexa(s)
117 | return (string.gsub(s, ".", function(c) return string.format("%02x", string.byte(c)) end))
118 | end
119 |
120 | local function num2s(l, n)
121 | local s = ""
122 | for i = 1, n do
123 | local rem = l % 256
124 | s = string.char(rem) .. s
125 | l = (l - rem) / 256
126 | end
127 | return s
128 | end
129 |
130 | local function s232num(s, i)
131 | local n = 0
132 | for i = i, i + 3 do n = n*256 + string.byte(s, i) end
133 | return n
134 | end
135 |
136 | local function preproc(msg, len)
137 | local extra = 64 - ((len + 9) % 64)
138 | len = num2s(8 * len, 8)
139 | msg = msg .. "\128" .. string.rep("\0", extra) .. len
140 | assert(#msg % 64 == 0)
141 | return msg
142 | end
143 |
144 | local function initH256(H)
145 | H[1] = 0x6a09e667
146 | H[2] = 0xbb67ae85
147 | H[3] = 0x3c6ef372
148 | H[4] = 0xa54ff53a
149 | H[5] = 0x510e527f
150 | H[6] = 0x9b05688c
151 | H[7] = 0x1f83d9ab
152 | H[8] = 0x5be0cd19
153 | return H
154 | end
155 |
156 | local function digestblock(msg, i, H)
157 | local w = {}
158 | for j = 1, 16 do w[j] = s232num(msg, i + (j - 1)*4) end
159 | for j = 17, 64 do
160 | local v = w[j - 15]
161 | local s0 = bxor(rrotate(v, 7), rrotate(v, 18), rshift(v, 3))
162 | v = w[j - 2]
163 | w[j] = w[j - 16] + s0 + w[j - 7] + bxor(rrotate(v, 17), rrotate(v, 19), rshift(v, 10))
164 | end
165 |
166 | local a, b, c, d, e, f, g, h = H[1], H[2], H[3], H[4], H[5], H[6], H[7], H[8]
167 | for i = 1, 64 do
168 | local s0 = bxor(rrotate(a, 2), rrotate(a, 13), rrotate(a, 22))
169 | local maj = bxor(band(a, b), band(a, c), band(b, c))
170 | local t2 = s0 + maj
171 | local s1 = bxor(rrotate(e, 6), rrotate(e, 11), rrotate(e, 25))
172 | local ch = bxor (band(e, f), band(bnot(e), g))
173 | local t1 = h + s1 + ch + k[i] + w[i]
174 | h, g, f, e, d, c, b, a = g, f, e, d + t1, c, b, a, t1 + t2
175 | end
176 |
177 | H[1] = band(H[1] + a)
178 | H[2] = band(H[2] + b)
179 | H[3] = band(H[3] + c)
180 | H[4] = band(H[4] + d)
181 | H[5] = band(H[5] + e)
182 | H[6] = band(H[6] + f)
183 | H[7] = band(H[7] + g)
184 | H[8] = band(H[8] + h)
185 | end
186 |
187 | -- Made this global
188 | function sha256(msg)
189 | msg = preproc(msg, #msg)
190 | local H = initH256({})
191 | for i = 1, #msg, 64 do digestblock(msg, i, H) end
192 | return str2hexa(num2s(H[1], 4) .. num2s(H[2], 4) .. num2s(H[3], 4) .. num2s(H[4], 4) ..
193 | num2s(H[5], 4) .. num2s(H[6], 4) .. num2s(H[7], 4) .. num2s(H[8], 4))
194 | end
--------------------------------------------------------------------------------
/backend/src/client/deployer.js:
--------------------------------------------------------------------------------
1 | const fs = require("fs");
2 | const nodepath = require("path");
3 | const utils = require("../utils");
4 | const config = require("../config");
5 | const CHUNK_SIZE = config.chunkSize;
6 | const { Assignment, AssignmentChunk } = require("../db/models");
7 | const assignmentQueue = require("./background/assignmentQueue");
8 |
9 | const chunkify = (buf) => {
10 | if (!Buffer.isBuffer(buf)) {
11 | throw new Error("Expected a buffer");
12 | }
13 | const totalChunks = Math.ceil(buf.length / CHUNK_SIZE);
14 | if (totalChunks <= 1) {
15 | return [buf];
16 | } else {
17 | const chunkBufs = [];
18 | for (let i = 0; i < totalChunks; i++) {
19 | chunkBufs.push(buf.subarray(i * CHUNK_SIZE, (i + 1) * CHUNK_SIZE));
20 | }
21 | return chunkBufs;
22 | }
23 | };
24 |
25 | const chunksToTotalLength = (chunks) =>
26 | (chunks.length - 1) * CHUNK_SIZE + chunks[chunks.length - 1].byteLength;
27 |
28 | const buffersToHashes = (bufs) =>
29 | bufs.map(buf => utils.hashFn(buf));
30 |
31 | const processFile = async (buf, chunkQueue) => {
32 | const chunks = chunkify(buf);
33 | const chunkHashes = buffersToHashes(chunks);
34 | const chunkHashesHex = chunkHashes.map(hash => hash.toString('hex'));
35 | const merkleTree = utils.merkle(chunkHashes, utils.hashFn);
36 | const filesize = chunksToTotalLength(chunks);
37 |
38 | // Let's save chunks into our storage
39 | for (let i = 0; i < chunks.length; i++) {
40 | const chunk = chunks[i];
41 | const chunkHash = chunkHashesHex[i];
42 | const chunkPath = AssignmentChunk.getPath(chunkHash);
43 | if (!fs.existsSync(chunkPath)) {
44 | utils.mkdirp(nodepath.dirname(chunkPath));
45 | fs.writeFileSync(chunkPath, chunk, null);
46 | }
47 |
48 | chunkQueue.push(chunkHash);
49 | }
50 |
51 | let ret;
52 | if (chunks.length > 1) {
53 | const merkleRoot = merkleTree[merkleTree.length - 1];
54 |
55 | const fileIndex = config.chunkinfoPrologue + JSON.stringify({"type": "file", "hash": merkleRoot.toString('hex'), "size": filesize, "chunks": chunkHashesHex});
56 | const fileIndexRaw = Buffer.from(fileIndex, 'utf-8');
57 | const fileIndexResult = await processFile(fileIndexRaw, chunkQueue);
58 | // if fileIndexRaw is less than CHUNK_SIZE, we will just get its hash
59 |
60 | chunkQueue.push(fileIndexResult.hash);
61 |
62 | console.log({fileIndexResult, filesize, chunkHashesHex, merkleRoot, CHUNK_SIZE})
63 |
64 | ret = {
65 | "type": "fileptr",
66 | "hash": fileIndexResult.hash,
67 | "size": fileIndexRaw.length + filesize,
68 | };
69 | } else {
70 | ret = {
71 | "type": "file",
72 | "hash": chunkHashesHex[0],
73 | "size": filesize,
74 | };
75 | }
76 |
77 | console.log({ret});
78 |
79 | return ret;
80 | };
81 |
82 | const processDirectory = async (path, chunkQueue) => {
83 | const container = {};
84 |
85 | const files = fs.readdirSync(path);
86 | for (const file of files) {
87 | const filePath = nodepath.join(path, file);
88 | const subStats = fs.statSync(filePath);
89 | if (subStats.isDirectory()) {
90 | container[file] = await storePath(filePath, chunkQueue);
91 | } else if (subStats.isFile()) {
92 | const buf = fs.readFileSync(filePath, null);
93 | container[file] = await processFile(buf, chunkQueue);
94 | } else {
95 | throw new Error("Unknown file type: " + filePath);
96 | }
97 | }
98 |
99 | let size = 0;
100 | for (const key in container) {
101 | size += container[key].size;
102 | }
103 |
104 | const directoryIndex = {
105 | "type": "directory",
106 | "size": size,
107 | "files": container
108 | };
109 | const directoryIndexRaw = Buffer.from(config.directoryPrologue + JSON.stringify(directoryIndex), 'utf-8');
110 |
111 | const dir = await processFile(directoryIndexRaw, chunkQueue);
112 | // if directoryIndexRaw is less than CHUNK_SIZE, we will just get its hash
113 |
114 | const ret = {
115 | "type": "dirptr",
116 | "hash": dir.hash,
117 | "size": dir.size + size,
118 | }
119 |
120 | console.log({ret});
121 |
122 | return ret;
123 | };
124 |
125 | const storePath = async (path, chunkQueue) => {
126 | const stats = fs.statSync(path);
127 | if (stats.isFile()) {
128 | const buf = fs.readFileSync(path, null);
129 | return await processFile(buf, chunkQueue);
130 | } else {
131 | return await processDirectory(path, chunkQueue);
132 | }
133 | };
134 |
135 | const store = async (path) => {
136 | let chunkQueue = [];
137 | const storeInfo = await storePath(path, chunkQueue);
138 |
139 | // At this point, we have chunkQueue, and storeInfo returned hash to us that is the root directory/file
140 | // Let's create the assignment
141 | const assignmentId = storeInfo.hash;
142 | const assignment = await Assignment.findByIdOrCreate(assignmentId);
143 | assignment.root_hash = storeInfo.hash;
144 | assignment.size = storeInfo.size;
145 | assignment.chunk_count = chunkQueue.length;
146 | assignment.desired_redundancy = config.client.defaultDesiredRedundancy; // todo: allow user to adjust
147 | assignment.achieved_redundancy = 0;
148 | assignment.desired_storage_duration = config.client.defaultDesiredStorageDuration; // todo: allow user to adjust
149 | assignment.is_active = false;
150 | await assignment.save();
151 |
152 | // Now add chunks
153 | const chunkEntries = [];
154 | let pos = 0;
155 | for (const chunkId of Object.values(chunkQueue)) { // don't use .map, it will skip -1 index
156 | chunkEntries.push({
157 | id: assignmentId + '_' + pos,
158 | assignment_id: assignmentId,
159 | pos,
160 | chunk_id: chunkId,
161 | });
162 |
163 | pos++;
164 | }
165 | await AssignmentChunk.bulkCreate(chunkEntries, {ignoreDuplicates: true});
166 |
167 | await Assignment.update({ is_active: true }, {
168 | where: { id: assignmentId }
169 | });
170 |
171 | // trigger assignment manager
172 | assignmentQueue.add(assignmentId);
173 |
174 | console.log(storeInfo);
175 | console.log({chunkQueue});
176 | return storeInfo;
177 | };
178 |
179 | module.exports = {
180 | store
181 | };
182 |
--------------------------------------------------------------------------------
/backend/src/index.js:
--------------------------------------------------------------------------------
1 | const { Command } = require('commander');
2 | const fs = require('fs');
3 | const utils = require('./utils');
4 | const readline = require('readline');
5 | const { checkPasses, hasPass } = require('./arweave/passes');
6 |
7 | (async() => {
8 | const program = new Command();
9 | program.storeOptionsAsProperties();
10 |
11 | const app = require('../package.json');
12 | program.version(app.version || 'N/A');
13 |
14 | program.description("Arfleet\nhttps://arfleet.io\n\nLicensed under MIT license.");
15 |
16 | program.option('-d, --datadir ', 'path to the data directory');
17 | program.option('-v, --verbose', 'force the logger to show debug level messages', false);
18 |
19 | const clientCommand = program.command('client')
20 | clientCommand
21 | .description('start as a client')
22 | .action(() => { process.env.MODE = 'client'; });
23 |
24 | clientCommand.command('store ')
25 | .description('store a file/folder')
26 | .action((path) => {
27 | process.env.MODE = 'client';
28 | process.env.SUBMODE = 'store';
29 | process.env.STORE_PATH = path;
30 | });
31 | clientCommand.command('makemigration')
32 | .description('[for developers] create a migration file from models')
33 | .action(() => {
34 | process.env.MODE = 'client';
35 | process.env.SUBMODE = 'makemigration';
36 | });
37 | clientCommand.command('migrate')
38 | .description('[for developers] migrate the database')
39 | .action(() => {
40 | process.env.MODE = 'client';
41 | process.env.SUBMODE = 'migrate';
42 | });
43 | clientCommand.command('transferpass ')
44 | .description('transfer the pass to the given address')
45 | .action((address) => {
46 | process.env.MODE = 'client';
47 | process.env.SUBMODE = 'transferpass';
48 | process.env.TRANSFERPASS_ADDRESS = address;
49 | });
50 |
51 | const providerCommand = program.command('provider');
52 | providerCommand
53 | .description('start as a provider')
54 | .action(() => { process.env.MODE = 'provider'; });
55 | providerCommand.command('makemigration')
56 | .description('[for developers] create a migration file from models')
57 | .action(() => {
58 | process.env.MODE = 'provider';
59 | process.env.SUBMODE = 'makemigration';
60 | });
61 | providerCommand.command('migrate')
62 | .description('[for developers] migrate the database')
63 | .action(() => {
64 | process.env.MODE = 'provider';
65 | process.env.SUBMODE = 'migrate';
66 | });
67 | providerCommand.command('transferpass ')
68 | .description('transfer the pass to the given address')
69 | .action((address) => {
70 | process.env.MODE = 'provider';
71 | process.env.SUBMODE = 'transferpass';
72 | process.env.TRANSFERPASS_ADDRESS = address;
73 | });
74 |
75 | program.parse(process.argv);
76 |
77 | // Print version
78 | console.log("ArFleet v" + app.version);
79 |
80 | // Load config
81 | const config = require('./config');
82 |
83 | // Set datadir
84 | if (program.datadir) {
85 | process.env.DATADIR = utils.resolveHome(program.datadir);
86 | } else {
87 | process.env.DATADIR = utils.resolveHome(config[process.env.MODE].defaultDatadir);
88 | }
89 | // Create if doesn't exist
90 | utils.mkdirp(process.env.DATADIR);
91 |
92 | // Make migration mode
93 | if (process.env.SUBMODE && process.env.SUBMODE === 'makemigration') {
94 | const { makeMigration } = require('./db/makemigration');
95 | makeMigration();
96 | process.exit(0);
97 | }
98 |
99 | // Migrate mode
100 | if (process.env.SUBMODE && process.env.SUBMODE === 'migrate') {
101 | const { migrate } = require('./db/migrate');
102 | await migrate();
103 | console.log("Migration done.")
104 | process.exit(0);
105 | }
106 |
107 | // Migrate every time anyway
108 | const { migrate } = require('./db/migrate');
109 | await migrate();
110 |
111 | if (process.env.MODE === 'client') {
112 | if (process.env.SUBMODE === 'store') {
113 | const cmd = require('./cmd');
114 | await cmd.client_store(process.env.STORE_PATH);
115 | process.exit(0);
116 | }
117 | }
118 |
119 | // Init wallet
120 | const { initWallet } = require('./wallet');
121 | const wallet = await initWallet();
122 |
123 | // Transfer pass mode
124 | if (process.env.SUBMODE && process.env.SUBMODE === 'transferpass') {
125 | try {
126 | const { getAoInstance } = require('./arweave/ao');
127 | const ao = getAoInstance({ wallet });
128 |
129 | if (!process.env.TRANSFERPASS_ADDRESS || !process.env.TRANSFERPASS_ADDRESS.length) {
130 | throw new Error("address is not given for transferpass");
131 | }
132 |
133 | await checkPasses(true);
134 | const ourAddress = await wallet.getAddress();
135 | const has = await hasPass(ourAddress);
136 | if (!has) {
137 | throw new Error("You don't have a pass to transfer");
138 | }
139 |
140 | console.log("This will transfer pass to", process.env.TRANSFERPASS_ADDRESS);
141 | console.log("Please confirm by typing 'yes'");
142 |
143 | const rl = readline.createInterface({
144 | input: process.stdin,
145 | output: process.stdout
146 | });
147 |
148 | rl.question('Confirm transfer: ', async (answer) => {
149 | if (answer === 'yes') {
150 | const result = await ao.transferPass(process.env.TRANSFERPASS_ADDRESS);
151 | console.log("Transfer result:", result);
152 | } else {
153 | console.log("Transfer cancelled.");
154 | }
155 | rl.close();
156 | process.exit(0);
157 | });
158 |
159 | return;
160 |
161 | } catch(e) {
162 | console.error(e);
163 | process.exit(1);
164 | }
165 | }
166 |
167 | // Start API
168 | const { startApi } = require('./api');
169 | await startApi();
170 |
171 | // Start client/provider
172 | switch(process.env.MODE) {
173 | case 'client':
174 | const getClientInstance = require('./client');
175 | getClientInstance({ wallet });
176 | break;
177 |
178 | case 'provider':
179 | const getProviderInstance = require('./provider');
180 | getProviderInstance({ wallet });
181 | break;
182 | }
183 | })();
--------------------------------------------------------------------------------
/backend/src/arweave/ao.js:
--------------------------------------------------------------------------------
1 | const config = require('../config');
2 |
3 | const AOScheduler = config.aoScheduler;
4 |
5 | const { connect, createDataItemSigner, result } = require("@permaweb/aoconnect");
6 | const axios = require('axios');
7 |
8 | const connection = connect(config.aoConfig);
9 |
10 | const MAX_ATTEMPTS = 100;
11 |
12 | class AOClient {
13 | constructor({ wallet }) {
14 | this.wallet = wallet;
15 | this.signer = createDataItemSigner(JSON.parse(wallet.readPrivateKey()));
16 | }
17 |
18 | async getResult(process_id, message, attempt = 0) {
19 | try {
20 | if (!attempt) attempt = 0;
21 |
22 | const resdata = await result({
23 | process: process_id,
24 | message: message,
25 | });
26 | return resdata;
27 | } catch (e) {
28 | if (attempt > MAX_ATTEMPTS) {
29 | throw e;
30 | } else {
31 | console.log("Retrying...");
32 | return this.getResult(process_id, message, attempt + 1);
33 | }
34 | }
35 | }
36 |
37 | async sendActionJSON(process_id, action, data, tags = {}, attempt = 0) {
38 | return await this.sendAction(process_id, action, JSON.stringify(data), tags, attempt);
39 | }
40 |
41 | async sendAction(process_id, action, data, tags = {}, attempt = 0) {
42 | try {
43 | if (!attempt) attempt = 0;
44 |
45 | console.log("sendAction", { action, data, tags });
46 |
47 | let t = [
48 | { name: "Action", value: action },
49 | { name: "Target", value: process_id }
50 | ];
51 |
52 | for (let key in tags) {
53 | t.push({ name: key, value: tags[key] });
54 | }
55 |
56 | const res = await connection.message({
57 | process: process_id,
58 | signer: this.signer,
59 | tags: t,
60 | data: data,
61 | });
62 |
63 | console.log({ res });
64 |
65 | const resdata = await this.getResult(process_id, res);
66 | console.log(resdata);
67 |
68 | if (resdata["Messages"] && resdata["Messages"].length > 0 && resdata["Messages"][0].Data) {
69 | const result = resdata["Messages"][0].Data;
70 | return result;
71 | } else {
72 | // Try Output.data
73 | if (resdata.Output && resdata.Output.data) {
74 | if (resdata.Output.json && resdata.Output.json !== 'undefined') {
75 | return JSON.parse(resdata.Output.json);
76 | } else {
77 | return resdata.Output.data.output;
78 | }
79 | }
80 | console.log("Returning null!!!");
81 | console.log("resdata", resdata);
82 | return null;
83 | }
84 |
85 | // return resdata;
86 | } catch (e) {
87 | if (attempt > MAX_ATTEMPTS) {
88 | throw e;
89 | } else {
90 | console.error(e);
91 | console.log("Retrying action...");
92 | return this.sendAction(process_id, action, data, tags, attempt + 1);
93 | }
94 | }
95 | }
96 |
97 | async getInbox() {
98 | const resdata = await this.sendAction("Eval", "Inbox");
99 | const inbox = resdata.Output.data;
100 | // return inbox;
101 | const json = inbox.json;
102 | console.log({ json });
103 | return json;
104 | }
105 |
106 | async spawn(source_lua, tags=[]) {
107 | const res = await connection.spawn({
108 | module: config.aosModule,
109 | scheduler: AOScheduler,
110 | signer: this.signer,
111 | tags,
112 | });
113 |
114 | // - source code
115 | await this.sendAction(res, "Eval", source_lua);
116 |
117 | return res;
118 | }
119 |
120 | async sendToken(token, to, amount) {
121 | const res = await this.sendAction(token, "Transfer", "", { Recipient: to, Quantity: amount.toString() });
122 | return res;
123 | }
124 |
125 | async getState(process_id) {
126 | const ret = await this.sendAction(process_id, "GetState", "");
127 | try {
128 | return JSON.parse(ret);
129 | } catch(e) {
130 | console.error("Error parsing state", e);
131 | console.error("ret", ret);
132 | throw e;
133 | }
134 | }
135 |
136 | async dryRun(process_id, action, data = "{}", tags = {}) {
137 | const url = `${config.aoConfig.CU_URL}/dry-run?process-id=${process_id}`;
138 |
139 | const tagsToSend = [];
140 | for (const key in tags) {
141 | tagsToSend.push({ name: key, value: tags[key] });
142 | }
143 | tagsToSend.push({ name: "Action", value: action });
144 | tagsToSend.push({ name: "Data-Protocol", value: "ao" });
145 | tagsToSend.push({ name: "Type", value: "Message" });
146 | tagsToSend.push({ name: "Variant", value: "ao.TN.1" });
147 |
148 | const body = {
149 | Id: "1234",
150 | Target: process_id,
151 | Owner: "1234",
152 | Anchor: "0",
153 | Data: data,
154 | Tags: tagsToSend
155 | };
156 |
157 | const response = await axios.post(url, body, {
158 | headers: {
159 | "accept": "*/*",
160 | "accept-language": "en-US,en;q=0.9,ru;q=0.8",
161 | "content-type": "application/json",
162 | "priority": "u=1, i",
163 | "sec-ch-ua": "\"Chromium\";v=\"128\", \"Not;A=Brand\";v=\"24\", \"Google Chrome\";v=\"128\"",
164 | "sec-ch-ua-mobile": "?0",
165 | "sec-ch-ua-platform": "\"macOS\"",
166 | "sec-fetch-dest": "empty",
167 | "sec-fetch-mode": "cors",
168 | "sec-fetch-site": "cross-site",
169 | "Referer": "https://bazar.arweave.dev/",
170 | "Referrer-Policy": "strict-origin-when-cross-origin"
171 | }
172 | });
173 |
174 | const returned = JSON.parse(response.data.Messages[0].Data);
175 | // console.log({ returned });
176 | return returned;
177 | }
178 |
179 | async getTokenBalance(token, decimals, recipient) {
180 | const res = await this.dryRun(token, "Balance", "{}", {
181 | "Recipient": recipient,
182 | });
183 |
184 | // if res is a Number
185 | if (typeof res === 'number') {
186 | return res / Math.pow(10, decimals);
187 | } else {
188 | throw new Error("Invalid response from dryRun");
189 | }
190 | }
191 |
192 | async transferPass(address) {
193 | const res = await this.sendAction(config.passes.address, "Transfer", "{}", {
194 | "Quantity": "1",
195 | "Recipient": address,
196 | });
197 | return res;
198 | }
199 | }
200 |
201 | let aoInstance;
202 |
203 | function getAoInstance(initialState = null) {
204 | if(!aoInstance) {
205 | if (!initialState) throw new Error("AOClient is not initialized with a state");
206 | aoInstance = new AOClient(initialState);
207 | }
208 |
209 | return aoInstance;
210 | }
211 |
212 | module.exports = { getAoInstance };
--------------------------------------------------------------------------------
/lua/libs/base64.lua:
--------------------------------------------------------------------------------
1 | --[[
2 |
3 | base64 -- v1.5.3 public domain Lua base64 encoder/decoder
4 | no warranty implied; use at your own risk
5 |
6 | Needs bit32.extract function. If not present it's implemented using BitOp
7 | or Lua 5.3 native bit operators. For Lua 5.1 fallbacks to pure Lua
8 | implementation inspired by Rici Lake's post:
9 | http://ricilake.blogspot.co.uk/2007/10/iterating-bits-in-lua.html
10 |
11 | author: Ilya Kolbin (iskolbin@gmail.com)
12 | url: github.com/iskolbin/lbase64
13 |
14 | COMPATIBILITY
15 |
16 | Lua 5.1+, LuaJIT
17 |
18 | LICENSE
19 |
20 | See end of file for license information.
21 |
22 | --]]
23 |
24 |
25 | local base64 = {}
26 |
27 | local extract = _G.bit32 and _G.bit32.extract -- Lua 5.2/Lua 5.3 in compatibility mode
28 | if not extract then
29 | if _G.bit then -- LuaJIT
30 | local shl, shr, band = _G.bit.lshift, _G.bit.rshift, _G.bit.band
31 | extract = function( v, from, width )
32 | return band( shr( v, from ), shl( 1, width ) - 1 )
33 | end
34 | elseif _G._VERSION == "Lua 5.1" then
35 | extract = function( v, from, width )
36 | local w = 0
37 | local flag = 2^from
38 | for i = 0, width-1 do
39 | local flag2 = flag + flag
40 | if v % flag2 >= flag then
41 | w = w + 2^i
42 | end
43 | flag = flag2
44 | end
45 | return w
46 | end
47 | else -- Lua 5.3+
48 | extract = load[[return function( v, from, width )
49 | return ( v >> from ) & ((1 << width) - 1)
50 | end]]()
51 | end
52 | end
53 |
54 |
55 | function base64.makeencoder( s62, s63, spad )
56 | local encoder = {}
57 | for b64code, char in pairs{[0]='A','B','C','D','E','F','G','H','I','J',
58 | 'K','L','M','N','O','P','Q','R','S','T','U','V','W','X','Y',
59 | 'Z','a','b','c','d','e','f','g','h','i','j','k','l','m','n',
60 | 'o','p','q','r','s','t','u','v','w','x','y','z','0','1','2',
61 | '3','4','5','6','7','8','9',s62 or '+',s63 or'/',spad or'='} do
62 | encoder[b64code] = char:byte()
63 | end
64 | return encoder
65 | end
66 |
67 | function base64.makedecoder( s62, s63, spad )
68 | local decoder = {}
69 | for b64code, charcode in pairs( base64.makeencoder( s62, s63, spad )) do
70 | decoder[charcode] = b64code
71 | end
72 | return decoder
73 | end
74 |
75 | local DEFAULT_ENCODER = base64.makeencoder()
76 | local DEFAULT_DECODER = base64.makedecoder()
77 |
78 | local char, concat = string.char, table.concat
79 |
80 | function base64.encode( str, encoder, usecaching )
81 | encoder = encoder or DEFAULT_ENCODER
82 | local t, k, n = {}, 1, #str
83 | local lastn = n % 3
84 | local cache = {}
85 | for i = 1, n-lastn, 3 do
86 | local a, b, c = str:byte( i, i+2 )
87 | local v = a*0x10000 + b*0x100 + c
88 | local s
89 | if usecaching then
90 | s = cache[v]
91 | if not s then
92 | s = char(encoder[extract(v,18,6)], encoder[extract(v,12,6)], encoder[extract(v,6,6)], encoder[extract(v,0,6)])
93 | cache[v] = s
94 | end
95 | else
96 | s = char(encoder[extract(v,18,6)], encoder[extract(v,12,6)], encoder[extract(v,6,6)], encoder[extract(v,0,6)])
97 | end
98 | t[k] = s
99 | k = k + 1
100 | end
101 | if lastn == 2 then
102 | local a, b = str:byte( n-1, n )
103 | local v = a*0x10000 + b*0x100
104 | t[k] = char(encoder[extract(v,18,6)], encoder[extract(v,12,6)], encoder[extract(v,6,6)], encoder[64])
105 | elseif lastn == 1 then
106 | local v = str:byte( n )*0x10000
107 | t[k] = char(encoder[extract(v,18,6)], encoder[extract(v,12,6)], encoder[64], encoder[64])
108 | end
109 | return concat( t )
110 | end
111 |
112 | function base64.decode( b64, decoder, usecaching )
113 | decoder = decoder or DEFAULT_DECODER
114 | local pattern = '[^%w%+%/%=]'
115 | if decoder then
116 | local s62, s63
117 | for charcode, b64code in pairs( decoder ) do
118 | if b64code == 62 then s62 = charcode
119 | elseif b64code == 63 then s63 = charcode
120 | end
121 | end
122 | pattern = ('[^%%w%%%s%%%s%%=]'):format( char(s62), char(s63) )
123 | end
124 | b64 = b64:gsub( pattern, '' )
125 | local cache = usecaching and {}
126 | local t, k = {}, 1
127 | local n = #b64
128 | local padding = b64:sub(-2) == '==' and 2 or b64:sub(-1) == '=' and 1 or 0
129 | for i = 1, padding > 0 and n-4 or n, 4 do
130 | local a, b, c, d = b64:byte( i, i+3 )
131 | local s
132 | if usecaching then
133 | local v0 = a*0x1000000 + b*0x10000 + c*0x100 + d
134 | s = cache[v0]
135 | if not s then
136 | local v = decoder[a]*0x40000 + decoder[b]*0x1000 + decoder[c]*0x40 + decoder[d]
137 | s = char( extract(v,16,8), extract(v,8,8), extract(v,0,8))
138 | cache[v0] = s
139 | end
140 | else
141 | local v = decoder[a]*0x40000 + decoder[b]*0x1000 + decoder[c]*0x40 + decoder[d]
142 | s = char( extract(v,16,8), extract(v,8,8), extract(v,0,8))
143 | end
144 | t[k] = s
145 | k = k + 1
146 | end
147 | if padding == 1 then
148 | local a, b, c = b64:byte( n-3, n-1 )
149 | local v = decoder[a]*0x40000 + decoder[b]*0x1000 + decoder[c]*0x40
150 | t[k] = char( extract(v,16,8), extract(v,8,8))
151 | elseif padding == 2 then
152 | local a, b = b64:byte( n-3, n-2 )
153 | local v = decoder[a]*0x40000 + decoder[b]*0x1000
154 | t[k] = char( extract(v,16,8))
155 | end
156 | return concat( t )
157 | end
158 |
159 | --[[
160 | ------------------------------------------------------------------------------
161 | This software is available under 2 licenses -- choose whichever you prefer.
162 | ------------------------------------------------------------------------------
163 | ALTERNATIVE A - MIT License
164 | Copyright (c) 2018 Ilya Kolbin
165 | Permission is hereby granted, free of charge, to any person obtaining a copy of
166 | this software and associated documentation files (the "Software"), to deal in
167 | the Software without restriction, including without limitation the rights to
168 | use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
169 | of the Software, and to permit persons to whom the Software is furnished to do
170 | so, subject to the following conditions:
171 | The above copyright notice and this permission notice shall be included in all
172 | copies or substantial portions of the Software.
173 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
174 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
175 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
176 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
177 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
178 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
179 | SOFTWARE.
180 | ------------------------------------------------------------------------------
181 | ALTERNATIVE B - Public Domain (www.unlicense.org)
182 | This is free and unencumbered software released into the public domain.
183 | Anyone is free to copy, modify, publish, use, compile, sell, or distribute this
184 | software, either in source code form or as a compiled binary, for any purpose,
185 | commercial or non-commercial, and by any means.
186 | In jurisdictions that recognize copyright laws, the author or authors of this
187 | software dedicate any and all copyright interest in the software to the public
188 | domain. We make this dedication for the benefit of the public at large and to
189 | the detriment of our heirs and successors. We intend this dedication to be an
190 | overt act of relinquishment in perpetuity of all present and future rights to
191 | this software under copyright law.
192 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
193 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
194 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
195 | AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
196 | ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
197 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
198 | ------------------------------------------------------------------------------
199 | --]]
--------------------------------------------------------------------------------
/backend/src/utils/index.js:
--------------------------------------------------------------------------------
1 | const nodepath = require('path');
2 | const os = require('os');
3 | const config = require('../config');
4 | const { color } = require('./color');
5 |
6 | module.exports = {
7 | setDataDir: function(path) {
8 | process.env.DATADIR = this.resolveHome(path);
9 | },
10 | getDatadir: function(path) {
11 | const datadir = process.env.DATADIR;
12 | return path ? nodepath.join(datadir, path) : datadir;
13 | },
14 | getMode: function() {
15 | return process.env.MODE;
16 | },
17 | resolveHome: function(filepath) {
18 | if (filepath[0] === '~') {
19 | return nodepath.join(process.env.HOME || os.homedir(), filepath.slice(1));
20 | }
21 | return filepath;
22 | },
23 | getProjectDir: function() {
24 | return nodepath.join(nodepath.dirname(require.main.filename), '..', '..');
25 | },
26 | getResourcesDir: function() {
27 | return nodepath.join(this.getProjectDir(), 'backend', 'resources');
28 | },
29 | getPublicDir: function() {
30 | return nodepath.join(this.getResourcesDir(), 'public');
31 | },
32 | getModeConfig: function() {
33 | const mode = this.getMode();
34 | return config[mode];
35 | },
36 | hashFn: function(buf) {
37 | if (!Buffer.isBuffer(buf)) {
38 | throw new Error('Expected a buffer');
39 | }
40 |
41 | const crypto = require('crypto');
42 | const hash = crypto.createHash('sha256');
43 | hash.update(buf);
44 | return hash.digest();
45 | },
46 | hashFnHex: function(buf) {
47 | return this.hashFn(buf).toString('hex');
48 | },
49 | merkleDerive: function(values, digestFn, initial_iteration) {
50 | // This is a modified version of https://www.npmjs.com/package/merkle-lib
51 | // Modified to defend merkle trees from second preimage attack
52 | const length = values.length;
53 | const results = [];
54 |
55 | for (let i = 0; i < length; i += 2) {
56 | const left = values[i];
57 | const right = i + 1 === length ? left : values[i + 1];
58 | const data = initial_iteration
59 | ? Buffer.concat([Buffer.from([0x00]), left, right])
60 | : Buffer.concat([left, right]);
61 |
62 | results.push(digestFn(data));
63 | }
64 |
65 | return results;
66 | },
67 | merkle: function(values, digestFn) {
68 | if (!Array.isArray(values)) throw TypeError('Expected values Array');
69 | if (typeof digestFn !== 'function') throw TypeError('Expected digest Function');
70 |
71 | // if (values.length === 1) return values.concat() // We don't do this because we would mess up format length
72 |
73 | const levels = [values];
74 | let level = values;
75 | let initial_iteration = true;
76 |
77 | do {
78 | level = this.merkleDerive(level, digestFn, initial_iteration);
79 | console.log('level', level);
80 | levels.push(level);
81 | initial_iteration = false;
82 | } while (level.length > 1);
83 |
84 | return [...levels].flat();
85 | },
86 | merkleDeriveFull: function(values, digestFn, initial_iteration) {
87 | // This is a modified version of https://www.npmjs.com/package/merkle-lib
88 | // Modified to defend merkle trees from second preimage attack
89 | const length = values.length;
90 | const results = [];
91 |
92 | for (let i = 0; i < length; i += 2) {
93 | const left = values[i];
94 | const right = i + 1 === length ? left : values[i + 1];
95 | const data = initial_iteration
96 | ? Buffer.concat([Buffer.from([0x00]), left.value, right.value])
97 | : Buffer.concat([left.value, right.value]);
98 |
99 | const node = {
100 | "value": digestFn(data),
101 | "left": left,
102 | "right": right
103 | }
104 |
105 | results.push(node);
106 | }
107 |
108 | return results;
109 | },
110 | merkleFull: function(valuesBin, digestFn) {
111 | if (!Array.isArray(valuesBin)) throw TypeError('Expected values Array');
112 | if (typeof digestFn !== 'function') throw TypeError('Expected digest Function');
113 |
114 | // if (values.length === 1) return values.concat() // We don't do this because we would mess up format length
115 |
116 | let values = [];
117 | for (let i = 0; i < valuesBin.length; i++) {
118 | values.push({"value": valuesBin[i], "left": null, "right": null});
119 | }
120 |
121 | const levels = [values];
122 | let level = values;
123 | let initial_iteration = true;
124 |
125 | do {
126 | level = this.merkleDeriveFull(level, digestFn, initial_iteration);
127 | // console.log('level', level);
128 | levels.push(level);
129 | initial_iteration = false;
130 | } while (level.length > 1);
131 |
132 | // verify that only one is left
133 | if (level.length !== 1) {
134 | throw new Error('Merkle tree is not valid');
135 | }
136 |
137 | return level[0];
138 | },
139 | merkleFullBinToHex: function(node) {
140 | return {
141 | "value": node.value.toString('hex'),
142 | "left": node.left ? this.merkleFullBinToHex(node.left) : null,
143 | "right": node.right ? this.merkleFullBinToHex(node.right) : null
144 | }
145 | },
146 | printTree: function(tree, level=0) {
147 | let result = "";
148 | for (let i = 0; i < level; i++) {
149 | result += " ";
150 | }
151 | result += tree.value + "\n";
152 | if (tree.left) {
153 | result += this.printTree(tree.left, level + 1);
154 | } else {
155 | for (let i = 0; i < level; i++) {
156 | result += " ";
157 | }
158 | result += " null\n";
159 | }
160 | if (tree.right) {
161 | result += this.printTree(tree.right, level + 1);
162 | } else {
163 | for (let i = 0; i < level; i++) {
164 | result += " ";
165 | }
166 | result += " null\n";
167 | }
168 | return result;
169 | },
170 | normalizeHeaders(headers) {
171 | const normalized = {};
172 | for (const key in headers) {
173 | normalized[key.toLowerCase()] = headers[key];
174 | }
175 | return normalized;
176 | },
177 | mkdirp: function(path) {
178 | const fs = require('fs');
179 | if (!fs.existsSync(path)) {
180 | fs.mkdirSync(path, { recursive: true });
181 | }
182 | },
183 | myExternalIP: async function() {
184 | const services = ['https://ifconfig.me', 'https://api.ipify.org', 'https://ipinfo.io/ip'];
185 | let lastError = null;
186 | for (let service of services) {
187 | const axios = require('axios');
188 | try {
189 | const response = await axios.get(service);
190 | return response.data;
191 | } catch (e) {
192 | lastError = e;
193 | continue;
194 | }
195 | }
196 | throw lastError;
197 | },
198 | xorBuffersInPlace: function(a, b) {
199 | var length = Math.min(a.length, b.length);
200 | for (var i = 0; i < length; ++i) {
201 | a[i] = a[i] ^ b[i];
202 | }
203 | return a;
204 | },
205 | outputWalletAddressAndBalance: async function(ao, address, token, decimals, symbol) {
206 | console.log(color("Wallet address: " + address, "cyan"));
207 | const balance = await ao.getTokenBalance(token, decimals, address);
208 | console.log(color("Balance (Token "+token+"): " + balance + " " + symbol, "cyan"));
209 |
210 | if (balance <= 0) {
211 | console.log("");
212 | console.log(color("WARNING: You don't have any balance in your wallet. Please fund your wallet with some "+symbol+" to be able to create deals.", "red"));
213 | console.log("");
214 | }
215 | }
216 | }
--------------------------------------------------------------------------------
/lua/ArFleetDeal.lua:
--------------------------------------------------------------------------------
1 | -- ArFleet Deal Blueprint
2 | -- Version: Deal-0.0.2
3 |
4 | local json = require("json")
5 |
6 | StatusEnum = {Created = "Created", Activated = "Activated", Cancelled = "Cancelled", Expired = "Expired"}
7 |
8 | State = {
9 | Version = "Deal-0.0.2",
10 |
11 | Status = StatusEnum.Created,
12 | MerkleRoot = "",
13 | Client = "",
14 | Provider = "",
15 | CreatedAt = 0,
16 | ExpiresAt = 0,
17 |
18 | RequiredReward = 0,
19 | ReceivedReward = 0,
20 |
21 | RequiredCollateral = 0,
22 | ReceivedCollateral = 0,
23 | SlashedCollateral = 0,
24 | RemainingCollateral = 0,
25 | SlashedTimes = 0,
26 |
27 | VerificationEveryPeriod = 0,
28 | VerificationResponsePeriod = 0,
29 | Token = "",
30 |
31 | NextVerification = 0,
32 |
33 | Challenge = "",
34 |
35 | Logs = {}
36 | }
37 |
38 | function Log(msg)
39 | print(msg)
40 | end
41 |
42 | function generate_random_binary_string(length)
43 | local random_string = ""
44 |
45 | for i = 1, length do
46 | local num = math.random(0, 1)
47 | random_string = random_string .. num
48 | end
49 |
50 | return random_string
51 | end
52 |
53 | function AdvanceNextVerification(currentTimestamp)
54 | if State.Status ~= StatusEnum.Activated then
55 | return
56 | end
57 |
58 | if currentTimestamp > 0 then
59 | State.NextVerification = currentTimestamp + State.VerificationEveryPeriod
60 | State.Challenge = ""
61 |
62 | -- Check expiration
63 | if State.NextVerification >= State.ExpiresAt then
64 | State.Status = StatusEnum.Expired
65 |
66 | -- Withdraw rewards + collateral
67 | Send({ Target = State.Token, Action = "Transfer", Recipient = State.Provider, Quantity = State.ReceivedReward + State.RemainingCollateral })
68 |
69 | return true
70 | end
71 | end
72 | end
73 |
74 | -- The Handle function must be defined before we use it
75 | function Handle(type, fn)
76 | Handlers.add(
77 | type,
78 | Handlers.utils.hasMatchingTag("Action", type),
79 | function(msg)
80 | local Data = nil
81 |
82 | local success, res = pcall(json.decode, msg.Data)
83 | if success then
84 | Data = res
85 | else
86 | -- error, leave it nil
87 | end
88 |
89 | local Result = fn(msg, Data)
90 |
91 | if Result == nil then
92 | return
93 | end
94 | Handlers.utils.reply(Result)(msg)
95 | end
96 | )
97 | end
98 |
99 | Handle("Credit-Notice", function(msg, Data)
100 | -- State.Logs[#State.Logs + 1] = json.encode(msg)
101 |
102 | -- Validate token
103 | if msg.From ~= State.Token then
104 | return
105 | end
106 |
107 | -- Ignore after it was already activated or cancelled
108 | if State.Status ~= StatusEnum.Created then
109 | return
110 | end
111 |
112 | -- todo: verify Target == ao.id
113 |
114 | -- Process the message based on the sender
115 | if msg.Sender == State.Client then
116 | State.ReceivedReward = State.ReceivedReward + msg.Quantity
117 | elseif msg.Sender == State.Provider then
118 | State.ReceivedCollateral = State.ReceivedCollateral + msg.Quantity
119 | else
120 | return
121 | end
122 |
123 | -- Check if both collateral and reward conditions are met to activate
124 | if State.ReceivedCollateral >= State.RequiredCollateral and State.ReceivedReward >= State.RequiredReward then
125 | State.Status = StatusEnum.Activated
126 | State.RemainingCollateral = State.ReceivedCollateral
127 | State.NextVerification = (msg.Timestamp // 1000) + State.VerificationEveryPeriod
128 | end
129 | end)
130 |
131 |
132 | Handle("Cancel", function(msg, Data)
133 | -- Verify that it's from the Client
134 | if msg.From ~= State.Client then
135 | return
136 | end
137 |
138 | -- Only in inactive state
139 | if State.Status ~= StatusEnum.Created then
140 | return
141 | end
142 |
143 | -- Send the funds back to the client
144 | if State.ReceivedReward > 0 then
145 | Send({ Target = State.Token, Action = "Transfer", Recipient = State.Client, Quantity = State.ReceivedReward })
146 | end
147 |
148 | -- Send the collateral back to the provider
149 | if State.ReceivedCollateral > 0 then
150 | Send({ Target = State.Token, Action = "Transfer", Recipient = State.Provider, Quantity = State.ReceivedCollateral })
151 | end
152 |
153 | -- Set the status to cancelled
154 | State.Status = StatusEnum.Cancelled
155 | end)
156 |
157 | function Slash(currentTimestamp)
158 | -- Calculate how many times we should slash based on the delay
159 | local SlashTimes = math.ceil((currentTimestamp - State.NextVerification) / (State.VerificationEveryPeriod + State.VerificationResponsePeriod))
160 |
161 | -- Iteratively slash half of the remaining collateral for each time it should be slashed
162 | local Slashed = 0
163 | for i = 1, SlashTimes do
164 | local ThisSlash = math.floor(State.RemainingCollateral / 2)
165 | State.RemainingCollateral = State.RemainingCollateral - ThisSlash
166 | Slashed = Slashed + ThisSlash
167 | end
168 |
169 | State.SlashedCollateral = State.SlashedCollateral + Slashed
170 | State.SlashedTimes = State.SlashedTimes + SlashTimes
171 |
172 | -- Move the challenge if it's a time-based slash
173 | AdvanceNextVerification(currentTimestamp)
174 | end
175 |
176 |
177 | Handle("Slash", function(msg, Data)
178 | -- Anyone can send
179 |
180 | -- Only in active state
181 | if State.Status ~= StatusEnum.Activated then
182 | return
183 | end
184 |
185 | local currentTimestamp = (msg.Timestamp//1000)
186 |
187 | -- Too early?
188 | if currentTimestamp < State.NextVerification then
189 | return
190 | end
191 |
192 | -- Too late?
193 | if currentTimestamp > State.NextVerification + State.VerificationResponsePeriod then
194 | Slash(currentTimestamp)
195 | end
196 | end)
197 |
198 |
199 | Handle("GetChallenge", function(msg, Data)
200 | -- Verify that it's from the Provider
201 | if msg.From ~= State.Provider then
202 | return
203 | end
204 |
205 | -- Only in active state
206 | if State.Status ~= StatusEnum.Activated then
207 | return "Error: Not activated"
208 | end
209 |
210 | local currentTimestamp = (msg.Timestamp//1000)
211 |
212 | -- Too early?
213 | if currentTimestamp < State.NextVerification then
214 | return "Error: Too early"
215 | end
216 |
217 | -- Too late?
218 | if currentTimestamp > State.NextVerification + State.VerificationResponsePeriod then
219 | Slash(currentTimestamp)
220 | return "Error: Too late " .. State.NextVerification + State.VerificationResponsePeriod .. " // " .. currentTimestamp
221 | end
222 |
223 | if State.Challenge ~= "" then
224 | return State.Challenge
225 | end
226 |
227 | -- Let's generate the challenge
228 | State.Challenge = generate_random_binary_string(256)
229 |
230 | return State.Challenge
231 | end)
232 |
233 | Handle("SubmitChallenge", function(msg, Data)
234 | -- Verify that it's from the Provider
235 | if msg.From ~= State.Provider then
236 | return
237 | end
238 |
239 | -- Only in active state
240 | if State.Status ~= StatusEnum.Activated then
241 | return "Error: Not activated"
242 | end
243 |
244 | local currentTimestamp = (msg.Timestamp//1000)
245 |
246 | -- Too early?
247 | if currentTimestamp < State.NextVerification then
248 | return "Error: Too early"
249 | end
250 |
251 | -- Too late?
252 | if currentTimestamp > State.NextVerification + State.VerificationResponsePeriod then
253 | Slash(currentTimestamp)
254 | return "Error: Too late"
255 | end
256 |
257 | local ChallengeFromProvider = Data["Challenge"]
258 | if ChallengeFromProvider ~= State.Challenge then
259 | -- don't slash, maybe it was just out of sync
260 | return "Error: Challenge mismatch: " .. ChallengeFromProvider .. " != " .. State.Challenge
261 | end
262 |
263 | -- State.Logs[#State.Logs + 1] = json.encode(msg)
264 | -- State.Logs[#State.Logs + 1] = json.encode(msg.Data)
265 |
266 | local Path = Data["Path"]
267 | -- State.Logs[#State.Logs + 1] = json.encode(Path)
268 |
269 | -- State.Logs[#State.Logs + 1] = "Start"
270 |
271 | -- Walk through all elements of the path, according to the binary string State.Challenge
272 | local i = 1
273 | local ExpectedNext = State.MerkleRoot
274 | while true do
275 | local Elem = Path[i]
276 |
277 | if Elem == nil then
278 | break
279 | end
280 |
281 | local ElemValue = Elem[1]
282 | local ElemLeft = Elem[2]
283 | local ElemRight = Elem[3]
284 |
285 | if ElemValue == nil then
286 | Slash(-1)
287 | return "Error: Path, i=" .. i .. ", ElemValue=nil"
288 | end
289 |
290 | if ExpectedNext ~= ElemValue then
291 | Slash(-1)
292 | return "Error: Path, i=" .. i .. ", ExpectedNext=" .. ExpectedNext .. ", ElemValue=" .. ElemValue
293 | end
294 |
295 | local Direction = string.sub(State.Challenge, i, i)
296 |
297 | if Direction == nil then
298 | return "Error: Path, i=" .. i .. ", Direction=nil, State.Challenge=" .. State.Challenge -- todo: should we slash?
299 | end
300 |
301 | State.Logs[#State.Logs + 1] = json.encode({
302 | ["i"] = i, ["Direction"] = Direction, ["ElemValue"] = ElemValue, ["ElemLeft"] = ElemLeft, ["ElemRight"] = ElemRight, ["State.Challenge"] = State.Challenge
303 | })
304 |
305 | if Direction == "0" then
306 | if ElemLeft == nil then
307 | Slash(-1)
308 | return "Error: Path, i=" .. i .. ", Direction=0, ElemLeft=nil"
309 | end
310 | ExpectedNext = ElemLeft
311 | elseif Direction == "1" then
312 | if ElemRight == nil then
313 | Slash(-1)
314 | return "Error: Path, i=" .. i .. ", Direction=1, ElemRight=nil"
315 | end
316 | ExpectedNext = ElemRight
317 | else
318 | return "Error: Something went wrong, Direction=" .. Direction
319 | end
320 |
321 | -- Verify the hashes
322 | local ExpectedHash = ElemValue
323 | local LeftData = HexToBytes(ElemLeft)
324 | local RightData = HexToBytes(ElemRight)
325 | local HashData = LeftData .. RightData
326 |
327 | -- If this is the last element, prepend zero byte
328 | if Path[i + 1] == nil then
329 | HashData = string.char(0) .. HashData
330 | end
331 |
332 | local Hash = sha256(HashData)
333 | if Hash ~= ExpectedHash then
334 | Slash(-1)
335 | return "Error: Hash, i=" .. i .. ", ExpectedHash=" .. ExpectedHash .. ", Hash=" .. Hash
336 | end
337 |
338 | i = i + 1
339 | end
340 |
341 | -- And finally, we have ExpectedValue is the hash of the leaf
342 | local LeafData = base64.decode(Data["Leaf"])
343 | local LeafHash = sha256(LeafData)
344 | if ExpectedNext ~= LeafHash then
345 | Slash(-1)
346 | return "Error: Leaf, ExpectedNext=" .. ExpectedNext .. ", LeafHash=" .. LeafHash
347 | end
348 |
349 | -- Challenge successfully passed!
350 | AdvanceNextVerification(currentTimestamp)
351 |
352 | return "Success"
353 | end)
354 |
355 | Handle("GetState", function(msg)
356 | return json.encode(State)
357 | end)
358 |
359 | -- todo: withdraw collateral at the end when expires by provider
360 |
361 | -- todo: withdraw rewards + slashed collateral at the end when expires by client
362 |
--------------------------------------------------------------------------------
/backend/src/client/background/placementQueue.js:
--------------------------------------------------------------------------------
1 | const axios = require('axios');
2 | const Sequelize = require('sequelize');
3 | const { PLACEMENT_STATUS } = require('../../db/models/Placement');
4 | const { Assignment, Placement, AssignmentChunk, PlacementChunk } = require('../../db/models');
5 | const { BackgroundQueue } = require('../../utils/backgroundQueue');
6 | const utils = require('../../utils');
7 | const deal = require('../../arweave/deal');
8 | const ao = () => { return require('../../arweave/ao').getAoInstance(); }
9 | const getClientInstance = require('../../client');
10 | const config = require('../../config');
11 | const fs = require('fs');
12 |
13 | class ProviderApi {
14 | constructor(connectionString) {
15 | this.connectionString = connectionString;
16 | }
17 |
18 | async cmd(command, data, headers) {
19 | const url = `${this.connectionString}/cmd/${command}`;
20 | console.log('Sending request to: ', url);
21 | const config = {
22 | headers: {
23 | 'ArFleet-Address': getClientInstance().address,
24 | 'ArFleet-Signature': 'todo' // todo 1
25 | }
26 | };
27 | const response = await axios.post(url, data, config);
28 | return response.data;
29 | }
30 | }
31 |
32 | let placementQueue = new BackgroundQueue({
33 | REBOOT_INTERVAL: 5 * 1000,
34 | addCandidates: async () => {
35 | const candidates = await Placement.findAll({
36 | where: {
37 | status: {
38 | [Sequelize.Op.notIn]: [PLACEMENT_STATUS.UNAVAILABLE, PLACEMENT_STATUS.COMPLETED]
39 | }
40 | }
41 | });
42 | const ids = candidates.map(c => c.id);
43 | return ids;
44 | },
45 | processCandidate: async (placement_id) => {
46 | console.log('Processing placement: ', placement_id);
47 |
48 | const placement = await Placement.findOrFail(placement_id);
49 | // console.log('Placement: ', placement);
50 |
51 | try {
52 | switch(placement.status) {
53 | case PLACEMENT_STATUS.CREATED:
54 | {
55 | // Let's try to connect
56 | console.log('Trying to connect to provider: ', placement.provider_id);
57 |
58 | const connectionStrings = placement.provider_connection_strings;
59 | const connectionString = placement.getConnectionString();
60 |
61 | const pApi = new ProviderApi(connectionString);
62 |
63 | try {
64 | const result = await pApi.cmd('ping', {});
65 |
66 | console.log({result});
67 |
68 | const assignment = await Assignment.findOrFail(placement.assignment_id);
69 |
70 | if (result === 'pong') {
71 | // todo: calculate reward and collateral
72 | placement.required_reward = 500; // todo
73 | placement.required_collateral = 1000; // todo
74 | await placement.save();
75 |
76 | // available, contact about the placement
77 | const placementResult = await pApi.cmd('placement', {
78 | placement_id: placement.id,
79 | size: assignment.size,
80 | chunks: assignment.chunk_count,
81 | required_reward: placement.required_reward,
82 | required_collateral: placement.required_collateral,
83 | provider_id: placement.provider_id
84 | });
85 |
86 | console.log({placementResult});
87 |
88 | if (placementResult === 'OK') {
89 | // mark as approved
90 | placement.status = PLACEMENT_STATUS.INITIALIZED;
91 | await placement.save();
92 |
93 | // start the encryption
94 | await placement.startEncryption();
95 |
96 | const assignmentChunks = await AssignmentChunk.allBy('assignment_id', assignment.id);
97 | for (const assignmentChunk of assignmentChunks) {
98 | const originalData = fs.readFileSync(AssignmentChunk.getPath(assignmentChunk.chunk_id), null);
99 |
100 | // mark as encrypting
101 | PlacementChunk.findByIdOrCreate(placement.id + '_' + assignmentChunk.pos, {
102 | placement_id: placement.id,
103 | is_encrypted: false,
104 | is_sent: false,
105 | original_chunk_id: assignmentChunk.chunk_id,
106 | original_size: originalData.byteLength,
107 | pos: assignmentChunk.pos
108 | });
109 | }
110 | } else {
111 | // mark as failed
112 | placement.status = PLACEMENT_STATUS.UNAVAILABLE;
113 | await placement.save();
114 | }
115 | }
116 | } catch(e) {
117 | // mark as failed
118 | console.log('Placement Connection Error: ', e);
119 | placement.status = PLACEMENT_STATUS.UNAVAILABLE;
120 | await placement.save();
121 | }
122 | break;
123 | }
124 |
125 | case PLACEMENT_STATUS.INITIALIZED:
126 | {
127 | // check if all chunks are encrypted
128 | const notEncryptedCount = await PlacementChunk.count({
129 | where: {
130 | placement_id,
131 | is_encrypted: false
132 | }
133 | });
134 | if (notEncryptedCount === 0) {
135 | // get all placement chunks, ordered by pos
136 | const placementChunks = await PlacementChunk.findAll({
137 | where: {
138 | placement_id
139 | },
140 | order: [
141 | ['pos', 'ASC']
142 | ]
143 | });
144 |
145 | // calculate merkle tree
146 | const chunkHashes = placementChunks.map(c => c.encrypted_chunk_id);
147 | const chunkHashesBin = chunkHashes.map(h => Buffer.from(h, 'hex'));
148 | const merkleTree = utils.merkle(chunkHashesBin, utils.hashFn);
149 | const merkleTreeHex = merkleTree.map(h => h.toString('hex'));
150 | const merkleRootHex = merkleTree[merkleTree.length - 1].toString('hex');
151 | placement.merkle_root = merkleRootHex;
152 | placement.merkle_tree = merkleTreeHex;
153 |
154 | // mark as encrypted
155 | placement.status = PLACEMENT_STATUS.ENCRYPTED;
156 | await placement.save();
157 | }
158 | break;
159 | }
160 |
161 | case PLACEMENT_STATUS.ENCRYPTED:
162 | {
163 | const dealDuration = 1 * 365 * 24 * 60 * 60; // todo
164 |
165 | // create process
166 | const createdAtTimestamp = Math.ceil(placement.created_at.getTime() / 1000);
167 | const lua_lines = [
168 | "State.Provider = '" + placement.provider_id + "'",
169 | "State.MerkleRoot = '" + placement.merkle_root + "'",
170 | "State.Client = '" + getClientInstance().address + "'",
171 | "State.Token = '" + config.defaultToken + "'",
172 | "State.RequiredReward = " + placement.required_reward,
173 | "State.ReceivedReward = 0",
174 | "State.RequiredCollateral = " + placement.required_collateral,
175 | "State.ReceivedCollateral = 0",
176 | "State.VerificationEveryPeriod = 1200", // todo
177 | "State.VerificationResponsePeriod = 10000", // todo
178 | "State.CreatedAt = " + createdAtTimestamp + "",
179 | "State.ExpiresAt = " + (createdAtTimestamp + dealDuration) + "",
180 | "State.Status = StatusEnum.Created",
181 | ];
182 | const process_id = await deal.spawnDeal(lua_lines.join("\n"));
183 | console.log('Process ID: ', process_id);
184 |
185 | console.log(await ao().sendAction(process_id, "Eval", "State"));
186 |
187 | placement.process_id = process_id;
188 | placement.status = PLACEMENT_STATUS.PROCESS_SPAWNED;
189 | await placement.save();
190 |
191 | break;
192 | }
193 |
194 | case PLACEMENT_STATUS.PROCESS_SPAWNED:
195 | {
196 | // fund with the reward
197 |
198 | console.log('Funding placement: ', placement.id);
199 |
200 | // change the state before sending the action
201 | placement.status = PLACEMENT_STATUS.FUNDED;
202 | placement.is_funded = true;
203 | await placement.save();
204 |
205 | try {
206 | await ao().sendToken(config.defaultToken, placement.process_id, placement.required_reward);
207 | console.log('Token sent');
208 | } catch(e) {
209 | console.log('Funding Error: ', e);
210 | placement.status = PLACEMENT_STATUS.FAILED; // todo: try to take the money out
211 | placement.error_was = e.toString();
212 | await placement.save();
213 | }
214 | break;
215 | }
216 |
217 | case PLACEMENT_STATUS.FUNDED:
218 | {
219 | // Make the provider accept it
220 | const pApi = new ProviderApi(placement.getConnectionString());
221 | const placementChunks = await PlacementChunk.findAll({
222 | where: {
223 | placement_id,
224 | },
225 | order: [
226 | ['pos', 'ASC']
227 | ]
228 | });
229 | const chunkHashes = placementChunks.map(c => c.encrypted_chunk_id);
230 |
231 | const acceptResult = await pApi.cmd('accept', {
232 | placement_id: placement.id,
233 | merkle_root: placement.merkle_root,
234 | chunks: chunkHashes,
235 | process_id: placement.process_id
236 | });
237 | console.log('Accept result: ', acceptResult);
238 |
239 | if (acceptResult === 'OK') {
240 | placement.status = PLACEMENT_STATUS.ACCEPTED;
241 | await placement.save();
242 | } else {
243 | console.error('Accept failed: ', acceptResult);
244 | placement.status = PLACEMENT_STATUS.FAILED;
245 | placement.error_was = JSON.stringify(acceptResult);
246 | await placement.save();
247 | }
248 | break;
249 | }
250 |
251 | case PLACEMENT_STATUS.ACCEPTED:
252 | {
253 | const pApi = new ProviderApi(placement.getConnectionString());
254 |
255 | // Transfer chunks
256 | const placementChunks = await PlacementChunk.findAll({
257 | where: {
258 | placement_id,
259 | is_sent: false
260 | },
261 | order: [
262 | ['pos', 'ASC']
263 | ]
264 | });
265 |
266 | if (placementChunks.length === 0) {
267 | placement.status = PLACEMENT_STATUS.TRANSFERRED;
268 | await placement.save();
269 | }
270 |
271 | for (const placementChunk of placementChunks) {
272 | console.log('Transferring chunk: ', placementChunk);
273 | const placementChunkPath = PlacementChunk.getPath(placementChunk.id);
274 | const chunkData = fs.readFileSync(placementChunkPath);
275 | const chunkDataB64 = chunkData.toString('base64'); // todo: replace with proper streaming/binary
276 |
277 | const result = await pApi.cmd('transfer', {
278 | placement_id: placement.id,
279 | merkle_root: placement.merkle_root,
280 | pos: placementChunk.pos,
281 | chunk_data: chunkDataB64,
282 | hash: placementChunk.encrypted_chunk_id,
283 | original_size: placementChunk.original_size
284 | });
285 | console.log('Transfer result: ', result);
286 |
287 | if (result === 'OK') {
288 | placementChunk.is_sent = true;
289 | await placementChunk.save();
290 | } else {
291 | console.error('Transfer failed: ', result);
292 | placement.status = PLACEMENT_STATUS.FAILED;
293 | placement.error_was = JSON.stringify(result);
294 | await placement.save();
295 | break;
296 | }
297 | }
298 | break;
299 | }
300 |
301 | case PLACEMENT_STATUS.TRANSFERRED:
302 | {
303 | // Make the provider send the collateral
304 | const pApi = new ProviderApi(placement.getConnectionString());
305 | const result = await pApi.cmd('complete', {
306 | placement_id: placement.id,
307 | public_key: placement.public_key
308 | });
309 |
310 | if (result === 'OK') {
311 | // verify
312 | // todo: verify that collateral is there
313 |
314 | // verify that it is now activated
315 | const processState = await ao().getState(placement.process_id);
316 | console.log('Process State: ', processState);
317 |
318 | if (processState.Status !== 'Activated') {
319 | // Allow some time to activate
320 | const last_update = placement.updated_at.getTime();
321 | const WAIT_FOR = 1 * 60;
322 | const now = new Date().getTime();
323 | if (now - last_update > WAIT_FOR * 1000) {
324 | console.error('Process not activated');
325 | placement.status = PLACEMENT_STATUS.FAILED;
326 | placement.error_was = 'Process not activated';
327 | await placement.save();
328 | } else {
329 | console.log('Waiting for activation');
330 | setTimeout(() => {
331 | placementQueue.add(placement.id);
332 | }, 5000);
333 | }
334 | } else {
335 | placement.status = PLACEMENT_STATUS.COMPLETED;
336 | const assignment = await Assignment.findOrFail(placement.assignment_id);
337 | assignment.achieved_redundancy++;
338 | await placement.save();
339 | await assignment.save();
340 |
341 | // Print merkle tree
342 | console.log(JSON.stringify(placement.merkle_tree));
343 | }
344 |
345 | } else {
346 | console.error('Complete failed: ', result);
347 | placement.status = PLACEMENT_STATUS.FAILED;
348 | placement.error_was = JSON.stringify(result);
349 | await placement.save();
350 | }
351 | }
352 |
353 | default:
354 | // todo
355 | } // end of switch(placement.status)
356 | } catch(e) {
357 | console.error('Error processing placement: ', e);
358 | placement.status = PLACEMENT_STATUS.FAILED;
359 | placement.error_was = e.toString();
360 | await placement.save();
361 | }
362 | }
363 | }, 'PlacementQueue');
364 |
365 | module.exports = { placementQueue };
366 |
--------------------------------------------------------------------------------