├── .github └── workflows │ └── compile_mikupad.yml ├── .gitignore ├── LICENSE ├── README.md ├── compile.bat ├── compile.sh ├── mikupad.html ├── project └── package.json └── server ├── package.json ├── server.js ├── start.bat └── start.sh /.github/workflows/compile_mikupad.yml: -------------------------------------------------------------------------------- 1 | name: Release Mikupad 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | permissions: 9 | contents: write 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout repository 17 | uses: actions/checkout@v4 18 | with: 19 | fetch-depth: 0 20 | ref: main 21 | 22 | - name: Set up Node.js 23 | uses: actions/setup-node@v4 24 | with: 25 | node-version: '20' 26 | 27 | - name: run compile.sh 28 | run: chmod +x compile.sh; ./compile.sh 29 | 30 | - name: Determine Tag and Build Names 31 | id: build_ids 32 | run: | 33 | # use number of commits as build ID 34 | 35 | BUILD_NUMBER="$(git rev-list --count HEAD)" 36 | echo "number=${BUILD_NUMBER}" >> $GITHUB_OUTPUT 37 | 38 | SHORT_HASH="$(git rev-parse --short=7 HEAD)" 39 | echo "buildName=Mikupad #${BUILD_NUMBER} [${SHORT_HASH}]" >> $GITHUB_OUTPUT 40 | 41 | echo "Build: $BUILD_NUMBER $SHORT_HASH" 42 | 43 | - name: Get Last Release Tag 44 | id: last_release 45 | run: | 46 | # find previous release to determine how many commits should be displayed in the changelog 47 | TAG=$(git tag --list 'release*' --sort=-v:refname | head -n 1) 48 | if [ -z "$TAG" ]; then 49 | echo "No release tag found" 50 | echo "tag=$(echo 'none')" >> $GITHUB_OUTPUT 51 | else 52 | TAG_NUMBER=$(echo $TAG | sed 's/release//') 53 | echo "Found release tag: $TAG" 54 | echo "tag=$TAG_NUMBER" >> $GITHUB_OUTPUT 55 | fi 56 | 57 | - name: Generate Changelog 58 | id: changelog 59 | run: | 60 | # if no previous release, don't dump the entire history 61 | if [ "${{ steps.last_release.outputs.tag }}" = "none" ]; then 62 | echo "mikudayo~" > CHANGELOG.txt 63 | else 64 | OLD_COMMIT="${{ steps.last_release.outputs.tag }}" 65 | NUM_COMMITS="${{ steps.build_ids.outputs.number }}" 66 | NEW_COMMITS="$(($NUM_COMMITS - $OLD_COMMIT))" 67 | echo "Generating changelog with $NEW_COMMITS commits starting from $OLD_COMMIT" 68 | 69 | # echo changelog 70 | echo "Generated $(date +'%Y-%m-%d %T %Z%z')" >> CHANGELOG.txt 71 | echo "\`\`\`" >> CHANGELOG.txt 72 | echo "$(git log --graph -n $NEW_COMMITS --oneline)" >> CHANGELOG.txt 73 | echo "\`\`\`" >> CHANGELOG.txt 74 | fi 75 | 76 | - name: Release 77 | uses: softprops/action-gh-release@v2 78 | with: 79 | tag_name: release${{ steps.build_ids.outputs.number }} 80 | name: ${{ steps.build_ids.outputs.buildName }} 81 | body_path: CHANGELOG.txt 82 | files: mikupad_compiled.html 83 | token: ${{ secrets.GITHUB_TOKEN }} 84 | 85 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | server/web-session-storage.db 2 | package-lock.json 3 | project/mikupad.html 4 | mikupad_compiled.html 5 | 6 | # Logs 7 | logs 8 | *.log 9 | npm-debug.log* 10 | yarn-debug.log* 11 | yarn-error.log* 12 | lerna-debug.log* 13 | .pnpm-debug.log* 14 | 15 | # Diagnostic reports (https://nodejs.org/api/report.html) 16 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 17 | 18 | # Runtime data 19 | pids 20 | *.pid 21 | *.seed 22 | *.pid.lock 23 | 24 | # Directory for instrumented libs generated by jscoverage/JSCover 25 | lib-cov 26 | 27 | # Coverage directory used by tools like istanbul 28 | coverage 29 | *.lcov 30 | 31 | # nyc test coverage 32 | .nyc_output 33 | 34 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 35 | .grunt 36 | 37 | # Bower dependency directory (https://bower.io/) 38 | bower_components 39 | 40 | # node-waf configuration 41 | .lock-wscript 42 | 43 | # Compiled binary addons (https://nodejs.org/api/addons.html) 44 | build/Release 45 | 46 | # Dependency directories 47 | node_modules/ 48 | jspm_packages/ 49 | 50 | # Snowpack dependency directory (https://snowpack.dev/) 51 | web_modules/ 52 | 53 | # TypeScript cache 54 | *.tsbuildinfo 55 | 56 | # Optional npm cache directory 57 | .npm 58 | 59 | # Optional eslint cache 60 | .eslintcache 61 | 62 | # Optional stylelint cache 63 | .stylelintcache 64 | 65 | # Microbundle cache 66 | .rpt2_cache/ 67 | .rts2_cache_cjs/ 68 | .rts2_cache_es/ 69 | .rts2_cache_umd/ 70 | 71 | # Optional REPL history 72 | .node_repl_history 73 | 74 | # Output of 'npm pack' 75 | *.tgz 76 | 77 | # Yarn Integrity file 78 | .yarn-integrity 79 | 80 | # dotenv environment variable files 81 | .env 82 | .env.development.local 83 | .env.test.local 84 | .env.production.local 85 | .env.local 86 | 87 | # parcel-bundler cache (https://parceljs.org/) 88 | .cache 89 | .parcel-cache 90 | 91 | # Next.js build output 92 | .next 93 | out 94 | 95 | # Nuxt.js build / generate output 96 | .nuxt 97 | dist 98 | 99 | # Gatsby files 100 | .cache/ 101 | # Comment in the public line in if your project uses Gatsby and not Next.js 102 | # https://nextjs.org/blog/next-9-1#public-directory-support 103 | # public 104 | 105 | # vuepress build output 106 | .vuepress/dist 107 | 108 | # vuepress v2.x temp and cache directory 109 | .temp 110 | .cache 111 | 112 | # Docusaurus cache and generated files 113 | .docusaurus 114 | 115 | # Serverless directories 116 | .serverless/ 117 | 118 | # FuseBox cache 119 | .fusebox/ 120 | 121 | # DynamoDB Local files 122 | .dynamodb/ 123 | 124 | # TernJS port file 125 | .tern-port 126 | 127 | # Stores VSCode versions used for testing VSCode extensions 128 | .vscode-test 129 | 130 | # yarn v2 131 | .yarn/cache 132 | .yarn/unplugged 133 | .yarn/build-state.yml 134 | .yarn/install-state.gz 135 | .pnp.* -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | CC0 1.0 Universal 2 | 3 | Statement of Purpose 4 | 5 | The laws of most jurisdictions throughout the world automatically confer 6 | exclusive Copyright and Related Rights (defined below) upon the creator and 7 | subsequent owner(s) (each and all, an "owner") of an original work of 8 | authorship and/or a database (each, a "Work"). 9 | 10 | Certain owners wish to permanently relinquish those rights to a Work for the 11 | purpose of contributing to a commons of creative, cultural and scientific 12 | works ("Commons") that the public can reliably and without fear of later 13 | claims of infringement build upon, modify, incorporate in other works, reuse 14 | and redistribute as freely as possible in any form whatsoever and for any 15 | purposes, including without limitation commercial purposes. These owners may 16 | contribute to the Commons to promote the ideal of a free culture and the 17 | further production of creative, cultural and scientific works, or to gain 18 | reputation or greater distribution for their Work in part through the use and 19 | efforts of others. 20 | 21 | For these and/or other purposes and motivations, and without any expectation 22 | of additional consideration or compensation, the person associating CC0 with a 23 | Work (the "Affirmer"), to the extent that he or she is an owner of Copyright 24 | and Related Rights in the Work, voluntarily elects to apply CC0 to the Work 25 | and publicly distribute the Work under its terms, with knowledge of his or her 26 | Copyright and Related Rights in the Work and the meaning and intended legal 27 | effect of CC0 on those rights. 28 | 29 | 1. Copyright and Related Rights. A Work made available under CC0 may be 30 | protected by copyright and related or neighboring rights ("Copyright and 31 | Related Rights"). Copyright and Related Rights include, but are not limited 32 | to, the following: 33 | 34 | i. the right to reproduce, adapt, distribute, perform, display, communicate, 35 | and translate a Work; 36 | 37 | ii. moral rights retained by the original author(s) and/or performer(s); 38 | 39 | iii. publicity and privacy rights pertaining to a person's image or likeness 40 | depicted in a Work; 41 | 42 | iv. rights protecting against unfair competition in regards to a Work, 43 | subject to the limitations in paragraph 4(a), below; 44 | 45 | v. rights protecting the extraction, dissemination, use and reuse of data in 46 | a Work; 47 | 48 | vi. database rights (such as those arising under Directive 96/9/EC of the 49 | European Parliament and of the Council of 11 March 1996 on the legal 50 | protection of databases, and under any national implementation thereof, 51 | including any amended or successor version of such directive); and 52 | 53 | vii. other similar, equivalent or corresponding rights throughout the world 54 | based on applicable law or treaty, and any national implementations thereof. 55 | 56 | 2. Waiver. To the greatest extent permitted by, but not in contravention of, 57 | applicable law, Affirmer hereby overtly, fully, permanently, irrevocably and 58 | unconditionally waives, abandons, and surrenders all of Affirmer's Copyright 59 | and Related Rights and associated claims and causes of action, whether now 60 | known or unknown (including existing as well as future claims and causes of 61 | action), in the Work (i) in all territories worldwide, (ii) for the maximum 62 | duration provided by applicable law or treaty (including future time 63 | extensions), (iii) in any current or future medium and for any number of 64 | copies, and (iv) for any purpose whatsoever, including without limitation 65 | commercial, advertising or promotional purposes (the "Waiver"). Affirmer makes 66 | the Waiver for the benefit of each member of the public at large and to the 67 | detriment of Affirmer's heirs and successors, fully intending that such Waiver 68 | shall not be subject to revocation, rescission, cancellation, termination, or 69 | any other legal or equitable action to disrupt the quiet enjoyment of the Work 70 | by the public as contemplated by Affirmer's express Statement of Purpose. 71 | 72 | 3. Public License Fallback. Should any part of the Waiver for any reason be 73 | judged legally invalid or ineffective under applicable law, then the Waiver 74 | shall be preserved to the maximum extent permitted taking into account 75 | Affirmer's express Statement of Purpose. In addition, to the extent the Waiver 76 | is so judged Affirmer hereby grants to each affected person a royalty-free, 77 | non transferable, non sublicensable, non exclusive, irrevocable and 78 | unconditional license to exercise Affirmer's Copyright and Related Rights in 79 | the Work (i) in all territories worldwide, (ii) for the maximum duration 80 | provided by applicable law or treaty (including future time extensions), (iii) 81 | in any current or future medium and for any number of copies, and (iv) for any 82 | purpose whatsoever, including without limitation commercial, advertising or 83 | promotional purposes (the "License"). The License shall be deemed effective as 84 | of the date CC0 was applied by Affirmer to the Work. Should any part of the 85 | License for any reason be judged legally invalid or ineffective under 86 | applicable law, such partial invalidity or ineffectiveness shall not 87 | invalidate the remainder of the License, and in such case Affirmer hereby 88 | affirms that he or she will not (i) exercise any of his or her remaining 89 | Copyright and Related Rights in the Work or (ii) assert any associated claims 90 | and causes of action with respect to the Work, in either case contrary to 91 | Affirmer's express Statement of Purpose. 92 | 93 | 4. Limitations and Disclaimers. 94 | 95 | a. No trademark or patent rights held by Affirmer are waived, abandoned, 96 | surrendered, licensed or otherwise affected by this document. 97 | 98 | b. Affirmer offers the Work as-is and makes no representations or warranties 99 | of any kind concerning the Work, express, implied, statutory or otherwise, 100 | including without limitation warranties of title, merchantability, fitness 101 | for a particular purpose, non infringement, or the absence of latent or 102 | other defects, accuracy, or the present or absence of errors, whether or not 103 | discoverable, all to the greatest extent permissible under applicable law. 104 | 105 | c. Affirmer disclaims responsibility for clearing rights of other persons 106 | that may apply to the Work or any use thereof, including without limitation 107 | any person's Copyright and Related Rights in the Work. Further, Affirmer 108 | disclaims responsibility for obtaining any necessary consents, permissions 109 | or other rights required for any use of the Work. 110 | 111 | d. Affirmer understands and acknowledges that Creative Commons is not a 112 | party to this document and has no duty or obligation with respect to this 113 | CC0 or use of the Work. 114 | 115 | For more information, please see 116 | 117 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # mikupad 2 | 3 | **mikupad** is a user-friendly, browser-based interface for interacting with language models. It's built with ReactJS and supports various text generation backends, all within a single HTML file. 4 | 5 | ![image](https://github.com/user-attachments/assets/4c5fa8ff-5926-4a4b-807b-34e4f36a032c) 6 | 7 | ## Features 8 | 9 | * **Multiple Backends**: Supports **llama.cpp**, **koboldcpp**, **AI Horde**, and any **OpenAI Compatible** API. 10 | * **Session Persistence**: Your prompt is automatically saved and restored, allowing you to continue seamlessly across multiple sessions. Import and export sessions for sharing or maintaining backups. 11 | * **Optional Server**: Can be hosted on a local Node.js server, enabling database access remotely or across your local network. 12 | * **Persistent Context**: 13 | * **Memory**: Seamlessly inject a text of your choice at the beginning of the context. 14 | * **Author's Note**: Seamlessly inject a text of your choice at the end of the context, with adjustable depth. 15 | * **World Info**: Dynamically include extra information in the context, triggered by specific keywords. 16 | * **Prediction Undo/Redo**: Easily experiment and refine your generated text with the ability to undo and redo predictions. 17 | * **Token Probability**: Hover over any token to reveal the top 10 most probable tokens at that point. Click on a probability to regenerate the text from that specific token. 18 | * If you're using oobabooga, make sure to use an \_HF sampler for this feature to function properly. 19 | * If you're using koboldcpp, token probabilities are only available with Token Streaming disabled. 20 | * **Logit Bias**: Fine-tune the generation process by adjusting the likelihood bias of specific tokens on-the-fly. 21 | * **Completion/Chat Modes**: 22 | * **Completion**: Have the language model directly continue your prompt. 23 | * **Chat**: Mikupad simplifies using instruct models. It automatically adds the right delimiters when you start or stop generating, based on your selected template. This also structures your prompt into messages, making it compatible with the Chat Completions API (for OpenAI-compatible backends). 24 | * **Themes**: Customize your environment by choosing from a variety of themes. 25 | 26 | ## Getting Started 27 | 28 | You can easily run **mikupad** by opening the `mikupad.html` file in your web browser. No additional installation is required. Choose your preferred backend and start generating text! 29 | 30 | ```shell 31 | git clone https://github.com/lmg-anon/mikupad.git 32 | cd mikupad 33 | open mikupad.html 34 | ``` 35 | To use **mikupad** fully offline, run the provided `compile` script or download the pre-compiled `mikupad_compiled.html` file from [Releases](https://github.com/lmg-anon/mikupad/releases/latest). 36 | 37 | You can also [try it on GitHub Pages](https://lmg-anon.github.io/mikupad/mikupad.html). 38 | 39 | ## Contributing 40 | 41 | Contributions from the open-source community are welcome. Whether it's fixing a bug, adding a feature, or improving the documentation, your contributions are greatly appreciated. To contribute to **mikupad**, follow these steps: 42 | 43 | 1. Fork the repository. 44 | 2. Create a new branch for your changes: `git checkout -b feature/your-feature-name` 45 | 3. Make your changes and commit them: `git commit -m 'Add your feature'` 46 | 4. Push your changes to your forked repository: `git push origin feature/your-feature-name` 47 | 5. Open a pull request on the main repository, explaining your changes. 48 | 49 | ## License 50 | 51 | This project is released to the public domain under the CC0 License - see the [LICENSE](LICENSE) file for details. 52 | -------------------------------------------------------------------------------- /compile.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | where node >nul 2>&1 4 | if %errorlevel% neq 0 ( 5 | echo Node.js is not installed. 6 | exit /b 1 7 | ) 8 | where npm >nul 2>&1 9 | if %errorlevel% neq 0 ( 10 | echo npm is not installed. 11 | exit /b 1 12 | ) 13 | 14 | xcopy /y mikupad.html project >NUL 15 | 16 | cd project 17 | call npm install 18 | call npm start build 19 | copy /y .\dist\mikupad.html ..\mikupad_compiled.html 20 | cd .. -------------------------------------------------------------------------------- /compile.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if ! command -v node > /dev/null 2>&1; then 4 | echo "Node.js is not installed." 5 | exit 1 6 | fi 7 | 8 | if ! command -v npm > /dev/null 2>&1; then 9 | echo "npm is not installed." 10 | exit 1 11 | fi 12 | 13 | cp -f mikupad.html project 14 | 15 | cd project 16 | npm install 17 | npm run build 18 | cp -f ./dist/mikupad.html ../mikupad_compiled.html 19 | cd .. -------------------------------------------------------------------------------- /project/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mikupad", 3 | "source": "mikupad.html", 4 | "scripts": { 5 | "start": "parcel", 6 | "build": "parcel build" 7 | }, 8 | "dependencies": { 9 | "scrollview-resize" : "^1.0.2", 10 | "htm": "^3.1.1", 11 | "react": "^18.2.0", 12 | "react-dom": "^18.2.0" 13 | }, 14 | "devDependencies": { 15 | "@parcel/transformer-inline-string": "^2.9.3", 16 | "scrollview-resize" : "^1.0.2", 17 | "htm": "^3.1.1", 18 | "parcel": "^2.9.3", 19 | "react": "^18.2.0", 20 | "react-dom": "^18.2.0" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "server", 3 | "main": "server.js", 4 | "scripts": { 5 | "test": "echo \"Error: no test specified\" && exit 1", 6 | "start": "node server.js" 7 | }, 8 | "dependencies": { 9 | "body-parser": "^1.20.2", 10 | "cors": "^2.8.5", 11 | "express": "^4.18.2", 12 | "sqlite3": "^5.1.7", 13 | "minimist": "^1.2.8", 14 | "axios": "^1.6.8", 15 | "open": "8.4.2" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /server/server.js: -------------------------------------------------------------------------------- 1 | const express = require('express'); 2 | const cors = require('cors'); 3 | const bodyParser = require('body-parser'); 4 | const sqlite3 = require('sqlite3'); 5 | const path = require('path'); 6 | const minimist = require('minimist'); 7 | const axios = require('axios'); 8 | const open = require('open'); 9 | 10 | const app = express(); 11 | 12 | // Parse command line arguments 13 | const args = minimist(process.argv.slice(2)); 14 | // Default fallbacks: command line args -> environment variables -> static defaults 15 | const port = args.port || process.env.MIKUPAD_PORT || 3000; 16 | const host = args.host || process.env.MIKUPAD_HOST || '0.0.0.0'; 17 | const noOpen = (args.open !== undefined && !args.open) || process.env.MIKUPAD_NO_OPEN; 18 | const login = args.login || process.env.MIKUPAD_LOGIN || 'anon'; 19 | const password = args.password || process.env.MIKUPAD_PASSWORD || undefined; 20 | 21 | // Headers that shouldn't be forwarded in the proxy endpoint. 22 | const headersToRemove = [ 23 | 'content-length', 24 | 'cdn-loop', 25 | 'cf-connecting-ip', 26 | 'cf-ipcountry', 27 | 'cf-ray', 28 | 'cf-visitor', 29 | 'x-forwarded-for', 30 | 'x-forwarded-host', 31 | 'x-forwarded-proto' 32 | ]; 33 | 34 | app.use(cors(), bodyParser.json({limit: "100mb"})); 35 | 36 | // authentication middleware 37 | app.use((req, res, next) => { 38 | if (!password) { 39 | // No password defined, access granted. 40 | return next(); 41 | } 42 | 43 | const b64auth = (req.headers.authorization || '').split(' ')[1] || ''; 44 | const [reqLogin, reqPassword] = Buffer.from(b64auth, 'base64').toString().split(':'); 45 | 46 | if (reqLogin == login && reqPassword == password) { 47 | // Access granted. 48 | return next(); 49 | } 50 | 51 | // Access denied. 52 | res.set('WWW-Authenticate', 'Basic realm="401"'); 53 | res.status(401).send('Authentication required.'); 54 | }); 55 | 56 | // Open a database connection 57 | const db = new sqlite3.Database('./web-session-storage.db', (err) => { 58 | if (err) { 59 | console.error(err.message); 60 | throw err; 61 | } else { 62 | db.run(`CREATE TABLE IF NOT EXISTS sessions ( 63 | key TEXT PRIMARY KEY, 64 | data TEXT 65 | )`); 66 | db.run(`CREATE TABLE IF NOT EXISTS templates ( 67 | key TEXT PRIMARY KEY, 68 | data TEXT 69 | )`); 70 | } 71 | }); 72 | 73 | // GET route to serve Mikupad html 74 | app.get('/', (req, res) => { 75 | res.sendFile(path.join(__dirname, '..', 'mikupad.html')); 76 | }); 77 | 78 | // GET route to get the server version 79 | app.get('/version', (req, res) => { 80 | res.json({ version: 2 }); 81 | }); 82 | 83 | // Dynamic POST proxy route 84 | app.post('/proxy/*', async (req, res) => { 85 | // Capture the part of the URL after '/proxy' 86 | const path = req.params[0]; 87 | 88 | // Target server base URL 89 | const targetBaseUrl = req.headers['x-real-url']; 90 | delete req.headers['x-real-url']; 91 | 92 | headersToRemove.forEach(header => { 93 | delete req.headers[header.toLowerCase()]; 94 | }); 95 | 96 | try { 97 | const response = await axios({ 98 | method: 'post', 99 | url: `${targetBaseUrl}/${path}`, 100 | data: req.body, 101 | headers: { 102 | ...req.headers, 103 | 'Content-Type': 'application/json', 104 | 'Host': new URL(targetBaseUrl).hostname, // Update the Host header for the target server 105 | 'Accept-Encoding': 'identity' 106 | }, 107 | responseType: 'stream' 108 | }); 109 | 110 | // Proxy the headers 111 | res.set(response.headers); 112 | 113 | // Proxy stream requests 114 | response.data.pipe(res); 115 | 116 | // Stop stream requests if the connection is aborted on the other end 117 | res.on('close', () => { 118 | response.data.destroy(); 119 | }); 120 | } catch (error) { 121 | if (error.response) { 122 | if (error.response.data.pipe !== undefined) { 123 | error.response.data.pipe(res.status(error.response.status)); 124 | } else { 125 | res.status(error.response.status).send(error.response.data); 126 | } 127 | } else if (error.request) { 128 | res.status(504).send('No response from target server.'); 129 | } else { 130 | res.status(500).send(`Error setting up request to target server: ${error.message}`); 131 | } 132 | } 133 | }); 134 | 135 | // Dynamic GET proxy route 136 | app.get('/proxy/*', async (req, res) => { 137 | // Capture the part of the URL after '/proxy' 138 | const path = req.params[0]; 139 | 140 | // Target server base URL 141 | const targetBaseUrl = req.headers['x-real-url']; 142 | delete req.headers['x-real-url']; 143 | 144 | headersToRemove.forEach(header => { 145 | delete req.headers[header.toLowerCase()]; 146 | }); 147 | 148 | try { 149 | const response = await axios.get(`${targetBaseUrl}/${path}`, { 150 | params: req.query, 151 | headers: { 152 | ...req.headers, 153 | 'Content-Type': 'application/json', 154 | 'Host': new URL(targetBaseUrl).hostname, // Update the Host header for the target server 155 | 'Accept-Encoding': 'identity' 156 | } 157 | }); 158 | 159 | res.send(response.data); 160 | } catch (error) { 161 | if (error.response) { 162 | res.status(error.response.status).send(error.response.data); 163 | } else if (error.request) { 164 | res.status(504).send('No response from target server.'); 165 | } else { 166 | res.status(500).send(`Error setting up request to target server: ${error.message}`); 167 | } 168 | } 169 | }); 170 | 171 | // Dynamic DELETE proxy route 172 | app.delete('/proxy/*', async (req, res) => { 173 | // Capture the part of the URL after '/proxy' 174 | const path = req.params[0]; 175 | 176 | // Target server base URL 177 | const targetBaseUrl = req.headers['x-real-url']; 178 | delete req.headers['x-real-url']; 179 | 180 | headersToRemove.forEach(header => { 181 | delete req.headers[header.toLowerCase()]; 182 | }); 183 | 184 | try { 185 | const response = await axios.delete(`${targetBaseUrl}/${path}`, { 186 | headers: { 187 | ...req.headers, 188 | 'Content-Type': 'application/json', 189 | 'Host': new URL(targetBaseUrl).hostname, // Update the Host header for the target server 190 | 'Accept-Encoding': 'identity' 191 | } 192 | }); 193 | 194 | res.send(response.data); 195 | } catch (error) { 196 | if (error.response) { 197 | res.status(error.response.status).send(error.response.data); 198 | } else if (error.request) { 199 | res.status(504).send('No response from target server.'); 200 | } else { 201 | res.status(500).send(`Error setting up request to target server: ${error.message}`); 202 | } 203 | } 204 | }); 205 | 206 | const normalizeStoreName = (storeName) => { 207 | if (!storeName) 208 | return "Sessions"; 209 | return storeName.split(' ')[0].toLowerCase(); 210 | }; 211 | 212 | // POST route to load data 213 | app.post('/load', (req, res) => { 214 | const { storeName, key } = req.body; 215 | const normStoreName = normalizeStoreName(storeName); 216 | db.get(`SELECT data FROM ${normStoreName} WHERE key = ?`, [key], (err, row) => { 217 | if (err) { 218 | res.status(500).json({ ok: false, message: 'Error querying the database' }); 219 | } else if (row) { 220 | res.json({ ok: true, result: JSON.parse(row.data) }); 221 | } else { 222 | res.status(404).json({ ok: false, message: 'Key not found' }); 223 | } 224 | }); 225 | }); 226 | 227 | // POST route to save data 228 | app.post('/save', (req, res) => { 229 | const { storeName, key, data } = req.body; 230 | const normStoreName = normalizeStoreName(storeName); 231 | db.run(`INSERT OR REPLACE INTO ${normStoreName} (key, data) VALUES (?, ?)`, [key, JSON.stringify(data)], (err) => { 232 | if (err) { 233 | res.status(500).json({ ok: false, message: 'Error writing to the database' }); 234 | } else { 235 | res.json({ ok: true, result: 'Data saved successfully' }); 236 | } 237 | }); 238 | }); 239 | 240 | // POST route to update session name 241 | app.post('/rename', (req, res) => { 242 | const { storeName, key, newName } = req.body; 243 | const normStoreName = normalizeStoreName(storeName); 244 | db.run( 245 | ` 246 | UPDATE ${normStoreName} 247 | SET data = json_set(data, '$.name', ?) 248 | WHERE key = ? 249 | `, 250 | [newName, key], 251 | (err) => { 252 | if (err) { 253 | res.status(500).json({ ok: false, message: 'Error updating the database' }); 254 | } else { 255 | res.json({ ok: true, result: 'Session renamed successfully' }); 256 | } 257 | } 258 | ); 259 | }); 260 | 261 | // POST route to get all rows from a table 262 | app.post('/all', (req, res) => { 263 | const { storeName } = req.body; 264 | const normStoreName = normalizeStoreName(storeName); 265 | db.all(`SELECT key, data FROM ${normStoreName}`, [], (err, rows) => { 266 | if (err) { 267 | res.status(500).json({ ok: false, message: 'Error querying the database' }); 268 | } else { 269 | const all = {}; 270 | rows.forEach((row) => { 271 | all[row.key] = JSON.parse(row.data); 272 | }); 273 | res.json({ ok: true, result: all }); 274 | } 275 | }); 276 | }); 277 | 278 | // POST route to get session info 279 | app.post('/sessions', (req, res) => { 280 | const { storeName } = req.body; 281 | const normStoreName = normalizeStoreName(storeName); 282 | db.all( 283 | ` 284 | SELECT key, json_extract(data, '$.name') AS name 285 | FROM ${normStoreName} 286 | WHERE key NOT IN ('selectedSessionId', 'nextSessionId') 287 | `, 288 | [], 289 | (err, rows) => { 290 | if (err) { 291 | res.status(500).json({ ok: false, message: 'Error querying the database' }); 292 | } else { 293 | const sessions = {}; 294 | rows.forEach((row) => { 295 | sessions[row.key] = row.name; 296 | }); 297 | res.json({ ok: true, result: sessions }); 298 | } 299 | } 300 | ); 301 | }); 302 | 303 | // POST route to delete a session 304 | app.post('/delete', (req, res) => { 305 | const { storeName, key } = req.body; 306 | const normStoreName = normalizeStoreName(storeName); 307 | db.run(`DELETE FROM ${normStoreName} WHERE key = ?`, [key], (err) => { 308 | if (err) { 309 | res.status(500).json({ ok: false, message: 'Error deleting from the database' }); 310 | } else { 311 | res.json({ ok: true, result: 'Session deleted successfully' }); 312 | } 313 | }); 314 | }); 315 | 316 | // Start the server 317 | app.listen(port, host, () => { 318 | console.log(`Server listening at http://${host}:${port}`); 319 | if (!noOpen) { 320 | open(`http://127.0.0.1:${port}/`); 321 | } 322 | }); 323 | 324 | // Close db connection on server close 325 | process.on('SIGINT', () => { 326 | db.close(() => { 327 | process.exit(0); 328 | }); 329 | }); 330 | -------------------------------------------------------------------------------- /server/start.bat: -------------------------------------------------------------------------------- 1 | @echo off 2 | 3 | where node >nul 2>&1 4 | if %errorlevel% neq 0 ( 5 | echo Node.js is not installed. 6 | exit /b 1 7 | ) 8 | where npm >nul 2>&1 9 | if %errorlevel% neq 0 ( 10 | echo npm is not installed. 11 | exit /b 1 12 | ) 13 | 14 | call npm install --no-audit 15 | call node server.js %* -------------------------------------------------------------------------------- /server/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if ! command -v node &> /dev/null 4 | then 5 | echo "Node.js is not installed." 6 | exit 1 7 | fi 8 | 9 | if ! command -v npm &> /dev/null 10 | then 11 | echo "npm is not installed." 12 | exit 1 13 | fi 14 | 15 | npm install --no-audit 16 | node server.js "$@" --------------------------------------------------------------------------------