├── .gitignore ├── .env.example ├── package.json ├── .github └── workflows │ └── main.yml ├── LICENSE ├── index.js └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | *.zip 3 | *.png 4 | node_modules 5 | -------------------------------------------------------------------------------- /.env.example: -------------------------------------------------------------------------------- 1 | ROAM_EMAIL="" 2 | ROAM_PASSWORD="" 3 | AWS_BUCKET_NAME="" 4 | AWS_ACCESS_KEY_ID="" 5 | AWS_ACCESS_KEY_SECRET="" -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "roam-backup", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "dependencies": { 7 | "aws-sdk": "^2.633.0", 8 | "dotenv": "^8.2.0", 9 | "fs": "0.0.1-security", 10 | "glob": "^7.1.6", 11 | "puppeteer": "^2.1.1" 12 | }, 13 | "devDependencies": { 14 | "rimraf": "^3.0.2" 15 | }, 16 | "scripts": { 17 | "start": "rimraf *.zip && rimraf *.png && node index.js", 18 | "test": "echo \"Error: no test specified\" && exit 1" 19 | }, 20 | "keywords": [], 21 | "author": "", 22 | "license": "ISC" 23 | } 24 | -------------------------------------------------------------------------------- /.github/workflows/main.yml: -------------------------------------------------------------------------------- 1 | name: "Roam Research backup" 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | schedule: 8 | - cron: "*/15 * * * *" 9 | 10 | jobs: 11 | backup: 12 | runs-on: ubuntu-latest 13 | name: Backup 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: actions/setup-node@v1 17 | with: 18 | node-version: "10.x" 19 | - run: npm install 20 | - run: npm start 21 | env: 22 | ROAM_EMAIL: ${{ secrets.roamEmail }} 23 | ROAM_PASSWORD: ${{ secrets.roamPassword }} 24 | AWS_BUCKET_NAME: ${{ secrets.awsBucketName }} 25 | AWS_ACCESS_KEY_ID: ${{ secrets.awsAccessKeyId }} 26 | AWS_ACCESS_KEY_SECRET: ${{ secrets.awsAccessKeySecret }} 27 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Kristian Freeman 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | require("dotenv").config(); 2 | 3 | const config = { 4 | backupFolder: "backups" 5 | }; 6 | 7 | const AWS = require("aws-sdk"); 8 | const s3 = new AWS.S3({ 9 | accessKeyId: process.env.AWS_ACCESS_KEY_ID, 10 | secretAccessKey: process.env.AWS_ACCESS_KEY_SECRET 11 | }); 12 | 13 | const fs = require("fs"); 14 | const glob = require("glob"); 15 | const puppeteer = require("puppeteer"); 16 | 17 | const generateExport = async () => { 18 | const browser = await puppeteer.launch(); 19 | const page = await browser.newPage(); 20 | try { 21 | await page._client.send("Page.setDownloadBehavior", { 22 | behavior: "allow", 23 | downloadPath: process.cwd() 24 | }); 25 | 26 | await page.goto("https://roamresearch.com/#/signin"); 27 | 28 | console.log("Logging into Roam"); 29 | 30 | await page.focus('[name="email"]'); 31 | await page.keyboard.type(process.env.ROAM_EMAIL); 32 | 33 | await page.focus('[name="password"]'); 34 | await page.keyboard.type(process.env.ROAM_PASSWORD); 35 | 36 | await page.$eval(".bp3-button", el => el.click()); 37 | 38 | await page.waitFor(5000); 39 | 40 | console.log("Successfully logged in"); 41 | 42 | await page.waitForSelector( 43 | ".flex-h-box > div > .bp3-popover-wrapper > .bp3-popover-target > .bp3-small" 44 | ); 45 | await page.click( 46 | ".flex-h-box > div > .bp3-popover-wrapper > .bp3-popover-target > .bp3-small" 47 | ); 48 | 49 | console.log("Opening Export menu"); 50 | 51 | await page.waitForSelector( 52 | ".bp3-popover-content > .bp3-menu > li:nth-child(3) > .bp3-menu-item > .bp3-text-overflow-ellipsis" 53 | ); 54 | await page.click( 55 | ".bp3-popover-content > .bp3-menu > li:nth-child(3) > .bp3-menu-item > .bp3-text-overflow-ellipsis" 56 | ); 57 | 58 | await page.waitForSelector( 59 | ".bp3-popover-wrapper > .bp3-popover-target > div > .bp3-button > .bp3-button-text" 60 | ); 61 | await page.click( 62 | ".bp3-popover-wrapper > .bp3-popover-target > div > .bp3-button > .bp3-button-text" 63 | ); 64 | 65 | console.log("Selecting JSON export"); 66 | 67 | await page.waitForSelector( 68 | "div > .bp3-menu > li > .bp3-menu-item > .bp3-text-overflow-ellipsis" 69 | ); 70 | await page.click( 71 | "div > .bp3-menu > li > .bp3-menu-item > .bp3-text-overflow-ellipsis" 72 | ); 73 | 74 | console.log("Creating export"); 75 | 76 | await page.waitForSelector( 77 | ".bp3-dialog-container > .bp3-dialog > div > .flex-h-box > .bp3-intent-primary" 78 | ); 79 | await page.click( 80 | ".bp3-dialog-container > .bp3-dialog > div > .flex-h-box > .bp3-intent-primary" 81 | ); 82 | 83 | console.log("Created export"); 84 | 85 | console.log("Waiting five seconds for it to download"); 86 | await page.waitFor(5000); 87 | } catch (err) { 88 | console.error("Something went wrong!"); 89 | console.error(err); 90 | 91 | await page.screenshot({ path: "error.png" }); 92 | } 93 | await browser.close(); 94 | }; 95 | 96 | const uploadToS3 = async filename => { 97 | try { 98 | const fileContent = fs.readFileSync(filename); 99 | 100 | const params = { 101 | Bucket: process.env.AWS_BUCKET_NAME, 102 | Key: `${config.backupFolder}/${filename}`, 103 | Body: fileContent 104 | }; 105 | 106 | const data = await s3.upload(params).promise(); 107 | console.log(`Successfully backed up Roam data to S3: ${data.Location}`); 108 | } catch (err) { 109 | console.error("Something went wrong while uploading to S3"); 110 | console.error(err); 111 | } 112 | }; 113 | 114 | const main = async function() { 115 | await generateExport(); 116 | const files = glob.sync("*.zip"); 117 | const filename = files[0]; 118 | if (!filename) { 119 | throw new Error("Couldn't find a file to upload, aborting"); 120 | } 121 | console.log(`Uploading ${filename} to S3`); 122 | await uploadToS3(filename); 123 | }; 124 | 125 | main(); 126 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # [[`roam-backup`]] 2 | 3 | Automate backing up JSON copies of your [Roam Research](https://roamresearch.com) data, using GitHub Actions and AWS S3. 4 | 5 | Backups will be generated using Puppeteer and saved into the `backups` folder (`config.backupFolder`, can be changed) of an S3 bucket of your choice. 6 | 7 | **Note from the dev:** I'm using Roam more infrequently in favor of org-mode/plain text note-taking so this project is somewhat unmaintained. I'm looking for people to help test/update [this PR](https://github.com/signalnerve/roam-backup/pull/6) which fixes some outstanding issues. In addition, if you're a Roam power user and care about this stuff being maintained, I'm looking to add at least one collaborator to the project to help maintain it - [check out this ticket!](https://github.com/signalnerve/roam-backup/issues/15) 8 | 9 | ## Usage 10 | 11 | You can get `roam-backup` up and running in just a few minutes! The best part is that **you don't need to deploy anything!** 12 | 13 | Just follow these steps and you'll be on your way: 14 | 15 | #### 1. Fork this repository 16 | 17 | If you haven't done this before, you'll find the Fork button on the top right of a GitHub repository's screen. 18 | 19 | #### 2. Enable Actions on your newly forked repository 20 | 21 | This is necessary because Actions get disabled when you fork a repository. Do this by tapping on the "Actions" tab in your repository (next to "Pull Requests"), and hit the big green button. 22 | 23 | #### 3. Setup an AWS S3 bucket to store your Roam backups 24 | 25 | - Ensure you create the AWS S3 bucket manually –– this script will not create the bucket. Note the `awsBucketName` for step #4. 26 | - Create (or find an existing) user in the AWS console, and note its `awsAccessKeyId` and `awsAccessKeySecret` for step #4. 27 | - Ensure the user has permissions to upload to an S3 bucket. To do this, you'll need to attach a policy to the user that allows uploading to S3. The simplest way to do this would be to use the existing global policy: `AmazonS3FullAccess`. 28 | - _(OPTIONAL)_ If you want to be a bit more conservative with the access policy, instead of `AmazonS3FullAccess` you can create your own custom policy and attach that to the user. Here's an example of the attributes you'd need to set: 29 | - Service: S3 30 | - Access Level: Write -> Put Object (NOTE: Do not simply select 'Write', instead click on the arrow to drill down into it and choose only 'Put Bucket' within all the options under Write) 31 | - Resource: Click on "Add ARN", and specify the Bucket Name, you can select "Any" for Object Name 32 | - Review and save the policy, then attach it to your user 33 | - Note the `awsAccessKeyId` and `awsAccessKeySecret` of the user for step #4. 34 | 35 | #### 4. Set your repository Secrets 36 | 37 | Go to your Github repository's Settings tab, and click on Secrets section on the left. Add the following secrets (**naming must match exactly!**), using your Roam login credentials and the AWS bucket name and user access key ID/secret from step #3: 38 | 39 | - `roamEmail` 40 | - `roamPassword` 41 | - `awsBucketName` 42 | - `awsAccessKeyId` 43 | - `awsAccessKeySecret` 44 | 45 | _Don't worry! Your Roam and AWS credentials will be secure. GitHub [Secrets](https://help.github.com/en/actions/configuring-and-managing-workflows/creating-and-storing-encrypted-secrets) are encrypted, and provide a convenient methodology for storing secure data in repositories._ 46 | 47 | #### 5. Make a commit. It can be any commit, but this will start the process and trigger workflows. 48 | 49 | Congrats! 🎉 You've successfully automated the backup of your brain 🧠. Now go write about it in today's Daily Note! 50 | 51 | _NOTE: This is still fairly WIP, and this is my first project using Puppeteer, so it may be a little buggy._ 52 | 53 | ## Development 54 | 55 | Running this project locally should be possible using `.env` - copy `.env.example` to `.env` and fill it in with your own authentication keys. 56 | 57 | The project generates an `error.png` screenshot to capture the current page if something goes wrong, as well as ZIP folders, which are the JSON backups. Running `npm start` will clear any local screenshots and backups, and run the script as it would in the GitHub Actions workflow (`npm start`) 58 | --------------------------------------------------------------------------------