├── .gitignore
├── web-apis
├── manifests
│ ├── external
│ │ ├── user-session.js
│ │ ├── experimental
│ │ │ ├── global-fetch.js
│ │ │ ├── capture-page.js
│ │ │ ├── library.js
│ │ │ └── dat-peers.js
│ │ ├── README.md
│ │ └── dat-archive.js
│ └── internal
│ │ ├── README.md
│ │ ├── watchlist.js
│ │ ├── history.js
│ │ ├── downloads.js
│ │ ├── sitedata.js
│ │ ├── bookmarks.js
│ │ ├── browser.js
│ │ └── archives.js
├── bg
│ ├── user-session.js
│ ├── watchlist.js
│ ├── history.js
│ ├── experimental
│ │ ├── global-fetch.js
│ │ ├── capture-page.js
│ │ ├── dat-peers.js
│ │ └── library.js
│ ├── bookmarks.js
│ └── archives.js
├── fg.js
├── fg
│ ├── stat.js
│ ├── experimental.js
│ ├── event-target.js
│ ├── beaker.js
│ └── dat-archive.js
└── bg.js
├── webview.js
├── dbs
├── schemas
│ ├── profile-data.v5.sql.js
│ ├── profile-data.v15.sql.js
│ ├── profile-data.v21.sql.js
│ ├── profile-data.v14.sql.js
│ ├── profile-data.v7.sql.js
│ ├── profile-data.v16.sql.js
│ ├── profile-data.v17.sql.js
│ ├── profile-data.v22.sql.js
│ ├── profile-data.v6.sql.js
│ ├── profile-data.v3.sql.js
│ ├── profile-data.v8.sql.js
│ ├── profile-data.v9.sql.js
│ ├── profile-data.v13.sql.js
│ ├── profile-data.v20.sql.js
│ ├── profile-data.v2.sql.js
│ ├── profile-data.v4.sql.js
│ ├── profile-data.v11.sql.js
│ ├── profile-data.v18.sql.js
│ ├── profile-data.v10.sql.js
│ ├── profile-data.v12.sql.js
│ ├── profile-data.v23.sql.js
│ ├── profile-data.v19.sql.js
│ ├── profile-data.v1.sql.js
│ └── profile-data.sql.js
├── index.js
├── templates.js
├── archive-drafts.js
├── watchlist.js
├── profile-data-db.js
├── settings.js
├── bookmarks.js
├── history.js
└── archives.js
├── dat
├── index.js
├── dns.js
├── daemon
│ ├── manifest.js
│ ├── storage.js
│ ├── extensions.js
│ └── logging-utils.js
├── debugging.js
├── garbage-collector.js
├── watchlist.js
├── directory-listing-page.js
└── protocol.js
├── globals.js
├── lib
├── env.js
├── functions.js
├── scoped-fses.js
├── lock.js
├── debug-logger.js
├── strings.js
├── const.js
├── db.js
├── time.js
├── mime.js
└── error-page.js
├── LICENSE
├── index.js
├── package.json
├── README.md
└── .eslintrc.json
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/user-session.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | fetch: 'promise'
3 | }
4 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/experimental/global-fetch.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | fetch: 'promise'
3 | }
4 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/experimental/capture-page.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | capturePage: 'promise'
3 | }
4 |
--------------------------------------------------------------------------------
/webview.js:
--------------------------------------------------------------------------------
1 | const webApis = require('./web-apis/fg')
2 |
3 | exports.setup = function ({rpcAPI}) {
4 | webApis.setup({rpcAPI})
5 | }
6 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v5.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- more default bookmarks
4 | -- REMOVED
5 |
6 | PRAGMA user_version = 5;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v15.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- more default bookmarks
4 | -- REMOVED
5 |
6 | PRAGMA user_version = 15;
7 | `
8 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/README.md:
--------------------------------------------------------------------------------
1 | # External APIs
2 |
3 | These are RPC APIs which are exported to userland, and need to be treated as potentially hostile.
--------------------------------------------------------------------------------
/web-apis/manifests/internal/README.md:
--------------------------------------------------------------------------------
1 | # Internal APIs
2 |
3 | These are RPC APIs which are exported to builtin interfaces, and need to be kept away from potentially hostile codepaths.
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v21.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add size data to archives_meta
4 | ALTER TABLE archives_meta ADD COLUMN size INTEGER DEFAULT 0;
5 |
6 | PRAGMA user_version = 21;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v14.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a non-unique index to the visits table to speed up joins
4 | CREATE INDEX visits_url ON visits (url);
5 |
6 | PRAGMA user_version = 14;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v7.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a field to track rehost expiration (for timed rehosting)
4 | ALTER TABLE archives ADD COLUMN expiresAt INTEGER;
5 |
6 | PRAGMA user_version = 7;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v16.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a field to track when last accessed in the library
4 | ALTER TABLE bookmarks ADD COLUMN pinOrder INTEGER DEFAULT 0;
5 |
6 | PRAGMA user_version = 16;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v17.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a field to track the folder where an archive is being synced
4 | ALTER TABLE archives ADD COLUMN localSyncPath TEXT;
5 |
6 | PRAGMA user_version = 17;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v22.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- automatically publish changes (0) or write to local folder (1)
4 | ALTER TABLE archives ADD COLUMN previewMode INTEGER;
5 |
6 | PRAGMA user_version = 22;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v6.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add more flags to control swarming behaviors of archives
4 | ALTER TABLE archives ADD COLUMN networked INTEGER DEFAULT 1;
5 |
6 | PRAGMA user_version = 6;
7 | `
8 |
--------------------------------------------------------------------------------
/web-apis/manifests/internal/watchlist.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | add: 'promise',
3 | list: 'promise',
4 | update: 'promise',
5 | remove: 'promise',
6 |
7 | // events
8 | createEventsStream: 'readable'
9 | }
10 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v3.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add variable to track the access times of archives
4 | ALTER TABLE archives_meta ADD COLUMN lastAccessTime INTEGER DEFAULT 0;
5 |
6 | PRAGMA user_version = 3;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v8.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add tags and notes to bookmarks
4 | ALTER TABLE bookmarks ADD COLUMN tags TEXT;
5 | ALTER TABLE bookmarks ADD COLUMN notes TEXT;
6 |
7 | PRAGMA user_version = 8;
8 | `
9 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v9.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- join table to list the archive's type fields
4 | CREATE TABLE archives_meta_type (
5 | key TEXT,
6 | type TEXT
7 | );
8 |
9 | PRAGMA user_version = 9;
10 | `
11 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v13.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a field to track when last accessed in the library
4 | ALTER TABLE archives_meta ADD COLUMN lastLibraryAccessTime INTEGER DEFAULT 0;
5 |
6 | PRAGMA user_version = 13;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v20.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- watch localSyncPath and automatically publish changes (1) or not (0)
4 | ALTER TABLE archives ADD COLUMN autoPublishLocal INTEGER DEFAULT 0;
5 |
6 | PRAGMA user_version = 20;
7 | `
8 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v2.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add variable to track the staging size less ignored files
4 | -- deprecated
5 | ALTER TABLE archives_meta ADD COLUMN stagingSizeLessIgnored INTEGER;
6 |
7 | PRAGMA user_version = 2;
8 | `
9 |
--------------------------------------------------------------------------------
/dat/index.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | debug: require('./debugging'),
3 | dns: require('./dns'),
4 | garbageCollector: require('./garbage-collector'),
5 | library: require('./library'),
6 | protocol: require('./protocol'),
7 | watchlist: require('./watchlist')
8 | }
9 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/experimental/library.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | add: 'promise',
3 | remove: 'promise',
4 | get: 'promise',
5 | list: 'promise',
6 |
7 | requestAdd: 'promise',
8 | requestRemove: 'promise',
9 |
10 | createEventStream: 'readable'
11 | }
12 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v4.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add flags to control swarming behaviors of archives
4 | ALTER TABLE archives ADD COLUMN autoDownload INTEGER DEFAULT 1;
5 | ALTER TABLE archives ADD COLUMN autoUpload INTEGER DEFAULT 1;
6 |
7 | PRAGMA user_version = 4;
8 | `
9 |
--------------------------------------------------------------------------------
/web-apis/manifests/internal/history.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | addVisit: 'promise',
3 | getVisitHistory: 'promise',
4 | getMostVisited: 'promise',
5 | search: 'promise',
6 | removeVisit: 'promise',
7 | removeAllVisits: 'promise',
8 | removeVisitsAfter: 'promise'
9 | }
10 |
--------------------------------------------------------------------------------
/web-apis/manifests/internal/downloads.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | getDownloads: 'promise',
3 | pause: 'promise',
4 | resume: 'promise',
5 | cancel: 'promise',
6 | remove: 'promise',
7 | open: 'promise',
8 | showInFolder: 'promise',
9 | createEventsStream: 'readable'
10 | }
11 |
--------------------------------------------------------------------------------
/globals.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | // config
3 | userDataPath: null,
4 | homePath: null,
5 | templatesPath: null,
6 | disallowedSavePaths: [],
7 |
8 | // dependencies
9 | permsAPI: null,
10 | uiAPI: null,
11 | rpcAPI: null,
12 | downloadsWebAPI: null,
13 | browserWebAPI: null
14 | }
15 |
--------------------------------------------------------------------------------
/lib/env.js:
--------------------------------------------------------------------------------
1 | exports.getEnvVar = function (name) {
2 | var ucv = process.env[name.toUpperCase()]
3 | if (typeof ucv !== 'undefined') {
4 | return ucv
5 | }
6 | var lcv = process.env[name.toLowerCase()]
7 | if (typeof lcv !== 'undefined') {
8 | return lcv
9 | }
10 | return undefined
11 | }
12 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/experimental/dat-peers.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | list: 'promise',
3 | get: 'promise',
4 | broadcast: 'promise',
5 | send: 'promise',
6 | getSessionData: 'promise',
7 | setSessionData: 'promise',
8 | getOwnPeerId: 'promise',
9 | createEventStream: 'readable'
10 | }
11 |
--------------------------------------------------------------------------------
/web-apis/bg/user-session.js:
--------------------------------------------------------------------------------
1 | const {getAppPermissions} = require('../../dbs/sitedata')
2 |
3 | // exported api
4 | // =
5 |
6 | module.exports = {
7 | // fetch the sender's session data
8 | async fetch () {
9 | return {
10 | permissions: await getAppPermissions(this.sender.getURL())
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/lib/functions.js:
--------------------------------------------------------------------------------
1 |
2 | // helper to make node-style CBs into promises
3 | // usage: cbPromise(cb => myNodeStyleMethod(cb)).then(...)
4 | exports.cbPromise = function (method, b) {
5 | return new Promise((resolve, reject) => {
6 | method((err, value) => {
7 | if (err) reject(err)
8 | else resolve(value)
9 | })
10 | })
11 | }
12 |
--------------------------------------------------------------------------------
/web-apis/manifests/internal/sitedata.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | get: 'promise',
3 | set: 'promise',
4 | getPermissions: 'promise',
5 | getPermission: 'promise',
6 | getAppPermissions: 'promise',
7 | setPermission: 'promise',
8 | setAppPermissions: 'promise',
9 | clearPermission: 'promise',
10 | clearPermissionAllOrigins: 'promise'
11 | }
12 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v11.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 | -- log of the user's app installations
3 | -- deprecated
4 | CREATE TABLE apps_log (
5 | profileId INTEGER NOT NULL,
6 | name TEXT NOT NULL,
7 | url TEXT,
8 | ts INTEGER DEFAULT (strftime('%s', 'now')),
9 |
10 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
11 | );
12 |
13 | PRAGMA user_version = 11;
14 | `
15 |
--------------------------------------------------------------------------------
/dbs/index.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | archives: require('./archives'),
3 | archiveDrafts: require('./archive-drafts'),
4 | bookmarks: require('./bookmarks'),
5 | history: require('./history'),
6 | profileData: require('./profile-data-db'),
7 | settings: require('./settings'),
8 | sitedata: require('./sitedata'),
9 | templates: require('./templates'),
10 | watchlist: require('./watchlist')
11 | }
12 |
--------------------------------------------------------------------------------
/lib/scoped-fses.js:
--------------------------------------------------------------------------------
1 | const ScopedFS = require('scoped-fs')
2 |
3 | // globals
4 | // =
5 |
6 | var scopedFSes = {} // map of scoped filesystems, kept in memory to reduce allocations
7 |
8 | // exported APIs
9 | // =
10 |
11 | exports.get = function (path) {
12 | if (!(path in scopedFSes)) {
13 | scopedFSes[path] = new ScopedFS(path)
14 | scopedFSes[path].isLocalFS = true
15 | }
16 | return scopedFSes[path]
17 | }
18 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v18.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a database to track user-defined templates for new dat sites
4 | CREATE TABLE templates (
5 | profileId INTEGER,
6 | url TEXT NOT NULL,
7 | title TEXT,
8 | screenshot,
9 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
10 |
11 | PRIMARY KEY (profileId, url),
12 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
13 | );
14 |
15 | PRAGMA user_version = 18;
16 | `
17 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v10.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 | -- list of the user's installed apps
3 | -- deprecated
4 | CREATE TABLE apps (
5 | profileId INTEGER NOT NULL,
6 | name TEXT NOT NULL,
7 | url TEXT,
8 | updatedAt INTEGER DEFAULT (strftime('%s', 'now')),
9 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
10 |
11 | PRIMARY KEY (profileId, name),
12 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
13 | );
14 |
15 | PRAGMA user_version = 10;
16 | `
17 |
--------------------------------------------------------------------------------
/lib/lock.js:
--------------------------------------------------------------------------------
1 | const AwaitLock = require('await-lock')
2 |
3 | // wraps await-lock in a simpler interface, with many possible locks
4 | // usage:
5 | /*
6 | var lock = require('./lock')
7 | async function foo () {
8 | var release = await lock('bar')
9 | // ...
10 | release()
11 | }
12 | */
13 |
14 | var locks = {}
15 | module.exports = async function (key) {
16 | if (!(key in locks)) locks[key] = new AwaitLock()
17 |
18 | var lock = locks[key]
19 | await lock.acquireAsync()
20 | return lock.release.bind(lock)
21 | }
22 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v12.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- list of the active workspaces
4 | -- deprecated
5 | CREATE TABLE workspaces (
6 | profileId INTEGER NOT NULL,
7 | name TEXT NOT NULL,
8 | localFilesPath TEXT,
9 | publishTargetUrl TEXT,
10 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
11 | updatedAt INTEGER DEFAULT (strftime('%s', 'now')),
12 |
13 | PRIMARY KEY (profileId, name),
14 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
15 | );
16 |
17 | PRAGMA user_version = 12;
18 | `
19 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v23.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add a database for watchlist feature
4 | CREATE TABLE watchlist (
5 | profileId INTEGER NOT NULL,
6 | url TEXT NOT NULL,
7 | description TEXT NOT NULL,
8 | seedWhenResolved BOOLEAN NOT NULL,
9 | resolved BOOLEAN NOT NULL DEFAULT (0),
10 | updatedAt INTEGER DEFAULT (strftime('%s', 'now')),
11 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
12 |
13 | PRIMARY KEY (profileId, url),
14 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
15 | );
16 |
17 | PRAGMA user_version = 23;
18 | `
--------------------------------------------------------------------------------
/web-apis/manifests/internal/bookmarks.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | // current user
3 | getBookmark: 'promise',
4 | isBookmarked: 'promise',
5 |
6 | // public
7 | bookmarkPublic: 'promise',
8 | unbookmarkPublic: 'promise',
9 | listPublicBookmarks: 'promise',
10 |
11 | // pins
12 | setBookmarkPinned: 'promise',
13 | setBookmarkPinOrder: 'promise',
14 | listPinnedBookmarks: 'promise',
15 |
16 | // private
17 | bookmarkPrivate: 'promise',
18 | unbookmarkPrivate: 'promise',
19 | listPrivateBookmarks: 'promise',
20 |
21 | // tags
22 | listBookmarkTags: 'promise'
23 | }
24 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v19.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 |
3 | -- add the 'hidden' flag to archives
4 | ALTER TABLE archives ADD COLUMN hidden INTEGER DEFAULT 0;
5 |
6 | -- add a database for tracking draft dats
7 | CREATE TABLE archive_drafts (
8 | profileId INTEGER,
9 | masterKey TEXT, -- key of the master dat
10 | draftKey TEXT, -- key of the draft dat
11 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
12 |
13 | isActive INTEGER, -- is this the active draft? (deprecated)
14 |
15 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
16 | );
17 |
18 | PRAGMA user_version = 19;
19 | `
20 |
--------------------------------------------------------------------------------
/web-apis/bg/watchlist.js:
--------------------------------------------------------------------------------
1 | const datWatchlist = require('../../dat/watchlist')
2 | const datLibrary = require('../../dat/library')
3 |
4 | // exported api
5 | // =
6 |
7 | module.exports = {
8 | async add (url, opts) {
9 | return datWatchlist.addSite(0, url, opts)
10 | },
11 |
12 | async list () {
13 | return datWatchlist.getSites(0)
14 | },
15 |
16 | async update (site, opts) {
17 | return datWatchlist.updateWatchlist(0, site, opts)
18 | },
19 |
20 | async remove (url) {
21 | return datWatchlist.removeSite(0, url)
22 | },
23 |
24 | // events
25 | // =
26 |
27 | createEventsStream () {
28 | return datWatchlist.createEventsStream()
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/web-apis/fg.js:
--------------------------------------------------------------------------------
1 | const { contextBridge } = require('electron')
2 | const DatArchive = require('./fg/dat-archive')
3 | const beaker = require('./fg/beaker')
4 | const experimental = require('./fg/experimental')
5 |
6 | exports.setup = function ({rpcAPI}) {
7 | // setup APIs
8 | if (['beaker:', 'dat:', 'https:'].includes(window.location.protocol) ||
9 | (window.location.protocol === 'http:' && window.location.hostname === 'localhost')) {
10 | DatArchive.setupAndExpose(rpcAPI)
11 | }
12 | if (['beaker:', 'dat:'].includes(window.location.protocol)) {
13 | contextBridge.exposeInMainWorld('beaker', beaker.setup(rpcAPI))
14 | contextBridge.exposeInMainWorld('experimental', experimental.setup(rpcAPI))
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/web-apis/bg/history.js:
--------------------------------------------------------------------------------
1 | const historyDb = require('../../dbs/history')
2 |
3 | // exported api
4 | // =
5 |
6 | module.exports = {
7 | async addVisit (...args) {
8 | return historyDb.addVisit(0, ...args)
9 | },
10 |
11 | async getVisitHistory (...args) {
12 | return historyDb.getVisitHistory(0, ...args)
13 | },
14 |
15 | async getMostVisited (...args) {
16 | return historyDb.getMostVisited(0, ...args)
17 | },
18 |
19 | async search (...args) {
20 | return historyDb.search(...args)
21 | },
22 |
23 | async removeVisit (...args) {
24 | return historyDb.removeVisit(...args)
25 | },
26 |
27 | async removeAllVisits (...args) {
28 | return historyDb.removeAllVisits(...args)
29 | },
30 |
31 | async removeVisitsAfter (...args) {
32 | return historyDb.removeVisitsAfter(...args)
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/web-apis/manifests/external/dat-archive.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | loadArchive: 'promise',
3 | createArchive: 'promise',
4 | forkArchive: 'promise',
5 | unlinkArchive: 'promise',
6 |
7 | getInfo: 'promise',
8 | configure: 'promise',
9 | history: 'promise',
10 |
11 | stat: 'promise',
12 | readFile: 'promise',
13 | writeFile: 'promise',
14 | unlink: 'promise',
15 | copy: 'promise',
16 | rename: 'promise',
17 | download: 'promise',
18 |
19 | readdir: 'promise',
20 | mkdir: 'promise',
21 | rmdir: 'promise',
22 |
23 | watch: 'readable',
24 | createNetworkActivityStream: 'readable',
25 |
26 | resolveName: 'promise',
27 | selectArchive: 'promise',
28 |
29 | diff: 'promise',
30 | merge: 'promise',
31 |
32 | importFromFilesystem: 'promise',
33 | exportToFilesystem: 'promise',
34 | exportToArchive: 'promise',
35 | }
36 |
--------------------------------------------------------------------------------
/dbs/templates.js:
--------------------------------------------------------------------------------
1 | const db = require('./profile-data-db')
2 |
3 | // exported api
4 | // =
5 |
6 | exports.get = function (profileId, url) {
7 | return db.get(`SELECT url, title, createdAt FROM templates WHERE profileId = ? AND url = ?`, [profileId, url])
8 | }
9 |
10 | exports.getScreenshot = function (profileId, url) {
11 | return db.get(`SELECT url, screenshot FROM templates WHERE profileId = ? AND url = ?`, [profileId, url])
12 | }
13 |
14 | exports.list = function (profileId) {
15 | return db.all(`SELECT url, title, createdAt FROM templates WHERE profileId = ? ORDER BY title`, [profileId])
16 | }
17 |
18 | exports.put = function (profileId, url, {title, screenshot}) {
19 | return db.run(`
20 | INSERT OR REPLACE
21 | INTO templates (profileId, url, title, screenshot)
22 | VALUES (?, ?, ?, ?)
23 | `, [profileId, url, title, screenshot])
24 | }
25 |
26 | exports.remove = function (profileId, url) {
27 | return db.run(`DELETE FROM templates WHERE profileId = ? AND url = ?`, [profileId, url])
28 | }
29 |
--------------------------------------------------------------------------------
/dat/dns.js:
--------------------------------------------------------------------------------
1 | const {InvalidDomainName} = require('beaker-error-constants')
2 | const sitedataDb = require('../dbs/sitedata')
3 | const {DAT_HASH_REGEX} = require('../lib/const')
4 |
5 | // instantate a dns cache and export it
6 | const datDns = require('dat-dns')({
7 | persistentCache: {read, write}
8 | })
9 | module.exports = datDns
10 |
11 | // wrap resolveName() with a better error
12 | const resolveName = datDns.resolveName
13 | datDns.resolveName = function () {
14 | return resolveName.apply(datDns, arguments)
15 | .catch(_ => {
16 | throw new InvalidDomainName()
17 | })
18 | }
19 |
20 | // persistent cache methods
21 | const sitedataDbOpts = {dontExtractOrigin: true}
22 | async function read (name, err) {
23 | var key = await sitedataDb.get('dat:' + name, 'dat-key', sitedataDbOpts)
24 | if (!key) throw err
25 | return key
26 | }
27 | async function write (name, key) {
28 | if (DAT_HASH_REGEX.test(name)) return // dont write for raw urls
29 | await sitedataDb.set('dat:' + name, 'dat-key', key, sitedataDbOpts)
30 | }
31 |
--------------------------------------------------------------------------------
/web-apis/manifests/internal/browser.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | createEventsStream: 'readable',
3 | getInfo: 'sync',
4 | checkForUpdates: 'promise',
5 | restartBrowser: 'sync',
6 |
7 | getSettings: 'promise',
8 | getSetting: 'promise',
9 | setSetting: 'promise',
10 | getUserSetupStatus: 'promise',
11 | setUserSetupStatus: 'promise',
12 | getDefaultLocalPath: 'promise',
13 | setStartPageBackgroundImage: 'promise',
14 | getDefaultProtocolSettings: 'promise',
15 | setAsDefaultProtocolClient: 'promise',
16 | removeAsDefaultProtocolClient: 'promise',
17 |
18 | listBuiltinFavicons: 'promise',
19 | getBuiltinFavicon: 'promise',
20 | uploadFavicon: 'promise',
21 | imageToIco: 'promise',
22 |
23 | fetchBody: 'promise',
24 | downloadURL: 'promise',
25 |
26 | getResourceContentType: 'sync',
27 |
28 | setWindowDimensions: 'promise',
29 | showOpenDialog: 'promise',
30 | showContextMenu: 'promise',
31 | openUrl: 'promise',
32 | openFolder: 'promise',
33 | doWebcontentsCmd: 'promise',
34 | doTest: 'promise',
35 | closeModal: 'sync'
36 | }
37 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Blue Link Labs
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/lib/debug-logger.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs')
2 | const {format} = require('util')
3 | const concat = require('concat-stream')
4 |
5 | var logFilePath
6 | var logFileWriteStream
7 |
8 | exports.setup = function (p) {
9 | logFilePath = p
10 | console.log('Logfile:', logFilePath)
11 |
12 | logFileWriteStream = fs.createWriteStream(logFilePath, {encoding: 'utf8'})
13 | logFileWriteStream.write(format('Log started at %s\n', new Date()))
14 | logFileWriteStream.on('error', e => {
15 | console.log('Failed to open debug.log', e)
16 | })
17 | }
18 |
19 | exports.debugLogger = function (namespace) {
20 | return function (...args) {
21 | if (logFileWriteStream) {
22 | logFileWriteStream.write(namespace + ' ' + format(...args) + '\n')
23 | } else {
24 | console.error(namespace + ' ' + format(...args) + '\n')
25 | }
26 | }
27 | }
28 |
29 | exports.getLogFilePath = function () {
30 | return logFilePath
31 | }
32 |
33 | exports.getLogFileContent = function (start, end) {
34 | start = start || 0
35 | end = end || 10e5
36 | return new Promise(resolve => fs.createReadStream(logFilePath, {start, end}).pipe(concat({encoding: 'string'}, resolve)))
37 | }
38 |
--------------------------------------------------------------------------------
/web-apis/manifests/internal/archives.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | // system state
3 | status: 'promise',
4 |
5 | // local cache management and querying
6 | setUserSettings: 'promise',
7 | add: 'promise',
8 | remove: 'promise',
9 | bulkRemove: 'promise',
10 | delete: 'promise',
11 | list: 'promise',
12 |
13 | // folder sync
14 | validateLocalSyncPath: 'promise',
15 | setLocalSyncPath: 'promise',
16 | ensureLocalSyncFinished: 'promise',
17 |
18 | // diff & publish
19 | diffLocalSyncPathListing: 'promise',
20 | diffLocalSyncPathFile: 'promise',
21 | publishLocalSyncPathListing: 'promise',
22 | revertLocalSyncPathListing: 'promise',
23 |
24 | // drafts
25 | getDraftInfo: 'promise',
26 | listDrafts: 'promise',
27 | addDraft: 'promise',
28 | removeDraft: 'promise',
29 |
30 | // templates
31 | getTemplate: 'promise',
32 | listTemplates: 'promise',
33 | putTemplate: 'promise',
34 | removeTemplate: 'promise',
35 |
36 | // internal management
37 | touch: 'promise',
38 | clearFileCache: 'promise',
39 | clearGarbage: 'promise',
40 | clearDnsCache: 'promise',
41 |
42 | // events
43 | createEventStream: 'readable',
44 | getDebugLog: 'promise',
45 | createDebugStream: 'readable'
46 | }
47 |
--------------------------------------------------------------------------------
/dbs/archive-drafts.js:
--------------------------------------------------------------------------------
1 | const db = require('./profile-data-db')
2 | const archivesDb = require('./archives')
3 |
4 | // exported api
5 | // =
6 |
7 | exports.list = async function (profileId, masterKey) {
8 | // get draft list
9 | var records = await db.all(`SELECT draftKey as key FROM archive_drafts WHERE profileId = ? AND masterKey = ? ORDER BY createdAt`, [profileId, masterKey])
10 | // fetch full info from archives db
11 | return Promise.all(records.map(async ({key}) => archivesDb.query(profileId, {key, showHidden: true})))
12 | }
13 |
14 | exports.add = function (profileId, masterKey, draftKey) {
15 | return db.run(`
16 | INSERT OR REPLACE
17 | INTO archive_drafts (profileId, masterKey, draftKey)
18 | VALUES (?, ?, ?)
19 | `, [profileId, masterKey, draftKey])
20 | }
21 |
22 | exports.remove = function (profileId, masterKey, draftKey) {
23 | return db.run(`DELETE FROM archive_drafts WHERE profileId = ? AND masterKey = ? AND draftKey = ?`, [profileId, masterKey, draftKey])
24 | }
25 |
26 | exports.getMaster = async function (profileId, draftKey) {
27 | var record = await db.get(`SELECT masterKey as key FROM archive_drafts WHERE profileId = ? AND draftKey = ?`, [profileId, draftKey])
28 | if (record) return record.key
29 | return draftKey
30 | }
--------------------------------------------------------------------------------
/dat/daemon/manifest.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | // setup & config
3 |
4 | setup: 'promise',
5 | setBandwidthThrottle: 'promise',
6 |
7 | // event streams & debug
8 |
9 | createEventStream: 'readable',
10 | createDebugStream: 'readable',
11 | getDebugLog: 'promise',
12 |
13 | // archive management
14 |
15 | configureArchive: 'promise',
16 | getArchiveInfo: 'promise',
17 | updateSizeTracking: 'promise',
18 | loadArchive: 'promise',
19 | unloadArchive: 'promise',
20 |
21 | // archive methods
22 |
23 | callArchiveAsyncMethod: 'async',
24 | callArchiveReadStreamMethod: 'readable',
25 | callArchiveWriteStreamMethod: 'writable',
26 | callArchivePDAPromiseMethod: 'promise',
27 | callArchivePDAReadStreamMethod: 'readable',
28 | clearFileCache: 'promise',
29 | exportArchiveToArchive: 'async',
30 |
31 | // folder sync
32 |
33 | fs_assertSafePath: 'promise',
34 | fs_ensureSyncFinished: 'promise',
35 | fs_diffListing: 'promise',
36 | fs_diffFile: 'promise',
37 | fe_queueSyncEvent: 'promise',
38 | fs_syncFolderToArchive: 'promise',
39 | fs_syncArchiveToFolder: 'promise',
40 |
41 | // dat extensions
42 |
43 | ext_listPeers: 'promise',
44 | ext_getPeer: 'promise',
45 | ext_getOwnPeerId: 'promise',
46 | ext_broadcastEphemeralMessage: 'promise',
47 | ext_sendEphemeralMessage: 'promise',
48 | ext_getSessionData: 'promise',
49 | ext_setSessionData: 'promise',
50 | ext_createDatPeersStream: 'readable'
51 | }
52 |
--------------------------------------------------------------------------------
/lib/strings.js:
--------------------------------------------------------------------------------
1 | /* globals window */
2 |
3 | const URL = typeof window === 'undefined' ? require('url').URL : window.URL
4 |
5 | exports.getPermId = function (permissionToken) {
6 | return permissionToken.split(':')[0]
7 | }
8 |
9 | exports.getPermParam = function (permissionToken) {
10 | return permissionToken.split(':').slice(1).join(':')
11 | }
12 |
13 | exports.ucfirst = function (str) {
14 | return str.charAt(0).toUpperCase() + str.slice(1)
15 | }
16 |
17 | exports.pluralize = function (num, base, suffix = 's') {
18 | if (num === 1) { return base }
19 | return base + suffix
20 | }
21 |
22 | exports.shorten = function (str, n = 6) {
23 | if (str.length > (n + 3)) {
24 | return str.slice(0, n) + '...'
25 | }
26 | return str
27 | }
28 |
29 | const shortenHash = exports.shortenHash = function (str, n = 6) {
30 | if (str.startsWith('dat://')) {
31 | return 'dat://' + shortenHash(str.slice('dat://'.length).replace(/\/$/, '')) + '/'
32 | }
33 | if (str.length > (n + 5)) {
34 | return str.slice(0, n) + '..' + str.slice(-2)
35 | }
36 | return str
37 | }
38 |
39 | exports.makeSafe = function (str) {
40 | return str.replace(//g, '>').replace(/&/g, '&').replace(/"/g, '')
41 | }
42 |
43 | exports.getHostname = function (str) {
44 | try {
45 | const u = new URL(str)
46 | if (u.protocol === 'dat:' && u.hostname.length === 64) {
47 | return 'dat://' + shortenHash(u.hostname)
48 | }
49 | return u.hostname
50 | } catch (e) {
51 | return str
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/dat/daemon/storage.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const fs = require('fs')
3 | const detectSparseFiles = require('supports-sparse-files')
4 | const raf = require('random-access-file')
5 | const raif = require('random-access-indexed-file')
6 |
7 | // globals
8 | // =
9 |
10 | const LARGE_FILES = ['data', 'signatures']
11 | const INDEX_BLOCK_SIZE = {
12 | data: 1024 * 1024, // 1mb
13 | signatures: 1024 // 1kb
14 | }
15 | var supportsSparseFiles = false
16 |
17 | // exported api
18 | // =
19 |
20 | exports.setup = async function () {
21 | await new Promise((resolve) => {
22 | detectSparseFiles(function (err, yes) {
23 | supportsSparseFiles = yes
24 | if (!yes) {
25 | console.log('Sparse-file support not detected. Falling back to indexed data files.')
26 | }
27 | resolve()
28 | })
29 | })
30 | }
31 |
32 | function createStorage (folder, subfolder) {
33 | return function (name) {
34 | var filepath = path.join(folder, subfolder, name)
35 | if (fs.existsSync(filepath + '.index')) {
36 | // use random-access-indexed-file because that's what has been used
37 | return raif(filepath, {blockSize: INDEX_BLOCK_SIZE[name]})
38 | }
39 | if (!supportsSparseFiles && LARGE_FILES.includes(name)) {
40 | // use random-access-indexed-file because sparse-files are not supported and this file tends to get big
41 | return raif(filepath, {blockSize: INDEX_BLOCK_SIZE[name]})
42 | }
43 | return raf(filepath)
44 | }
45 | }
46 |
47 | exports.create = function (folder) {
48 | return {
49 | metadata: createStorage(folder, 'metadata'),
50 | content: createStorage(folder, 'content')
51 | }
52 | }
--------------------------------------------------------------------------------
/web-apis/fg/stat.js:
--------------------------------------------------------------------------------
1 | // http://man7.org/linux/man-pages/man2/stat.2.html
2 | // mirrored from hyperdrive/lib/stat.js
3 |
4 | const Stat = module.exports = function Stat (data) {
5 | if (!(this instanceof Stat)) return new Stat(data)
6 |
7 | /*
8 | TODO- are the following attrs needed?
9 | this.dev = 0
10 | this.nlink = 1
11 | this.rdev = 0
12 | this.blksize = 0
13 | this.ino = 0
14 | this.uid = data ? data.uid : 0
15 | this.gid = data ? data.gid : 0 */
16 |
17 | this.mode = data ? data.mode : 0
18 | this.size = data ? data.size : 0
19 | this.offset = data ? data.offset : 0
20 | this.blocks = data ? data.blocks : 0
21 | this.downloaded = data ? data.downloaded : 0
22 | this.atime = new Date(data ? data.mtime : 0) // we just set this to mtime ...
23 | this.mtime = new Date(data ? data.mtime : 0)
24 | this.ctime = new Date(data ? data.ctime : 0)
25 |
26 | this.linkname = data ? data.linkname : null
27 | }
28 |
29 | Stat.IFSOCK = 49152 // 0b1100...
30 | Stat.IFLNK = 40960 // 0b1010...
31 | Stat.IFREG = 32768 // 0b1000...
32 | Stat.IFBLK = 24576 // 0b0110...
33 | Stat.IFDIR = 16384 // 0b0100...
34 | Stat.IFCHR = 8192 // 0b0010...
35 | Stat.IFIFO = 4096 // 0b0001...
36 |
37 | Stat.prototype.isSocket = check(Stat.IFSOCK)
38 | Stat.prototype.isSymbolicLink = check(Stat.IFLNK)
39 | Stat.prototype.isFile = check(Stat.IFREG)
40 | Stat.prototype.isBlockDevice = check(Stat.IFBLK)
41 | Stat.prototype.isDirectory = check(Stat.IFDIR)
42 | Stat.prototype.isCharacterDevice = check(Stat.IFCHR)
43 | Stat.prototype.isFIFO = check(Stat.IFIFO)
44 |
45 | function check (mask) {
46 | return function () {
47 | return (mask & this.mode) === mask
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/dbs/watchlist.js:
--------------------------------------------------------------------------------
1 | const lock = require('../lib/lock')
2 | const db = require('./profile-data-db')
3 |
4 | // exported methods
5 | // =
6 |
7 | exports.addSite = async function (profileId, url, opts) {
8 | var release = await lock('watchlist-db')
9 | try {
10 | // get date for timestamp in seconds floored
11 | var ts = (Date.now() / 1000 | 0)
12 |
13 | // check if site already being watched
14 | var site = await db.get('SELECT rowid, * from watchlist WHERE profileId = ? AND url = ?', [profileId, url])
15 | if (!site) {
16 | // add site to watch list
17 | await db.run('INSERT INTO watchlist (profileId, url, description, seedWhenResolved, createdAt) VALUES (?, ?, ?, ?, ?);', [profileId, url, opts.description, opts.seedWhenResolved, ts])
18 | }
19 | } finally {
20 | release()
21 | }
22 | return db.get('SELECT rowid, * from watchlist WHERE profileId = ? AND url = ?', [profileId, url])
23 | }
24 |
25 | exports.getSites = async function (profileId) {
26 | return db.all(`SELECT * FROM watchlist WHERE profileId = ?1`, [profileId])
27 | }
28 |
29 | exports.updateWatchlist = async function (profileId, site, opts) {
30 | var combine = Object.assign(site, opts)
31 | var updatedAt = (Date.now() / 1000 | 0)
32 |
33 | var release = await lock('watchlist-db')
34 | try {
35 | await db.run(`UPDATE watchlist SET seedWhenResolved = ?, resolved = ?, updatedAt = ?
36 | WHERE profileId = ? AND url = ?`, [combine.seedWhenResolved, combine.resolved, updatedAt, profileId, combine.url])
37 | } finally {
38 | release()
39 | }
40 | }
41 |
42 | exports.removeSite = async function (profileId, url) {
43 | return db.run(`DELETE FROM watchlist WHERE profileId = ? AND url = ?`, [profileId, url])
44 | }
45 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | const assert = require('assert')
2 | const {join} = require('path')
3 | const debugLogger = require('./lib/debug-logger')
4 | const globals = require('./globals')
5 | const {getEnvVar} = require('./lib/env')
6 | const dat = require('./dat')
7 | const dbs = require('./dbs')
8 | const webapis = require('./web-apis/bg')
9 |
10 | module.exports = {
11 | getEnvVar,
12 | globals,
13 | dat,
14 | dbs,
15 |
16 | debugLogger: debugLogger.debugLogger,
17 | getLogFilePath: debugLogger.getLogFilePath,
18 | getLogFileContent: debugLogger.getLogFileContent,
19 |
20 | async setup (opts) {
21 | assert(typeof opts.userDataPath === 'string', 'userDataPath must be a string')
22 | assert(typeof opts.homePath === 'string', 'homePath must be a string')
23 | assert(typeof opts.templatesPath === 'string', 'templatesPath must be a string')
24 | assert(!!opts.datDaemonProcess, 'must provide datDaemonProcess')
25 | assert(!!opts.permsAPI, 'must provide permsAPI')
26 | assert(!!opts.uiAPI, 'must provide uiAPI')
27 | assert(!!opts.rpcAPI, 'must provide rpcAPI')
28 | assert(!!opts.downloadsWebAPI, 'must provide downloadsWebAPI')
29 | assert(!!opts.browserWebAPI, 'must provide browserWebAPI')
30 |
31 | for (let k in opts) {
32 | globals[k] = opts[k]
33 | }
34 |
35 | // initiate log
36 | debugLogger.setup(join(opts.userDataPath, 'debug.log'))
37 |
38 | // setup databases
39 | for (let k in dbs) {
40 | if (dbs[k].setup) {
41 | dbs[k].setup(opts)
42 | }
43 | }
44 |
45 | // setup dat
46 | await dat.library.setup(opts)
47 |
48 | // setup watchlist
49 | await dat.watchlist.setup()
50 |
51 | // setup web apis
52 | webapis.setup(opts)
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/dat/debugging.js:
--------------------------------------------------------------------------------
1 | const {getActiveArchives} = require('./library')
2 | const datDns = require('./dns')
3 |
4 | exports.archivesDebugPage = function () {
5 | var archives = getActiveArchives()
6 | return `
7 |
8 | ${Object.keys(archives).map(key => {
9 | var a = archives[key]
10 | return `
11 |
${a.key.toString('hex')}
12 |
13 | | Meta DKey | ${a.discoveryKey.toString('hex')} |
14 | | Content DKey | ${a.content.discoveryKey.toString('hex')} |
15 | | Meta Key | ${a.key.toString('hex')} |
16 | | Content Key | ${a.content.key.toString('hex')} |
17 | ${a.replicationStreams.map((s, i) => `
18 | | Peer ${i} | ${s.peerInfo.type} ${s.peerInfo.host}:${s.peerInfo.port} |
19 | `).join('')}
20 |
21 |
`
22 | }).join('')}
23 |
24 | `
25 | }
26 |
27 | exports.datDnsCachePage = function () {
28 | var cache = datDns.listCache()
29 | return `
30 |
31 | Dat DNS cache
32 |
33 |
34 | ${Object.keys(cache).map(name => {
35 | var key = cache[name]
36 | return `| ${name} | ${key} |
`
37 | }).join('')}
38 |
39 |
40 |
41 | `
42 | }
43 |
44 | exports.datDnsCacheJS = function () {
45 | return `
46 | document.querySelector('button').addEventListener('click', clear)
47 | async function clear () {
48 | await beaker.archives.clearDnsCache()
49 | location.reload()
50 | }
51 | `
52 | }
53 |
--------------------------------------------------------------------------------
/dat/garbage-collector.js:
--------------------------------------------------------------------------------
1 | const archivesDb = require('../dbs/archives')
2 | const datLibrary = require('./library')
3 | const {
4 | DAT_GC_FIRST_COLLECT_WAIT,
5 | DAT_GC_REGULAR_COLLECT_WAIT
6 | } = require('../lib/const')
7 | const debug = require('../lib/debug-logger').debugLogger('datgc')
8 |
9 | // globals
10 | // =
11 |
12 | var nextGCTimeout
13 |
14 | // exported API
15 | // =
16 |
17 | exports.setup = function () {
18 | schedule(DAT_GC_FIRST_COLLECT_WAIT)
19 | }
20 |
21 | const collect = exports.collect = async function ({olderThan, isOwner} = {}) {
22 | // clear any scheduled GC
23 | if (nextGCTimeout) {
24 | clearTimeout(nextGCTimeout)
25 | nextGCTimeout = null
26 | }
27 |
28 | // run the GC
29 | var totalBytes = 0
30 | var skippedArchives = 0
31 | var startTime = Date.now()
32 |
33 | // first unsave expired archives
34 | var expiredArchives = await archivesDb.listExpiredArchives()
35 | debug('GC unsaving %d expired archives', expiredArchives.length)
36 | var promises = []
37 | for (let i = 0; i < expiredArchives.length; i++) {
38 | promises.push(archivesDb.setUserSettings(0, expiredArchives[i].key, {isSaved: false}))
39 | }
40 | await Promise.all(promises)
41 |
42 | // now GC old archives
43 | var unusedArchives = await archivesDb.listGarbageCollectableArchives({olderThan, isOwner})
44 | debug('GC cleaning out %d unused archives', unusedArchives.length)
45 | debug(unusedArchives)
46 | for (let i = 0; i < unusedArchives.length; i++) {
47 | await datLibrary.unloadArchive(unusedArchives[i].key)
48 | totalBytes += await archivesDb.deleteArchive(unusedArchives[i].key)
49 | }
50 |
51 | debug('GC completed in %d ms', Date.now() - startTime)
52 |
53 | // schedule the next GC
54 | schedule(DAT_GC_REGULAR_COLLECT_WAIT)
55 |
56 | // return stats
57 | return {totalBytes, totalArchives: unusedArchives.length - skippedArchives, skippedArchives}
58 | }
59 |
60 | // helpers
61 | // =
62 |
63 | function schedule (time) {
64 | nextGCTimeout = setTimeout(collect, time)
65 | nextGCTimeout.unref()
66 | }
67 |
--------------------------------------------------------------------------------
/web-apis/bg/experimental/global-fetch.js:
--------------------------------------------------------------------------------
1 | const globals = require('../../../globals')
2 | const http = require('http')
3 | const https = require('https')
4 | const concat = require('concat-stream')
5 | const {URL} = require('url')
6 |
7 | // constants
8 | // =
9 |
10 | const API_DOCS_URL = 'https://beakerbrowser.com/docs/apis/experimental-globalfetch.html'
11 | const API_PERM_ID = 'experimentalGlobalFetch'
12 | const LAB_API_ID = 'globalFetch'
13 |
14 | // exported api
15 | // =
16 |
17 | module.exports = {
18 | async fetch (reqOptions, reqBody) {
19 | // parse url
20 | let urlp = new URL(reqOptions.url)
21 | reqOptions.protocol = urlp.protocol
22 | reqOptions.host = urlp.host
23 | reqOptions.path = urlp.pathname + urlp.search + urlp.hash
24 |
25 | // check perms
26 | await globals.permsAPI.checkLabsPerm({
27 | perm: API_PERM_ID + ':' + reqOptions.protocol + '//' + reqOptions.host,
28 | labApi: LAB_API_ID,
29 | apiDocsUrl: API_DOCS_URL,
30 | sender: this.sender
31 | })
32 |
33 | if (reqOptions.protocol !== 'https:' && reqOptions.protocol !== 'http:') {
34 | throw new Error('Can only send requests to http or https URLs')
35 | }
36 |
37 | return new Promise((resolve, reject) => {
38 | // start request
39 | let proto = urlp.protocol === 'https:' ? https : http
40 | let reqStream = proto.request(reqOptions, resStream => {
41 | resStream.pipe(concat(resStream, resBody => {
42 | // resolve with response
43 | resolve({
44 | status: resStream.statusCode,
45 | statusText: resStream.statusMessage,
46 | headers: resStream.headers,
47 | body: (resStream.statusCode != 204 && resStream.statusCode != 304 ? resBody : null)
48 | })
49 | }))
50 |
51 | // handle errors
52 | resStream.on('error', err => {
53 | reject(new Error('Network request failed'))
54 | })
55 | resStream.on('abort', err => {
56 | reject(new Error('Aborted'))
57 | })
58 | })
59 |
60 | // send data
61 | if (reqBody) {
62 | reqStream.send(reqBody)
63 | }
64 |
65 | reqStream.end()
66 | })
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/lib/const.js:
--------------------------------------------------------------------------------
1 | const bytes = require('bytes')
2 | const ms = require('ms')
3 |
4 | // file paths
5 | exports.ANALYTICS_DATA_FILE = 'analytics-ping.json'
6 | exports.ANALYTICS_SERVER = 'analytics.beakerbrowser.com'
7 | exports.ANALYTICS_CHECKIN_INTERVAL = ms('1w')
8 |
9 | // 64 char hex
10 | exports.DAT_HASH_REGEX = /^[0-9a-f]{64}$/i
11 | exports.DAT_URL_REGEX = /^(?:dat:\/\/)?([0-9a-f]{64})/i
12 |
13 | // url file paths
14 | exports.DAT_VALID_PATH_REGEX = /^[a-z0-9\-._~!$&'()*+,;=:@/\s]+$/i
15 | exports.INVALID_SAVE_FOLDER_CHAR_REGEX = /[^0-9a-zA-Z-_ ]/g
16 |
17 | // dat settings
18 | exports.DAT_SWARM_PORT = 3282
19 | exports.DAT_MANIFEST_FILENAME = 'dat.json'
20 | let quotaEnvVar = process.env.BEAKER_DAT_QUOTA_DEFAULT_BYTES_ALLOWED || process.env.beaker_dat_quota_default_bytes_allowed
21 | exports.DAT_QUOTA_DEFAULT_BYTES_ALLOWED = bytes.parse(quotaEnvVar || '500mb')
22 | exports.DEFAULT_DAT_DNS_TTL = ms('1h')
23 | exports.MAX_DAT_DNS_TTL = ms('7d')
24 | exports.DEFAULT_DAT_API_TIMEOUT = ms('5s')
25 | exports.DAT_GC_EXPIRATION_AGE = ms('7d') // how old do archives need to be before deleting them from the cache?
26 | exports.DAT_GC_FIRST_COLLECT_WAIT = ms('30s') // how long after process start to do first collect?
27 | exports.DAT_GC_REGULAR_COLLECT_WAIT = ms('15m') // how long between GCs to collect?
28 | // dat.json manifest fields which can be changed by configure()
29 | exports.DAT_CONFIGURABLE_FIELDS = [
30 | 'title',
31 | 'description',
32 | 'type',
33 | 'links',
34 | 'web_root',
35 | 'fallback_page'
36 | ]
37 | // dat.json manifest fields which should be preserved in forks
38 | exports.DAT_PRESERVED_FIELDS_ON_FORK = [
39 | 'web_root',
40 | 'fallback_page',
41 | 'links',
42 | 'experimental',
43 | 'content_security_policy'
44 | ]
45 |
46 | // workspace settings
47 | exports.WORKSPACE_VALID_NAME_REGEX = /^[a-z][a-z0-9-]*$/i
48 |
49 | // git-url validator
50 | exports.IS_GIT_URL_REGEX = /(?:git|ssh|https?|git@[-\w.]+):(\/\/)?(.*?)(\.git)(\/?|#[-\d\w._]+?)$/
51 |
52 | // archive metadata
53 | // TODO- these may not all be meaningful anymore -prf
54 | exports.STANDARD_ARCHIVE_TYPES = [
55 | 'application',
56 | 'module',
57 | 'dataset',
58 | 'documents',
59 | 'music',
60 | 'photos',
61 | 'user-profile',
62 | 'videos',
63 | 'website'
64 | ]
65 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.v1.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 | CREATE TABLE profiles (
3 | id INTEGER PRIMARY KEY NOT NULL,
4 | url TEXT,
5 | createdAt INTEGER DEFAULT (strftime('%s', 'now'))
6 | );
7 |
8 | CREATE TABLE archives (
9 | profileId INTEGER NOT NULL,
10 | key TEXT NOT NULL,
11 | localPath TEXT, -- deprecated
12 | isSaved INTEGER,
13 | createdAt INTEGER DEFAULT (strftime('%s', 'now'))
14 | );
15 |
16 | CREATE TABLE archives_meta (
17 | key TEXT PRIMARY KEY,
18 | title TEXT,
19 | description TEXT,
20 | forkOf TEXT, -- deprecated
21 | createdByUrl TEXT, -- deprecated
22 | createdByTitle TEXT, -- deprecated
23 | mtime INTEGER,
24 | metaSize INTEGER, -- deprecated
25 | stagingSize INTEGER, -- deprecated
26 | isOwner INTEGER
27 | );
28 |
29 | CREATE TABLE bookmarks (
30 | profileId INTEGER,
31 | url TEXT NOT NULL,
32 | title TEXT,
33 | pinned INTEGER,
34 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
35 |
36 | PRIMARY KEY (profileId, url),
37 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
38 | );
39 |
40 | CREATE TABLE visits (
41 | profileId INTEGER,
42 | url TEXT NOT NULL,
43 | title TEXT NOT NULL,
44 | ts INTEGER NOT NULL,
45 |
46 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
47 | );
48 |
49 | CREATE TABLE visit_stats (
50 | url TEXT NOT NULL,
51 | num_visits INTEGER,
52 | last_visit_ts INTEGER
53 | );
54 |
55 | CREATE VIRTUAL TABLE visit_fts USING fts4 (url, title);
56 | CREATE UNIQUE INDEX visits_stats_url ON visit_stats (url);
57 |
58 | -- default profile
59 | INSERT INTO profiles (id) VALUES (0);
60 |
61 | -- default bookmarks
62 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Beaker Home', 'dat://beakerbrowser.com', 1);
63 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Dat Project', 'dat://datproject.org', 1);
64 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, '@BeakerBrowser', 'https://twitter.com/beakerbrowser', 1);
65 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Hashbase', 'https://hashbase.io', 1);
66 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Documentation', 'dat://beakerbrowser.com/docs', 1);
67 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Report an issue', 'https://github.com/beakerbrowser/beaker/issues', 1);
68 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Explore the P2P Web', 'dat://explore.beakerbrowser.com/', 1);
69 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Support Beaker', 'https://opencollective.com/beaker', 1);
70 |
71 | PRAGMA user_version = 1;
72 | `
73 |
--------------------------------------------------------------------------------
/dbs/profile-data-db.js:
--------------------------------------------------------------------------------
1 | const sqlite3 = require('sqlite3')
2 | const path = require('path')
3 | const fs = require('fs')
4 | const {cbPromise} = require('../lib/functions')
5 | const {setupSqliteDB} = require('../lib/db')
6 |
7 | // globals
8 | // =
9 |
10 | var db
11 | var migrations
12 | var setupPromise
13 |
14 | // exported methods
15 | // =
16 |
17 | exports.setup = function (opts) {
18 | // open database
19 | var dbPath = path.join(opts.userDataPath, 'Profiles')
20 | db = new sqlite3.Database(dbPath)
21 | setupPromise = setupSqliteDB(db, {setup: setupDb, migrations}, '[PROFILES]')
22 | }
23 |
24 | exports.get = async function (...args) {
25 | await setupPromise
26 | return cbPromise(cb => db.get(...args, cb))
27 | }
28 |
29 | exports.all = async function (...args) {
30 | await setupPromise
31 | return cbPromise(cb => db.all(...args, cb))
32 | }
33 |
34 | exports.run = async function (...args) {
35 | await setupPromise
36 | return cbPromise(cb => db.run(...args, cb))
37 | }
38 |
39 | exports.serialize = function () {
40 | return db.serialize()
41 | }
42 |
43 | exports.parallelize = function () {
44 | return db.parallelize()
45 | }
46 |
47 | // internal methods
48 | // =
49 |
50 | function setupDb (cb) {
51 | db.exec(require('./schemas/profile-data.sql'), cb)
52 | }
53 | migrations = [
54 | migration('profile-data.v1.sql'),
55 | migration('profile-data.v2.sql'),
56 | migration('profile-data.v3.sql'),
57 | migration('profile-data.v4.sql'),
58 | migration('profile-data.v5.sql'),
59 | migration('profile-data.v6.sql'),
60 | migration('profile-data.v7.sql'),
61 | migration('profile-data.v8.sql'),
62 | migration('profile-data.v9.sql'),
63 | migration('profile-data.v10.sql'),
64 | migration('profile-data.v11.sql'),
65 | migration('profile-data.v12.sql'),
66 | migration('profile-data.v13.sql'),
67 | migration('profile-data.v14.sql'),
68 | migration('profile-data.v15.sql'),
69 | migration('profile-data.v16.sql', {canFail: true}), // set canFail because we made a mistake in the rollout of this update, see https://github.com/beakerbrowser/beaker/issues/934
70 | migration('profile-data.v17.sql'),
71 | migration('profile-data.v18.sql'),
72 | migration('profile-data.v19.sql'),
73 | migration('profile-data.v20.sql'),
74 | migration('profile-data.v21.sql'),
75 | migration('profile-data.v22.sql', {canFail: true}), // canFail for the same reason as v16, ffs
76 | migration('profile-data.v23.sql'),
77 | ]
78 | function migration (file, opts = {}) {
79 | return cb => {
80 | if (opts.canFail) {
81 | var orgCb = cb
82 | cb = () => orgCb() // suppress the error
83 | }
84 | db.exec(require('./schemas/' + file), cb)
85 | }
86 | }
87 |
--------------------------------------------------------------------------------
/web-apis/bg/experimental/capture-page.js:
--------------------------------------------------------------------------------
1 | const globals = require('../../../globals')
2 | const {URL} = require('url')
3 |
4 | // constants
5 | // =
6 |
7 | const API_DOCS_URL = 'https://beakerbrowser.com/docs/apis/experimental-capturepage.html'
8 | const API_PERM_ID = 'experimentalCapturePage'
9 | const LAB_API_ID = 'capturePage'
10 |
11 | // exported api
12 | // =
13 |
14 | module.exports = {
15 | async capturePage (url, opts = {}) {
16 | // validate inputs
17 | if (!url && typeof url !== 'string') {
18 | throw new Error('The first argument must be a URL string')
19 | }
20 | if (opts && typeof opts !== 'object') {
21 | throw new Error('The second argument must be an options object')
22 | }
23 | if (opts) {
24 | if ('width' in opts) {
25 | if (typeof opts.width !== 'number') throw new Error('The width option must be a number')
26 | if (opts.width <= 0 || opts.width > 1600) throw new Error('The width option must between 1 and 1600')
27 | }
28 | if ('height' in opts) {
29 | if (typeof opts.height !== 'number') throw new Error('The height option must be a number')
30 | if (opts.height <= 0 || opts.height > 1200) throw new Error('The height option must between 1 and 1200')
31 | }
32 | if ('resizeTo' in opts) {
33 | if (typeof opts.resizeTo !== 'object') throw new Error('The resizeTo option must be an object')
34 | if ('width' in opts.resizeTo) {
35 | if (typeof opts.resizeTo.width !== 'number') throw new Error('The resizeTo.width option must be a number')
36 | if (opts.resizeTo.width <= 0 || opts.resizeTo.width > 1600) throw new Error('The resizeTo.width option must between 1 and 1600')
37 | }
38 | if ('height' in opts.resizeTo) {
39 | if (typeof opts.resizeTo.height !== 'number') throw new Error('The resizeTo.height option must be a number')
40 | if (opts.resizeTo.height <= 0 || opts.resizeTo.height > 1200) throw new Error('The resizeTo.height option must between 1 and 1200')
41 | }
42 | }
43 | }
44 |
45 | // parse url
46 | var urlp
47 | try { urlp = new URL(url) }
48 | catch (e) { throw new Error('The first argument must be a URL string') }
49 |
50 | if (['http:', 'https:', 'dat:'].indexOf(urlp.protocol) === -1) {
51 | throw new Error('Can only capture pages served over http, https, or dat')
52 | }
53 |
54 | // check perms
55 | await globals.permsAPI.checkLabsPerm({
56 | perm: API_PERM_ID + ':' + url,
57 | labApi: LAB_API_ID,
58 | apiDocsUrl: API_DOCS_URL,
59 | sender: this.sender
60 | })
61 |
62 | // run method
63 | return globals.uiAPI.capturePage(url, opts)
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@beaker/core",
3 | "version": "3.0.2",
4 | "description": "Beaker browser's core software",
5 | "main": "index.js",
6 | "scripts": {
7 | "lint": "eslint . --ignore-pattern 'node_modules/*' --fix",
8 | "test": "ava ./*-test.js -s -T15000"
9 | },
10 | "pre-commit": [
11 | "lint"
12 | ],
13 | "repository": {
14 | "type": "git",
15 | "url": "git+https://github.com/beakerbrowser/beaker-core.git"
16 | },
17 | "keywords": [
18 | "beaker",
19 | "dat"
20 | ],
21 | "author": "Paul Frazee ",
22 | "license": "MIT",
23 | "bugs": {
24 | "url": "https://github.com/beakerbrowser/beaker-core/issues"
25 | },
26 | "homepage": "https://github.com/beakerbrowser/beaker-core#readme",
27 | "dependencies": {
28 | "@beaker/dat-ephemeral-ext-msg": "^1.0.2",
29 | "@beaker/dat-session-data-ext-msg": "^1.1.0",
30 | "@beaker/datignore": "^1.0.0",
31 | "anymatch": "^2.0.0",
32 | "await-lock": "^1.2.1",
33 | "beaker-error-constants": "^1.4.0",
34 | "binary-extensions": "^1.13.1",
35 | "bytes": "^3.0.0",
36 | "circular-append-file": "^1.0.1",
37 | "concat-stream": "^1.6.2",
38 | "dat-dns": "^3.2.1",
39 | "dat-encoding": "^5.0.1",
40 | "datland-swarm-defaults": "^1.0.2",
41 | "diff": "^3.5.0",
42 | "diff-file-tree": "^2.3.2",
43 | "discovery-swarm": "^6.1.0",
44 | "emit-stream": "^0.1.2",
45 | "fs-jetpack": "^1.3.1",
46 | "function-queue": "0.0.12",
47 | "hypercore-protocol": "^6.12.0",
48 | "hyperdrive": "^9.16.0",
49 | "hyperdrive-network-speed": "^2.1.0",
50 | "hyperdrive-to-zip-stream": "^2.1.1",
51 | "identify-filetype": "^1.0.0",
52 | "into-stream": "^3.1.0",
53 | "lodash.debounce": "^4.0.8",
54 | "lodash.get": "^4.4.2",
55 | "lodash.isequal": "^4.5.0",
56 | "lodash.pick": "^4.4.0",
57 | "lodash.throttle": "^4.1.1",
58 | "mime": "^1.4.0",
59 | "mkdirp": "^0.5.1",
60 | "moment": "^2.24.0",
61 | "ms": "^2.1.2",
62 | "normalize-url": "^3.3.0",
63 | "once": "^1.4.0",
64 | "os-locale": "^3.1.0",
65 | "parse-dat-url": "^3.0.3",
66 | "pauls-dat-api": "^8.1.0",
67 | "pify": "^3.0.0",
68 | "pump": "^3.0.0",
69 | "random-access-file": "^2.0.1",
70 | "random-access-indexed-file": "^2.0.0",
71 | "range-parser": "^1.2.1",
72 | "scoped-fs": "^1.4.1",
73 | "semver": "^5.7.1",
74 | "slugify": "^1.4.0",
75 | "sodium-signatures": "^2.1.1",
76 | "split2": "^2.2.0",
77 | "sqlite3": "^4.1.1",
78 | "stream-throttle": "^0.1.3",
79 | "supports-sparse-files": "^1.0.2",
80 | "textextensions": "^2.6.0",
81 | "through2": "^2.0.5",
82 | "utp-native": "^2.1.7"
83 | },
84 | "devDependencies": {
85 | "eslint": "^4.19.1",
86 | "pre-commit": "^1.2.2"
87 | }
88 | }
89 |
--------------------------------------------------------------------------------
/dat/watchlist.js:
--------------------------------------------------------------------------------
1 | const EventEmitter = require('events')
2 | const emitStream = require('emit-stream')
3 |
4 | // dat modules
5 | const datLibrary = require('../dat/library')
6 | const datDns = require('../dat/dns')
7 | const watchlistDb = require('../dbs/watchlist')
8 |
9 | // globals
10 | // =
11 |
12 | var watchlistEvents = new EventEmitter()
13 |
14 | // exported methods
15 | // =
16 |
17 | exports.setup = async function setup () {
18 | try {
19 | var watchedSites = await watchlistDb.getSites(0)
20 | for (let site of watchedSites) {
21 | watch(site)
22 | }
23 | } catch (err) {
24 | throw new Error('Failed to load the watchlist')
25 | }
26 | }
27 |
28 | exports.addSite = async function addSite (profileId, url, opts) {
29 | // validate parameters
30 | if (!url || typeof url !== 'string') {
31 | throw new Error('url must be a string')
32 | }
33 | if (!opts.description || typeof opts.description !== 'string') {
34 | throw new Error('description must be a string')
35 | }
36 | if (typeof opts.seedWhenResolved !== 'boolean') {
37 | throw new Error('seedWhenResolved must be a boolean')
38 | }
39 | if (!url.startsWith('dat://')) {
40 | url = 'dat://' + url
41 | }
42 |
43 | try {
44 | var site = await watchlistDb.addSite(profileId, url, opts)
45 | watch(site)
46 | } catch (err) {
47 | throw new Error('Failed to add to watchlist')
48 | }
49 | }
50 |
51 | exports.getSites = async function getSites (profileId) {
52 | return watchlistDb.getSites(profileId)
53 | }
54 |
55 | const updateWatchlist = exports.updateWatchlist = async function (profileId, site, opts) {
56 | try {
57 | await watchlistDb.updateWatchlist(profileId, site, opts)
58 | } catch (err) {
59 | throw new Error('Failed to update the watchlist')
60 | }
61 | }
62 |
63 | exports.removeSite = async function removeSite (profileId, url) {
64 | // validate parameters
65 | if (!url || typeof url !== 'string') {
66 | throw new Error('url must be a string')
67 | }
68 | return watchlistDb.removeSite(profileId, url)
69 | }
70 |
71 | // events
72 |
73 | exports.createEventsStream = function createEventsStream () {
74 | return emitStream(watchlistEvents)
75 | }
76 |
77 | // internal methods
78 | // =
79 |
80 | async function watch (site) {
81 | // resolve DNS
82 | var key
83 | try {
84 | key = await datDns.resolveName(site.url)
85 | } catch (e) {}
86 | if (!key) {
87 | // try again in 30s
88 | setTimeout(watch, 30e3)
89 | return
90 | }
91 |
92 | // load archive
93 | var archive = await datLibrary.loadArchive(key)
94 | if (site.resolved === 0) {
95 | watchlistEvents.emit('resolved', site)
96 | }
97 | archive.pda.download('/').catch(e => { /* ignore cancels */ }) // download the site to make sure it's available
98 | await updateWatchlist(0, site, {resolved: 1})
99 | }
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Beaker Core
2 |
3 | [Beaker browser's](https://github.com/beakerbrowser/beaker) core software. Factored out so that we can build extensions from the same codebase.
4 |
5 | **Work in progress! Not ready to use.**
6 |
7 | Here's how we use it in electron (the browser):
8 |
9 | ```js
10 | import {app, protocol} from 'electron'
11 | import beakerCore from '@beaker/core'
12 |
13 | const DISALLOWED_SAVE_PATH_NAMES = [
14 | 'home',
15 | 'desktop',
16 | 'documents',
17 | 'downloads',
18 | 'music',
19 | 'pictures',
20 | 'videos'
21 | ]
22 |
23 | // setup beaker-core
24 | await beakerCore.setup({
25 | // config
26 | userDataPath: app.getPath('userData'),
27 | homePath: app.getPath('home'),
28 | templatesPath: path.join(__dirname, 'assets', 'templates'),
29 | disallowedSavePaths: DISALLOWED_SAVE_PATH_NAMES.map(path => app.getPath(path)),
30 |
31 | // APIs
32 | permsAPI: {
33 | async checkLabsPerm({perm, labApi, apiDocsUrl, sender}) {/*...*/},
34 | async queryPermission(perm, sender) {/*...*/},
35 | async requestPermission(perm, sender) {/*...*/},
36 | async grantPermission(perm, senderURL) {/*...*/}
37 | },
38 | uiAPI: {
39 | async showModal(sender, modalName, opts) {/*...*/},
40 | async capturePage(url, opts) {/*...*/}
41 | },
42 | rpcAPI: {
43 | exportAPI(apiName, apiManifest, apiImpl, [guardFn])
44 | },
45 | downloadsWebAPI: {...},
46 | browserWebAPI: {...}
47 | })
48 |
49 | // setup the protocol handler
50 | protocol.registerStreamProtocol('dat', beakerCore.dat.protocol.electronHandler, err => {
51 | if (err) throw ProtocolSetupError(err, 'Failed to create protocol: dat')
52 | })
53 | ```
54 |
55 | In the webview preload:
56 |
57 | ```js
58 | import beakerCoreWebview from '@beaker/core/webview'
59 |
60 | beakerCoreWebview.setup({
61 | // APIs
62 | rpcAPI: {
63 | importAPI(apiName, apiManifest, opts)
64 | }
65 | })
66 | ```
67 |
68 | ## API (@beaker/core)
69 |
70 | ### `setup()`
71 |
72 | ### `getEnvVar()`
73 |
74 | ### `debugLogger(name)`
75 |
76 | ```js
77 | import {debugLogger} from '@beaker/core'
78 | const debug = debugLogger('dat')
79 |
80 | // write to the debug log under 'dat'
81 | debug('dat-related stuff')
82 | ```
83 |
84 | ### `getLogFilePath()`
85 |
86 | ### `getLogFileContent(start, end)`
87 |
88 | ### `globals`
89 |
90 | ### `dbs`
91 |
92 | ### `dbs.archives`
93 |
94 | ### `dbs.bookmarks`
95 |
96 | ### `dbs.history`
97 |
98 | ### `dbs.settings`
99 |
100 | ### `dbs.sitedata`
101 |
102 | ### `dbs.templates`
103 |
104 | ### `dat`
105 |
106 | ### `dat.library`
107 |
108 | ### `dat.dns`
109 |
110 | ### `dat.folderSync`
111 |
112 | ### `dat.garbageCollector`
113 |
114 | ### `dat.protocol`
115 |
116 | ### `dat.debug`
117 |
118 | ## API (@beaker/core/webview)
119 |
120 | ### `setup()`
--------------------------------------------------------------------------------
/lib/db.js:
--------------------------------------------------------------------------------
1 | const debug = require('../lib/debug-logger').debugLogger('beaker-sqlite')
2 | const FnQueue = require('function-queue')
3 | const { cbPromise } = require('./functions')
4 |
5 | // transaction lock
6 | // - returns a function which enforces FIFO execution on async behaviors, via a queue
7 | // - call sig: txLock(cb => { ...; cb() })
8 | // - MUST call given cb to release the lock
9 | const makeTxLock = exports.makeTxLock = function () {
10 | var fnQueue = FnQueue()
11 | return cb => fnQueue.push(cb)
12 | }
13 |
14 | // sqlite transactor, handles common needs for sqlite queries:
15 | // 1. waits for the setupPromise
16 | // 2. provides a cb handler that returns a promise
17 | // 3. creates a transaction lock, and wraps the cb with it
18 | // NOTE:
19 | // Using the transactor does mean that the DB is locked into sequential operation.
20 | // This is slower, but necessary if the SQLite instance has any transactions that
21 | // do async work within them; eg, SELECT then UPDATE.
22 | // Why: without the tx lock around all SQLite statements, you can end up injecting
23 | // new commands into the active async transaction.
24 | // If the DB doesn't do async transactions, you don't need the transactor. At time of
25 | // writing this, only the history DB needed it.
26 | // -prf
27 | exports.makeSqliteTransactor = function (setupPromise) {
28 | var txLock = makeTxLock()
29 | return function (fn) {
30 | // 1. wait for the setup promise
31 | return setupPromise.then(v => {
32 | // 2. provide a cb handler
33 | return cbPromise(cb => {
34 | // 3. create a tx lock
35 | txLock(endTx => {
36 | // 3b. wrap the cb with the lock release
37 | var cbWrapped = (err, res) => {
38 | endTx()
39 | cb(err, res)
40 | }
41 | // yeesh
42 | fn(cbWrapped)
43 | })
44 | })
45 | })
46 | }
47 | }
48 |
49 | // runs needed migrations, returns a promise
50 | exports.setupSqliteDB = function (db, {setup, migrations}, logTag) {
51 | return new Promise((resolve, reject) => {
52 | // run migrations
53 | db.get('PRAGMA user_version;', (err, res) => {
54 | if (err) return reject(err)
55 |
56 | var version = (res && res.user_version) ? +res.user_version : 0
57 | var neededMigrations = (version === 0 && setup) ? [setup] : migrations.slice(version)
58 | if (neededMigrations.length == 0) { return resolve() }
59 |
60 | debug(logTag, 'Database at version', version, '; Running', neededMigrations.length, 'migrations')
61 | runNeededMigrations()
62 | function runNeededMigrations (err) {
63 | if (err) return reject(err)
64 |
65 | var migration = neededMigrations.shift()
66 | if (!migration) {
67 | // done
68 | resolve()
69 | return debug(logTag, 'Database migrations completed without error')
70 | }
71 |
72 | migration(runNeededMigrations)
73 | }
74 | })
75 | })
76 | }
77 |
--------------------------------------------------------------------------------
/lib/time.js:
--------------------------------------------------------------------------------
1 | const moment = require('moment')
2 | const {TimeoutError} = require('beaker-error-constants')
3 |
4 | moment.updateLocale('en', {
5 | relativeTime: {s: 'seconds'}
6 | })
7 |
8 | exports.niceDate = function (ts, opts) {
9 | const endOfToday = moment().endOf('day')
10 | if (typeof ts === 'number' || ts instanceof Date) { ts = moment(ts) }
11 | if (ts.isSame(endOfToday, 'day')) {
12 | if (opts && opts.noTime) { return 'today' }
13 | return ts.fromNow()
14 | } else if (ts.isSame(endOfToday.subtract(1, 'day'), 'day')) { return 'yesterday' } else if (ts.isSame(endOfToday, 'month')) { return ts.fromNow() }
15 | return ts.format('ll')
16 | }
17 |
18 | exports.downloadTimestamp = function (ts) {
19 | if (typeof ts === 'string') {
20 | ts = moment(Number(ts))
21 | } else if (typeof ts === 'number') {
22 | ts = moment(ts)
23 | }
24 | return moment(ts).local().format('ddd MMM D, h:mma')
25 | }
26 |
27 | // this is a wrapper for any behavior that needs to maintain a timeout
28 | // you call it like this:
29 | /*
30 | timer(30e3, async (checkin, pause, resume) => {
31 | checkin('doing work')
32 | await work()
33 |
34 | checkin('doing other work')
35 | await otherWork()
36 |
37 | pause() // dont count this period against the timeout
38 | await askUserSomething()
39 | resume() // resume the timeout
40 |
41 | checkin('finishing')
42 | return finishing()
43 | })
44 | */
45 | // Rules of usage:
46 | // - Call `checkin` after a period of async work to give the timer a chance to
47 | // abort further work. If the timer has expired, checkin() will stop running.
48 | // - Give `checkin` a description of the task if you want the timeouterror to be
49 | // descriptive.
50 | exports.timer = function (ms, fn) {
51 | var currentAction
52 | var isTimedOut = false
53 |
54 | // no timeout?
55 | if (!ms) return fn(noop, noop, noop)
56 |
57 | return new Promise((resolve, reject) => {
58 | var timer
59 | var remaining = ms
60 | var start
61 |
62 | const checkin = action => {
63 | if (isTimedOut) throw new TimeoutError() // this will abort action, but the wrapping promise is already rejected
64 | if (action) currentAction = action
65 | }
66 | const pause = () => {
67 | clearTimeout(timer)
68 | remaining -= (Date.now() - start)
69 | }
70 | const resume = () => {
71 | if (isTimedOut) return
72 | clearTimeout(timer)
73 | start = Date.now()
74 | timer = setTimeout(onTimeout, remaining)
75 | }
76 | const onTimeout = () => {
77 | isTimedOut = true
78 | reject(new TimeoutError(currentAction ? `Timed out while ${currentAction}` : undefined))
79 | }
80 |
81 | // call the fn to get the promise
82 | var promise = fn(checkin, pause, resume)
83 |
84 | // start the timer
85 | resume()
86 |
87 | // wrap the promise
88 | promise.then(
89 | val => {
90 | clearTimeout(timer)
91 | resolve(val)
92 | },
93 | err => {
94 | clearTimeout(timer)
95 | reject(err)
96 | }
97 | )
98 | })
99 | }
100 |
101 | function noop () {}
102 |
--------------------------------------------------------------------------------
/dbs/settings.js:
--------------------------------------------------------------------------------
1 | const EventEmitter = require('events')
2 | const sqlite3 = require('sqlite3')
3 | const path = require('path')
4 | const {cbPromise} = require('../lib/functions')
5 | const {setupSqliteDB} = require('../lib/db')
6 | const {getEnvVar} = require('../lib/env')
7 |
8 | // globals
9 | // =
10 | var db
11 | var migrations
12 | var setupPromise
13 | var defaultSettings
14 | var events = new EventEmitter()
15 |
16 | // exported methods
17 | // =
18 |
19 | exports.setup = function (opts) {
20 | // open database
21 | var dbPath = path.join(opts.userDataPath, 'Settings')
22 | db = new sqlite3.Database(dbPath)
23 | setupPromise = setupSqliteDB(db, {migrations}, '[SETTINGS]')
24 |
25 | defaultSettings = {
26 | auto_update_enabled: 1,
27 | auto_redirect_to_dat: 1,
28 | custom_start_page: 'blank',
29 | start_page_background_image: '',
30 | workspace_default_path: path.join(opts.homePath, 'Sites'),
31 | default_dat_ignore: '.git\n.dat\nnode_modules\n*.log\n**/.DS_Store\nThumbs.db\n',
32 | analytics_enabled: 1,
33 | dat_bandwidth_limit_up: 0,
34 | dat_bandwidth_limit_down: 0
35 | }
36 | }
37 |
38 | exports.on = events.on.bind(events)
39 | exports.once = events.once.bind(events)
40 |
41 | exports.set = function (key, value) {
42 | events.emit('set', key, value)
43 | events.emit('set:' + key, value)
44 | return setupPromise.then(v => cbPromise(cb => {
45 | db.run(`
46 | INSERT OR REPLACE
47 | INTO settings (key, value, ts)
48 | VALUES (?, ?, ?)
49 | `, [key, value, Date.now()], cb)
50 | }))
51 | }
52 |
53 | exports.get = function (key) {
54 | // env variables
55 | if (key === 'no_welcome_tab') {
56 | return (getEnvVar('BEAKER_NO_WELCOME_TAB') == 1)
57 | }
58 | // stored values
59 | return setupPromise.then(v => cbPromise(cb => {
60 | db.get(`SELECT value FROM settings WHERE key = ?`, [key], (err, row) => {
61 | if (row) { row = row.value }
62 | if (typeof row === 'undefined') { row = defaultSettings[key] }
63 | cb(err, row)
64 | })
65 | }))
66 | }
67 |
68 | exports.getAll = function () {
69 | return setupPromise.then(v => cbPromise(cb => {
70 | db.all(`SELECT key, value FROM settings`, (err, rows) => {
71 | if (err) { return cb(err) }
72 |
73 | var obj = {}
74 | rows.forEach(row => { obj[row.key] = row.value })
75 | obj = Object.assign({}, defaultSettings, obj)
76 | obj.no_welcome_tab = (getEnvVar('BEAKER_NO_WELCOME_TAB') == 1)
77 | cb(null, obj)
78 | })
79 | }))
80 | }
81 |
82 | // internal methods
83 | // =
84 |
85 | migrations = [
86 | // version 1
87 | function (cb) {
88 | db.exec(`
89 | CREATE TABLE settings(
90 | key PRIMARY KEY,
91 | value,
92 | ts
93 | );
94 | INSERT INTO settings (key, value) VALUES ('auto_update_enabled', 1);
95 | PRAGMA user_version = 1;
96 | `, cb)
97 | },
98 | // version 2
99 | function (cb) {
100 | db.exec(`
101 | INSERT INTO settings (key, value) VALUES ('start_page_background_image', '');
102 | PRAGMA user_version = 2
103 | `, cb)
104 | }
105 | ]
106 |
--------------------------------------------------------------------------------
/web-apis/bg/experimental/dat-peers.js:
--------------------------------------------------------------------------------
1 | const parseDatURL = require('parse-dat-url')
2 | const {PermissionsError} = require('beaker-error-constants')
3 | const globals = require('../../../globals')
4 | const datLibrary = require('../../../dat/library')
5 | const datDns = require('../../../dat/dns')
6 | const {DAT_HASH_REGEX} = require('../../../lib/const')
7 |
8 | // constants
9 | // =
10 |
11 | const API_DOCS_URL = 'https://beakerbrowser.com/docs/apis/experimental-datpeers.html'
12 | const API_PERM_ID = 'experimentalDatPeers'
13 | const LAB_API_ID = 'datPeers'
14 | const LAB_PERMS_OBJ = {perm: API_PERM_ID, labApi: LAB_API_ID, apiDocsUrl: API_DOCS_URL}
15 |
16 | // exported api
17 | // =
18 |
19 | module.exports = {
20 | async list () {
21 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
22 | var archive = await getSenderArchive(this.sender)
23 | return datLibrary.getDaemon().ext_listPeers(archive.key.toString('hex'))
24 | },
25 |
26 | async get (peerId) {
27 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
28 | var archive = await getSenderArchive(this.sender)
29 | return datLibrary.getDaemon().ext_getPeer(archive.key.toString('hex'), peerId)
30 | },
31 |
32 | async broadcast (data) {
33 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
34 | var archive = await getSenderArchive(this.sender)
35 | return datLibrary.getDaemon().ext_broadcastEphemeralMessage(archive.key.toString('hex'), data)
36 | },
37 |
38 | async send (peerId, data) {
39 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
40 | var archive = await getSenderArchive(this.sender)
41 | return datLibrary.getDaemon().ext_sendEphemeralMessage(archive.key.toString('hex'), peerId, data)
42 | },
43 |
44 | async getSessionData () {
45 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
46 | var archive = await getSenderArchive(this.sender)
47 | return datLibrary.getDaemon().ext_getSessionData(archive.key.toString('hex'))
48 | },
49 |
50 | async setSessionData (sessionData) {
51 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
52 | var archive = await getSenderArchive(this.sender)
53 | return datLibrary.getDaemon().ext_setSessionData(archive.key.toString('hex'), sessionData)
54 | },
55 |
56 | async createEventStream () {
57 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
58 | var archive = await getSenderArchive(this.sender)
59 | return datLibrary.getDaemon().ext_createDatPeersStream(archive.key.toString('hex'))
60 | },
61 |
62 | async getOwnPeerId () {
63 | await globals.permsAPI.checkLabsPerm(Object.assign({sender: this.sender}, LAB_PERMS_OBJ))
64 | return datLibrary.getDaemon().ext_getOwnPeerId()
65 | }
66 | }
67 |
68 | // internal methods
69 | // =
70 |
71 | async function getSenderArchive (sender) {
72 | var url = sender.getURL()
73 | if (!url.startsWith('dat:')) {
74 | throw new PermissionsError('Only dat:// sites can use the datPeers API')
75 | }
76 | var urlp = parseDatURL(url)
77 | if (!DAT_HASH_REGEX.test(urlp.host)) {
78 | urlp.host = await datDns.resolveName(url)
79 | }
80 | return datLibrary.getArchive(urlp.host)
81 | }
82 |
--------------------------------------------------------------------------------
/web-apis/bg.js:
--------------------------------------------------------------------------------
1 | const globals = require('../globals')
2 |
3 | const SECURE_ORIGIN_REGEX = /^(beaker:|dat:|https:|http:\/\/localhost(\/|:))/i
4 |
5 | // internal manifests
6 | const archivesManifest = require('./manifests/internal/archives')
7 | const beakerBrowserManifest = require('./manifests/internal/browser')
8 | const bookmarksManifest = require('./manifests/internal/bookmarks')
9 | const downloadsManifest = require('./manifests/internal/downloads')
10 | const historyManifest = require('./manifests/internal/history')
11 | const sitedataManifest = require('./manifests/internal/sitedata')
12 | const watchlistManifest = require('./manifests/internal/watchlist')
13 |
14 | // internal apis
15 | const archivesAPI = require('./bg/archives')
16 | const bookmarksAPI = require('./bg/bookmarks')
17 | const historyAPI = require('./bg/history')
18 | const sitedataAPI = require('../dbs/sitedata').WEBAPI
19 | const watchlistAPI = require('./bg/watchlist')
20 |
21 | // external manifests
22 | const datArchiveManifest = require('./manifests/external/dat-archive')
23 |
24 | // external apis
25 | const datArchiveAPI = require('./bg/dat-archive')
26 |
27 | // experimental manifests
28 | const experimentalCapturePageManifest = require('./manifests/external/experimental/capture-page')
29 | const experimentalDatPeersManifest = require('./manifests/external/experimental/dat-peers')
30 | const experimentalGlobalFetchManifest = require('./manifests/external/experimental/global-fetch')
31 | const experimentalLibraryManifest = require('./manifests/external/experimental/library')
32 |
33 | // experimental apis
34 | const experimentalCapturePageAPI = require('./bg/experimental/capture-page')
35 | const experimentalDatPeersAPI = require('./bg/experimental/dat-peers')
36 | const experimentalGlobalFetchAPI = require('./bg/experimental/global-fetch')
37 | const experimentalLibraryAPI = require('./bg/experimental/library')
38 |
39 | // exported api
40 | // =
41 |
42 | exports.setup = function () {
43 | // internal apis
44 | globals.rpcAPI.exportAPI('archives', archivesManifest, archivesAPI, internalOnly)
45 | globals.rpcAPI.exportAPI('beaker-browser', beakerBrowserManifest, globals.browserWebAPI, internalOnly)
46 | globals.rpcAPI.exportAPI('bookmarks', bookmarksManifest, bookmarksAPI, internalOnly)
47 | globals.rpcAPI.exportAPI('downloads', downloadsManifest, globals.downloadsWebAPI, internalOnly)
48 | globals.rpcAPI.exportAPI('history', historyManifest, historyAPI, internalOnly)
49 | globals.rpcAPI.exportAPI('sitedata', sitedataManifest, sitedataAPI, internalOnly)
50 | globals.rpcAPI.exportAPI('watchlist', watchlistManifest, watchlistAPI, internalOnly)
51 |
52 | // external apis
53 | globals.rpcAPI.exportAPI('dat-archive', datArchiveManifest, datArchiveAPI, secureOnly)
54 |
55 | // experimental apis
56 | globals.rpcAPI.exportAPI('experimental-capture-page', experimentalCapturePageManifest, experimentalCapturePageAPI, secureOnly)
57 | globals.rpcAPI.exportAPI('experimental-dat-peers', experimentalDatPeersManifest, experimentalDatPeersAPI, secureOnly)
58 | globals.rpcAPI.exportAPI('experimental-global-fetch', experimentalGlobalFetchManifest, experimentalGlobalFetchAPI, secureOnly)
59 | globals.rpcAPI.exportAPI('experimental-library', experimentalLibraryManifest, experimentalLibraryAPI, secureOnly)
60 | }
61 |
62 | function internalOnly (event, methodName, args) {
63 | return (event && event.sender && event.sender.getURL().startsWith('beaker:'))
64 | }
65 |
66 | function secureOnly (event, methodName, args) {
67 | if (!(event && event.sender)) {
68 | return false
69 | }
70 | var url = event.sender.getURL()
71 | return SECURE_ORIGIN_REGEX.test(url)
72 | }
73 |
--------------------------------------------------------------------------------
/web-apis/bg/bookmarks.js:
--------------------------------------------------------------------------------
1 | const globals = require('../../globals')
2 | const assert = require('assert')
3 | const normalizeUrl = require('normalize-url')
4 | const {PermissionsError} = require('beaker-error-constants')
5 | const bookmarksDb = require('../../dbs/bookmarks')
6 |
7 | const NORMALIZE_OPTS = {
8 | stripFragment: false,
9 | stripWWW: false,
10 | removeQueryParameters: false,
11 | removeTrailingSlash: false
12 | }
13 |
14 | // exported api
15 | // =
16 |
17 | module.exports = {
18 |
19 | // current user
20 | // =
21 |
22 | // fetch bookmark data from the current user's data
23 | async getBookmark (href) {
24 | await assertPermission(this.sender, 'app:bookmarks:read')
25 | assertString(href, 'Parameter one must be a URL')
26 | href = normalizeUrl(href, NORMALIZE_OPTS)
27 | return bookmarksDb.getBookmark(0, href)
28 | },
29 |
30 | // check if bookmark exists in the current user's data
31 | async isBookmarked (href) {
32 | await assertPermission(this.sender, 'app:bookmarks:read')
33 | assertString(href, 'Parameter one must be a URL')
34 | href = normalizeUrl(href, NORMALIZE_OPTS)
35 | try {
36 | var bookmark = await bookmarksDb.getBookmark(0, href)
37 | return !!bookmark
38 | } catch (e) {
39 | return false
40 | }
41 | },
42 |
43 | // pins
44 | // =
45 |
46 | // pin a bookmark
47 | async setBookmarkPinned (href, pinned) {
48 | await assertPermission(this.sender, 'app:bookmarks:edit-private')
49 | assertString(href, 'Parameter one must be a URL')
50 | href = normalizeUrl(href, NORMALIZE_OPTS)
51 | await bookmarksDb.setBookmarkPinned(0, href, pinned)
52 | },
53 |
54 | // set the order of pinned bookmarks
55 | async setBookmarkPinOrder (urls) {
56 | await assertPermission(this.sender, 'app:bookmarks:edit-private')
57 | if (!Array.isArray(urls)) throw new Error('Parameter one must be an array of URLs')
58 | return bookmarksDb.setBookmarkPinOrder(0, urls)
59 | },
60 |
61 | // list pinned bookmarks
62 | async listPinnedBookmarks () {
63 | await assertPermission(this.sender, 'app:bookmarks:read')
64 | return bookmarksDb.listPinnedBookmarks(0)
65 | },
66 |
67 | // bookmarks
68 | // =
69 |
70 | // bookmark
71 | // - data.title: string
72 | async bookmarkPrivate (href, data = {}) {
73 | await assertPermission(this.sender, 'app:bookmarks:edit-private')
74 | assertString(href, 'Parameter one must be a URL')
75 | href = normalizeUrl(href, NORMALIZE_OPTS)
76 | await bookmarksDb.bookmark(0, href, data)
77 | },
78 |
79 | // delete bookmark
80 | async unbookmarkPrivate (href) {
81 | await assertPermission(this.sender, 'app:bookmarks:edit-private')
82 | assertString(href, 'Parameter one must be a URL')
83 | href = normalizeUrl(href, NORMALIZE_OPTS)
84 | await bookmarksDb.unbookmark(0, href)
85 | },
86 |
87 | // list bookmarks
88 | async listPrivateBookmarks (opts) {
89 | await assertPermission(this.sender, 'app:bookmarks:read')
90 | return bookmarksDb.listBookmarks(0, opts)
91 | },
92 |
93 | // TODO removeme -prf
94 | async listPublicBookmarks () {
95 | return []
96 | },
97 |
98 | // tags
99 | // =
100 |
101 | async listBookmarkTags () {
102 | await assertPermission(this.sender, 'app:bookmarks:read')
103 | return bookmarksDb.listBookmarkTags(0)
104 | }
105 | }
106 |
107 | async function assertPermission (sender, perm) {
108 | if (sender.getURL().startsWith('beaker:')) {
109 | return true
110 | }
111 | if (await globals.permsAPI.queryPermission(perm, sender)) return true
112 | throw new PermissionsError()
113 | }
114 |
115 | function assertString (v, msg) {
116 | assert(!!v && typeof v === 'string', msg)
117 | }
118 |
--------------------------------------------------------------------------------
/lib/mime.js:
--------------------------------------------------------------------------------
1 | const through2 = require('through2')
2 | const identifyFiletype = require('identify-filetype')
3 | const mime = require('mime')
4 | const path = require('path')
5 | const textextensions = require('textextensions')
6 | const binextensions = require('binary-extensions')
7 | const concat = require('concat-stream')
8 |
9 | // config default mimetype
10 | mime.default_type = 'text/plain'
11 | const TEXT_TYPE_RE = /^text\/|^application\/(javascript|json)/
12 |
13 | const identify = exports.identify = function (name, chunk) {
14 | // try to identify the type by the chunk contents
15 | var mimeType
16 | var identifiedExt = (chunk) ? identifyFiletype(chunk) : false
17 | if (identifiedExt) { mimeType = mime.lookup(identifiedExt) }
18 | if (!mimeType) {
19 | // fallback to using the entry name
20 | mimeType = mime.lookup(name)
21 | }
22 |
23 | // hackish fix
24 | // the svg test can be a bit aggressive: html pages with
25 | // inline svgs can be falsely interpretted as svgs
26 | // double check that
27 | if (identifiedExt === 'svg' && mime.lookup(name) === 'text/html') {
28 | return 'text/html; charset=utf8'
29 | }
30 |
31 | // assume utf-8 for text types
32 | if (TEXT_TYPE_RE.test(mimeType)) {
33 | mimeType += '; charset=utf8'
34 | }
35 |
36 | return mimeType
37 | }
38 |
39 | exports.identifyStream = function (name, cb) {
40 | var first = true
41 | return through2(function (chunk, enc, cb2) {
42 | if (first) {
43 | first = false
44 | cb(identify(name, chunk))
45 | }
46 | this.push(chunk)
47 | cb2()
48 | })
49 | }
50 |
51 | exports.isFileNameBinary = function (filepath) {
52 | const ext = path.extname(filepath)
53 | if (binextensions.includes(ext)) return true
54 | if (textextensions.includes(ext)) return false
55 | // dont know
56 | }
57 |
58 | exports.isFileContentBinary = async function (fsInstance, filepath) {
59 | return new Promise((resolve, reject) => {
60 | const rs = fsInstance.createReadStream(filepath, {start: 0, end: 512})
61 | rs.on('error', reject)
62 | rs.pipe(concat(buf => resolve(isBinaryCheck(buf))))
63 | })
64 | }
65 |
66 | // pulled from https://github.com/gjtorikian/isBinaryFile
67 | function isBinaryCheck (bytes) {
68 | var size = bytes.length
69 | if (size === 0) {
70 | return false
71 | }
72 |
73 | var suspicious_bytes = 0
74 |
75 | // UTF-8 BOM
76 | if (size >= 3 && bytes[0] == 0xEF && bytes[1] == 0xBB && bytes[2] == 0xBF) {
77 | return false
78 | }
79 |
80 | // UTF-32 BOM
81 | if (size >= 4 && bytes[0] === 0x00 && bytes[1] === 0x00 && bytes[2] == 0xFE && bytes[3] == 0xFF) {
82 | return false
83 | }
84 |
85 | // UTF-32 LE BOM
86 | if (size >= 4 && bytes[0] == 0xFF && bytes[1] == 0xFE && bytes[2] === 0x00 && bytes[3] === 0x00) {
87 | return false
88 | }
89 |
90 | // GB BOM
91 | if (size >= 4 && bytes[0] == 0x84 && bytes[1] == 0x31 && bytes[2] == 0x95 && bytes[3] == 0x33) {
92 | return false
93 | }
94 |
95 | if (size >= 5 && bytes.slice(0, 5) == '%PDF-') {
96 | /* PDF. This is binary. */
97 | return true
98 | }
99 |
100 | // UTF-16 BE BOM
101 | if (size >= 2 && bytes[0] == 0xFE && bytes[1] == 0xFF) {
102 | return false
103 | }
104 |
105 | // UTF-16 LE BOM
106 | if (size >= 2 && bytes[0] == 0xFF && bytes[1] == 0xFE) {
107 | return false
108 | }
109 |
110 | for (var i = 0; i < size; i++) {
111 | if (bytes[i] === 0) { // NULL byte--it's binary!
112 | return true
113 | } else if ((bytes[i] < 7 || bytes[i] > 14) && (bytes[i] < 32 || bytes[i] > 127)) {
114 | // UTF-8 detection
115 | if (bytes[i] > 193 && bytes[i] < 224 && i + 1 < size) {
116 | i++
117 | if (bytes[i] > 127 && bytes[i] < 192) {
118 | continue
119 | }
120 | } else if (bytes[i] > 223 && bytes[i] < 240 && i + 2 < size) {
121 | i++
122 | if (bytes[i] > 127 && bytes[i] < 192 && bytes[i + 1] > 127 && bytes[i + 1] < 192) {
123 | i++
124 | continue
125 | }
126 | }
127 | suspicious_bytes++
128 | // Read at least 32 bytes before making a decision
129 | if (i > 32 && (suspicious_bytes * 100) / size > 10) {
130 | return true
131 | }
132 | }
133 | }
134 |
135 | if ((suspicious_bytes * 100) / size > 10) {
136 | return true
137 | }
138 |
139 | return false
140 | }
141 |
--------------------------------------------------------------------------------
/web-apis/fg/experimental.js:
--------------------------------------------------------------------------------
1 | /* globals Request Response fetch */
2 |
3 | const {EventTargetFromStream} = require('./event-target')
4 | const errors = require('beaker-error-constants')
5 |
6 | const experimentalLibraryManifest = require('../manifests/external/experimental/library')
7 | const experimentalGlobalFetchManifest = require('../manifests/external/experimental/global-fetch')
8 | const experimentalCapturePageManifest = require('../manifests/external/experimental/capture-page')
9 | const experimentalDatPeersManifest = require('../manifests/external/experimental/dat-peers')
10 |
11 | exports.setup = function (rpc) {
12 | const experimental = {}
13 | const opts = {timeout: false, errors}
14 |
15 | // dat or internal only
16 | if (window.location.protocol === 'beaker:' || window.location.protocol === 'dat:') {
17 | const libraryRPC = rpc.importAPI('experimental-library', experimentalLibraryManifest, opts)
18 | const globalFetchRPC = rpc.importAPI('experimental-global-fetch', experimentalGlobalFetchManifest, opts)
19 | const capturePageRPC = rpc.importAPI('experimental-capture-page', experimentalCapturePageManifest, opts)
20 | const datPeersRPC = rpc.importAPI('experimental-dat-peers', experimentalDatPeersManifest, opts)
21 |
22 | // experimental.library
23 | let libraryEvents = ['added', 'removed', 'updated', 'folder-synced', 'network-changed']
24 | experimental.library = new EventTargetFromStream(libraryRPC.createEventStream.bind(libraryRPC), libraryEvents)
25 | experimental.library.add = libraryRPC.add
26 | experimental.library.remove = libraryRPC.remove
27 | experimental.library.get = libraryRPC.get
28 | experimental.library.list = libraryRPC.list
29 | experimental.library.requestAdd = libraryRPC.requestAdd
30 | experimental.library.requestRemove = libraryRPC.requestRemove
31 |
32 | // experimental.globalFetch
33 | experimental.globalFetch = async function globalFetch (input, init) {
34 | let request = new Request(input, init)
35 | if (request.method !== 'HEAD' && request.method !== 'GET') {
36 | throw new Error('Only HEAD and GET requests are currently supported by globalFetch()')
37 | }
38 | try {
39 | if (request.compress)
40 | { request.headers.set('accept-encoding', 'gzip,deflate') }
41 | let headers = {}
42 | request.headers.forEach((val, name) => headers[name] = val)
43 | let responseData = await globalFetchRPC.fetch({headers,
44 | method: request.method,
45 | url: request.url
46 | })
47 | return new Response(responseData.body, responseData)
48 | } catch (e) {
49 | if (e.message === 'Can only send requests to http or https URLs' && request.url.startsWith('dat://')) {
50 | // we can just use `fetch` for dat:// URLs, because dat:// does not enforce CORS
51 | return fetch(input, init)
52 | }
53 | throw e
54 | }
55 | }
56 |
57 | // experimental.capturePage
58 | experimental.capturePage = capturePageRPC.capturePage
59 |
60 | // experimental.datPeers
61 | class DatPeer {
62 | constructor (id, sessionData) {
63 | this.id = id
64 | this.sessionData = sessionData
65 | }
66 | send (data) {
67 | datPeersRPC.send(this.id, data)
68 | }
69 | }
70 | function prepDatPeersEvents (event, details) {
71 | var peer = new DatPeer(details.peerId, details.sessionData)
72 | delete details.peerId
73 | delete details.sessionData
74 | details.peer = peer
75 | return details
76 | }
77 | const datPeersEvents = ['connect', 'message', 'session-data', 'disconnect']
78 | experimental.datPeers = new EventTargetFromStream(datPeersRPC.createEventStream.bind(datPeersRPC), datPeersEvents, prepDatPeersEvents)
79 | experimental.datPeers.list = async () => {
80 | var peers = await datPeersRPC.list()
81 | return peers.map(p => new DatPeer(p.id, p.sessionData))
82 | }
83 | experimental.datPeers.get = async (peerId) => {
84 | var {sessionData} = await datPeersRPC.get(peerId)
85 | return new DatPeer(peerId, sessionData)
86 | }
87 | experimental.datPeers.broadcast = datPeersRPC.broadcast
88 | experimental.datPeers.getSessionData = datPeersRPC.getSessionData
89 | experimental.datPeers.setSessionData = datPeersRPC.setSessionData
90 | experimental.datPeers.getOwnPeerId = datPeersRPC.getOwnPeerId
91 | }
92 |
93 | return experimental
94 | }
95 |
--------------------------------------------------------------------------------
/web-apis/bg/experimental/library.js:
--------------------------------------------------------------------------------
1 | const globals = require('../../../globals')
2 | const _pick = require('lodash.pick')
3 | const through2 = require('through2')
4 | const datLibrary = require('../../../dat/library')
5 | const datDns = require('../../../dat/dns')
6 | const archivesDb = require('../../../dbs/archives')
7 | const {PermissionsError} = require('beaker-error-constants')
8 |
9 | // constants
10 | // =
11 |
12 | const API_DOCS_URL = 'https://beakerbrowser.com/docs/apis/experimental-library.html'
13 | const API_PERM_ID = 'experimentalLibrary'
14 | const REQUEST_ADD_PERM_ID = 'experimentalLibraryRequestAdd'
15 | const REQUEST_REMOVE_PERM_ID = 'experimentalLibraryRequestRemove'
16 | const LAB_API_ID = 'library'
17 |
18 | const QUERY_FIELDS = ['inMemory', 'isSaved', 'isNetworked', 'isOwner']
19 | const USER_SETTINGS_FIELDS = ['key', 'isSaved', 'expiresAt']
20 | const ARCHIVE_FIELDS = ['url', 'title', 'description', 'size', 'mtime', 'isOwner', 'userSettings', 'peers']
21 | const EVENT_FIELDS = {
22 | added: ['url', 'isSaved'],
23 | removed: ['url', 'isSaved'],
24 | updated: ['url', 'title', 'description', 'size', 'mtime', 'isOwner'],
25 | 'network-changed': ['url', 'connections']
26 | }
27 |
28 | // exported api
29 | // =
30 |
31 | function add (isRequest) {
32 | return async function (url, {duration} = {}) {
33 | var key = await datDns.resolveName(url)
34 | if (isRequest) await checkIsntOwner(key)
35 | await globals.permsAPI.checkLabsPerm({
36 | perm: isRequest ? `${REQUEST_ADD_PERM_ID}:${key}` : API_PERM_ID,
37 | labApi: LAB_API_ID,
38 | apiDocsUrl: API_DOCS_URL,
39 | sender: this.sender
40 | })
41 |
42 | // swarm the archive
43 | /* dont await */ datLibrary.getOrLoadArchive(key)
44 |
45 | // update settings
46 | var opts = {isSaved: true}
47 | if (duration && duration > 0) {
48 | opts.expiresAt = Date.now() + (duration * 60e3)
49 | }
50 | var settings = await archivesDb.setUserSettings(0, key, opts)
51 | return _pick(settings, USER_SETTINGS_FIELDS)
52 | }
53 | }
54 |
55 | function remove (isRequest) {
56 | return async function (url) {
57 | var key = await datDns.resolveName(url)
58 | if (isRequest) await checkIsntOwner(key)
59 | await globals.permsAPI.checkLabsPerm({
60 | perm: isRequest ? `${REQUEST_REMOVE_PERM_ID}:${key}` : API_PERM_ID,
61 | labApi: LAB_API_ID,
62 | apiDocsUrl: API_DOCS_URL,
63 | sender: this.sender
64 | })
65 | var settings = await archivesDb.setUserSettings(0, key, {isSaved: false})
66 | return _pick(settings, USER_SETTINGS_FIELDS)
67 | }
68 | }
69 |
70 | module.exports = {
71 |
72 | add: add(false),
73 | requestAdd: add(true),
74 |
75 | remove: remove(false),
76 | requestRemove: remove(true),
77 |
78 | async get (url) {
79 | await globals.permsAPI.checkLabsPerm({
80 | perm: API_PERM_ID,
81 | labApi: LAB_API_ID,
82 | apiDocsUrl: API_DOCS_URL,
83 | sender: this.sender
84 | })
85 | var key = await datDns.resolveName(url)
86 | var settings = await archivesDb.getUserSettings(0, key)
87 | return _pick(settings, USER_SETTINGS_FIELDS)
88 | },
89 |
90 | async list (query = {}) {
91 | await globals.permsAPI.checkLabsPerm({
92 | perm: API_PERM_ID,
93 | labApi: LAB_API_ID,
94 | apiDocsUrl: API_DOCS_URL,
95 | sender: this.sender
96 | })
97 | query = _pick(query, QUERY_FIELDS)
98 | var archives = await datLibrary.queryArchives(query)
99 | return archives.map(a => {
100 | a = _pick(a, ARCHIVE_FIELDS)
101 | a.userSettings = _pick(a.userSettings, USER_SETTINGS_FIELDS)
102 | return a
103 | })
104 | },
105 |
106 | async createEventStream () {
107 | await globals.permsAPI.checkLabsPerm({
108 | perm: API_PERM_ID,
109 | labApi: LAB_API_ID,
110 | apiDocsUrl: API_DOCS_URL,
111 | sender: this.sender
112 | })
113 | return datLibrary.createEventStream().pipe(through2.obj(function (event, enc, cb) {
114 | // only emit events that have a fields set
115 | var fields = EVENT_FIELDS[event[0]]
116 | if (fields) {
117 | event[1] = _pick(event[1].details, fields)
118 | this.push(event)
119 | }
120 | cb()
121 | }))
122 | }
123 | }
124 |
125 | // internal methods
126 | // =
127 |
128 | async function checkIsntOwner (key) {
129 | var meta = await archivesDb.getMeta(key)
130 | if (meta.isOwner) throw new PermissionsError('Archive is owned by user')
131 | }
132 |
--------------------------------------------------------------------------------
/dbs/bookmarks.js:
--------------------------------------------------------------------------------
1 | const db = require('./profile-data-db')
2 | const normalizeUrl = require('normalize-url')
3 | const lock = require('../lib/lock')
4 |
5 | const NORMALIZE_OPTS = {
6 | stripFragment: false,
7 | stripWWW: false,
8 | removeQueryParameters: false,
9 | removeTrailingSlash: false
10 | }
11 |
12 | // exported methods
13 | // =
14 |
15 | exports.bookmark = async function (profileId, url, {title, tags, notes, pinOrder}) {
16 | tags = tagsToString(tags)
17 | var release = await lock(`bookmark:${url}`)
18 | try {
19 | // read old bookmark and fallback to old values as needed
20 | var oldBookmark = await db.get(`SELECT url, title, pinned, pinOrder FROM bookmarks WHERE profileId = ? AND url = ?`, [profileId, url])
21 | oldBookmark = oldBookmark || {}
22 | const pinned = oldBookmark.pinned ? 1 : 0
23 | title = typeof title === 'undefined' ? oldBookmark.title : title
24 | tags = typeof tags === 'undefined' ? oldBookmark.tags : tags
25 | notes = typeof notes === 'undefined' ? oldBookmark.notes : notes
26 | pinOrder = typeof pinOrder === 'undefined' ? oldBookmark.pinOrder : pinOrder
27 |
28 | // update record
29 | return db.run(`
30 | INSERT OR REPLACE
31 | INTO bookmarks (profileId, url, title, tags, notes, pinned, pinOrder)
32 | VALUES (?, ?, ?, ?, ?, ?, ?)
33 | `, [profileId, url, title, tags, notes, pinned, pinOrder])
34 | } finally {
35 | release()
36 | }
37 | }
38 |
39 | exports.unbookmark = function (profileId, url) {
40 | return db.run(`DELETE FROM bookmarks WHERE profileId = ? AND url = ?`, [profileId, url])
41 | }
42 |
43 | exports.setBookmarkPinned = function (profileId, url, pinned) {
44 | return db.run(`UPDATE bookmarks SET pinned = ? WHERE profileId = ? AND url = ?`, [pinned ? 1 : 0, profileId, url])
45 | }
46 |
47 | exports.setBookmarkPinOrder = async function (profileId, urls) {
48 | var len = urls.length
49 | await Promise.all(urls.map((url, i) => (
50 | db.run(`UPDATE bookmarks SET pinOrder = ? WHERE profileId = ? AND url = ?`, [len - i, profileId, url])
51 | )))
52 | }
53 |
54 | exports.getBookmark = async function (profileId, url) {
55 | return toNewFormat(await db.get(`SELECT url, title, tags, notes, pinned, pinOrder, createdAt FROM bookmarks WHERE profileId = ? AND url = ?`, [profileId, url]))
56 | }
57 |
58 | exports.listBookmarks = async function (profileId, {tag} = {}) {
59 | var bookmarks = await db.all(`SELECT url, title, tags, notes, pinned, pinOrder, createdAt FROM bookmarks WHERE profileId = ? ORDER BY createdAt DESC`, [profileId])
60 | bookmarks = bookmarks.map(toNewFormat)
61 |
62 | // apply tag filter
63 | if (tag) {
64 | if (Array.isArray(tag)) {
65 | bookmarks = bookmarks.filter(b => {
66 | return tag.reduce((agg, t) => agg & b.tags.includes(t), true)
67 | })
68 | } else {
69 | bookmarks = bookmarks.filter(b => b.tags.includes(tag))
70 | }
71 | }
72 |
73 | return bookmarks
74 | }
75 |
76 | exports.listPinnedBookmarks = async function (profileId) {
77 | var bookmarks = await db.all(`SELECT url, title, tags, notes, pinned, pinOrder, createdAt FROM bookmarks WHERE profileId = ? AND pinned = 1 ORDER BY pinOrder DESC`, [profileId])
78 | return bookmarks.map(toNewFormat)
79 | }
80 |
81 | exports.listBookmarkTags = async function (profileId) {
82 | var tagSet = new Set()
83 | var bookmarks = await db.all(`SELECT tags FROM bookmarks WHERE profileId = ?`, [profileId])
84 | bookmarks.forEach(b => {
85 | if (b.tags) {
86 | b.tags.split(' ').forEach(t => tagSet.add(t))
87 | }
88 | })
89 | return Array.from(tagSet)
90 | }
91 |
92 | // TEMP
93 | // apply normalization to old bookmarks
94 | // (can probably remove this in 2018 or so)
95 | // -prf
96 | exports.fixOldBookmarks = async function () {
97 | var bookmarks = await db.all(`SELECT url FROM bookmarks`)
98 | bookmarks.forEach(b => {
99 | let newUrl = normalizeUrl(b.url, NORMALIZE_OPTS)
100 | db.run(`UPDATE bookmarks SET url = ? WHERE url = ?`, [newUrl, b.url])
101 | })
102 | }
103 |
104 | function tagsToString (v) {
105 | if (Array.isArray(v)) {
106 | v = v.join(' ')
107 | }
108 | return v
109 | }
110 |
111 | function toNewFormat (b) {
112 | if (!b) return b
113 | return {
114 | _origin: false,
115 | _url: false,
116 | private: true,
117 | createdAt: b.createdAt * 1e3, // convert to ms
118 | href: b.url,
119 | title: b.title,
120 | tags: b.tags ? b.tags.split(' ').filter(Boolean) : [],
121 | notes: b.notes,
122 | pinned: !!b.pinned,
123 | pinOrder: b.pinOrder
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/dat/directory-listing-page.js:
--------------------------------------------------------------------------------
1 | const {pluralize, makeSafe} = require('../lib/strings')
2 | const {stat, readdir} = require('pauls-dat-api')
3 | const {join, relative} = require('path')
4 |
5 | const styles = ``
22 |
23 | module.exports = async function renderDirectoryListingPage (archive, dirPath, webRoot) {
24 | // handle the webroot
25 | webRoot = webRoot || '/'
26 | const realPath = p => join(webRoot, p)
27 | const webrootPath = p => relative(webRoot, p)
28 |
29 | // list files
30 | var names = []
31 | try { names = await readdir(archive, realPath(dirPath)) } catch (e) {}
32 |
33 | // stat each file
34 | var entries = await Promise.all(names.map(async (name) => {
35 | var entry
36 | var entryPath = join(dirPath, name)
37 | try { entry = await stat(archive, realPath(entryPath)) } catch (e) { return false }
38 | entry.path = webrootPath(entryPath)
39 | entry.name = name
40 | return entry
41 | }))
42 | entries = entries.filter(Boolean)
43 |
44 | // sort the listing
45 | entries.sort((a, b) => {
46 | // directories on top
47 | if (a.isDirectory() && !b.isDirectory()) return -1
48 | if (!a.isDirectory() && b.isDirectory()) return 1
49 | // alphabetical after that
50 | return a.name.localeCompare(b.name)
51 | })
52 |
53 | // show the updog if path is not top
54 | var updog = ''
55 | if (['/', '', '..'].includes(webrootPath(dirPath)) === false) {
56 | updog = ``
57 | }
58 |
59 | // render entries
60 | var totalFiles = 0
61 | entries = entries.map(entry => {
62 | totalFiles++
63 | var url = makeSafe(entry.path)
64 | if (!url.startsWith('/')) url = '/' + url // all urls should have a leading slash
65 | if (entry.isDirectory() && !url.endsWith('/')) url += '/' // all dirs should have a trailing slash
66 | var type = entry.isDirectory() ? 'directory' : 'file'
67 | return ``
68 | }).join('')
69 |
70 | // render summary
71 | var summary = `${totalFiles} ${pluralize(totalFiles, 'file')}
`
72 |
73 | // render final
74 | return '' + styles + updog + entries + summary
75 | }
76 |
--------------------------------------------------------------------------------
/web-apis/fg/event-target.js:
--------------------------------------------------------------------------------
1 | // this emulates the implementation of event-targets by browsers
2 |
3 | const LISTENERS = Symbol() // eslint-disable-line
4 | const CREATE_STREAM = Symbol() // eslint-disable-line
5 | const STREAM_EVENTS = Symbol() // eslint-disable-line
6 | const STREAM = Symbol() // eslint-disable-line
7 | const PREP_EVENT = Symbol() // eslint-disable-line
8 |
9 | class EventTarget {
10 | constructor () {
11 | this[LISTENERS] = {}
12 |
13 | this.addEventListener = this.addEventListener.bind(this)
14 | this.removeEventListener = this.removeEventListener.bind(this)
15 | this.dispatchEvent = this.dispatchEvent.bind(this)
16 | }
17 |
18 | addEventListener (type, callback) {
19 | if (!(type in this[LISTENERS])) {
20 | this[LISTENERS][type] = []
21 | }
22 | this[LISTENERS][type].push(callback)
23 | }
24 |
25 | removeEventListener (type, callback) {
26 | if (!(type in this[LISTENERS])) {
27 | return
28 | }
29 | var stack = this[LISTENERS][type]
30 | var i = stack.findIndex(cb => cb === callback)
31 | if (i !== -1) {
32 | stack.splice(i, 1)
33 | }
34 | }
35 |
36 | dispatchEvent (event) {
37 | if (!(event.type in this[LISTENERS])) {
38 | return
39 | }
40 | event.target = this
41 | var stack = this[LISTENERS][event.type]
42 | stack.forEach(cb => cb.call(this, event))
43 | }
44 | }
45 |
46 | class EventTargetFromStream extends EventTarget {
47 | constructor (createStreamFn, events, eventPrepFn) {
48 | super()
49 | this[CREATE_STREAM] = createStreamFn
50 | this[STREAM_EVENTS] = events
51 | this[PREP_EVENT] = eventPrepFn
52 | this[STREAM] = null
53 | }
54 |
55 | addEventListener (type, callback) {
56 | if (!this[STREAM]) {
57 | // create the event stream
58 | let s = this[STREAM] = fromEventStream(this[CREATE_STREAM]())
59 | // proxy all events
60 | this[STREAM_EVENTS].forEach(event => {
61 | s.addEventListener(event, details => {
62 | details = details || {}
63 | if (this[PREP_EVENT]) {
64 | details = this[PREP_EVENT](event, details)
65 | }
66 | details.target = this
67 | this.dispatchEvent(new Event(event, details))
68 | })
69 | })
70 | }
71 | return super.addEventListener(type, callback)
72 | }
73 | }
74 |
75 | class Event {
76 | constructor (type, opts) {
77 | this.type = type
78 | for (var k in opts) {
79 | this[k] = opts[k]
80 | }
81 | Object.defineProperty(this, 'bubbles', {value: false})
82 | Object.defineProperty(this, 'cancelBubble', {value: false})
83 | Object.defineProperty(this, 'cancelable', {value: false})
84 | Object.defineProperty(this, 'composed', {value: false})
85 | Object.defineProperty(this, 'currentTarget', {value: this.target})
86 | Object.defineProperty(this, 'deepPath', {value: []})
87 | Object.defineProperty(this, 'defaultPrevented', {value: false})
88 | Object.defineProperty(this, 'eventPhase', {value: 2}) // Event.AT_TARGET
89 | Object.defineProperty(this, 'timeStamp', {value: Date.now()})
90 | Object.defineProperty(this, 'isTrusted', {value: true})
91 | Object.defineProperty(this, 'createEvent', {value: () => undefined})
92 | Object.defineProperty(this, 'composedPath', {value: () => []})
93 | Object.defineProperty(this, 'initEvent', {value: () => undefined})
94 | Object.defineProperty(this, 'preventDefault', {value: () => undefined})
95 | Object.defineProperty(this, 'stopImmediatePropagation', {value: () => undefined})
96 | Object.defineProperty(this, 'stopPropagation', {value: () => undefined})
97 | }
98 | }
99 |
100 | exports.EventTarget = EventTarget
101 | exports.EventTargetFromStream = EventTargetFromStream
102 | exports.Event = Event
103 |
104 | const bindEventStream = exports.bindEventStream = function (stream, target) {
105 | stream.on('data', data => {
106 | var event = data[1] || {}
107 | event.type = data[0]
108 | target.dispatchEvent(event)
109 | })
110 | }
111 |
112 | const fromEventStream = exports.fromEventStream = function (stream) {
113 | var target = new EventTarget()
114 | bindEventStream(stream, target)
115 | target.close = () => {
116 | target.listeners = {}
117 | stream.close()
118 | }
119 | return target
120 | }
121 |
122 | exports.fromAsyncEventStream = function (asyncStream) {
123 | var target = new EventTarget()
124 | asyncStream.then(
125 | stream => bindEventStream(stream, target),
126 | err => {
127 | target.dispatchEvent({type: 'error', details: err})
128 | target.close()
129 | }
130 | )
131 | target.close = () => {
132 | target.listeners = {}
133 | asyncStream.then(stream => stream.close())
134 | }
135 | return target
136 | }
137 |
138 | exports.exportEventStreamFn = function (rpcObj, fnName) {
139 | var orgFn = rpcObj[fnName]
140 | rpcObj[fnName] = (...args) => {
141 | var evts = orgFn(...args)
142 | return {
143 | on: evts.on.bind(evts),
144 | close: evts.close ? evts.close.bind(evts) : () => {}
145 | }
146 | }
147 | }
--------------------------------------------------------------------------------
/lib/error-page.js:
--------------------------------------------------------------------------------
1 | var errorPageCSS = `
2 | * {
3 | box-sizing: border-box;
4 | }
5 | a {
6 | text-decoration: none;
7 | color: inherit;
8 | cursor: pointer;
9 | }
10 | body {
11 | background: #fff;
12 | margin: 0;
13 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Ubuntu, Cantarell, "Oxygen Sans", "Helvetica Neue", sans-serif;
14 | }
15 | .btn {
16 | display: inline-block;
17 | cursor: pointer;
18 | color: #777;
19 | border-radius: 2px;
20 | background: #fafafa;
21 | border: 1px solid #ddd;
22 | font-size: 12px;
23 | font-weight: 500;
24 | height: 25px;
25 | line-height: 2;
26 | padding: 0 8px;
27 | letter-spacing: .2px;
28 | height: 26px;
29 | font-weight: 400;
30 | }
31 | .btn * {
32 | cursor: pointer;
33 | line-height: 25px;
34 | vertical-align: baseline;
35 | display: inline-block;
36 | }
37 | .btn:focus {
38 | outline-color: #007aff;
39 | }
40 | .btn:hover {
41 | text-decoration: none;
42 | background: #f0f0f0;
43 | }
44 | .btn.disabled,
45 | .btn:disabled {
46 | cursor: default;
47 | color: #999999;
48 | border: 1px solid #ccc;
49 | box-shadow: none;
50 | }
51 | .btn.disabled .spinner,
52 | .btn:disabled .spinner {
53 | color: #aaa;
54 | }
55 | .btn.primary {
56 | -webkit-font-smoothing: antialiased;
57 | font-weight: 800;
58 | background: #007aff;
59 | color: #fff;
60 | border: none;
61 | transition: background .1s ease;
62 | }
63 | .btn.primary:hover {
64 | background: #0074f2;
65 | }
66 | a.btn span {
67 | vertical-align: baseline;
68 | }
69 | .btn.big {
70 | font-size: 18px;
71 | height: auto;
72 | padding: 0px 12px;
73 | }
74 | a.link {
75 | color: blue;
76 | text-decoration: underline;
77 | }
78 | .right {
79 | float: right;
80 | }
81 | .icon-wrapper {
82 | vertical-align: top;
83 | width: 70px;
84 | font-size: 50px;
85 | display: inline-block;
86 | color: #555;
87 |
88 | i {
89 | margin-top: -3px;
90 | }
91 | }
92 | .error-wrapper {
93 | display: inline-block;
94 | width: 80%;
95 | }
96 | div.error-page-content {
97 | max-width: 650px;
98 | margin: auto;
99 | transform: translateX(-20px);
100 | margin-top: 30vh;
101 | color: #777;
102 | font-size: 14px;
103 | }
104 | div.error-page-content .description {
105 |
106 | p {
107 | margin: 20px 0;
108 | }
109 | }
110 | div.error-page-content i {
111 | margin-right: 5px;
112 | }
113 | h1 {
114 | margin: 0;
115 | color: #333;
116 | font-weight: 400;
117 | font-size: 22px;
118 | }
119 | p.big {
120 | font-size: 18px;
121 | }
122 | .icon {
123 | float: right;
124 | }
125 | li {
126 | margin-bottom: 0.5em;
127 | }
128 | li:last-child {
129 | margin: 0;
130 | }
131 | .footer {
132 | font-size: 14px;
133 | color: #777;
134 | display: flex;
135 | justify-content: space-between;
136 | align-items: flex-start;
137 | margin-top: 30px;
138 | padding-top: 10px;
139 | border-top: 1px solid #ddd;
140 | }
141 | `
142 |
143 | module.exports = function (e) {
144 | var title = 'This site can’t be reached'
145 | var info = ''
146 | var icon = 'fa-exclamation-circle'
147 | var button = 'Try again'
148 | var errorDescription
149 | var moreHelp = ''
150 |
151 | if (typeof e === 'object') {
152 | errorDescription = e.errorDescription || ''
153 | info = e.errorInfo || ''
154 | // remove trailing slash
155 | var origin = e.validatedURL.slice(0, e.validatedURL.length - 1)
156 |
157 | // strip protocol
158 | if (origin.startsWith('https://')) {
159 | origin = origin.slice(8)
160 | } else if (origin.startsWith('http://')) {
161 | origin = origin.slice(7)
162 | }
163 |
164 | switch (e.errorCode) {
165 | case -106:
166 | title = 'No internet connection'
167 | info = `Your computer is not connected to the internet.
Try:
- Resetting your Wi-Fi connection
- Checking your router and modem.
`
168 | break
169 | case -105:
170 | icon = 'fa-frown-o'
171 | info = `Couldn’t resolve the DNS address for ${origin}
`
172 | break
173 | case 404:
174 | icon = 'fa-frown-o'
175 | title = e.title || 'Page Not Found'
176 | info = `${e.errorInfo}
`
177 | break
178 | case -501:
179 | title = 'Your connection is not secure'
180 | info = `Beaker cannot establish a secure connection to the server for ${origin}.
`
181 | icon = 'fa-close warning'
182 | button = 'Go back'
183 | break
184 | case 504:
185 | icon = 'fa-share-alt'
186 | title = `It doesn${"'"}t seem like anybody is sharing this ${e.resource} right now.`
187 | info = `
188 | Would you like Beaker to watch for it to reappear?
189 | Add to watchlist
190 | `
191 | errorDescription = `The p2p ${e.resource} was not found on the network.`
192 | button = `Network debugger`
193 | break
194 | }
195 | } else {
196 | errorDescription = e
197 | }
198 |
199 | return `
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
${title}
212 |
213 | ${info}
214 |
215 |
219 |
220 |
221 |
222 | `.replace(/\n/g, '')
223 | }
224 |
--------------------------------------------------------------------------------
/dbs/schemas/profile-data.sql.js:
--------------------------------------------------------------------------------
1 | module.exports = `
2 | CREATE TABLE profiles (
3 | id INTEGER PRIMARY KEY NOT NULL,
4 | url TEXT,
5 | createdAt INTEGER DEFAULT (strftime('%s', 'now'))
6 | );
7 |
8 | CREATE TABLE archives (
9 | profileId INTEGER NOT NULL,
10 | key TEXT NOT NULL, -- dat key
11 |
12 | previewMode INTEGER, -- automatically publish changes (0) or write to local folder (1)
13 | localSyncPath TEXT, -- custom local folder that the data is synced to
14 |
15 | isSaved INTEGER, -- is this archive saved to our library?
16 | hidden INTEGER DEFAULT 0, -- should this archive be hidden in the library or select-archive modals? (this is useful for internal dats, such as drafts)
17 | networked INTEGER DEFAULT 1, -- join the swarm (1) or do not swarm (0)
18 | autoDownload INTEGER DEFAULT 1, -- watch and download all available data (1) or sparsely download on demand (0)
19 | autoUpload INTEGER DEFAULT 1, -- join the swarm at startup (1) or only swarm when visiting (0)
20 | expiresAt INTEGER, -- change autoUpload to 0 at this time (used for temporary seeding)
21 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
22 |
23 | localPath TEXT, -- deprecated
24 | autoPublishLocal INTEGER DEFAULT 0 -- deprecated -- watch localSyncPath and automatically publish changes (1) or not (0)
25 | );
26 |
27 | CREATE TABLE archives_meta (
28 | key TEXT PRIMARY KEY,
29 | title TEXT,
30 | description TEXT,
31 | mtime INTEGER,
32 | size INTEGER,
33 | isOwner INTEGER,
34 | lastAccessTime INTEGER DEFAULT 0,
35 | lastLibraryAccessTime INTEGER DEFAULT 0,
36 |
37 | forkOf TEXT, -- deprecated
38 | createdByUrl TEXT, -- deprecated
39 | createdByTitle TEXT, -- deprecated
40 | metaSize INTEGER, -- deprecated
41 | stagingSize INTEGER -- deprecated
42 | );
43 |
44 | CREATE TABLE archives_meta_type (
45 | key TEXT,
46 | type TEXT
47 | );
48 |
49 | -- a list of the draft-dats for a master-dat
50 | CREATE TABLE archive_drafts (
51 | profileId INTEGER,
52 | masterKey TEXT, -- key of the master dat
53 | draftKey TEXT, -- key of the draft dat
54 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
55 |
56 | isActive INTEGER, -- is this the active draft? (deprecated)
57 |
58 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
59 | );
60 |
61 | CREATE TABLE bookmarks (
62 | profileId INTEGER,
63 | url TEXT NOT NULL,
64 | title TEXT,
65 | pinned INTEGER,
66 | pinOrder INTEGER DEFAULT 0,
67 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
68 | tags TEXT,
69 | notes TEXT,
70 |
71 | PRIMARY KEY (profileId, url),
72 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
73 | );
74 |
75 | CREATE TABLE templates (
76 | profileId INTEGER,
77 | url TEXT NOT NULL,
78 | title TEXT,
79 | screenshot,
80 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
81 |
82 | PRIMARY KEY (profileId, url),
83 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
84 | );
85 |
86 | CREATE TABLE visits (
87 | profileId INTEGER,
88 | url TEXT NOT NULL,
89 | title TEXT NOT NULL,
90 | ts INTEGER NOT NULL,
91 |
92 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
93 | );
94 | CREATE INDEX visits_url ON visits (url);
95 |
96 | CREATE TABLE visit_stats (
97 | url TEXT NOT NULL,
98 | num_visits INTEGER,
99 | last_visit_ts INTEGER
100 | );
101 |
102 | CREATE VIRTUAL TABLE visit_fts USING fts4 (url, title);
103 | CREATE UNIQUE INDEX visits_stats_url ON visit_stats (url);
104 |
105 | -- list of the user's installed apps
106 | -- deprecated
107 | CREATE TABLE apps (
108 | profileId INTEGER NOT NULL,
109 | name TEXT NOT NULL,
110 | url TEXT,
111 | updatedAt INTEGER DEFAULT (strftime('%s', 'now')),
112 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
113 |
114 | PRIMARY KEY (profileId, name),
115 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
116 | );
117 |
118 | -- log of the user's app installations
119 | -- deprecated
120 | CREATE TABLE apps_log (
121 | profileId INTEGER NOT NULL,
122 | name TEXT NOT NULL,
123 | url TEXT,
124 | ts INTEGER DEFAULT (strftime('%s', 'now')),
125 |
126 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
127 | );
128 |
129 | -- add a database for watchlist feature
130 | CREATE TABLE watchlist (
131 | profileId INTEGER NOT NULL,
132 | url TEXT NOT NULL,
133 | description TEXT NOT NULL,
134 | seedWhenResolved BOOLEAN NOT NULL,
135 | resolved BOOLEAN NOT NULL DEFAULT (0),
136 | updatedAt INTEGER DEFAULT (strftime('%s', 'now')),
137 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
138 |
139 | PRIMARY KEY (profileId, url),
140 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
141 | );
142 |
143 | -- deprecated
144 | CREATE TABLE workspaces (
145 | profileId INTEGER NOT NULL,
146 | name TEXT NOT NULL,
147 | localFilesPath TEXT,
148 | publishTargetUrl TEXT,
149 | createdAt INTEGER DEFAULT (strftime('%s', 'now')),
150 | updatedAt INTEGER DEFAULT (strftime('%s', 'now')),
151 |
152 | PRIMARY KEY (profileId, name),
153 | FOREIGN KEY (profileId) REFERENCES profiles (id) ON DELETE CASCADE
154 | );
155 |
156 | -- default profile
157 | INSERT INTO profiles (id) VALUES (0);
158 |
159 | -- default bookmarks
160 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Beaker Home', 'dat://beakerbrowser.com', 1);
161 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Dat Project', 'dat://datproject.org', 1);
162 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, '@BeakerBrowser', 'https://twitter.com/beakerbrowser', 1);
163 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Hashbase', 'https://hashbase.io', 1);
164 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Documentation', 'dat://beakerbrowser.com/docs', 1);
165 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Report an issue', 'https://github.com/beakerbrowser/beaker/issues', 1);
166 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Explore the p2p Web', 'dat://taravancil.com/explore-the-p2p-web.md', 1);
167 | INSERT INTO bookmarks (profileId, title, url, pinned) VALUES (0, 'Support Beaker', 'https://opencollective.com/beaker', 1);
168 |
169 | PRAGMA user_version = 23;
170 | `
171 |
--------------------------------------------------------------------------------
/dbs/history.js:
--------------------------------------------------------------------------------
1 | const lock = require('../lib/lock')
2 | const db = require('./profile-data-db')
3 |
4 | class BadParamError extends Error {
5 | constructor (msg) {
6 | super()
7 | this.name = 'BadParamError'
8 | this.message = msg
9 | }
10 | }
11 |
12 | // exported methods
13 | // =
14 |
15 | exports.addVisit = async function (profileId, {url, title}) {
16 | // validate parameters
17 | if (!url || typeof url !== 'string') {
18 | throw new BadParamError('url must be a string')
19 | }
20 | if (!title || typeof title !== 'string') {
21 | throw new BadParamError('title must be a string')
22 | }
23 |
24 | var release = await lock('history-db')
25 | try {
26 | await db.run('BEGIN TRANSACTION;')
27 |
28 | // get current stats
29 | var stats = await db.get('SELECT * FROM visit_stats WHERE url = ?;', [url])
30 | var ts = Date.now()
31 |
32 | // create or update stats
33 | if (!stats) {
34 | await db.run('INSERT INTO visit_stats (url, num_visits, last_visit_ts) VALUES (?, ?, ?);', [url, 1, ts])
35 | await db.run('INSERT INTO visit_fts (url, title) VALUES (?, ?);', [url, title])
36 | } else {
37 | let num_visits = (+stats.num_visits || 1) + 1
38 | await db.run('UPDATE visit_stats SET num_visits = ?, last_visit_ts = ? WHERE url = ?;', [num_visits, ts, url])
39 | }
40 |
41 | // visited within 1 hour?
42 | var visit = await db.get('SELECT rowid, * from visits WHERE profileId = ? AND url = ? AND ts > ? ORDER BY ts DESC LIMIT 1', [profileId, url, ts - 1000 * 60 * 60])
43 | if (visit) {
44 | // update visit ts and title
45 | await db.run('UPDATE visits SET ts = ?, title = ? WHERE rowid = ?', [ts, title, visit.rowid])
46 | } else {
47 | // log visit
48 | await db.run('INSERT INTO visits (profileId, url, title, ts) VALUES (?, ?, ?, ?);', [profileId, url, title, ts])
49 | }
50 |
51 | await db.run('COMMIT;')
52 | } finally {
53 | release()
54 | }
55 | }
56 |
57 | exports.getVisitHistory = async function (profileId, {search, offset, limit, before, after}) {
58 | var release = await lock('history-db')
59 | try {
60 | const params = [
61 | profileId,
62 | limit || 50,
63 | offset || 0
64 | ]
65 | if (search) {
66 | // prep search terms
67 | params.push(
68 | search
69 | .toLowerCase() // all lowercase. (uppercase is interpretted as a directive by sqlite.)
70 | .replace(/[:^*]/g, '') + // strip symbols that sqlite interprets.
71 | '*' // allow partial matches
72 | )
73 | return await db.all(`
74 | SELECT visits.*
75 | FROM visit_fts
76 | LEFT JOIN visits ON visits.url = visit_fts.url
77 | WHERE visits.profileId = ?1 AND visit_fts MATCH ?4
78 | ORDER BY visits.ts DESC
79 | LIMIT ?2 OFFSET ?3
80 | `, params)
81 | }
82 | let timeWhere = ''
83 | if (before && after) {
84 | timeWhere += 'AND ts <= ?4 AND ts >= ?5'
85 | params.push(before)
86 | params.push(after)
87 | } else if (before) {
88 | timeWhere += 'AND ts <= ?4'
89 | params.push(before)
90 | } else if (after) {
91 | timeWhere += 'AND ts >= ?4'
92 | params.push(after)
93 | }
94 | return await db.all(`
95 | SELECT * FROM visits
96 | WHERE profileId = ?1 ${timeWhere}
97 | ORDER BY ts DESC
98 | LIMIT ?2 OFFSET ?3
99 | `, params)
100 | } finally {
101 | release()
102 | }
103 | }
104 |
105 | exports.getMostVisited = async function (profileId, { offset, limit }) {
106 | var release = await lock('history-db')
107 | try {
108 | offset = offset || 0
109 | limit = limit || 50
110 | return await db.all(`
111 | SELECT visit_stats.*, visits.title AS title
112 | FROM visit_stats
113 | LEFT JOIN visits ON visits.url = visit_stats.url
114 | WHERE profileId = ? AND visit_stats.num_visits > 5
115 | GROUP BY visit_stats.url
116 | ORDER BY num_visits DESC, last_visit_ts DESC
117 | LIMIT ? OFFSET ?
118 | `, [profileId, limit, offset])
119 | } finally {
120 | release()
121 | }
122 | }
123 |
124 | exports.search = async function (q) {
125 | if (!q || typeof q !== 'string') {
126 | throw new BadParamError('q must be a string')
127 | }
128 |
129 | var release = await lock('history-db')
130 | try {
131 | // prep search terms
132 | q = q
133 | .toLowerCase() // all lowercase. (uppercase is interpretted as a directive by sqlite.)
134 | .replace(/[:^*]/g, '') // strip symbols that sqlite interprets
135 | .replace(/[-]/g, ' ') + // strip symbols that sqlite interprets
136 | '*' // allow partial matches
137 |
138 | // run query
139 | return await db.all(`
140 | SELECT offsets(visit_fts) as offsets, visit_fts.url, visit_fts.title, visit_stats.num_visits
141 | FROM visit_fts
142 | LEFT JOIN visit_stats ON visit_stats.url = visit_fts.url
143 | WHERE visit_fts MATCH ?
144 | ORDER BY visit_stats.num_visits DESC
145 | LIMIT 10;
146 | `, [q])
147 | } finally {
148 | release()
149 | }
150 | }
151 |
152 | exports.removeVisit = async function (url) {
153 | // validate parameters
154 | if (!url || typeof url !== 'string') {
155 | throw new BadParamError('url must be a string')
156 | }
157 |
158 | var release = await lock('history-db')
159 | try {
160 | db.serialize()
161 | db.run('BEGIN TRANSACTION;')
162 | db.run('DELETE FROM visits WHERE url = ?;', url)
163 | db.run('DELETE FROM visit_stats WHERE url = ?;', url)
164 | db.run('DELETE FROM visit_fts WHERE url = ?;', url)
165 | await db.run('COMMIT;')
166 | } finally {
167 | db.parallelize()
168 | release()
169 | }
170 | }
171 |
172 | exports.removeVisitsAfter = async function (timestamp) {
173 | var release = await lock('history-db')
174 | try {
175 | db.serialize()
176 | db.run('BEGIN TRANSACTION;')
177 | db.run('DELETE FROM visits WHERE ts >= ?;', timestamp)
178 | db.run('DELETE FROM visit_stats WHERE last_visit_ts >= ?;', timestamp)
179 | await db.run('COMMIT;')
180 | } finally {
181 | db.parallelize()
182 | release()
183 | }
184 | }
185 |
186 | exports.removeAllVisits = async function () {
187 | var release = await lock('history-db')
188 | db.run('DELETE FROM visits;')
189 | db.run('DELETE FROM visit_stats;')
190 | db.run('DELETE FROM visit_fts;')
191 | release()
192 | }
193 |
--------------------------------------------------------------------------------
/dat/daemon/extensions.js:
--------------------------------------------------------------------------------
1 | const EventEmitter = require('events')
2 | const emitStream = require('emit-stream')
3 | const {DatSessionDataExtMsg} = require('@beaker/dat-session-data-ext-msg')
4 | const {DatEphemeralExtMsg} = require('@beaker/dat-ephemeral-ext-msg')
5 |
6 | // globals
7 | // =
8 |
9 | var datSessionDataExtMsg = new DatSessionDataExtMsg()
10 | var datEphemeralExtMsg = new DatEphemeralExtMsg()
11 |
12 | // exported api
13 | // =
14 |
15 | function setup () {
16 | datEphemeralExtMsg.on('message', onEphemeralMsg)
17 | datSessionDataExtMsg.on('session-data', onSessionDataMsg)
18 | }
19 | exports.setup = setup
20 |
21 | // call this on every archive created in the library
22 | function attach (archive) {
23 | datEphemeralExtMsg.watchDat(archive)
24 | datSessionDataExtMsg.watchDat(archive)
25 | archive._datPeersEvents = new EventEmitter()
26 | archive._datPeersOnPeerAdd = (peer) => onPeerAdd(archive, peer)
27 | archive._datPeersOnPeerRemove = (peer) => onPeerRemove(archive, peer)
28 | archive.metadata.on('peer-add', archive._datPeersOnPeerAdd)
29 | archive.metadata.on('peer-remove', archive._datPeersOnPeerRemove)
30 | }
31 | exports.attach = attach
32 |
33 | // call this on every archive destroyed in the library
34 | function detach (archive) {
35 | datEphemeralExtMsg.unwatchDat(archive)
36 | datSessionDataExtMsg.unwatchDat(archive)
37 | delete archive._datPeersEvents
38 | archive.metadata.removeListener('peer-add', archive._datPeersOnPeerAdd)
39 | archive.metadata.removeListener('peer-remove', archive._datPeersOnPeerRemove)
40 | }
41 | exports.detach = detach
42 |
43 | // impl for datPeers.list()
44 | function listPeers (archive) {
45 | return archive.metadata.peers.map(internalPeerObj => createWebAPIPeerObj(archive, internalPeerObj))
46 | }
47 | exports.listPeers = listPeers
48 |
49 | // impl for datPeers.getPeer(peerId)
50 | function getPeer (archive, peerId) {
51 | var internalPeerObj = archive.metadata.peers.find(internalPeerObj => getPeerId(internalPeerObj) === peerId)
52 | return createWebAPIPeerObj(archive, internalPeerObj)
53 | }
54 | exports.getPeer = getPeer
55 |
56 | // impl for datPeers.broadcast(msg)
57 | function broadcastEphemeralMessage (archive, payload) {
58 | datEphemeralExtMsg.broadcast(archive, encodeEphemeralMsg(payload))
59 | }
60 | exports.broadcastEphemeralMessage =broadcastEphemeralMessage
61 |
62 | // impl for datPeers.send(peerId, msg)
63 | function sendEphemeralMessage (archive, peerId, payload) {
64 | datEphemeralExtMsg.send(archive, peerId, encodeEphemeralMsg(payload))
65 | }
66 | exports.sendEphemeralMessage = sendEphemeralMessage
67 |
68 | // impl for datPeers.getSessionData()
69 | function getSessionData (archive) {
70 | return decodeSessionData(datSessionDataExtMsg.getLocalSessionData(archive))
71 | }
72 | exports.getSessionData = getSessionData
73 |
74 | // impl for datPeers.getSessionData(data)
75 | function setSessionData (archive, sessionData) {
76 | datSessionDataExtMsg.setLocalSessionData(archive, encodeSessionData(sessionData))
77 | }
78 | exports.setSessionData = setSessionData
79 |
80 | function createDatPeersStream (archive) {
81 | return emitStream(archive._datPeersEvents)
82 | }
83 | exports.createDatPeersStream = createDatPeersStream
84 |
85 | // events
86 | // =
87 |
88 | function onPeerAdd (archive, internalPeerObj) {
89 | if (getPeerId(internalPeerObj)) onHandshook()
90 | else internalPeerObj.stream.stream.on('handshake', onHandshook)
91 |
92 | function onHandshook () {
93 | var peerId = getPeerId(internalPeerObj)
94 |
95 | // send session data
96 | if (datSessionDataExtMsg.getLocalSessionData(archive)) {
97 | datSessionDataExtMsg.sendLocalSessionData(archive, peerId)
98 | }
99 |
100 | // emit event
101 | archive._datPeersEvents.emit('connect', {
102 | peerId,
103 | sessionData: getPeerSessionData(archive, peerId)
104 | })
105 | }
106 | }
107 |
108 | function onPeerRemove (archive, internalPeerObj) {
109 | var peerId = getPeerId(internalPeerObj)
110 | if (peerId) {
111 | archive._datPeersEvents.emit('disconnect', {
112 | peerId,
113 | sessionData: getPeerSessionData(archive, peerId)
114 | })
115 | }
116 | }
117 |
118 | function onEphemeralMsg (archive, internalPeerObj, msg) {
119 | var peerId = getPeerId(internalPeerObj)
120 | archive._datPeersEvents.emit('message', {
121 | peerId,
122 | sessionData: getPeerSessionData(archive, peerId),
123 | message: decodeEphemeralMsg(msg)
124 | })
125 | }
126 |
127 | function onSessionDataMsg (archive, internalPeerObj, sessionData) {
128 | archive._datPeersEvents.emit('session-data', {
129 | peerId: getPeerId(internalPeerObj),
130 | sessionData: decodeSessionData(sessionData)
131 | })
132 | }
133 |
134 | // internal methods
135 | // =
136 |
137 | function getPeerId (internalPeerObj) {
138 | var feedStream = internalPeerObj.stream
139 | var protocolStream = feedStream.stream
140 | return protocolStream.remoteId ? protocolStream.remoteId.toString('hex') : null
141 | }
142 |
143 | function getPeerSessionData (archive, peerId) {
144 | return decodeSessionData(datSessionDataExtMsg.getSessionData(archive, peerId))
145 | }
146 |
147 | function createWebAPIPeerObj (archive, internalPeerObj) {
148 | var id = getPeerId(internalPeerObj)
149 | var sessionData = getPeerSessionData(archive, id)
150 | return {id, sessionData}
151 | }
152 |
153 | function encodeEphemeralMsg (payload) {
154 | var contentType
155 | if (Buffer.isBuffer(payload)) {
156 | contentType = 'application/octet-stream'
157 | } else {
158 | contentType = 'application/json'
159 | payload = Buffer.from(JSON.stringify(payload), 'utf8')
160 | }
161 | return {contentType, payload}
162 | }
163 |
164 | function decodeEphemeralMsg (msg) {
165 | var payload
166 | if (msg.contentType === 'application/json') {
167 | try {
168 | payload = JSON.parse(msg.payload.toString('utf8'))
169 | } catch (e) {
170 | console.error('Failed to parse ephemeral message', e, msg)
171 | payload = null
172 | }
173 | }
174 | return payload
175 | }
176 |
177 | function encodeSessionData (obj) {
178 | return Buffer.from(JSON.stringify(obj), 'utf8')
179 | }
180 |
181 | function decodeSessionData (sessionData) {
182 | if (!sessionData || sessionData.length === 0) return null
183 | try {
184 | return JSON.parse(sessionData.toString('utf8'))
185 | } catch (e) {
186 | console.error('Failed to parse local session data', e, sessionData)
187 | return null
188 | }
189 | }
--------------------------------------------------------------------------------
/.eslintrc.json:
--------------------------------------------------------------------------------
1 | {
2 | "parserOptions": {
3 | "ecmaVersion": 9,
4 | "ecmaFeatures": {
5 | "jsx": true
6 | },
7 | "sourceType": "module"
8 | },
9 |
10 | "env": {
11 | "es6": true,
12 | "node": true
13 | },
14 |
15 | "plugins": [
16 | ],
17 |
18 | "globals": {
19 | "document": false,
20 | "navigator": false,
21 | "window": false
22 | },
23 |
24 | "rules": {
25 | "accessor-pairs": "error",
26 | "arrow-spacing": ["error", { "before": true, "after": true }],
27 | "block-spacing": ["error", "always"],
28 | "brace-style": ["off", "1tbs", { "allowSingleLine": true }],
29 | "camelcase": "off",
30 | "comma-dangle": "off",
31 | "comma-spacing": ["error", { "before": false, "after": true }],
32 | "comma-style": ["error", "last"],
33 | "constructor-super": "error",
34 | "curly": ["error", "multi-line"],
35 | "dot-location": ["error", "property"],
36 | "eol-last": "off",
37 | "eqeqeq": "off",
38 | "func-call-spacing": ["error", "never"],
39 | "generator-star-spacing": ["error", { "before": true, "after": true }],
40 | "handle-callback-err": "off",
41 | "indent": "off",
42 | "key-spacing": ["error", { "beforeColon": false, "afterColon": true }],
43 | "keyword-spacing": ["error", { "before": true, "after": true }],
44 | "new-cap": ["error", { "newIsCap": true, "capIsNew": false }],
45 | "new-parens": "error",
46 | "no-array-constructor": "error",
47 | "no-caller": "error",
48 | "no-class-assign": "error",
49 | "no-compare-neg-zero": "error",
50 | "no-cond-assign": "error",
51 | "no-const-assign": "error",
52 | "no-constant-condition": ["error", { "checkLoops": false }],
53 | "no-control-regex": "off",
54 | "no-debugger": "error",
55 | "no-delete-var": "error",
56 | "no-dupe-args": "error",
57 | "no-dupe-class-members": "error",
58 | "no-dupe-keys": "error",
59 | "no-duplicate-case": "error",
60 | "no-empty-character-class": "error",
61 | "no-empty-pattern": "error",
62 | "no-eval": "error",
63 | "no-ex-assign": "error",
64 | "no-extend-native": "error",
65 | "no-extra-bind": "error",
66 | "no-extra-boolean-cast": "error",
67 | "no-extra-parens": ["error", "functions"],
68 | "no-fallthrough": "error",
69 | "no-floating-decimal": "error",
70 | "no-func-assign": "error",
71 | "no-global-assign": "error",
72 | "no-implied-eval": "error",
73 | "no-inner-declarations": "off",
74 | "no-invalid-regexp": "error",
75 | "no-irregular-whitespace": "error",
76 | "no-iterator": "error",
77 | "no-label-var": "error",
78 | "no-labels": ["error", { "allowLoop": false, "allowSwitch": false }],
79 | "no-lone-blocks": "error",
80 | "no-mixed-operators": ["error", {
81 | "groups": [
82 | ["==", "!=", "===", "!==", ">", ">=", "<", "<="],
83 | ["&&", "||"],
84 | ["in", "instanceof"]
85 | ],
86 | "allowSamePrecedence": true
87 | }],
88 | "no-mixed-spaces-and-tabs": "error",
89 | "no-multi-spaces": "off",
90 | "no-multi-str": "error",
91 | "no-multiple-empty-lines": ["error", { "max": 1, "maxEOF": 0 }],
92 | "no-negated-in-lhs": "error",
93 | "no-new": "off",
94 | "no-new-func": "error",
95 | "no-new-object": "error",
96 | "no-new-require": "error",
97 | "no-new-symbol": "error",
98 | "no-new-wrappers": "error",
99 | "no-obj-calls": "error",
100 | "no-octal": "error",
101 | "no-octal-escape": "error",
102 | "no-path-concat": "error",
103 | "no-proto": "error",
104 | "no-redeclare": "error",
105 | "no-regex-spaces": "error",
106 | "no-return-assign": ["error", "except-parens"],
107 | "no-return-await": "off",
108 | "no-self-assign": "error",
109 | "no-self-compare": "error",
110 | "no-sequences": "error",
111 | "no-shadow-restricted-names": "error",
112 | "no-sparse-arrays": "error",
113 | "no-tabs": "error",
114 | "no-template-curly-in-string": "error",
115 | "no-this-before-super": "error",
116 | "no-throw-literal": "error",
117 | "no-trailing-spaces": "error",
118 | "no-undef": "error",
119 | "no-undef-init": "error",
120 | "no-unexpected-multiline": "error",
121 | "no-unmodified-loop-condition": "error",
122 | "no-unneeded-ternary": ["off", { "defaultAssignment": false }],
123 | "no-unreachable": "error",
124 | "no-unsafe-finally": "error",
125 | "no-unsafe-negation": "error",
126 | "no-unused-expressions": ["error", { "allowShortCircuit": true, "allowTernary": true, "allowTaggedTemplates": true }],
127 | "no-unused-vars": ["off", { "vars": "all", "args": "none", "ignoreRestSiblings": true }],
128 | "no-use-before-define": ["error", { "functions": false, "classes": false, "variables": false }],
129 | "no-useless-call": "error",
130 | "no-useless-computed-key": "error",
131 | "no-useless-constructor": "error",
132 | "no-useless-escape": "error",
133 | "no-useless-rename": "error",
134 | "no-useless-return": "off",
135 | "no-whitespace-before-property": "error",
136 | "no-with": "error",
137 | "object-property-newline": ["error", { "allowMultiplePropertiesPerLine": true }],
138 | "one-var": ["error", { "initialized": "never" }],
139 | "operator-linebreak": ["off", "after", { "overrides": { "?": "before", ":": "before" } }],
140 | "padded-blocks": ["error", { "blocks": "never", "switches": "never", "classes": "never" }],
141 | "prefer-promise-reject-errors": "off",
142 | "quotes": ["error", "single", { "avoidEscape": true, "allowTemplateLiterals": true }],
143 | "rest-spread-spacing": ["error", "never"],
144 | "semi": ["error", "never"],
145 | "semi-spacing": ["error", { "before": false, "after": true }],
146 | "space-before-blocks": ["error", "always"],
147 | "space-before-function-paren": ["error", "always"],
148 | "space-in-parens": ["error", "never"],
149 | "space-infix-ops": "off",
150 | "space-unary-ops": ["error", { "words": true, "nonwords": false }],
151 | "spaced-comment": ["off", "always", {
152 | "line": { "markers": ["*package", "!", "/", ","] },
153 | "block": { "balanced": true, "markers": ["*package", "!", ",", ":", "::", "flow-include"], "exceptions": ["*"] }
154 | }],
155 | "symbol-description": "error",
156 | "template-curly-spacing": ["error", "never"],
157 | "template-tag-spacing": ["error", "never"],
158 | "unicode-bom": ["error", "never"],
159 | "use-isnan": "error",
160 | "valid-typeof": ["error", { "requireStringLiterals": true }],
161 | "wrap-iife": ["error", "any", { "functionPrototypeMethods": true }],
162 | "yield-star-spacing": ["error", "both"],
163 | "yoda": ["error", "never"]
164 | }
165 | }
--------------------------------------------------------------------------------
/web-apis/fg/beaker.js:
--------------------------------------------------------------------------------
1 | const { EventTarget, bindEventStream, fromEventStream } = require('./event-target')
2 | const errors = require('beaker-error-constants')
3 |
4 | const archivesManifest = require('../manifests/internal/archives')
5 | const beakerBrowserManifest = require('../manifests/internal/browser')
6 | const bookmarksManifest = require('../manifests/internal/bookmarks')
7 | const downloadsManifest = require('../manifests/internal/downloads')
8 | const historyManifest = require('../manifests/internal/history')
9 | const sitedataManifest = require('../manifests/internal/sitedata')
10 | const watchlistManifest = require('../manifests/internal/watchlist')
11 |
12 | exports.setup = function (rpc) {
13 | const beaker = {}
14 | const opts = { timeout: false, errors }
15 |
16 | // internal only
17 | if (window.location.protocol === 'beaker:') {
18 | const archivesRPC = rpc.importAPI('archives', archivesManifest, opts)
19 | const beakerBrowserRPC = rpc.importAPI('beaker-browser', beakerBrowserManifest, opts)
20 | const bookmarksRPC = rpc.importAPI('bookmarks', bookmarksManifest, opts)
21 | const downloadsRPC = rpc.importAPI('downloads', downloadsManifest, opts)
22 | const historyRPC = rpc.importAPI('history', historyManifest, opts)
23 | const sitedataRPC = rpc.importAPI('sitedata', sitedataManifest, opts)
24 | const watchlistRPC = rpc.importAPI('watchlist', watchlistManifest, opts)
25 |
26 | // beaker.archives
27 | beaker.archives = new EventTarget()
28 | beaker.archives.status = archivesRPC.status
29 | beaker.archives.add = archivesRPC.add
30 | beaker.archives.setUserSettings = archivesRPC.setUserSettings
31 | beaker.archives.remove = archivesRPC.remove
32 | beaker.archives.bulkRemove = archivesRPC.bulkRemove
33 | beaker.archives.delete = archivesRPC.delete
34 | beaker.archives.list = archivesRPC.list
35 | beaker.archives.validateLocalSyncPath = archivesRPC.validateLocalSyncPath
36 | beaker.archives.setLocalSyncPath = archivesRPC.setLocalSyncPath
37 | beaker.archives.ensureLocalSyncFinished = archivesRPC.ensureLocalSyncFinished
38 | beaker.archives.diffLocalSyncPathListing = archivesRPC.diffLocalSyncPathListing
39 | beaker.archives.diffLocalSyncPathFile = archivesRPC.diffLocalSyncPathFile
40 | beaker.archives.publishLocalSyncPathListing = archivesRPC.publishLocalSyncPathListing
41 | beaker.archives.revertLocalSyncPathListing = archivesRPC.revertLocalSyncPathListing
42 | beaker.archives.getDraftInfo = archivesRPC.getDraftInfo
43 | beaker.archives.listDrafts = archivesRPC.listDrafts
44 | beaker.archives.addDraft = archivesRPC.addDraft
45 | beaker.archives.removeDraft = archivesRPC.removeDraft
46 | beaker.archives.getTemplate = archivesRPC.getTemplate
47 | beaker.archives.listTemplates = archivesRPC.listTemplates
48 | beaker.archives.putTemplate = archivesRPC.putTemplate
49 | beaker.archives.removeTemplate = archivesRPC.removeTemplate
50 | beaker.archives.touch = archivesRPC.touch
51 | beaker.archives.clearFileCache = archivesRPC.clearFileCache
52 | beaker.archives.clearGarbage = archivesRPC.clearGarbage
53 | beaker.archives.clearDnsCache = archivesRPC.clearDnsCache
54 | beaker.archives.getDebugLog = archivesRPC.getDebugLog
55 | beaker.archives.createDebugStream = () => fromEventStream(archivesRPC.createDebugStream())
56 | window.addEventListener('load', () => {
57 | try {
58 | bindEventStream(archivesRPC.createEventStream(), beaker.archives)
59 | } catch (e) {
60 | // permissions error
61 | }
62 | })
63 |
64 | // beaker.browser
65 | beaker.browser = {}
66 | beaker.browser.createEventsStream = () => fromEventStream(beakerBrowserRPC.createEventsStream())
67 | beaker.browser.getInfo = beakerBrowserRPC.getInfo
68 | beaker.browser.checkForUpdates = beakerBrowserRPC.checkForUpdates
69 | beaker.browser.restartBrowser = beakerBrowserRPC.restartBrowser
70 | beaker.browser.getSetting = beakerBrowserRPC.getSetting
71 | beaker.browser.getSettings = beakerBrowserRPC.getSettings
72 | beaker.browser.setSetting = beakerBrowserRPC.setSetting
73 | beaker.browser.getUserSetupStatus = beakerBrowserRPC.getUserSetupStatus
74 | beaker.browser.setUserSetupStatus = beakerBrowserRPC.setUserSetupStatus
75 | beaker.browser.getDefaultLocalPath = beakerBrowserRPC.getDefaultLocalPath
76 | beaker.browser.setStartPageBackgroundImage = beakerBrowserRPC.setStartPageBackgroundImage
77 | beaker.browser.getDefaultProtocolSettings = beakerBrowserRPC.getDefaultProtocolSettings
78 | beaker.browser.setAsDefaultProtocolClient = beakerBrowserRPC.setAsDefaultProtocolClient
79 | beaker.browser.removeAsDefaultProtocolClient = beakerBrowserRPC.removeAsDefaultProtocolClient
80 | beaker.browser.fetchBody = beakerBrowserRPC.fetchBody
81 | beaker.browser.downloadURL = beakerBrowserRPC.downloadURL
82 | beaker.browser.getResourceContentType = beakerBrowserRPC.getResourceContentType
83 | beaker.browser.listBuiltinFavicons = beakerBrowserRPC.listBuiltinFavicons
84 | beaker.browser.getBuiltinFavicon = beakerBrowserRPC.getBuiltinFavicon
85 | beaker.browser.uploadFavicon = beakerBrowserRPC.uploadFavicon
86 | beaker.browser.imageToIco = beakerBrowserRPC.imageToIco
87 | beaker.browser.setWindowDimensions = beakerBrowserRPC.setWindowDimensions
88 | beaker.browser.showOpenDialog = beakerBrowserRPC.showOpenDialog
89 | beaker.browser.showContextMenu = beakerBrowserRPC.showContextMenu
90 | beaker.browser.openUrl = beakerBrowserRPC.openUrl
91 | beaker.browser.openFolder = beakerBrowserRPC.openFolder
92 | beaker.browser.doWebcontentsCmd = beakerBrowserRPC.doWebcontentsCmd
93 | beaker.browser.doTest = beakerBrowserRPC.doTest
94 | beaker.browser.closeModal = beakerBrowserRPC.closeModal
95 |
96 | // beaker.bookmarks
97 | beaker.bookmarks = {}
98 | beaker.bookmarks.getBookmark = bookmarksRPC.getBookmark
99 | beaker.bookmarks.isBookmarked = bookmarksRPC.isBookmarked
100 | beaker.bookmarks.bookmarkPublic = bookmarksRPC.bookmarkPublic
101 | beaker.bookmarks.unbookmarkPublic = bookmarksRPC.unbookmarkPublic
102 | beaker.bookmarks.listPublicBookmarks = bookmarksRPC.listPublicBookmarks
103 | beaker.bookmarks.setBookmarkPinned = bookmarksRPC.setBookmarkPinned
104 | beaker.bookmarks.setBookmarkPinOrder = bookmarksRPC.setBookmarkPinOrder
105 | beaker.bookmarks.listPinnedBookmarks = bookmarksRPC.listPinnedBookmarks
106 | beaker.bookmarks.bookmarkPrivate = bookmarksRPC.bookmarkPrivate
107 | beaker.bookmarks.unbookmarkPrivate = bookmarksRPC.unbookmarkPrivate
108 | beaker.bookmarks.listPrivateBookmarks = bookmarksRPC.listPrivateBookmarks
109 | beaker.bookmarks.listBookmarkTags = bookmarksRPC.listBookmarkTags
110 |
111 | // beaker.downloads
112 | beaker.downloads = {}
113 | beaker.downloads.getDownloads = downloadsRPC.getDownloads
114 | beaker.downloads.pause = downloadsRPC.pause
115 | beaker.downloads.resume = downloadsRPC.resume
116 | beaker.downloads.cancel = downloadsRPC.cancel
117 | beaker.downloads.remove = downloadsRPC.remove
118 | beaker.downloads.open = downloadsRPC.open
119 | beaker.downloads.showInFolder = downloadsRPC.showInFolder
120 | beaker.downloads.createEventsStream = () => fromEventStream(downloadsRPC.createEventsStream())
121 |
122 | // beaker.history
123 | beaker.history = {}
124 | beaker.history.addVisit = historyRPC.addVisit
125 | beaker.history.getVisitHistory = historyRPC.getVisitHistory
126 | beaker.history.getMostVisited = historyRPC.getMostVisited
127 | beaker.history.search = historyRPC.search
128 | beaker.history.removeVisit = historyRPC.removeVisit
129 | beaker.history.removeAllVisits = historyRPC.removeAllVisits
130 | beaker.history.removeVisitsAfter = historyRPC.removeVisitsAfter
131 |
132 | // beaker.sitedata
133 | beaker.sitedata = {}
134 | beaker.sitedata.get = sitedataRPC.get
135 | beaker.sitedata.set = sitedataRPC.set
136 | beaker.sitedata.getPermissions = sitedataRPC.getPermissions
137 | beaker.sitedata.getAppPermissions = sitedataRPC.getAppPermissions
138 | beaker.sitedata.getPermission = sitedataRPC.getPermission
139 | beaker.sitedata.setPermission = sitedataRPC.setPermission
140 | beaker.sitedata.setAppPermissions = sitedataRPC.setAppPermissions
141 | beaker.sitedata.clearPermission = sitedataRPC.clearPermission
142 | beaker.sitedata.clearPermissionAllOrigins = sitedataRPC.clearPermissionAllOrigins
143 |
144 | // beaker.watchlist
145 | beaker.watchlist = {}
146 | beaker.watchlist.add = watchlistRPC.add
147 | beaker.watchlist.list = watchlistRPC.list
148 | beaker.watchlist.update = watchlistRPC.update
149 | beaker.watchlist.remove = watchlistRPC.remove
150 | beaker.watchlist.createEventsStream = () => fromEventStream(watchlistRPC.createEventsStream())
151 | }
152 |
153 | return beaker
154 | }
155 |
--------------------------------------------------------------------------------
/dat/daemon/logging-utils.js:
--------------------------------------------------------------------------------
1 | const datEncoding = require('dat-encoding')
2 |
3 | const findFullDiscoveryKey = exports.findFullDiscoveryKey = function (archivesByDKey, key) {
4 | key = Buffer.isBuffer(key) ? key.toString('hex') : key
5 | // HACK
6 | // if the key is short, try to find the full thing in our list
7 | // (this shouldnt be needed once discovery stops truncating keys)
8 | // -prf
9 | if (key && key.length === 40) {
10 | let dKeys = Object.keys(archivesByDKey)
11 | for (let i = 0; i < dKeys.length; i++) {
12 | if (dKeys[i].startsWith(key)) {
13 | return dKeys[i]
14 | }
15 | }
16 | }
17 | return key
18 | }
19 |
20 | const getDNSMessageDiscoveryKey = exports.getDNSMessageDiscoveryKey = function (archivesByDKey, msg) {
21 | var key
22 | function check (obj) {
23 | if (!key && obj.name.endsWith('.dat.local')) {
24 | key = findFullDiscoveryKey(archivesByDKey, obj.name.slice(0, -10))
25 | }
26 | }
27 | if (msg.questions) msg.questions.forEach(check)
28 | if (msg.answers) msg.answers.forEach(check)
29 | if (msg.additionals) msg.additionals.forEach(check)
30 | return key || ''
31 | }
32 |
33 | function has (str, v) {
34 | return str.indexOf(v) !== -1
35 | }
36 |
37 | const addArchiveSwarmLogging = exports.addArchiveSwarmLogging = function ({archivesByDKey, log, archiveSwarm}) {
38 | archiveSwarm.on('listening', () => {
39 | archiveSwarm._discovery.dns.on('traffic', (type, details) => {
40 | let archive = archivesByDKey[getDNSMessageDiscoveryKey(archivesByDKey, details.message)]
41 | if (!archive) return
42 | log(datEncoding.toStr(archive.key), {
43 | event: 'traffic',
44 | trafficType: type,
45 | messageId: details.message.id,
46 | message: renderDNSTraffic(details.message),
47 | peer: details.peer ? `${details.peer.address || details.peer.host}:${details.peer.port}` : undefined
48 | })
49 | })
50 | })
51 | archiveSwarm.on('peer', (peer) => {
52 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
53 | if (!archive) return
54 | log(datEncoding.toStr(archive.key), {
55 | event: 'peer-found',
56 | peer: `${peer.address || peer.host}:${peer.port}`
57 | })
58 | })
59 | archiveSwarm.on('peer-banned', (peer, details) => {
60 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
61 | if (!archive) return
62 | log(datEncoding.toStr(archive.key), {
63 | event: 'peer-banned',
64 | peer: `${peer.address || peer.host}:${peer.port}`,
65 | message: peerBannedReason(details.reason)
66 | })
67 | })
68 | archiveSwarm.on('peer-rejected', (peer, details) => {
69 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
70 | if (!archive) return
71 | log(datEncoding.toStr(archive.key), {
72 | event: 'peer-rejected',
73 | peer: `${peer.address || peer.host}:${peer.port}`,
74 | message: peerRejectedReason(details.reason)
75 | })
76 | })
77 | archiveSwarm.on('drop', (peer) => {
78 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
79 | if (!archive) return
80 | log(datEncoding.toStr(archive.key), {
81 | event: 'peer-dropped',
82 | peer: `${peer.address || peer.host}:${peer.port}`,
83 | message: 'Too many failed connection attempts'
84 | })
85 | })
86 | archiveSwarm.on('connecting', (peer) => {
87 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
88 | if (!archive) return
89 | log(datEncoding.toStr(archive.key), {
90 | event: 'connecting',
91 | peer: `${peer.address || peer.host}:${peer.port}`
92 | })
93 | })
94 | archiveSwarm.on('connect-failed', (peer, details) => {
95 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
96 | if (!archive) return
97 | log(datEncoding.toStr(archive.key), {
98 | event: 'connect-failed',
99 | peer: `${peer.address || peer.host}:${peer.port}`,
100 | message: connectFailedMessage(details)
101 | })
102 | })
103 | archiveSwarm.on('handshaking', (conn, peer) => {
104 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
105 | if (!archive) return
106 | log(datEncoding.toStr(archive.key), {
107 | event: 'handshaking',
108 | peer: `${peer.address || peer.host}:${peer.port}`,
109 | connectionId: conn._debugId,
110 | connectionType: peer.type,
111 | ts: 0
112 | })
113 | })
114 | archiveSwarm.on('handshake-timeout', (conn, peer) => {
115 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
116 | if (!archive) return
117 | log(datEncoding.toStr(archive.key), {
118 | event: 'handshake-timeout',
119 | peer: `${peer.address || peer.host}:${peer.port}`,
120 | connectionId: conn._debugId,
121 | connectionType: peer.type,
122 | ts: Date.now() - conn._debugStartTime
123 | })
124 | })
125 | archiveSwarm.on('connection', (conn, peer) => {
126 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
127 | if (!archive) return
128 | log(datEncoding.toStr(archive.key), {
129 | event: 'connection-established',
130 | peer: `${peer.address || peer.host}:${peer.port}`,
131 | connectionId: conn._debugId,
132 | connectionType: peer.type,
133 | ts: Date.now() - conn._debugStartTime,
134 | message: 'Starting replication'
135 | })
136 | })
137 | archiveSwarm.on('redundant-connection', (conn, peer) => {
138 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
139 | if (!archive) return
140 | log(datEncoding.toStr(archive.key), {
141 | event: 'redundant-connection',
142 | peer: `${peer.address || peer.host}:${peer.port}`,
143 | connectionId: conn._debugId,
144 | connectionType: peer.type,
145 | ts: Date.now() - conn._debugStartTime
146 | })
147 | })
148 | archiveSwarm.on('connection-closed', (conn, peer) => {
149 | let archive = archivesByDKey[findFullDiscoveryKey(archivesByDKey, peer.channel)]
150 | if (!archive) return
151 | log(datEncoding.toStr(archive.key), {
152 | event: 'connection-closed',
153 | peer: `${peer.address || peer.host}:${peer.port}`,
154 | connectionId: conn._debugId,
155 | connectionType: peer.type,
156 | ts: Date.now() - conn._debugStartTime
157 | })
158 | })
159 | }
160 |
161 | const renderDNSTraffic = exports.renderDNSTraffic = function ({questions, answers, additionals}) {
162 | var messageParts = []
163 | if (questions && (!answers || !answers.length) && (!additionals || !additionals.length)) {
164 | questions.forEach(q => {
165 | if (q.type === 'TXT') {
166 | messageParts.push('TXT Question (requesting peers list)')
167 | } else {
168 | messageParts.push(q.type + ' Question')
169 | }
170 | })
171 | }
172 | if (answers) {
173 | answers.forEach(a => {
174 | if (a.type === 'TXT' && a.data) {
175 | let data = a.data.toString()
176 | if (has(data, 'host') && has(data, 'token')) {
177 | messageParts.push('TXT Answer (heres a session token)')
178 | } else if (has(data, 'peers')) {
179 | messageParts.push('TXT Answer (heres a peers list)')
180 | } else if (has(data, 'token')) {
181 | messageParts.push('TXT Answer (no peers found)')
182 | } else {
183 | messageParts.push('TXT Answer')
184 | }
185 | } else {
186 | messageParts.push(a.type + ' Answer')
187 | }
188 | })
189 | }
190 | if (additionals) {
191 | additionals.forEach(a => {
192 | if (a.type === 'TXT' && a.data) {
193 | let data = a.data.toString()
194 | if (has(data, 'announce')) {
195 | messageParts.push('TXT Additional (announcing self)')
196 | } else if (has(data, 'unannounce')) {
197 | messageParts.push('TXT Additional (unannouncing self)')
198 | } else if (has(data, 'subscribe')) {
199 | messageParts.push('TXT Additional (subscribing)')
200 | } else {
201 | messageParts.push('TXT Additional')
202 | }
203 | } else if (a.type === 'SRV' && a.data) {
204 | messageParts.push('SRV Additional (pushed announcement)')
205 | } else {
206 | messageParts.push(a.type + ' Additional')
207 | }
208 | })
209 | }
210 | return messageParts.join(', ')
211 | }
212 |
213 | function connectFailedMessage (details) {
214 | if (details.timedout) return 'Timed out'
215 | }
216 |
217 | function peerBannedReason (reason) {
218 | switch (reason) {
219 | case 'detected-self': return 'Detected that the peer is this process'
220 | case 'application': return 'Peer was removed by the application'
221 | }
222 | return ''
223 | }
224 |
225 | function peerRejectedReason (reason) {
226 | switch (reason) {
227 | case 'whitelist': return 'Peer was not on the whitelist'
228 | case 'banned': return 'Peer is on the ban list'
229 | case 'duplicate': return 'Peer was a duplicate (already being handled)'
230 | }
231 | return ''
232 | }
233 |
--------------------------------------------------------------------------------
/web-apis/bg/archives.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const mkdirp = require('mkdirp')
3 | const jetpack = require('fs-jetpack')
4 | const templatesDb = require('../../dbs/templates')
5 | const datDns = require('../../dat/dns')
6 | const datLibrary = require('../../dat/library')
7 | const datGC = require('../../dat/garbage-collector')
8 | const archivesDb = require('../../dbs/archives')
9 | const archiveDraftsDb = require('../../dbs/archive-drafts')
10 | const {cbPromise} = require('../../lib/functions')
11 | const {timer} = require('../../lib/time')
12 | const lock = require('../../lib/lock')
13 |
14 | // exported api
15 | // =
16 |
17 | module.exports = {
18 |
19 | // system state
20 | // =
21 |
22 | async status () {
23 | var status = {archives: 0, peers: 0}
24 | var archives = datLibrary.getActiveArchives()
25 | for (var k in archives) {
26 | status.archives++
27 | status.peers += archives[k].metadata.peers.length
28 | }
29 | return status
30 | },
31 |
32 | // local cache management and querying
33 | // =
34 |
35 | async setUserSettings (url, opts) {
36 | var key = datLibrary.fromURLToKey(url)
37 | return archivesDb.setUserSettings(0, key, opts)
38 | },
39 |
40 | async add (url, opts = {}) {
41 | var key = datLibrary.fromURLToKey(url)
42 |
43 | // pull metadata
44 | var archive = await datLibrary.getOrLoadArchive(key)
45 | await datLibrary.pullLatestArchiveMeta(archive)
46 |
47 | // update settings
48 | opts.isSaved = true
49 | return archivesDb.setUserSettings(0, key, opts)
50 | },
51 |
52 | async remove (url) {
53 | var key = datLibrary.fromURLToKey(url)
54 | return archivesDb.setUserSettings(0, key, {isSaved: false})
55 | },
56 |
57 | async bulkRemove (urls) {
58 | var results = []
59 |
60 | // sanity check
61 | if (!urls || !Array.isArray(urls)) {
62 | return []
63 | }
64 |
65 | for (var i = 0; i < urls.length; i++) {
66 | let key = datLibrary.fromURLToKey(urls[i])
67 |
68 | results.push(await archivesDb.setUserSettings(0, key, {isSaved: false}))
69 | }
70 | return results
71 | },
72 |
73 | async delete (url) {
74 | const key = datLibrary.fromURLToKey(url)
75 | const drafts = await archiveDraftsDb.list(0, key)
76 | const toDelete = [{key}].concat(drafts)
77 | var bytes = 0
78 | for (let archive of toDelete) {
79 | await archivesDb.setUserSettings(0, archive.key, {isSaved: false})
80 | await datLibrary.unloadArchive(archive.key)
81 | bytes += await archivesDb.deleteArchive(archive.key)
82 | }
83 | return {bytes}
84 | },
85 |
86 | async list (query = {}) {
87 | return datLibrary.queryArchives(query)
88 | },
89 |
90 | // folder sync
91 | // =
92 |
93 | async validateLocalSyncPath (key, localSyncPath) {
94 | key = datLibrary.fromURLToKey(key)
95 | localSyncPath = path.normalize(localSyncPath)
96 |
97 | // make sure the path is good
98 | try {
99 | await datLibrary.getDaemon().fs_assertSafePath(localSyncPath)
100 | } catch (e) {
101 | if (e.notFound) {
102 | return {doesNotExist: true}
103 | }
104 | throw e
105 | }
106 |
107 | // check for conflicts
108 | var archive = await datLibrary.getOrLoadArchive(key)
109 | var diff = await datLibrary.getDaemon().fs_diffListing(archive, {localSyncPath})
110 | diff = diff.filter(d => d.change === 'mod' && d.path !== '/dat.json')
111 | if (diff.length) {
112 | return {hasConflicts: true, conflicts: diff.map(d => d.path)}
113 | }
114 |
115 | return {}
116 | },
117 |
118 | async setLocalSyncPath (key, localSyncPath, opts = {}) {
119 | key = datLibrary.fromURLToKey(key)
120 | localSyncPath = localSyncPath ? path.normalize(localSyncPath) : null
121 |
122 | // disable path
123 | if (!localSyncPath) {
124 | let oldSettings = await archivesDb.getUserSettings(0, key)
125 | await archivesDb.setUserSettings(0, key, {localSyncPath: ''})
126 |
127 | if (opts.deleteSyncPath && oldSettings.localSyncPath) {
128 | try {
129 | await datLibrary.getDaemon().fs_assertSafePath(oldSettings.localSyncPath)
130 | await jetpack.removeAsync(oldSettings.localSyncPath)
131 | } catch (_) {}
132 | }
133 | return
134 | }
135 |
136 | // load the archive
137 | await timer(3e3, async (checkin) => { // put a max 3s timeout on loading the dat
138 | checkin('searching for dat')
139 | await datLibrary.getOrLoadArchive(key)
140 | })
141 |
142 | // make sure the path is good
143 | try {
144 | await datLibrary.getDaemon().fs_assertSafePath(localSyncPath)
145 | } catch (e) {
146 | if (e.notFound) {
147 | // just create the folder
148 | await cbPromise(cb => mkdirp(localSyncPath, cb))
149 | } else {
150 | throw e
151 | }
152 | }
153 |
154 | // update the record
155 | var newValues = {localSyncPath}
156 | if ('previewMode' in opts) {
157 | newValues.previewMode = opts.previewMode
158 | }
159 | await archivesDb.setUserSettings(0, key, newValues)
160 | },
161 |
162 | async ensureLocalSyncFinished (key) {
163 | key = datLibrary.fromURLToKey(key)
164 |
165 | // load the archive
166 | var archive
167 | await timer(3e3, async (checkin) => { // put a max 3s timeout on loading the dat
168 | checkin('searching for dat')
169 | archive = await datLibrary.getOrLoadArchive(key)
170 | })
171 |
172 | // ensure sync
173 | await datLibrary.getDaemon().fs_ensureSyncFinished(archive)
174 | },
175 |
176 | // diff & publish
177 | // =
178 |
179 | async diffLocalSyncPathListing (key, opts) {
180 | key = datLibrary.fromURLToKey(key)
181 |
182 | // load the archive
183 | var archive
184 | await timer(3e3, async (checkin) => { // put a max 3s timeout on loading the dat
185 | checkin('searching for dat')
186 | archive = await datLibrary.getOrLoadArchive(key)
187 | })
188 |
189 | return datLibrary.getDaemon().fs_diffListing(archive, opts)
190 | },
191 |
192 | async diffLocalSyncPathFile (key, filepath) {
193 | key = datLibrary.fromURLToKey(key)
194 |
195 | // load the archive
196 | var archive
197 | await timer(3e3, async (checkin) => { // put a max 3s timeout on loading the dat
198 | checkin('searching for dat')
199 | archive = await datLibrary.getOrLoadArchive(key)
200 | })
201 |
202 | return datLibrary.getDaemon().fs_diffFile(archive, filepath)
203 | },
204 |
205 | async publishLocalSyncPathListing (key, opts = {}) {
206 | key = datLibrary.fromURLToKey(key)
207 |
208 | // load the archive
209 | var archive
210 | await timer(3e3, async (checkin) => { // put a max 3s timeout on loading the dat
211 | checkin('searching for dat')
212 | archive = await datLibrary.getOrLoadArchive(key)
213 | })
214 |
215 | opts.shallow = false
216 | return datLibrary.getDaemon().fs_syncFolderToArchive(archive, opts)
217 | },
218 |
219 | async revertLocalSyncPathListing (key, opts = {}) {
220 | key = datLibrary.fromURLToKey(key)
221 |
222 | // load the archive
223 | var archive
224 | await timer(3e3, async (checkin) => { // put a max 3s timeout on loading the dat
225 | checkin('searching for dat')
226 | archive = await datLibrary.getOrLoadArchive(key)
227 | })
228 |
229 | opts.shallow = false
230 | return datLibrary.getDaemon().fs_syncArchiveToFolder(archive, opts)
231 | },
232 |
233 | // drafts
234 | // =
235 |
236 | async getDraftInfo (url) {
237 | var key = datLibrary.fromURLToKey(url)
238 | var masterKey = await archiveDraftsDb.getMaster(0, key)
239 | var master = await archivesDb.query(0, {key: masterKey})
240 | var drafts = await archiveDraftsDb.list(0, masterKey)
241 | return {master, drafts}
242 | },
243 |
244 | async listDrafts (masterUrl) {
245 | var masterKey = datLibrary.fromURLToKey(masterUrl)
246 | return archiveDraftsDb.list(0, masterKey)
247 | },
248 |
249 | async addDraft (masterUrl, draftUrl) {
250 | var masterKey = datLibrary.fromURLToKey(masterUrl)
251 | var draftKey = datLibrary.fromURLToKey(draftUrl)
252 |
253 | // make sure we're modifying the master
254 | masterKey = await archiveDraftsDb.getMaster(0, masterKey)
255 |
256 | return archiveDraftsDb.add(0, masterKey, draftKey)
257 | },
258 |
259 | async removeDraft (masterUrl, draftUrl) {
260 | var masterKey = datLibrary.fromURLToKey(masterUrl)
261 | var draftKey = datLibrary.fromURLToKey(draftUrl)
262 |
263 | // make sure we're modifying the master
264 | masterKey = await archiveDraftsDb.getMaster(0, masterKey)
265 |
266 | return archiveDraftsDb.remove(0, masterKey, draftKey)
267 | },
268 |
269 | // templates
270 | // =
271 |
272 | async getTemplate (url) {
273 | return templatesDb.get(0, url)
274 | },
275 |
276 | async listTemplates () {
277 | return templatesDb.list(0)
278 | },
279 |
280 | async putTemplate (url, {title, screenshot}) {
281 | return templatesDb.put(0, url, {title, screenshot})
282 | },
283 |
284 | async removeTemplate (url) {
285 | return templatesDb.remove(0, url)
286 | },
287 |
288 | // internal management
289 | // =
290 |
291 | async touch (key, timeVar, value) {
292 | return archivesDb.touch(key, timeVar, value)
293 | },
294 |
295 | async clearFileCache (url) {
296 | return datLibrary.clearFileCache(datLibrary.fromURLToKey(url))
297 | },
298 |
299 | async clearGarbage ({isOwner} = {}) {
300 | return datGC.collect({olderThan: 0, biggerThan: 0, isOwner})
301 | },
302 |
303 | clearDnsCache () {
304 | datDns.flushCache()
305 | },
306 |
307 | // events
308 | // =
309 |
310 | createEventStream () {
311 | return datLibrary.createEventStream()
312 | },
313 |
314 | getDebugLog (key) {
315 | return datLibrary.getDebugLog(key)
316 | },
317 |
318 | createDebugStream () {
319 | return datLibrary.createDebugStream()
320 | }
321 | }
322 |
--------------------------------------------------------------------------------
/dat/protocol.js:
--------------------------------------------------------------------------------
1 | const {join} = require('path')
2 | const parseDatUrl = require('parse-dat-url')
3 | const parseRange = require('range-parser')
4 | const once = require('once')
5 | const debug = require('../lib/debug-logger').debugLogger('dat-serve')
6 | const intoStream = require('into-stream')
7 | const toZipStream = require('hyperdrive-to-zip-stream')
8 | const slugify = require('slugify')
9 |
10 | const datDns = require('./dns')
11 | const datLibrary = require('./library')
12 |
13 | const directoryListingPage = require('./directory-listing-page')
14 | const errorPage = require('../lib/error-page')
15 | const mime = require('../lib/mime')
16 | const {makeSafe} = require('../lib/strings')
17 |
18 | // HACK detect whether the native builds of some key deps are working -prf
19 | // -prf
20 | try {
21 | require('utp-native')
22 | } catch (err) {
23 | debug('Failed to load utp-native. Peer-to-peer connectivity may be degraded.', err.toString())
24 | console.error('Failed to load utp-native. Peer-to-peer connectivity may be degraded.', err)
25 | }
26 | try {
27 | require('sodium-native')
28 | } catch (err) {
29 | debug('Failed to load sodium-native. Performance may be degraded.', err.toString())
30 | console.error('Failed to load sodium-native. Performance may be degraded.', err)
31 | }
32 |
33 | // constants
34 | // =
35 |
36 | // how long till we give up?
37 | const REQUEST_TIMEOUT_MS = 30e3 // 30 seconds
38 |
39 | // exported api
40 | // =
41 |
42 | exports.electronHandler = async function (request, respond) {
43 | respond = once(respond)
44 | var respondError = (code, status, errorPageInfo) => {
45 | if (errorPageInfo) {
46 | errorPageInfo.validatedURL = request.url
47 | errorPageInfo.errorCode = code
48 | }
49 | var accept = request.headers.Accept || ''
50 | if (accept.includes('text/html')) {
51 | respond({
52 | statusCode: code,
53 | headers: {
54 | 'Content-Type': 'text/html',
55 | 'Content-Security-Policy': "default-src 'unsafe-inline' beaker:;",
56 | 'Access-Control-Allow-Origin': '*'
57 | },
58 | data: intoStream(errorPage(errorPageInfo || (code + ' ' + status)))
59 | })
60 | } else {
61 | respond({statusCode: code})
62 | }
63 | }
64 | var fileReadStream
65 | var headersSent = false
66 | var archive
67 | var cspHeader = ''
68 |
69 | // validate request
70 | var urlp = parseDatUrl(request.url, true)
71 | if (!urlp.host) {
72 | return respondError(404, 'Archive Not Found', {
73 | title: 'Archive Not Found',
74 | errorDescription: 'Invalid URL',
75 | errorInfo: `${request.url} is an invalid dat:// URL`
76 | })
77 | }
78 | if (request.method !== 'GET' && request.method !== 'HEAD') {
79 | return respondError(405, 'Method Not Supported')
80 | }
81 |
82 | // resolve the name
83 | // (if it's a hostname, do a DNS lookup)
84 | try {
85 | var archiveKey = await datDns.resolveName(urlp.host, {ignoreCachedMiss: true})
86 | } catch (err) {
87 | return respondError(404, 'No DNS record found for ' + urlp.host, {
88 | errorDescription: 'No DNS record found',
89 | errorInfo: `No DNS record found for dat://${urlp.host}`
90 | })
91 | }
92 |
93 | // setup a timeout
94 | var timeout
95 | const cleanup = () => clearTimeout(timeout)
96 | timeout = setTimeout(() => {
97 | // cleanup
98 | debug('Timed out searching for', archiveKey)
99 | if (fileReadStream) {
100 | fileReadStream.destroy()
101 | fileReadStream = null
102 | }
103 |
104 | // error page
105 | var resource = archive ? 'page' : 'site'
106 | respondError(504, `Timed out searching for ${resource}`, {
107 | resource,
108 | validatedURL: urlp.href
109 | })
110 | }, REQUEST_TIMEOUT_MS)
111 |
112 | try {
113 | // start searching the network
114 | archive = await datLibrary.getOrLoadArchive(archiveKey)
115 | } catch (err) {
116 | debug('Failed to open archive', archiveKey, err)
117 | cleanup()
118 | return respondError(500, 'Failed')
119 | }
120 |
121 | // parse path
122 | var filepath = decodeURIComponent(urlp.path)
123 | if (!filepath) filepath = '/'
124 | if (filepath.indexOf('?') !== -1) filepath = filepath.slice(0, filepath.indexOf('?')) // strip off any query params
125 | var hasTrailingSlash = filepath.endsWith('/')
126 |
127 | // checkout version if needed
128 | try {
129 | var {checkoutFS} = datLibrary.getArchiveCheckout(archive, urlp.version)
130 | } catch (err) {
131 | if (err.noPreviewMode) {
132 | let latestUrl = makeSafe(request.url.replace('+preview', ''))
133 | respondError(404, 'Cannot open preview', {
134 | title: 'Cannot open preview',
135 | errorInfo: `You are trying to open the "preview" version of this site, but no preview exists.`,
136 | errorDescription: `You can open the latest published version instead.`
137 | })
138 | } else {
139 | debug('Failed to open archive', archiveKey, err)
140 | cleanup()
141 | return respondError(500, 'Failed')
142 | }
143 | }
144 |
145 | // read the manifest (it's needed in a couple places)
146 | var manifest
147 | try { manifest = await checkoutFS.pda.readManifest() } catch (e) { manifest = null }
148 |
149 | // read manifest CSP
150 | if (manifest && manifest.content_security_policy && typeof manifest.content_security_policy === 'string') {
151 | cspHeader = manifest.content_security_policy
152 | }
153 |
154 | // handle zip download
155 | if (urlp.query.download_as === 'zip') {
156 | cleanup()
157 |
158 | // (try to) get the title from the manifest
159 | let zipname = false
160 | if (manifest) {
161 | zipname = slugify(manifest.title || '').toLowerCase()
162 | }
163 | zipname = zipname || 'archive'
164 |
165 | let headers = {
166 | 'Content-Type': 'application/zip',
167 | 'Content-Disposition': `attachment; filename="${zipname}.zip"`,
168 | 'Content-Security-Policy': cspHeader,
169 | 'Access-Control-Allow-Origin': '*'
170 | }
171 |
172 | if (request.method === 'HEAD') {
173 | // serve the headers
174 | return respond({
175 | statusCode: 204,
176 | headers,
177 | data: intoStream('')
178 | })
179 | } else {
180 | // serve the zip
181 | var zs = toZipStream(archive, filepath)
182 | zs.on('error', err => console.log('Error while producing .zip file', err))
183 | return respond({
184 | statusCode: 200,
185 | headers,
186 | data: zs
187 | })
188 | }
189 | }
190 |
191 | // lookup entry
192 | debug('Attempting to lookup', archiveKey, filepath)
193 | var statusCode = 200
194 | var headers = {}
195 | var entry
196 | const tryStat = async (path) => {
197 | // abort if we've already found it
198 | if (entry) return
199 | // apply the web_root config
200 | if (manifest && manifest.web_root && !urlp.query.disable_web_root) {
201 | if (path) {
202 | path = join(manifest.web_root, path)
203 | } else {
204 | path = manifest.web_root
205 | }
206 | }
207 | // attempt lookup
208 | try {
209 | entry = await checkoutFS.pda.stat(path)
210 | entry.path = path
211 | } catch (e) {}
212 | }
213 |
214 | // do lookup
215 | if (hasTrailingSlash) {
216 | await tryStat(filepath + 'index.html')
217 | await tryStat(filepath + 'index.md')
218 | await tryStat(filepath)
219 | } else {
220 | await tryStat(filepath)
221 | await tryStat(filepath + '.html') // fallback to .html
222 | if (entry && entry.isDirectory()) {
223 | // unexpected directory, give the .html fallback a chance
224 | let dirEntry = entry
225 | entry = null
226 | await tryStat(filepath + '.html') // fallback to .html
227 | if (dirEntry && !entry) {
228 | // no .html fallback found, stick with directory that we found
229 | entry = dirEntry
230 | }
231 | }
232 | }
233 |
234 | // handle folder
235 | if (entry && entry.isDirectory()) {
236 | cleanup()
237 |
238 | // make sure there's a trailing slash
239 | if (!hasTrailingSlash) {
240 | let url = `dat://${urlp.host}${urlp.version ? ('+' + urlp.version) : ''}${urlp.pathname || ''}/${urlp.search || ''}`
241 | return respond({
242 | statusCode: 200,
243 | headers: {'Content-Type': 'text/html'},
244 | data: intoStream(``)
245 | })
246 | }
247 |
248 | let headers = {
249 | 'Content-Type': 'text/html',
250 | 'Content-Security-Policy': cspHeader,
251 | 'Access-Control-Allow-Origin': '*'
252 | }
253 | if (request.method === 'HEAD') {
254 | return respond({statusCode: 204, headers, data: intoStream('')})
255 | } else {
256 | return respond({
257 | statusCode: 200,
258 | headers,
259 | data: intoStream(await directoryListingPage(checkoutFS, filepath, manifest && manifest.web_root))
260 | })
261 | }
262 | }
263 |
264 | // handle not found
265 | if (!entry) {
266 | debug('Entry not found:', urlp.path)
267 |
268 | // check for a fallback page
269 | if (manifest && manifest.fallback_page) {
270 | await tryStat(manifest.fallback_page)
271 | }
272 |
273 | if (!entry) {
274 | cleanup()
275 | return respondError(404, 'File Not Found', {
276 | errorDescription: 'File Not Found',
277 | errorInfo: `Beaker could not find the file ${urlp.path}`,
278 | title: 'File Not Found'
279 | })
280 | }
281 | }
282 |
283 | // TODO
284 | // Electron is being really aggressive about caching and not following the headers correctly
285 | // caching is disabled till we can figure out why
286 | // -prf
287 | // caching if-match
288 | // const ETag = (checkoutFS.isLocalFS) ? false : 'block-' + entry.offset
289 | // if (request.headers['if-none-match'] === ETag) {
290 | // return respondError(304, 'Not Modified')
291 | // }
292 |
293 | // fetch the permissions
294 | // TODO this has been disabled until we can create a better UX -prf
295 | // var origins
296 | // try {
297 | // origins = await sitedataDb.getNetworkPermissions('dat://' + archiveKey)
298 | // } catch (e) {
299 | // origins = []
300 | // }
301 |
302 | // handle range
303 | headers['Accept-Ranges'] = 'bytes'
304 | var range = request.headers.Range || request.headers.range
305 | if (range) range = parseRange(entry.size, range)
306 | if (range && range.type === 'bytes') {
307 | range = range[0] // only handle first range given
308 | statusCode = 206
309 | headers['Content-Range'] = 'bytes ' + range.start + '-' + range.end + '/' + entry.size
310 | headers['Content-Length'] = range.end - range.start + 1
311 | debug('Serving range:', range)
312 | } else {
313 | if (entry.size) {
314 | headers['Content-Length'] = entry.size
315 | }
316 | }
317 |
318 | // fetch the entry and stream the response
319 | debug('Entry found:', entry.path)
320 | fileReadStream = checkoutFS.createReadStream(entry.path, range)
321 | var dataStream = fileReadStream
322 | .pipe(mime.identifyStream(entry.path, mimeType => {
323 | // cleanup the timeout now, as bytes have begun to stream
324 | cleanup()
325 |
326 | // send headers, now that we can identify the data
327 | headersSent = true
328 | Object.assign(headers, {
329 | 'Content-Type': mimeType,
330 | 'Content-Security-Policy': cspHeader,
331 | 'Access-Control-Allow-Origin': '*',
332 | 'Cache-Control': 'no-cache'
333 | })
334 | // TODO
335 | // Electron is being really aggressive about caching and not following the headers correctly
336 | // caching is disabled till we can figure out why
337 | // -prf
338 | // if (ETag) {
339 | // Object.assign(headers, {ETag})
340 | // } else {
341 | // Object.assign(headers, {'Cache-Control': 'no-cache'})
342 | // }
343 |
344 | if (request.method === 'HEAD') {
345 | dataStream.destroy() // stop reading data
346 | respond({statusCode: 204, headers, data: intoStream('')})
347 | } else {
348 | respond({statusCode, headers, data: dataStream})
349 | }
350 | }))
351 |
352 | // handle empty files
353 | fileReadStream.once('end', () => {
354 | if (!headersSent) {
355 | cleanup()
356 | debug('Served empty file')
357 | respond({
358 | statusCode: 200,
359 | headers: {
360 | 'Content-Security-Policy': cspHeader,
361 | 'Access-Control-Allow-Origin': '*'
362 | },
363 | data: intoStream('')
364 | })
365 | }
366 | })
367 |
368 | // handle read-stream errors
369 | fileReadStream.once('error', err => {
370 | debug('Error reading file', err)
371 | if (!headersSent) respondError(500, 'Failed to read file')
372 | })
373 | }
374 |
--------------------------------------------------------------------------------
/web-apis/fg/dat-archive.js:
--------------------------------------------------------------------------------
1 | const { contextBridge, webFrame } = require('electron')
2 | const errors = require('beaker-error-constants')
3 | const datArchiveManifest = require('../manifests/external/dat-archive')
4 | const { exportEventStreamFn } = require('./event-target')
5 |
6 | exports.setupAndExpose = function (rpc) {
7 | // create the rpc apis
8 | const datRPC = rpc.importAPI('dat-archive', datArchiveManifest, { timeout: false, errors })
9 | exportEventStreamFn(datRPC, 'watch')
10 | exportEventStreamFn(datRPC, 'createNetworkActivityStream')
11 | contextBridge.exposeInMainWorld('__dat', datRPC)
12 |
13 | webFrame.executeJavaScript(`
14 | function Stat (data) {
15 | if (!(this instanceof Stat)) return new Stat(data)
16 |
17 | this.mode = data ? data.mode : 0
18 | this.size = data ? data.size : 0
19 | this.offset = data ? data.offset : 0
20 | this.blocks = data ? data.blocks : 0
21 | this.downloaded = data ? data.downloaded : 0
22 | this.atime = new Date(data ? data.mtime : 0) // we just set this to mtime ...
23 | this.mtime = new Date(data ? data.mtime : 0)
24 | this.ctime = new Date(data ? data.ctime : 0)
25 |
26 | this.linkname = data ? data.linkname : null
27 | }
28 |
29 | Stat.IFSOCK = 49152 // 0b1100...
30 | Stat.IFLNK = 40960 // 0b1010...
31 | Stat.IFREG = 32768 // 0b1000...
32 | Stat.IFBLK = 24576 // 0b0110...
33 | Stat.IFDIR = 16384 // 0b0100...
34 | Stat.IFCHR = 8192 // 0b0010...
35 | Stat.IFIFO = 4096 // 0b0001...
36 |
37 | Stat.prototype.isSocket = check(Stat.IFSOCK)
38 | Stat.prototype.isSymbolicLink = check(Stat.IFLNK)
39 | Stat.prototype.isFile = check(Stat.IFREG)
40 | Stat.prototype.isBlockDevice = check(Stat.IFBLK)
41 | Stat.prototype.isDirectory = check(Stat.IFDIR)
42 | Stat.prototype.isCharacterDevice = check(Stat.IFCHR)
43 | Stat.prototype.isFIFO = check(Stat.IFIFO)
44 |
45 | function check (mask) {
46 | return function () {
47 | return (mask & this.mode) === mask
48 | }
49 | }
50 |
51 |
52 |
53 | const LISTENERS = Symbol() // eslint-disable-line
54 | const CREATE_STREAM = Symbol() // eslint-disable-line
55 | const STREAM_EVENTS = Symbol() // eslint-disable-line
56 | const STREAM = Symbol() // eslint-disable-line
57 | const PREP_EVENT = Symbol() // eslint-disable-line
58 |
59 | class EventTarget {
60 | constructor () {
61 | this[LISTENERS] = {}
62 | }
63 |
64 | addEventListener (type, callback) {
65 | if (!(type in this[LISTENERS])) {
66 | this[LISTENERS][type] = []
67 | }
68 | this[LISTENERS][type].push(callback)
69 | }
70 |
71 | removeEventListener (type, callback) {
72 | if (!(type in this[LISTENERS])) {
73 | return
74 | }
75 | var stack = this[LISTENERS][type]
76 | var i = stack.findIndex(cb => cb === callback)
77 | if (i !== -1) {
78 | stack.splice(i, 1)
79 | }
80 | }
81 |
82 | dispatchEvent (event) {
83 | if (!(event.type in this[LISTENERS])) {
84 | return
85 | }
86 | event.target = this
87 | var stack = this[LISTENERS][event.type]
88 | stack.forEach(cb => cb.call(this, event))
89 | }
90 | }
91 |
92 | class EventTargetFromStream extends EventTarget {
93 | constructor (createStreamFn, events, eventPrepFn) {
94 | super()
95 | this[CREATE_STREAM] = createStreamFn
96 | this[STREAM_EVENTS] = events
97 | this[PREP_EVENT] = eventPrepFn
98 | this[STREAM] = null
99 | }
100 |
101 | addEventListener (type, callback) {
102 | if (!this[STREAM]) {
103 | // create the event stream
104 | let s = this[STREAM] = fromEventStream(this[CREATE_STREAM]())
105 | // proxy all events
106 | this[STREAM_EVENTS].forEach(event => {
107 | s.addEventListener(event, details => {
108 | details = details || {}
109 | if (this[PREP_EVENT]) {
110 | details = this[PREP_EVENT](event, details)
111 | }
112 | details.target = this
113 | this.dispatchEvent(new Event(event, details))
114 | })
115 | })
116 | }
117 | return super.addEventListener(type, callback)
118 | }
119 | }
120 |
121 | class Event {
122 | constructor (type, opts) {
123 | this.type = type
124 | for (var k in opts) {
125 | this[k] = opts[k]
126 | }
127 | Object.defineProperty(this, 'bubbles', {value: false})
128 | Object.defineProperty(this, 'cancelBubble', {value: false})
129 | Object.defineProperty(this, 'cancelable', {value: false})
130 | Object.defineProperty(this, 'composed', {value: false})
131 | Object.defineProperty(this, 'currentTarget', {value: this.target})
132 | Object.defineProperty(this, 'deepPath', {value: []})
133 | Object.defineProperty(this, 'defaultPrevented', {value: false})
134 | Object.defineProperty(this, 'eventPhase', {value: 2}) // Event.AT_TARGET
135 | Object.defineProperty(this, 'timeStamp', {value: Date.now()})
136 | Object.defineProperty(this, 'isTrusted', {value: true})
137 | Object.defineProperty(this, 'createEvent', {value: () => undefined})
138 | Object.defineProperty(this, 'composedPath', {value: () => []})
139 | Object.defineProperty(this, 'initEvent', {value: () => undefined})
140 | Object.defineProperty(this, 'preventDefault', {value: () => undefined})
141 | Object.defineProperty(this, 'stopImmediatePropagation', {value: () => undefined})
142 | Object.defineProperty(this, 'stopPropagation', {value: () => undefined})
143 | }
144 | }
145 |
146 | function bindEventStream (stream, target) {
147 | stream.on('data', data => {
148 | var event = data[1] || {}
149 | event.type = data[0]
150 | target.dispatchEvent(event)
151 | })
152 | }
153 |
154 | function fromEventStream (stream) {
155 | var target = new EventTarget()
156 | bindEventStream(stream, target)
157 | target.close = () => {
158 | target.listeners = {}
159 | stream.close()
160 | }
161 | return target
162 | }
163 |
164 |
165 | const SCHEME_REGEX = /[a-z]+:\\/\\//i
166 | // 1 2 3 4
167 | const VERSION_REGEX = /^(dat:\\/\\/)?([^/]+)(\\+[^/]+)(.*)$/i
168 |
169 | function parseDatURL (str, parseQS) {
170 | // prepend the scheme if it's missing
171 | if (!SCHEME_REGEX.test(str)) {
172 | str = 'dat://' + str
173 | }
174 |
175 | var parsed, version = null, match = VERSION_REGEX.exec(str)
176 | if (match) {
177 | // run typical parse with version segment removed
178 | parsed = parse((match[1] || '') + (match[2] || '') + (match[4] || ''), parseQS)
179 | version = match[3].slice(1)
180 | } else {
181 | parsed = parse(str, parseQS)
182 | }
183 | parsed.path = parsed.pathname // to match node
184 | if (!parsed.query && parsed.searchParams) {
185 | parsed.query = Object.fromEntries(parsed.searchParams) // to match node
186 | }
187 | parsed.version = version // add version segment
188 | return parsed
189 | }
190 |
191 | function parse (str) {
192 | return new URL(str)
193 | }
194 |
195 |
196 |
197 | const LOAD_PROMISE = Symbol('LOAD_PROMISE')
198 | const URL_PROMISE = Symbol('URL_PROMISE')
199 | const NETWORK_ACT_STREAM = Symbol() // eslint-disable-line
200 |
201 | class DatArchive extends EventTarget {
202 | constructor (url) {
203 | super()
204 | // simple case: new DatArchive(window.location)
205 | if (url === window.location) {
206 | url = window.location.toString()
207 | }
208 |
209 | // basic URL validation
210 | if (!url || typeof url !== 'string') {
211 | throw new Error('Invalid dat:// URL')
212 | }
213 |
214 | // parse the URL
215 | const urlParsed = parseDatURL(url)
216 | if (!urlParsed || (urlParsed.protocol !== 'dat:')) {
217 | throw new Error('Invalid URL: must be a dat:// URL')
218 | }
219 | url = 'dat://' + urlParsed.hostname + (urlParsed.version ? ('+' + urlParsed.version) : '')
220 |
221 | // load into the 'active' (in-memory) cache
222 | setHidden(this, LOAD_PROMISE, __dat.loadArchive(url))
223 |
224 | // resolve the URL (DNS)
225 | const urlPromise = DatArchive.resolveName(url).then(url => {
226 | if (urlParsed.version) {
227 | url += '+' + urlParsed.version
228 | }
229 | return 'dat://' + url
230 | })
231 | setHidden(this, URL_PROMISE, urlPromise)
232 |
233 | // define this.url as a frozen getter
234 | Object.defineProperty(this, 'url', {
235 | enumerable: true,
236 | value: url
237 | })
238 | }
239 |
240 | static load (url) {
241 | const a = new DatArchive(url)
242 | return Promise.all([a[LOAD_PROMISE], a[URL_PROMISE]])
243 | .then(() => a)
244 | }
245 |
246 | static create (opts = {}) {
247 | return __dat.createArchive(opts)
248 | .then(newUrl => new DatArchive(newUrl))
249 | }
250 |
251 | static fork (url, opts = {}) {
252 | url = (typeof url.url === 'string') ? url.url : url
253 | if (!isDatURL(url)) {
254 | throw new Error('Invalid URL: must be a dat:// URL')
255 | }
256 | return __dat.forkArchive(url, opts)
257 | .then(newUrl => new DatArchive(newUrl))
258 | }
259 |
260 | static unlink (url) {
261 | url = (typeof url.url === 'string') ? url.url : url
262 | if (!isDatURL(url)) {
263 | throw new Error('Invalid URL: must be a dat:// URL')
264 | }
265 | return __dat.unlinkArchive(url)
266 | }
267 |
268 | // override to create the activity stream if needed
269 | addEventListener (type, callback) {
270 | if (type === 'network-changed' || type === 'download' || type === 'upload' || type === 'sync') {
271 | createNetworkActStream(this)
272 | }
273 | super.addEventListener(type, callback)
274 | }
275 |
276 | async getInfo (opts = {}) {
277 | var url = await this[URL_PROMISE]
278 | return await __dat.getInfo(url, opts)
279 | }
280 |
281 | async configure (info, opts = {}) {
282 | var url = await this[URL_PROMISE]
283 | return await __dat.configure(url, info, opts)
284 | }
285 |
286 | checkout (version) {
287 | const urlParsed = parseDatURL(this.url)
288 | version = version ? ('+' + version) : ''
289 | return new DatArchive('dat://' + urlParsed.hostname + version)
290 | }
291 |
292 | async diff (opts = {}) {
293 | // noop
294 | console.warn('The DatArchive diff() API has been deprecated.')
295 | return []
296 | }
297 |
298 | async commit (opts = {}) {
299 | // noop
300 | console.warn('The DatArchive commit() API has been deprecated.')
301 | return []
302 | }
303 |
304 | async revert (opts = {}) {
305 | // noop
306 | console.warn('The DatArchive revert() API has been deprecated.')
307 | return []
308 | }
309 |
310 | async history (opts = {}) {
311 | var url = await this[URL_PROMISE]
312 | return await __dat.history(url, opts)
313 | }
314 |
315 | async stat (path, opts = {}) {
316 | var url = await this[URL_PROMISE]
317 | return new Stat(await __dat.stat(url, path, opts))
318 | }
319 |
320 | async readFile (path, opts = {}) {
321 | var url = await this[URL_PROMISE]
322 | return await __dat.readFile(url, path, opts)
323 | }
324 |
325 | async writeFile (path, data, opts = {}) {
326 | var url = await this[URL_PROMISE]
327 | return await __dat.writeFile(url, path, data, opts)
328 | }
329 |
330 | async unlink (path, opts = {}) {
331 | var url = await this[URL_PROMISE]
332 | return await __dat.unlink(url, path, opts)
333 | }
334 |
335 | async copy (path, dstPath, opts = {}) {
336 | var url = await this[URL_PROMISE]
337 | return __dat.copy(url, path, dstPath, opts)
338 | }
339 |
340 | async rename (path, dstPath, opts = {}) {
341 | var url = await this[URL_PROMISE]
342 | return __dat.rename(url, path, dstPath, opts)
343 | }
344 |
345 | async download (path = '/', opts = {}) {
346 | var url = await this[URL_PROMISE]
347 | return await __dat.download(url, path, opts)
348 | }
349 |
350 | async readdir (path = '/', opts = {}) {
351 | var url = await this[URL_PROMISE]
352 | var names = await __dat.readdir(url, path, opts)
353 | if (opts.stat) {
354 | names.forEach(name => { name.stat = new Stat(name.stat) })
355 | }
356 | return names
357 | }
358 |
359 | async mkdir (path, opts = {}) {
360 | var url = await this[URL_PROMISE]
361 | return await __dat.mkdir(url, path, opts)
362 | }
363 |
364 | async rmdir (path, opts = {}) {
365 | var url = await this[URL_PROMISE]
366 | return await __dat.rmdir(url, path, opts)
367 | }
368 |
369 | createFileActivityStream (pathSpec = null) {
370 | console.warn('The DatArchive createFileActivityStream() API has been deprecated, use watch() instead.')
371 | return this.watch(pathSpec)
372 | }
373 |
374 | watch (pathSpec = null, onInvalidated = null) {
375 | // usage: (onInvalidated)
376 | if (typeof pathSpec === 'function') {
377 | onInvalidated = pathSpec
378 | pathSpec = null
379 | }
380 |
381 | var evts = fromEventStream(__dat.watch(this.url, pathSpec))
382 | if (onInvalidated) {
383 | evts.addEventListener('invalidated', onInvalidated)
384 | }
385 | return evts
386 | }
387 |
388 | createNetworkActivityStream () {
389 | console.warn('The DatArchive createNetworkActivityStream() API has been deprecated, use addEventListener() instead.')
390 | return fromEventStream(__dat.createNetworkActivityStream(this.url))
391 | }
392 |
393 | static async resolveName (name) {
394 | // simple case: DatArchive.resolveName(window.location)
395 | if (name === window.location) {
396 | name = window.location.toString()
397 | }
398 | return await __dat.resolveName(name)
399 | }
400 |
401 | static selectArchive (opts = {}) {
402 | return __dat.selectArchive(opts)
403 | .then(url => new DatArchive(url))
404 | }
405 | }
406 |
407 | // add internal methods
408 | if (window.location.protocol === 'beaker:') {
409 | DatArchive.importFromFilesystem = async function (opts = {}) {
410 | return await __dat.importFromFilesystem(opts)
411 | }
412 |
413 | DatArchive.exportToFilesystem = async function (opts = {}) {
414 | return await __dat.exportToFilesystem(opts)
415 | }
416 |
417 | DatArchive.exportToArchive = async function (opts = {}) {
418 | return await __dat.exportToArchive(opts)
419 | }
420 |
421 | DatArchive.diff = async function (srcUrl, dstUrl, opts = {}) {
422 | if (srcUrl && typeof srcUrl.url === 'string') srcUrl = srcUrl.url
423 | if (dstUrl && typeof dstUrl.url === 'string') dstUrl = dstUrl.url
424 | return __dat.diff(srcUrl, dstUrl, opts)
425 | }
426 |
427 | DatArchive.merge = async function (srcUrl, dstUrl, opts = {}) {
428 | if (srcUrl && typeof srcUrl.url === 'string') srcUrl = srcUrl.url
429 | if (dstUrl && typeof dstUrl.url === 'string') dstUrl = dstUrl.url
430 | return __dat.merge(srcUrl, dstUrl, opts)
431 | }
432 | }
433 | window.DatArchive = DatArchive
434 |
435 | // internal methods
436 | // =
437 |
438 | function setHidden (t, attr, value) {
439 | Object.defineProperty(t, attr, {enumerable: false, value})
440 | }
441 |
442 | function isDatURL (url) {
443 | var urlp = parseDatURL(url)
444 | return urlp && urlp.protocol === 'dat:'
445 | }
446 |
447 | function createNetworkActStream (archive) {
448 | if (archive[NETWORK_ACT_STREAM]) return
449 |
450 | var s = archive[NETWORK_ACT_STREAM] = fromEventStream(__dat.createNetworkActivityStream(archive.url))
451 | s.addEventListener('network-changed', detail => archive.dispatchEvent(new Event('network-changed', {target: archive, peers: detail.connections})))
452 | s.addEventListener('download', detail => archive.dispatchEvent(new Event('download', {target: archive, feed: detail.feed, block: detail.block, bytes: detail.bytes})))
453 | s.addEventListener('upload', detail => archive.dispatchEvent(new Event('upload', {target: archive, feed: detail.feed, block: detail.block, bytes: detail.bytes})))
454 | s.addEventListener('sync', detail => archive.dispatchEvent(new Event('sync', {target: archive, feed: detail.feed})))
455 | }
456 | `)
457 | }
--------------------------------------------------------------------------------
/dbs/archives.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const url = require('url')
3 | const mkdirp = require('mkdirp')
4 | const Events = require('events')
5 | const datEncoding = require('dat-encoding')
6 | const jetpack = require('fs-jetpack')
7 | const {InvalidArchiveKeyError} = require('beaker-error-constants')
8 | const db = require('./profile-data-db')
9 | const lock = require('../lib/lock')
10 | const {
11 | DAT_HASH_REGEX,
12 | DAT_GC_EXPIRATION_AGE
13 | } = require('../lib/const')
14 |
15 | // globals
16 | // =
17 |
18 | var datPath // path to the dat folder
19 | var events = new Events()
20 |
21 | // exported methods
22 | // =
23 |
24 | exports.setup = function (opts) {
25 | // make sure the folders exist
26 | datPath = path.join(opts.userDataPath, 'Dat')
27 | mkdirp.sync(path.join(datPath, 'Archives'))
28 | }
29 |
30 | exports.getDatPath = function () {
31 | return datPath
32 | }
33 |
34 | // get the path to an archive's files
35 | const getArchiveMetaPath = exports.getArchiveMetaPath = function (archiveOrKey) {
36 | var key = datEncoding.toStr(archiveOrKey.key || archiveOrKey)
37 | return path.join(datPath, 'Archives', 'Meta', key.slice(0, 2), key.slice(2))
38 | }
39 |
40 | // get the path to an archive's temporary local sync path
41 | const getInternalLocalSyncPath = exports.getInternalLocalSyncPath = function (archiveOrKey) {
42 | var key = datEncoding.toStr(archiveOrKey.key || archiveOrKey)
43 | return path.join(datPath, 'Archives', 'LocalCopy', key.slice(0, 2), key.slice(2))
44 | }
45 |
46 | // delete all db entries and files for an archive
47 | exports.deleteArchive = async function (key) {
48 | const path = getArchiveMetaPath(key)
49 | const info = await jetpack.inspectTreeAsync(path)
50 | await Promise.all([
51 | db.run(`DELETE FROM archives WHERE key=?`, key),
52 | db.run(`DELETE FROM archives_meta WHERE key=?`, key),
53 | db.run(`DELETE FROM archives_meta_type WHERE key=?`, key),
54 | jetpack.removeAsync(path),
55 | jetpack.removeAsync(getInternalLocalSyncPath(key))
56 | ])
57 | return info.size
58 | }
59 |
60 | exports.on = events.on.bind(events)
61 | exports.addListener = events.addListener.bind(events)
62 | exports.removeListener = events.removeListener.bind(events)
63 |
64 | // exported methods: archive user settings
65 | // =
66 |
67 | // get an array of saved archives
68 | // - optional `query` keys:
69 | // - `isSaved`: bool
70 | // - `isNetworked`: bool
71 | // - `isOwner`: bool, does beaker have the secret key?
72 | // - `type`: string, a type filter
73 | // - `showHidden`: bool, show hidden dats
74 | // - `key`: string, the key of the archive you want (return single result)
75 | exports.query = async function (profileId, query) {
76 | query = query || {}
77 |
78 | // fetch archive meta
79 | var values = []
80 | var WHERE = []
81 | if (query.isOwner === true) WHERE.push('archives_meta.isOwner = 1')
82 | if (query.isOwner === false) WHERE.push('archives_meta.isOwner = 0')
83 | if (query.isNetworked === true) WHERE.push('archives.networked = 1')
84 | if (query.isNetworked === false) WHERE.push('archives.networked = 0')
85 | if ('isSaved' in query) {
86 | if (query.isSaved) {
87 | WHERE.push('archives.profileId = ?')
88 | values.push(profileId)
89 | WHERE.push('archives.isSaved = 1')
90 | } else {
91 | WHERE.push('(archives.isSaved = 0 OR archives.isSaved IS NULL)')
92 | }
93 | }
94 | if ('key' in query) {
95 | WHERE.push('archives_meta.key = ?')
96 | values.push(query.key)
97 | }
98 | if (!query.showHidden) WHERE.push('(archives.hidden = 0 OR archives.hidden IS NULL)')
99 | if (WHERE.length) WHERE = `WHERE ${WHERE.join(' AND ')}`
100 | else WHERE = ''
101 |
102 | var archives = await db.all(`
103 | SELECT
104 | archives_meta.*,
105 | GROUP_CONCAT(archives_meta_type.type) AS type,
106 | archives.isSaved,
107 | archives.hidden,
108 | archives.networked,
109 | archives.autoDownload,
110 | archives.autoUpload,
111 | archives.expiresAt,
112 | archives.localSyncPath,
113 | archives.previewMode
114 | FROM archives_meta
115 | LEFT JOIN archives ON archives.key = archives_meta.key
116 | LEFT JOIN archives_meta_type ON archives_meta_type.key = archives_meta.key
117 | ${WHERE}
118 | GROUP BY archives_meta.key
119 | `, values)
120 |
121 | // massage the output
122 | archives.forEach(archive => {
123 | archive.url = `dat://${archive.key}`
124 | archive.isOwner = archive.isOwner != 0
125 | archive.type = archive.type ? archive.type.split(',') : []
126 | archive.userSettings = {
127 | isSaved: archive.isSaved != 0,
128 | hidden: archive.hidden != 0,
129 | networked: archive.networked != 0,
130 | autoDownload: archive.autoDownload != 0,
131 | autoUpload: archive.autoUpload != 0,
132 | expiresAt: archive.expiresAt,
133 | localSyncPath: archive.localSyncPath,
134 | previewMode: archive.previewMode == 1
135 | }
136 |
137 | // user settings
138 | delete archive.isSaved
139 | delete archive.hidden
140 | delete archive.networked
141 | delete archive.autoDownload
142 | delete archive.autoUpload
143 | delete archive.expiresAt
144 | delete archive.localSyncPath
145 | delete archive.previewMode
146 |
147 | // deprecated attrs
148 | delete archive.createdByTitle
149 | delete archive.createdByUrl
150 | delete archive.forkOf
151 | delete archive.metaSize
152 | delete archive.stagingSize
153 | delete archive.stagingSizeLessIgnored
154 | })
155 |
156 | // apply manual filters
157 | if ('type' in query) {
158 | let types = Array.isArray(query.type) ? query.type : [query.type]
159 | archives = archives.filter(a => {
160 | for (let type of types) {
161 | if (a.type.indexOf(type) === -1) {
162 | return false
163 | }
164 | }
165 | return true
166 | })
167 | }
168 |
169 | return ('key' in query) ? archives[0] : archives
170 | }
171 |
172 | // get all archives that should be unsaved
173 | exports.listExpiredArchives = async function () {
174 | return db.all(`
175 | SELECT archives.key
176 | FROM archives
177 | WHERE
178 | archives.isSaved = 1
179 | AND archives.expiresAt != 0
180 | AND archives.expiresAt IS NOT NULL
181 | AND archives.expiresAt < ?
182 | `, [Date.now()])
183 | }
184 |
185 | // get all archives that are ready for garbage collection
186 | exports.listGarbageCollectableArchives = async function ({olderThan, isOwner} = {}) {
187 | olderThan = typeof olderThan === 'number' ? olderThan : DAT_GC_EXPIRATION_AGE
188 | isOwner = typeof isOwner === 'boolean' ? `AND archives_meta.isOwner = ${isOwner ? '1' : '0'}` : ''
189 |
190 | // fetch archives
191 | var records = await db.all(`
192 | SELECT archives_meta.key
193 | FROM archives_meta
194 | LEFT JOIN archives ON archives_meta.key = archives.key
195 | WHERE
196 | (archives.isSaved != 1 OR archives.isSaved IS NULL)
197 | AND archives_meta.lastAccessTime < ?
198 | ${isOwner}
199 | `, [Date.now() - olderThan])
200 | var records2 = records.slice()
201 |
202 | // fetch any related drafts
203 | for (let record of records2) {
204 | let drafts = await db.all(`SELECT draftKey as key FROM archive_drafts WHERE masterKey = ? ORDER BY createdAt`, [record.key])
205 | records = records.concat(drafts)
206 | }
207 |
208 | return records
209 | }
210 |
211 | // upsert the last-access time
212 | exports.touch = async function (key, timeVar = 'lastAccessTime', value = -1) {
213 | var release = await lock('archives-db:meta')
214 | try {
215 | if (timeVar !== 'lastAccessTime' && timeVar !== 'lastLibraryAccessTime') {
216 | timeVar = 'lastAccessTime'
217 | }
218 | if (value === -1) value = Date.now()
219 | key = datEncoding.toStr(key)
220 | await db.run(`UPDATE archives_meta SET ${timeVar}=? WHERE key=?`, [value, key])
221 | await db.run(`INSERT OR IGNORE INTO archives_meta (key, ${timeVar}) VALUES (?, ?)`, [key, value])
222 | } finally {
223 | release()
224 | }
225 | }
226 |
227 | // get a single archive's user settings
228 | // - supresses a not-found with an empty object
229 | const getUserSettings = exports.getUserSettings = async function (profileId, key) {
230 | // massage inputs
231 | key = datEncoding.toStr(key)
232 |
233 | // validate inputs
234 | if (!DAT_HASH_REGEX.test(key)) {
235 | throw new InvalidArchiveKeyError()
236 | }
237 |
238 | // fetch
239 | try {
240 | var settings = await db.get(`
241 | SELECT * FROM archives WHERE profileId = ? AND key = ?
242 | `, [profileId, key])
243 | settings.isSaved = !!settings.isSaved
244 | settings.hidden = !!settings.hidden
245 | settings.networked = !!settings.networked
246 | settings.autoDownload = !!settings.autoDownload
247 | settings.autoUpload = !!settings.autoUpload
248 | settings.previewMode = settings.previewMode == 1
249 | return settings
250 | } catch (e) {
251 | return {}
252 | }
253 | }
254 |
255 | // write an archive's user setting
256 | exports.setUserSettings = async function (profileId, key, newValues = {}) {
257 | // massage inputs
258 | key = datEncoding.toStr(key)
259 |
260 | // validate inputs
261 | if (!DAT_HASH_REGEX.test(key)) {
262 | throw new InvalidArchiveKeyError()
263 | }
264 |
265 | var release = await lock('archives-db')
266 | try {
267 | // fetch current
268 | var value = await getUserSettings(profileId, key)
269 |
270 | if (!value || typeof value.key === 'undefined') {
271 | // create
272 | value = {
273 | profileId,
274 | key,
275 | isSaved: newValues.isSaved,
276 | hidden: newValues.hidden,
277 | networked: ('networked' in newValues) ? newValues.networked : true,
278 | autoDownload: ('autoDownload' in newValues) ? newValues.autoDownload : newValues.isSaved,
279 | autoUpload: ('autoUpload' in newValues) ? newValues.autoUpload : newValues.isSaved,
280 | expiresAt: newValues.expiresAt,
281 | localSyncPath: ('localSyncPath' in newValues) ? newValues.localSyncPath : '',
282 | previewMode: ('previewMode' in newValues) ? newValues.previewMode : ''
283 | }
284 | let valueArray = [
285 | profileId,
286 | key,
287 | flag(value.isSaved),
288 | flag(value.hidden),
289 | flag(value.networked),
290 | flag(value.autoDownload),
291 | flag(value.autoUpload),
292 | value.expiresAt,
293 | value.localSyncPath,
294 | flag(value.previewMode)
295 | ]
296 | await db.run(`
297 | INSERT INTO archives
298 | (
299 | profileId,
300 | key,
301 | isSaved,
302 | hidden,
303 | networked,
304 | autoDownload,
305 | autoUpload,
306 | expiresAt,
307 | localSyncPath,
308 | previewMode
309 | )
310 | VALUES (${valueArray.map(_ => '?').join(', ')})
311 | `, valueArray)
312 | } else {
313 | // update
314 | let { isSaved, hidden, networked, autoDownload, autoUpload, expiresAt, localSyncPath, previewMode } = newValues
315 | if (typeof isSaved === 'boolean') value.isSaved = isSaved
316 | if (typeof hidden === 'boolean') value.hidden = hidden
317 | if (typeof networked === 'boolean') value.networked = networked
318 | if (typeof autoDownload === 'boolean') value.autoDownload = autoDownload
319 | if (typeof autoUpload === 'boolean') value.autoUpload = autoUpload
320 | if (typeof expiresAt === 'number') value.expiresAt = expiresAt
321 | if (typeof localSyncPath === 'string') value.localSyncPath = localSyncPath
322 | if (typeof previewMode === 'boolean') value.previewMode = previewMode
323 | let valueArray = [
324 | flag(value.isSaved),
325 | flag(value.hidden),
326 | flag(value.networked),
327 | flag(value.autoDownload),
328 | flag(value.autoUpload),
329 | value.expiresAt,
330 | value.localSyncPath,
331 | flag(value.previewMode),
332 | profileId,
333 | key
334 | ]
335 | await db.run(`
336 | UPDATE archives
337 | SET
338 | isSaved = ?,
339 | hidden = ?,
340 | networked = ?,
341 | autoDownload = ?,
342 | autoUpload = ?,
343 | expiresAt = ?,
344 | localSyncPath = ?,
345 | previewMode = ?
346 | WHERE
347 | profileId = ? AND key = ?
348 | `, valueArray)
349 | }
350 |
351 | events.emit('update:archive-user-settings', key, value, newValues)
352 | return value
353 | } finally {
354 | release()
355 | }
356 | }
357 |
358 | // exported methods: archive meta
359 | // =
360 |
361 | // get a single archive's metadata
362 | // - supresses a not-found with an empty object
363 | const getMeta = exports.getMeta = async function (key) {
364 | // massage inputs
365 | key = datEncoding.toStr(key)
366 |
367 | // validate inputs
368 | if (!DAT_HASH_REGEX.test(key)) {
369 | throw new InvalidArchiveKeyError()
370 | }
371 |
372 | // fetch
373 | var meta = await db.get(`
374 | SELECT
375 | archives_meta.*,
376 | GROUP_CONCAT(archives_meta_type.type) AS type,
377 | GROUP_CONCAT(apps.name) as installedNames
378 | FROM archives_meta
379 | LEFT JOIN archives_meta_type ON archives_meta_type.key = archives_meta.key
380 | LEFT JOIN apps ON apps.url = ('dat://' || archives_meta.key)
381 | WHERE archives_meta.key = ?
382 | GROUP BY archives_meta.key
383 | `, [key])
384 | if (!meta) {
385 | return defaultMeta(key)
386 | }
387 |
388 | // massage some values
389 | meta.isOwner = !!meta.isOwner
390 | meta.type = meta.type ? meta.type.split(',') : []
391 | meta.installedNames = meta.installedNames ? meta.installedNames.split(',') : []
392 |
393 | // remove old attrs
394 | delete meta.createdByTitle
395 | delete meta.createdByUrl
396 | delete meta.forkOf
397 | delete meta.metaSize
398 | delete meta.stagingSize
399 | delete meta.stagingSizeLessIgnored
400 |
401 | return meta
402 | }
403 |
404 | // write an archive's metadata
405 | exports.setMeta = async function (key, value = {}) {
406 | // massage inputs
407 | key = datEncoding.toStr(key)
408 |
409 | // validate inputs
410 | if (!DAT_HASH_REGEX.test(key)) {
411 | throw new InvalidArchiveKeyError()
412 | }
413 |
414 | // extract the desired values
415 | var {title, description, type, size, mtime, isOwner} = value
416 | title = typeof title === 'string' ? title : ''
417 | description = typeof description === 'string' ? description : ''
418 | if (typeof type === 'string') type = type.split(' ')
419 | else if (Array.isArray(type)) type = type.filter(v => v && typeof v === 'string')
420 | isOwner = flag(isOwner)
421 |
422 | // write
423 | var release = await lock('archives-db:meta')
424 | var {lastAccessTime, lastLibraryAccessTime} = await getMeta(key)
425 | try {
426 | await db.run(`
427 | INSERT OR REPLACE INTO
428 | archives_meta (key, title, description, mtime, size, isOwner, lastAccessTime, lastLibraryAccessTime)
429 | VALUES (?, ?, ?, ?, ?, ?, ?, ?)
430 | `, [key, title, description, mtime, size, isOwner, lastAccessTime, lastLibraryAccessTime])
431 | await db.run(`DELETE FROM archives_meta_type WHERE key=?`, key)
432 | if (type) {
433 | await Promise.all(type.map(t => (
434 | db.run(`INSERT INTO archives_meta_type (key, type) VALUES (?, ?)`, [key, t])
435 | )))
436 | }
437 | } finally {
438 | release()
439 | }
440 | events.emit('update:archive-meta', key, value)
441 | }
442 |
443 | // find the archive currently using a given localSyncPath
444 | exports.getByLocalSyncPath = async function (profileId, localSyncPath) {
445 | try {
446 | return await db.get(`
447 | SELECT key FROM archives WHERE profileId = ? AND localSyncPath = ?
448 | `, [profileId, localSyncPath])
449 | } catch (e) {
450 | return null
451 | }
452 | }
453 |
454 | // internal methods
455 | // =
456 |
457 | function defaultMeta (key) {
458 | return {
459 | key,
460 | title: null,
461 | description: null,
462 | type: [],
463 | author: null,
464 | mtime: 0,
465 | isOwner: false,
466 | lastAccessTime: 0,
467 | installedNames: []
468 | }
469 | }
470 |
471 | function flag (b) {
472 | return b ? 1 : 0
473 | }
474 |
475 | exports.extractOrigin = function (originURL) {
476 | var urlp = url.parse(originURL)
477 | if (!urlp || !urlp.host || !urlp.protocol) return
478 | return (urlp.protocol + (urlp.slashes ? '//' : '') + urlp.host)
479 | }
480 |
--------------------------------------------------------------------------------