├── public
├── css
│ └── style.css
├── favicon.ico
└── index.html
├── .zprintrc
├── .firebaserc
├── firestore.rules
├── .clj-kondo
└── config.edn
├── .gitignore
├── firebase.json
├── project.clj
├── karma.conf.js
├── firestore.indexes.json
├── shadow-cljs.edn
├── .github
└── workflows
│ └── test.yml
├── LICENSE
├── src
├── test
│ └── datafire
│ │ ├── cards.cljs
│ │ ├── tests.cljs
│ │ ├── offline_tests.cljs
│ │ ├── sandbox.cljs
│ │ ├── test_helpers.cljs
│ │ └── samples.cljs
└── main
│ └── datafire
│ └── core.cljs
├── package.json
├── README.md
├── pom.xml
└── docs
├── views.md
└── sandbox.md
/public/css/style.css:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.zprintrc:
--------------------------------------------------------------------------------
1 | {:style :indent-only}
--------------------------------------------------------------------------------
/.firebaserc:
--------------------------------------------------------------------------------
1 | {
2 | "projects": {
3 | "default": "datascript-firebase"
4 | }
5 | }
6 |
--------------------------------------------------------------------------------
/public/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/filipesilva/datafire/HEAD/public/favicon.ico
--------------------------------------------------------------------------------
/firestore.rules:
--------------------------------------------------------------------------------
1 | service cloud.firestore {
2 | match /databases/{database}/documents {
3 | match /{document=**} {
4 | allow read, write;
5 | }
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/.clj-kondo/config.edn:
--------------------------------------------------------------------------------
1 | {:lint-as {devcards.core/defcard cljs.core/def
2 | devcards.core/defcard-rg cljs.core/def
3 | devcards.core/deftest cljs.core/def}}
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | target/
3 | public/js
4 | public/test
5 | out
6 | .shadow-cljs
7 | .nrepl-port
8 | .clj-kondo/.cache
9 | yarn-error.log
10 | report.html
11 | *.log
--------------------------------------------------------------------------------
/firebase.json:
--------------------------------------------------------------------------------
1 | {
2 | "firestore": {
3 | "rules": "firestore.rules",
4 | "indexes": "firestore.indexes.json"
5 | },
6 | "emulators": {
7 | "firestore": {
8 | "port": 8080
9 | }
10 | }
11 | }
--------------------------------------------------------------------------------
/project.clj:
--------------------------------------------------------------------------------
1 | (defproject datafire "0.1.1"
2 | :description "Persist Datascript databases in Firebase's Firestore."
3 | :url "https://github.com/filipesilva/datafire"
4 | :license {:name "MIT"
5 | :url "https://github.com/filipesilva/datafire/blob/master/LICENSE"}
6 | :dependencies [[org.clojure/clojurescript "1.10.520" :scope "provided"]]
7 | :source-paths ["src/main"]
8 | :repositories {"clojars" {:url "https://clojars.org/repo"
9 | :sign-releases false}})
--------------------------------------------------------------------------------
/karma.conf.js:
--------------------------------------------------------------------------------
1 | process.env.CHROME_BIN = require('puppeteer').executablePath();
2 | module.exports = function (config) {
3 | config.set({
4 | browsers: ['ChromeHeadless'],
5 | // The directory where the output file lives
6 | basePath: 'out',
7 | // The file itself
8 | files: ['run-tests.js'],
9 | frameworks: ['cljs-test'],
10 | plugins: ['karma-cljs-test', 'karma-chrome-launcher'],
11 | colors: true,
12 | logLevel: config.LOG_INFO,
13 | client: {
14 | args: ["shadow.test.karma.init"],
15 | singleRun: true
16 | }
17 | })
18 | };
--------------------------------------------------------------------------------
/public/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | datascript-firebase
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/firestore.indexes.json:
--------------------------------------------------------------------------------
1 | {
2 | // Example:
3 | //
4 | // "indexes": [
5 | // {
6 | // "collectionGroup": "widgets",
7 | // "queryScope": "COLLECTION",
8 | // "fields": [
9 | // { "fieldPath": "foo", "arrayConfig": "CONTAINS" },
10 | // { "fieldPath": "bar", "mode": "DESCENDING" }
11 | // ]
12 | // },
13 | //
14 | // "fieldOverrides": [
15 | // {
16 | // "collectionGroup": "widgets",
17 | // "fieldPath": "baz",
18 | // "indexes": [
19 | // { "order": "ASCENDING", "queryScope": "COLLECTION" }
20 | // ]
21 | // },
22 | // ]
23 | // ]
24 | "indexes": [],
25 | "fieldOverrides": []
26 | }
--------------------------------------------------------------------------------
/shadow-cljs.edn:
--------------------------------------------------------------------------------
1 | {:builds
2 | {:cards {:asset-path "/js"
3 | :modules {:main {:init-fn datafire.cards/main}}
4 | :compiler-options {:devcards true}
5 | :output-dir "public/js"
6 | :target :browser}
7 | :test {:target :browser-test
8 | :ns-regexp "datafire.cards"
9 | :test-dir "public/test"
10 | :devtools {:http-port 8021
11 | :http-root "public/test"}}
12 | :run-tests {:target :karma
13 | :ns-regexp "datafire.cards"
14 | :output-to "out/run-tests.js"}}
15 | :dependencies [[org.clojure/core.async "0.6.532"]
16 | [async-interop "0.1.4"]
17 | [reagent "0.8.1"]
18 | [devcards "0.2.6"]
19 | [datascript "0.18.9"]
20 | [datascript-transit "0.3.0"]]
21 | :dev-http {3000 "public"}
22 | :nrepl {:port 3333}
23 | :source-paths ["src/main" "src/test"]}
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 |
3 | on:
4 | pull_request: ~
5 | push:
6 | branches:
7 | # These are the branches that we want CI to run on direct pushes.
8 | # All other changes should go through PRs.
9 | - master
10 |
11 | jobs:
12 | test:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v1
16 | - name: Cache node_modules
17 | uses: actions/cache@v1
18 | with:
19 | path: node_modules
20 | key: node_modules-${{ hashFiles('yarn.lock') }}
21 | restore-keys: |
22 | node_modules-
23 | - name: Cache maven
24 | uses: actions/cache@v1
25 | with:
26 | path: ~/.m2
27 | key: m2-${{ hashFiles('shadow-cljs.edn') }}
28 | restore-keys: |
29 | m2-
30 | - name: Use Node.js 12.x
31 | uses: actions/setup-node@v1
32 | with:
33 | node-version: '12.x'
34 | - name: Install
35 | run: yarn
36 | # - name: Lint
37 | # run: yarn lint
38 | - name: Start background server
39 | run: yarn sc start
40 | - name: Test
41 | run: yarn test:ci
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2019-present, Filipe Silva.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/src/test/datafire/cards.cljs:
--------------------------------------------------------------------------------
1 | (ns datafire.cards
2 | ; devcards needs cljsjs.react and cljsjs.react.dom to be imported
3 | ; separately for shadow-cljs to add shims.
4 | (:require [cljsjs.react]
5 | [cljsjs.react.dom]
6 | ; We shouldn't need to load reagent directly, but it looks like devcards
7 | ; is importing it in such a way that it needs to be imported beforehand.
8 | [reagent.core]
9 | [devcards.core :refer [start-devcard-ui!]]
10 | ; Import all namespaces with cards here to load them.
11 | [datafire.sandbox]
12 | [datafire.tests]
13 | [datafire.offline-tests]))
14 |
15 | ; 15x the usual devcards timeout to give time for the sync tests.
16 | (set! devcards.core/test-timeout 12000)
17 |
18 | (defn ^:export main
19 | "Start the devcards UI."
20 | []
21 | ; Add a special class to the body to signal we're in devcards mode.
22 | ; We want to mostly use the same styles as the app, but might need to make
23 | ; some exceptions.
24 | (js/document.body.classList.add "using-devcards")
25 | ; Start the devcards UI.
26 | (start-devcard-ui!))
27 |
--------------------------------------------------------------------------------
/src/test/datafire/tests.cljs:
--------------------------------------------------------------------------------
1 | (ns datafire.tests
2 | (:require [cljs.test :refer [is async]]
3 | [cljs.core.async :refer [go clj (.data %) :keywordize-keys true) :id (.-id %))
19 | (.-docs query-snapshot)))
20 |
21 | (defn firestore-logs-atom []
22 | (let [a (atom [])]
23 | (.onSnapshot (df/txs (link))
24 | #(reset! a (parse-fb-snapshot %)))
25 | a))
26 |
27 | (defn add-user [user]
28 | (df/transact! (link) [user]))
29 |
30 | (defn pull-2 []
31 | (print (d/pull @(conn) '[*] 2)))
32 |
33 | (defn add-ada []
34 | (let [ada {:db/id -1 :first "Ada" :last "Lovelace" :born "1815"}
35 | ada-ref {:db/id -1 :ada-ref 1}]
36 | [:<>
37 | [:div
38 | "Click to add an Ada Lovelace user "
39 | [:input {:type "button" :value "Add Ada"
40 | :on-click #(add-user ada)}]]
41 | [:div
42 | "Click to add an ada-ref to 1 "
43 | [:input {:type "button" :value "Add Ada ref"
44 | :on-click #(add-user ada-ref)}]]
45 | [:div
46 | "Click to pull on 2 "
47 | [:input {:type "button" :value "pull 2"
48 | :on-click #(pull-2)}]]
49 | [:div
50 | "Click to clear the firebase emulator database and reload "
51 | [:input {:type "button" :value "Clear"
52 | :on-click #(.then (js/fetch
53 | "http://localhost:8080/emulator/v1/projects/datafire/databases/(default)/documents"
54 | #js {:method "DELETE"})
55 | (fn [] (.reload js/window.location)))}]]
56 | [:div
57 | "Click to disable network "
58 | [:input {:type "button" :value "disable"
59 | :on-click #(.disableNetwork (fs))}]]
60 | [:div
61 | "Click to enable network "
62 | [:input {:type "button" :value "enable"
63 | :on-click #(.enableNetwork (fs))}]]]))
64 |
65 | (defcard-rg add-ada-card add-ada)
66 |
67 | (defcard ds-conn (conn))
68 |
69 | (defcard firestore-logs (firestore-logs-atom) [] {:history false})
70 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 | datafire
4 | datafire
5 | jar
6 | 0.1.1
7 | datafire
8 | Persist Datascript databases in Firebase's Firestore.
9 | https://github.com/filipesilva/datafire
10 |
11 |
12 | MIT
13 | https://github.com/filipesilva/datafire/blob/master/LICENSE
14 |
15 |
16 |
17 | https://github.com/filipesilva/datascript-firebase
18 | scm:git:git://github.com/filipesilva/datascript-firebase.git
19 | scm:git:ssh://git@github.com/filipesilva/datascript-firebase.git
20 | db2e3fbc7f71a118171cf35939e27bec61a213c0
21 |
22 |
23 | src\main
24 | test
25 |
26 |
27 | resources
28 |
29 |
30 |
31 |
32 | resources
33 |
34 |
35 | target
36 | target\classes
37 |
38 |
39 |
40 |
41 | central
42 | https://repo1.maven.org/maven2/
43 |
44 | false
45 |
46 |
47 | true
48 |
49 |
50 |
51 | clojars
52 | https://clojars.org/repo
53 |
54 | true
55 |
56 |
57 | true
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 | org.clojure
67 | clojurescript
68 | 1.10.520
69 | provided
70 |
71 |
72 |
73 |
74 |
78 |
--------------------------------------------------------------------------------
/docs/views.md:
--------------------------------------------------------------------------------
1 | # Views
2 |
3 | Status: design
4 |
5 | Views are snapshots of a database state according to a query.
6 | They are stored as the resulting datoms and can be restored into a standalone Datascript database.
7 | Views are incomplete by design, as a Datascript database by itself doesn't keep retraction history.
8 |
9 | Views on a client can be kept up to date either from view or log updates.
10 | In turn the view can be updated by any client with log access.
11 | Since views are derived data there will always be a delay between the log update and the view update.
12 |
13 | You can set security rules on each view, thus providing different access to different clients.
14 | But updating a view requires log access, so it doesn't make much sense to give write access to a
15 | view if the client doesn't have at least read access to the log.
16 |
17 | Views can be updated automatically or manually by setting the update policy.
18 | Automatic updates are performed on any client on log update after a random delay and in batch.
19 | The random delay and batch helps reduce view churn, and thus costs.
20 | Firebase validation rules are used to discard updates from older database states, according to tx id, ensuring only the latest snapshot is written.
21 | Views without automatic updates are updated by triggering a manual update via the API.
22 | Manual updates are useful to keep a point-in-time view.
23 |
24 | All links by default use a special view called `snapshot`.
25 | The query for this view lists all datoms in the database, and is thus a snapshot of the whole database.
26 | On link startup the connection will load the `snapshot` view and then apply newer transactions from the log.
27 | This behaviour can be disabled in order to obtain a connection atom that has gone through all
28 | transactions, including retractions, but will be slower.
29 |
30 |
31 | ## Open questions & notes
32 |
33 | - View usage requires using the tx ordering field for queries and storage.
34 | This is currently a server timestamp, which is of higher fidelity than a JS date.
35 | Need to validate the server timestamp can be used client-side.
36 | - Should views also contain the result snapshot along with the datoms?
37 | Clients that only consume the view, and not update it, might not care about the snapshot db.
38 | - Updating a snapshot requires a DB whose state is consistent with the server, meaning all
39 | transactions must have been applied in the same order as in the server.
40 | But offline transactions might locally be out of order in relation to the server.
41 | Thus for snapshot updates there needs to be a resync step where the local db is synced to the
42 | server order.
43 | - A random delay in view update would help reduce snapshot churn, and thus costs and computation.
44 | - On datom granularity it might be possible to provide view-based log query capability by keeping
45 | track of entity membership on a query.
46 | Unsure if this would be useful given snapshots already exist.
--------------------------------------------------------------------------------
/src/test/datafire/test_helpers.cljs:
--------------------------------------------------------------------------------
1 | (ns datafire.test-helpers
2 | (:require [cljs.core.async :refer [go]]
3 | [datascript.core :as d]
4 | [datafire.core :as df]
5 | ["firebase/app" :as firebase]
6 | ["firebase/firestore"]))
7 |
8 | (def firebase-config #js {:apiKey "AIzaSyAYJX2_LdpTbdgcaGYvSbfz9hJplqTPi7Y"
9 | :authDomain "datafire.firebasedatafire.com"
10 | :projectId "datafire"})
11 | (def emulator-settings #js {:host "localhost:8080" :ssl false})
12 |
13 | (def default-test-app (str df/default-firebase-app "-TEST"))
14 |
15 | (defn test-link
16 | ([] (test-link {}))
17 | ([{:keys [schema path name granularity]
18 | :or {schema {}
19 | path (str "tmp/rand-path-" (rand))
20 | name default-test-app
21 | granularity :tx}}]
22 | (go (let [_ (try (.app firebase name)
23 | (catch js/Error _
24 | (let [new-app (.initializeApp firebase firebase-config name)
25 | _ (.settings (.firestore new-app) emulator-settings)]
26 | new-app)))
27 | conn (d/create-conn schema)
28 | link (df/create-link conn path {:name name :granularity granularity})]
29 | (df/listen! link)
30 | [conn link path name]))))
31 |
32 | (defn query-lethal-weapon [conn]
33 | (d/q '[:find ?e .
34 | :where [?e :movie/title "Lethal Weapon"]]
35 | @conn))
36 |
37 | (defn pull-lethal-weapon [conn]
38 | (d/pull @conn '[*] (query-lethal-weapon conn)))
39 |
40 | (def pulled-lethal-weapon-snapshot
41 | {:db/id 57
42 | :movie/cast
43 | [{:db/id 13, :person/born "1956-01-03", :person/name "Mel Gibson"}
44 | {:db/id 14
45 | :person/born "1946-07-22"
46 | :person/name "Danny Glover"}
47 | {:db/id 15
48 | :person/born "1944-07-29"
49 | :person/name "Gary Busey"}]
50 | :movie/director
51 | [{:db/id 12
52 | :person/born "1930-04-24"
53 | :person/name "Richard Donner"}]
54 | :movie/sequel
55 | {:db/id 58
56 | :movie/cast
57 | [{:db/id 13
58 | :person/born "1956-01-03"
59 | :person/name "Mel Gibson"}
60 | {:db/id 14
61 | :person/born "1946-07-22"
62 | :person/name "Danny Glover"}
63 | {:db/id 37
64 | :person/born "1943-02-09"
65 | :person/name "Joe Pesci"}]
66 | :movie/director
67 | [{:db/id 12
68 | :person/born "1930-04-24"
69 | :person/name "Richard Donner"}]
70 | :movie/sequel
71 | {:db/id 64
72 | :movie/cast
73 | [{:db/id 13
74 | :person/born "1956-01-03"
75 | :person/name "Mel Gibson"}
76 | {:db/id 14
77 | :person/born "1946-07-22"
78 | :person/name "Danny Glover"}
79 | {:db/id 37
80 | :person/born "1943-02-09"
81 | :person/name "Joe Pesci"}]
82 | :movie/director
83 | [{:db/id 12
84 | :person/born "1930-04-24"
85 | :person/name "Richard Donner"}]
86 | :movie/title "Lethal Weapon 3"
87 | :movie/year 1992}
88 | :movie/title "Lethal Weapon 2"
89 | :movie/year 1989}
90 | :movie/title "Lethal Weapon"
91 | :movie/year 1987})
--------------------------------------------------------------------------------
/docs/sandbox.md:
--------------------------------------------------------------------------------
1 | # Sandbox
2 |
3 | This doc contains assorted notes and todos as I come up with them.
4 |
5 |
6 | ## Notes
7 |
8 | - For datom granularity, don't need to group by tx when just getting latest snapshot. Add retracted field and update it in a cloud function to enable short snapshot via query.
9 |
10 | - Keep only av on datom granularity transit data.
11 |
12 | - If the rules deny access to any of the specified document paths, the entire request fails. https://firebase.google.com/docs/firestore/security/get-started and https://youtu.be/eW5MdE3ZcAw?t=866 and https://firebase.google.com/docs/firestore/security/rules-query and https://firebase.google.com/docs/firestore/solutions/role-based-access.
13 |
14 | - On datom granularity, store read access info on the datom, update it on a cloud function that watches a special doc that lists access privileges. Can't just use security rule to filter because of https://stackoverflow.com/questions/56296046/firestore-access-documents-in-collection-user-has-permission-to. Test if this works before investing time in it: permissions model, semantics for adding permission, cloud function for adding permission to existing tx.
15 |
16 | - Consider making db views instead of user permissions. A view is a collection of entities. Users can have access to views. Datoms list which views they belong to. This also neatly gets around making user uids public. Views don't need to be bound to users either, they can also be used to watch a subsection of the full db.
17 |
18 | - Can use firestore events for cloud functions to update docs https://firebase.google.com/docs/functions/firestore-events. If we're updating docs, we'll need to also begin watching update events, not just "added" ones.
19 |
20 | - With entity level permissions, it's possible for a client to not see full transactions. Need to account for that on the storage schema.
21 |
22 | - That covers datom reads, what about writes? Would need to have a security rule that says "user can only write tx that have this entity". Could use https://firebase.google.com/docs/firestore/security/rules-conditions#access_other_documents to check if the id exists on the acess collection.
23 |
24 | - What happens to offline writes that fail security rules? Are they removed on a snapshot update?
25 |
26 | - Views are probably either pulls or queries. Pulls might be statically analyzable for relevant seids, given the schema. Then the cloud function only needs to watch for datoms with those seids to update view membership.
27 |
28 | - Overwrites and retractions might be hard. This would be an optimization over loading the pull and verifying membership though. Maybe start with actually doing the pull and improve on that.
29 |
30 | - Firestore in offline mode adds a lot of extra load per document fetched, which makes datom granularity much slower than tx granularity.
31 |
32 | ### Notes for https://tonsky.me/blog/datascript-internals/:
33 |
34 | - Try using the datom eavt array format with added bool flag, get around the need for t somehow. Make a size comparison for large transit. Consider if needed in the datom granularity.
35 |
36 | - Roam folks mentioned it was too slow to load all tx, maybe the slowness is ds loading all individual ones and adding indexes etc repeatedly? If so it might be worth it to still load individual tx but transact them as a single giant one.
37 |
38 | - The non-temp IDs seem to be called implicit IDs.
39 |
40 | - It's safe to use op vectors constructed from datoms, but TxReport is the only place where you can see datoms with added == false for datoms which were retracted.
41 |
42 | - Filtered DBs sound interesting for ACL purposes. Maybe a similar concept can be used for datom granularity df links, but the filtering is happening on fb, maybe even a fb function for more involved filters.
43 |
44 |
45 | ## TODOs
46 |
47 | - test/figure out retracts `[1 :name "Ivan" 536870918 false]`
48 | - negative tx means retract
49 | - figure out other ds built-ins ever appear as the op in tx-datoms (see builtin-fn?)
50 | - add spec to validate data coming in and out
51 | - really need to revisit tx/tx-data/ops names
52 | - add error-cbs to transact!
53 | - after I have tests, check if it's ok to add tempid info on fb doc
54 | - consider adding docs that transact! returns a promise with the doc (and thus seid), but that it only resolves when it hits the fb server. Offline docs say this shouldn't be relied on overall and it's better to not wait on this promise.
55 | - support tx-meta on transact!
56 | - test permissions model
57 | - save link info on metadata, validate it's the same on... listen I guess? no, on security rules.
58 | - use either `conn-from-datoms` or `conn-from-db` to create the db instead of replaying all tx, it's more performant.
59 | - this needs a tx number though, so there needs to be a `stx->tx` conversion step too.
60 | - consider keeping the non-ref attrs separately to scale better after a certain size
61 | - e.g. store large strings elsewhere
62 | - similar to datalog keeping the indexes that then lazy load content
63 | - add db sync according to last known correct tx order
--------------------------------------------------------------------------------
/src/main/datafire/core.cljs:
--------------------------------------------------------------------------------
1 | (ns datafire.core
2 | (:require [datascript.core :as d]
3 | [datascript.transit :as dt]
4 | ["firebase/app" :as firebase]
5 | ["firebase/firestore"]))
6 |
7 | (def default-firebase-app "[DEFAULT]")
8 |
9 | (defn- firestore [link]
10 | (.firestore (.app firebase (:name link))))
11 |
12 | (defn- server-timestamp []
13 | (.serverTimestamp (.-FieldValue (.-firestore firebase))))
14 |
15 | (defn db [link]
16 | (.doc (firestore link) (:path link)))
17 |
18 | (defn txs [link]
19 | (.collection (db link) "txs"))
20 |
21 | (defn- new-seid [link]
22 | ; Note: this doesn't actually create a doc.
23 | (.-id (.doc (txs link))))
24 |
25 | (defn- datom->op [datom]
26 | [(if (pos? (:tx datom))
27 | :db/add
28 | :db/retract)
29 | (:e datom) (:a datom) (:v datom)])
30 |
31 | (defn- resolve-id [id local global]
32 | (or (get local id)
33 | (get global id)))
34 |
35 | (defn- throw-unresolved-id [id local global]
36 | (if-let [resolved (resolve-id id local global)]
37 | resolved
38 | (throw (str "Could not resolve eid " id))))
39 |
40 | (defn- resolve-op [op refs local global]
41 | [(op 0)
42 | (throw-unresolved-id (op 1) local global)
43 | (op 2)
44 | (if (contains? refs (op 2))
45 | (throw-unresolved-id (op 3) local global)
46 | (op 3))])
47 |
48 | (defn- save-to-firestore! [link tx-data]
49 | (let [coll (txs link)
50 | granularity (:granularity link)]
51 | (cond (= granularity :tx) (.add coll #js {:t (dt/write-transit-str tx-data)
52 | :ts (server-timestamp)})
53 | ; Firestore transactions can't be done offline, but batches can so we use that.
54 | (= granularity :datom) (let [batch (.batch (firestore link))
55 | tx-id (.-id (.doc coll))]
56 | (doseq [[idx op] (map-indexed vector tx-data)]
57 | (.set batch (.doc coll)
58 | #js {:tx tx-id
59 | :ts (server-timestamp)
60 | ; Order matters in DS, so we keep it.
61 | ; https://github.com/tonsky/datascript/issues/172
62 | :i idx
63 | :d (dt/write-transit-str op)}))
64 | (.commit batch))
65 | :else (throw (str "Unsupported granularity: " granularity)))))
66 |
67 | (defn- transact-to-datascript! [link ops seid->tempid]
68 | (let [tempids (dissoc (:tempids (d/transact! (:conn link) ops)) :db/current-tx)]
69 | (doseq [entry seid->tempid]
70 | (let [seid (key entry)
71 | eid (get tempids (val entry))]
72 | (swap! (:seid->eid link) assoc seid eid)
73 | (swap! (:eid->seid link) assoc eid seid)))))
74 |
75 | (defn- update-tempids [op refs max-tempid seid->tempid seid->eid]
76 | (loop [seids (if (contains? refs (op 2))
77 | [(op 1) (op 3)]
78 | [(op 1)])
79 | seid->tempid seid->tempid
80 | max-tempid max-tempid]
81 | (if (empty? seids)
82 | [seid->tempid max-tempid]
83 | (let [seid (first seids)
84 | existing-eid (resolve-id seid seid->tempid seid->eid)
85 | new-max-tempid (if existing-eid max-tempid (inc max-tempid))
86 | new-seid->tempid (if existing-eid
87 | seid->tempid
88 | (assoc seid->tempid seid (- new-max-tempid)))]
89 | (recur (rest seids)
90 | new-seid->tempid
91 | new-max-tempid)))))
92 |
93 | (defn- load-transaction! [link tx-data]
94 | (let [refs (:db.type/ref (:rschema @(:conn link)))
95 | seid->eid @(:seid->eid link)]
96 | (loop [input-ops tx-data
97 | output-ops []
98 | seid->tempid {}
99 | max-tempid 0]
100 | (if (empty? input-ops)
101 | (transact-to-datascript! link output-ops seid->tempid)
102 | (let [op (first input-ops)
103 | [new-seid->tempid
104 | new-max-tempid] (update-tempids op refs max-tempid seid->tempid seid->eid)]
105 | (recur (rest input-ops)
106 | (conj output-ops (resolve-op op refs new-seid->tempid seid->eid))
107 | new-seid->tempid
108 | new-max-tempid))))))
109 |
110 | (defn- snapshot->txs [link snapshot]
111 | (let [granularity (:granularity link)
112 | ; Only listen to "added" events because our transactions are
113 | ; immutable on the server.
114 | ; The server timestamp is technically an exception, since the client
115 | ; that adds the transaction will see a "modified" event when the
116 | ; timestamp is added, but other clients will only see the "added".
117 | ; This isn't a problem because the timestamp is used for ordering and
118 | ; we assume client tx happen as soon as they are committed locally.
119 | doc-changes (.filter (.docChanges snapshot) #(= (.-type %) "added"))
120 | length (.-length doc-changes)]
121 | ; On tx granularity, each doc is a transaction.
122 | (cond (= granularity :tx) (loop [idx 0
123 | txs []]
124 | (if (= idx length)
125 | txs
126 | (recur (inc idx)
127 | (conj txs
128 | (dt/read-transit-str
129 | (.-t (.data (.-doc (aget doc-changes idx)))))))))
130 | ; On datom granularity, each doc is a datom that belongs to a given transaction.
131 | (= granularity :datom) (loop [idx 0
132 | tx-ids []
133 | txs-map {}]
134 | (if (= idx length)
135 | (map #(vals (get txs-map %)) tx-ids)
136 | (let [data (.data (.-doc (aget doc-changes idx)))
137 | datom (dt/read-transit-str (.-d data))
138 | tx-id (.-tx data)
139 | tx-idx (.-i data)]
140 | (if (contains? txs-map tx-id)
141 | (recur (inc idx)
142 | tx-ids
143 | (update txs-map tx-id conj [tx-idx datom]))
144 | (recur (inc idx)
145 | (conj tx-ids tx-id)
146 | (conj txs-map
147 | [tx-id (sorted-map tx-idx datom)]))))))
148 | :else (throw (str "Unsupported granularity: " granularity)))))
149 |
150 | (defn- listen-to-firestore [link error-cb]
151 | ; Any given snapshot contains full transactions regardless of granularity.
152 | ; With :tx granularity, that's a single doc.
153 | ; With :datom granularity, there's a doc for each datom in the tx, but they are in the same
154 | ; snapshot because the writes are batched.
155 | (.onSnapshot (.orderBy (txs link) "ts")
156 | #(doseq [tx-data (snapshot->txs link %)]
157 | (load-transaction! link tx-data))
158 | error-cb))
159 |
160 | (defn transact!
161 | "Persist tx-data on the link.
162 | Returns a promise that resolves when the transaction hits the server.
163 | Since the promise won't resolve while offline, it's recommended that you never wait for it."
164 | [link tx-data]
165 | (let [report (d/with @(:conn link) tx-data)
166 | eid->seid (into {} (map #(vector (val %) (new-seid link))
167 | (dissoc (:tempids report) :db/current-tx)))
168 | resolved-ops (map #(resolve-op (datom->op %)
169 | (:db.type/ref (:rschema @(:conn link)))
170 | eid->seid
171 | @(:eid->seid link))
172 | (:tx-data report))]
173 | (save-to-firestore! link resolved-ops)))
174 |
175 | (defn create-link
176 | "Create a link between a Datascript connection and a Firestore document path."
177 | ([conn path] (create-link conn path {}))
178 | ([conn path {:keys [name granularity]
179 | :or {name default-firebase-app
180 | granularity :tx}}]
181 | (with-meta
182 | {:conn conn
183 | :path path
184 | :name name
185 | :granularity granularity
186 | :seid->eid (atom {})
187 | :eid->seid (atom {})}
188 | {:unsubscribe (atom nil)})))
189 |
190 | (defn unlisten!
191 | "Stop listening to transactions on Firebase."
192 | [link]
193 | (let [unsubscribe @(:unsubscribe (meta link))]
194 | (when unsubscribe (unsubscribe))
195 | (reset! (:unsubscribe (meta link)) nil)))
196 |
197 | (defn listen!
198 | "Start listening to transactions on the link and applies them to the Datascript connection.
199 | Previous transactions will be loaded onto the Datascript connection."
200 | ([link] (listen! link js/undefined))
201 | ([link error-cb]
202 | (unlisten! link)
203 | (reset! (:unsubscribe (meta link)) (listen-to-firestore link error-cb))))
204 |
--------------------------------------------------------------------------------
/src/test/datafire/samples.cljs:
--------------------------------------------------------------------------------
1 | (ns datafire.samples)
2 |
3 | ; Adapted from https://github.com/jonase/learndatalogtoday/blob/master/resources/db/data.edn
4 | (def schema {:movie/director {:db/valueType :db.type/ref
5 | :db/cardinality :db.cardinality/many
6 | :db/isComponent true}
7 | :movie/cast {:db/valueType :db.type/ref
8 | :db/cardinality :db.cardinality/many
9 | :db/isComponent true}
10 | :movie/sequel {:db/valueType :db.type/ref
11 | :db/isComponent true}})
12 | (def data
13 | [{:db/id -100
14 | :person/name "James Cameron"
15 | :person/born "1954-08-16"}
16 |
17 | {:db/id -101
18 | :person/name "Arnold Schwarzenegger"
19 | :person/born "1947-07-30"}
20 |
21 | {:db/id -102
22 | :person/name "Linda Hamilton"
23 | :person/born "1956-09-26"}
24 |
25 | {:db/id -103
26 | :person/name "Michael Biehn"
27 | :person/born "1956-07-31"}
28 |
29 | {:db/id -104
30 | :person/name "Ted Kotcheff"
31 | :person/born "1931-04-07"}
32 |
33 | {:db/id -105
34 | :person/name "Sylvester Stallone"
35 | :person/born "1946-07-06"}
36 |
37 | {:db/id -106
38 | :person/name "Richard Crenna"
39 | :person/born "1926-11-30"
40 | :person/death "2003-01-17"}
41 |
42 | {:db/id -107
43 | :person/name "Brian Dennehy"
44 | :person/born "1938-07-09"}
45 |
46 | {:db/id -108
47 | :person/name "John McTiernan"
48 | :person/born "1951-01-08"}
49 |
50 | {:db/id -109
51 | :person/name "Elpidia Carrillo"
52 | :person/born "1961-08-16"}
53 |
54 | {:db/id -110
55 | :person/name "Carl Weathers"
56 | :person/born "1948-01-14"}
57 |
58 | {:db/id -111
59 | :person/name "Richard Donner"
60 | :person/born "1930-04-24"}
61 |
62 | {:db/id -112
63 | :person/name "Mel Gibson"
64 | :person/born "1956-01-03"}
65 |
66 | {:db/id -113
67 | :person/name "Danny Glover"
68 | :person/born "1946-07-22"}
69 |
70 | {:db/id -114
71 | :person/name "Gary Busey"
72 | :person/born "1944-07-29"}
73 |
74 | {:db/id -115
75 | :person/name "Paul Verhoeven"
76 | :person/born "1938-07-18"}
77 |
78 | {:db/id -116
79 | :person/name "Peter Weller"
80 | :person/born "1947-06-24"}
81 |
82 | {:db/id -117
83 | :person/name "Nancy Allen"
84 | :person/born "1950-06-24"}
85 |
86 | {:db/id -118
87 | :person/name "Ronny Cox"
88 | :person/born "1938-07-23"}
89 |
90 | {:db/id -119
91 | :person/name "Mark L. Lester"
92 | :person/born "1946-11-26"}
93 |
94 | {:db/id -120
95 | :person/name "Rae Dawn Chong"
96 | :person/born "1961-02-28"}
97 |
98 | {:db/id -121
99 | :person/name "Alyssa Milano"
100 | :person/born "1972-12-19"}
101 |
102 | {:db/id -122
103 | :person/name "Bruce Willis"
104 | :person/born "1955-03-19"}
105 |
106 | {:db/id -123
107 | :person/name "Alan Rickman"
108 | :person/born "1946-02-21"}
109 |
110 | {:db/id -124
111 | :person/name "Alexander Godunov"
112 | :person/born "1949-11-28"
113 | :person/death "1995-05-18"}
114 |
115 | {:db/id -125
116 | :person/name "Robert Patrick"
117 | :person/born "1958-11-05"}
118 |
119 | {:db/id -126
120 | :person/name "Edward Furlong"
121 | :person/born "1977-08-02"}
122 |
123 | {:db/id -127
124 | :person/name "Jonathan Mostow"
125 | :person/born "1961-11-28"}
126 |
127 | {:db/id -128
128 | :person/name "Nick Stahl"
129 | :person/born "1979-12-05"}
130 |
131 | {:db/id -129
132 | :person/name "Claire Danes"
133 | :person/born "1979-04-12"}
134 |
135 | {:db/id -130
136 | :person/name "George P. Cosmatos"
137 | :person/born "1941-01-04"
138 | :person/death "2005-04-19"}
139 |
140 | {:db/id -131
141 | :person/name "Charles Napier"
142 | :person/born "1936-04-12"
143 | :person/death "2011-10-05"}
144 |
145 | {:db/id -132
146 | :person/name "Peter MacDonald"}
147 |
148 | {:db/id -133
149 | :person/name "Marc de Jonge"
150 | :person/born "1949-02-16"
151 | :person/death "1996-06-06"}
152 |
153 | {:db/id -134
154 | :person/name "Stephen Hopkins"}
155 |
156 | {:db/id -135
157 | :person/name "Ruben Blades"
158 | :person/born "1948-07-16"}
159 |
160 | {:db/id -136
161 | :person/name "Joe Pesci"
162 | :person/born "1943-02-09"}
163 |
164 | {:db/id -137
165 | :person/name "Ridley Scott"
166 | :person/born "1937-11-30"}
167 |
168 | {:db/id -138
169 | :person/name "Tom Skerritt"
170 | :person/born "1933-08-25"}
171 |
172 | {:db/id -139
173 | :person/name "Sigourney Weaver"
174 | :person/born "1949-10-08"}
175 |
176 | {:db/id -140
177 | :person/name "Veronica Cartwright"
178 | :person/born "1949-04-20"}
179 |
180 | {:db/id -141
181 | :person/name "Carrie Henn"}
182 |
183 | {:db/id -142
184 | :person/name "George Miller"
185 | :person/born "1945-03-03"}
186 |
187 | {:db/id -143
188 | :person/name "Steve Bisley"
189 | :person/born "1951-12-26"}
190 |
191 | {:db/id -144
192 | :person/name "Joanne Samuel"}
193 |
194 | {:db/id -145
195 | :person/name "Michael Preston"
196 | :person/born "1938-05-14"}
197 |
198 | {:db/id -146
199 | :person/name "Bruce Spence"
200 | :person/born "1945-09-17"}
201 |
202 | {:db/id -147
203 | :person/name "George Ogilvie"
204 | :person/born "1931-03-05"}
205 |
206 | {:db/id -148
207 | :person/name "Tina Turner"
208 | :person/born "1939-11-26"}
209 |
210 | {:db/id -149
211 | :person/name "Sophie Marceau"
212 | :person/born "1966-11-17"}
213 |
214 | {:db/id -200
215 | :movie/title "The Terminator"
216 | :movie/year 1984
217 | :movie/director -100
218 | :movie/cast [-101
219 | -102
220 | -103]
221 | :movie/sequel -207}
222 |
223 | {:db/id -201
224 | :movie/title "First Blood"
225 | :movie/year 1982
226 | :movie/director -104
227 | :movie/cast [-105
228 | -106
229 | -107]
230 | :movie/sequel -209}
231 |
232 | {:db/id -202
233 | :movie/title "Predator"
234 | :movie/year 1987
235 | :movie/director -108
236 | :movie/cast [-101
237 | -109
238 | -110]
239 | :movie/sequel -211}
240 |
241 | {:db/id -203
242 | :movie/title "Lethal Weapon"
243 | :movie/year 1987
244 | :movie/director -111
245 | :movie/cast [-112
246 | -113
247 | -114]
248 | :movie/sequel -212}
249 |
250 | {:db/id -204
251 | :movie/title "RoboCop"
252 | :movie/year 1987
253 | :movie/director -115
254 | :movie/cast [-116
255 | -117
256 | -118]}
257 |
258 | {:db/id -205
259 | :movie/title "Commando"
260 | :movie/year 1985
261 | :movie/director -119
262 | :movie/cast [-101
263 | -120
264 | -121]
265 | :trivia "In 1986, a sequel was written with an eye to having
266 | John McTiernan direct. Schwarzenegger wasn't interested in reprising
267 | the role. The script was then reworked with a new central character,
268 | eventually played by Bruce Willis, and became Die Hard"}
269 |
270 | {:db/id -206
271 | :movie/title "Die Hard"
272 | :movie/year 1988
273 | :movie/director -108
274 | :movie/cast [-122
275 | -123
276 | -124]}
277 |
278 | {:db/id -207
279 | :movie/title "Terminator 2: Judgment Day"
280 | :movie/year 1991
281 | :movie/director -100
282 | :movie/cast [-101
283 | -102
284 | -125
285 | -126]
286 | :movie/sequel -208}
287 |
288 | {:db/id -208
289 | :movie/title "Terminator 3: Rise of the Machines"
290 | :movie/year 2003
291 | :movie/director -127
292 | :movie/cast [-101
293 | -128
294 | -129]}
295 |
296 | {:db/id -209
297 | :movie/title "Rambo: First Blood Part II"
298 | :movie/year 1985
299 | :movie/director -130
300 | :movie/cast [-105
301 | -106
302 | -131]
303 | :movie/sequel -210}
304 |
305 | {:db/id -210
306 | :movie/title "Rambo III"
307 | :movie/year 1988
308 | :movie/director -132
309 | :movie/cast [-105
310 | -106
311 | -133]}
312 |
313 | {:db/id -211
314 | :movie/title "Predator 2"
315 | :movie/year 1990
316 | :movie/director -134
317 | :movie/cast [-113
318 | -114
319 | -135]}
320 |
321 | {:db/id -212
322 | :movie/title "Lethal Weapon 2"
323 | :movie/year 1989
324 | :movie/director -111
325 | :movie/cast [-112
326 | -113
327 | -136]
328 | :movie/sequel -213}
329 |
330 | {:db/id -213
331 | :movie/title "Lethal Weapon 3"
332 | :movie/year 1992
333 | :movie/director -111
334 | :movie/cast [-112
335 | -113
336 | -136]}
337 |
338 | {:db/id -214
339 | :movie/title "Alien"
340 | :movie/year 1979
341 | :movie/director -137
342 | :movie/cast [-138
343 | -139
344 | -140]
345 | :movie/sequel -215}
346 |
347 | {:db/id -215
348 | :movie/title "Aliens"
349 | :movie/year 1986
350 | :movie/director -100
351 | :movie/cast [-139
352 | -141
353 | -103]}
354 |
355 | {:db/id -216
356 | :movie/title "Mad Max"
357 | :movie/year 1979
358 | :movie/director -142
359 | :movie/cast [-112
360 | -143
361 | -144]
362 | :movie/sequel -217}
363 |
364 | {:db/id -217
365 | :movie/title "Mad Max 2"
366 | :movie/year 1981
367 | :movie/director -142
368 | :movie/cast [-112
369 | -145
370 | -146]
371 | :movie/sequel -218}
372 |
373 | {:db/id -218
374 | :movie/title "Mad Max Beyond Thunderdome"
375 | :movie/year 1985
376 | :movie/director [-142
377 | -147]
378 | :movie/cast [-112
379 | -148]}
380 |
381 | {:db/id -219
382 | :movie/title "Braveheart"
383 | :movie/year 1995
384 | :movie/director [-112]
385 | :movie/cast [-112
386 | -149]}
387 |
388 | ])
--------------------------------------------------------------------------------