├── .VERSION_PREFIX
├── .dir-locals.el
├── .gitignore
├── CHANGELOG.md
├── LICENSE.txt
├── README.md
├── bb.edn
├── bin
├── kaocha
├── launchpad
└── proj
├── deps.edn
├── dev
└── user.clj
├── examples
├── mbrainz-duckdb
│ ├── README.md
│ ├── deps.edn
│ ├── repl_sessions
│ │ └── db_insert.clj
│ └── src
│ │ └── lambdaisland
│ │ └── mbrainz.clj
├── mbrainz-postgres
│ ├── README.md
│ ├── deps.edn
│ └── src
│ │ └── lambdaisland
│ │ └── mbrainz.clj
└── v0.4.50
│ └── mbrainz-postgres
│ ├── README.md
│ ├── deps.edn
│ └── src
│ └── lambdaisland
│ └── mbrainz.clj
├── pom.xml
├── repl_sessions
├── etl.clj
├── find_prev_tx.clj
├── first_class_tx.clj
├── jdbc_poke.clj
└── plenish_first_spike.clj
├── src
├── .gitkeep
└── lambdaisland
│ ├── plenish.clj
│ └── plenish
│ ├── adapters
│ ├── duckdb.clj
│ └── postgres.clj
│ └── protocols.clj
├── test
├── .gitkeep
└── lambdaisland
│ ├── duckdb
│ └── plenish_test.clj
│ ├── plenish
│ └── factories.clj
│ └── postgres
│ └── plenish_test.clj
└── tests.edn
/.VERSION_PREFIX:
--------------------------------------------------------------------------------
1 | 0.6
--------------------------------------------------------------------------------
/.dir-locals.el:
--------------------------------------------------------------------------------
1 | ((nil . ((cider-clojure-cli-global-options . "-A:dev:test:jdbc:datomic-pro:postgresql"))))
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .cpcache
2 | .nrepl-port
3 | target
4 | repl
5 | scratch.clj
6 | .shadow-cljs
7 | target
8 | yarn.lock
9 | node_modules/
10 | .DS_Store
11 | resources/public/ui
12 | .store
13 | out
14 | .#*
15 | *.local.*
16 | *.local
17 | musicbrainz
18 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Unreleased
2 |
3 | ## Added
4 |
5 | ## Fixed
6 |
7 | ## Changed
8 |
9 | # 0.6.99 (2025-02-26 / 2295083)
10 |
11 | ## Added
12 |
13 | - DuckDB adatper and PostgresDB adapter.
14 |
15 | ## Changed
16 |
17 | - Change `initial-ctx` and `sync-to-latest` to accept a db-adapter parameter
18 |
19 | # 0.4.50 (2023-04-24 / 1d5eca9)
20 |
21 | ## Added
22 |
23 | - First public release
24 | - Convenience function `sync-to-latest`
25 |
26 | ## Fixed
27 |
28 | - Fixed issue where multiple cardinality-many attributes would lead to clashing constraint names
29 |
30 | # 0.3.45 (2022-12-23 / b87cb3a)
31 |
32 | ## Added
33 |
34 | - Added a `find-max-t` helper function, for picking up work where it was left off
35 |
36 | ## Changed
37 |
38 | - Throw when trying to reprocess an earlier transaction
39 |
40 | # 0.2.37 (2022-09-14 / 5b770a2)
41 |
42 | ## Fixed
43 |
44 | - Fix updates and retractions
45 |
46 | # 0.1.23 (2022-07-25 / cf784ed)
47 |
48 | - First proof of concept
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Mozilla Public License Version 2.0
2 | ==================================
3 |
4 | 1. Definitions
5 | --------------
6 |
7 | 1.1. "Contributor"
8 | means each individual or legal entity that creates, contributes to
9 | the creation of, or owns Covered Software.
10 |
11 | 1.2. "Contributor Version"
12 | means the combination of the Contributions of others (if any) used
13 | by a Contributor and that particular Contributor's Contribution.
14 |
15 | 1.3. "Contribution"
16 | means Covered Software of a particular Contributor.
17 |
18 | 1.4. "Covered Software"
19 | means Source Code Form to which the initial Contributor has attached
20 | the notice in Exhibit A, the Executable Form of such Source Code
21 | Form, and Modifications of such Source Code Form, in each case
22 | including portions thereof.
23 |
24 | 1.5. "Incompatible With Secondary Licenses"
25 | means
26 |
27 | (a) that the initial Contributor has attached the notice described
28 | in Exhibit B to the Covered Software; or
29 |
30 | (b) that the Covered Software was made available under the terms of
31 | version 1.1 or earlier of the License, but not also under the
32 | terms of a Secondary License.
33 |
34 | 1.6. "Executable Form"
35 | means any form of the work other than Source Code Form.
36 |
37 | 1.7. "Larger Work"
38 | means a work that combines Covered Software with other material, in
39 | a separate file or files, that is not Covered Software.
40 |
41 | 1.8. "License"
42 | means this document.
43 |
44 | 1.9. "Licensable"
45 | means having the right to grant, to the maximum extent possible,
46 | whether at the time of the initial grant or subsequently, any and
47 | all of the rights conveyed by this License.
48 |
49 | 1.10. "Modifications"
50 | means any of the following:
51 |
52 | (a) any file in Source Code Form that results from an addition to,
53 | deletion from, or modification of the contents of Covered
54 | Software; or
55 |
56 | (b) any new file in Source Code Form that contains any Covered
57 | Software.
58 |
59 | 1.11. "Patent Claims" of a Contributor
60 | means any patent claim(s), including without limitation, method,
61 | process, and apparatus claims, in any patent Licensable by such
62 | Contributor that would be infringed, but for the grant of the
63 | License, by the making, using, selling, offering for sale, having
64 | made, import, or transfer of either its Contributions or its
65 | Contributor Version.
66 |
67 | 1.12. "Secondary License"
68 | means either the GNU General Public License, Version 2.0, the GNU
69 | Lesser General Public License, Version 2.1, the GNU Affero General
70 | Public License, Version 3.0, or any later versions of those
71 | licenses.
72 |
73 | 1.13. "Source Code Form"
74 | means the form of the work preferred for making modifications.
75 |
76 | 1.14. "You" (or "Your")
77 | means an individual or a legal entity exercising rights under this
78 | License. For legal entities, "You" includes any entity that
79 | controls, is controlled by, or is under common control with You. For
80 | purposes of this definition, "control" means (a) the power, direct
81 | or indirect, to cause the direction or management of such entity,
82 | whether by contract or otherwise, or (b) ownership of more than
83 | fifty percent (50%) of the outstanding shares or beneficial
84 | ownership of such entity.
85 |
86 | 2. License Grants and Conditions
87 | --------------------------------
88 |
89 | 2.1. Grants
90 |
91 | Each Contributor hereby grants You a world-wide, royalty-free,
92 | non-exclusive license:
93 |
94 | (a) under intellectual property rights (other than patent or trademark)
95 | Licensable by such Contributor to use, reproduce, make available,
96 | modify, display, perform, distribute, and otherwise exploit its
97 | Contributions, either on an unmodified basis, with Modifications, or
98 | as part of a Larger Work; and
99 |
100 | (b) under Patent Claims of such Contributor to make, use, sell, offer
101 | for sale, have made, import, and otherwise transfer either its
102 | Contributions or its Contributor Version.
103 |
104 | 2.2. Effective Date
105 |
106 | The licenses granted in Section 2.1 with respect to any Contribution
107 | become effective for each Contribution on the date the Contributor first
108 | distributes such Contribution.
109 |
110 | 2.3. Limitations on Grant Scope
111 |
112 | The licenses granted in this Section 2 are the only rights granted under
113 | this License. No additional rights or licenses will be implied from the
114 | distribution or licensing of Covered Software under this License.
115 | Notwithstanding Section 2.1(b) above, no patent license is granted by a
116 | Contributor:
117 |
118 | (a) for any code that a Contributor has removed from Covered Software;
119 | or
120 |
121 | (b) for infringements caused by: (i) Your and any other third party's
122 | modifications of Covered Software, or (ii) the combination of its
123 | Contributions with other software (except as part of its Contributor
124 | Version); or
125 |
126 | (c) under Patent Claims infringed by Covered Software in the absence of
127 | its Contributions.
128 |
129 | This License does not grant any rights in the trademarks, service marks,
130 | or logos of any Contributor (except as may be necessary to comply with
131 | the notice requirements in Section 3.4).
132 |
133 | 2.4. Subsequent Licenses
134 |
135 | No Contributor makes additional grants as a result of Your choice to
136 | distribute the Covered Software under a subsequent version of this
137 | License (see Section 10.2) or under the terms of a Secondary License (if
138 | permitted under the terms of Section 3.3).
139 |
140 | 2.5. Representation
141 |
142 | Each Contributor represents that the Contributor believes its
143 | Contributions are its original creation(s) or it has sufficient rights
144 | to grant the rights to its Contributions conveyed by this License.
145 |
146 | 2.6. Fair Use
147 |
148 | This License is not intended to limit any rights You have under
149 | applicable copyright doctrines of fair use, fair dealing, or other
150 | equivalents.
151 |
152 | 2.7. Conditions
153 |
154 | Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
155 | in Section 2.1.
156 |
157 | 3. Responsibilities
158 | -------------------
159 |
160 | 3.1. Distribution of Source Form
161 |
162 | All distribution of Covered Software in Source Code Form, including any
163 | Modifications that You create or to which You contribute, must be under
164 | the terms of this License. You must inform recipients that the Source
165 | Code Form of the Covered Software is governed by the terms of this
166 | License, and how they can obtain a copy of this License. You may not
167 | attempt to alter or restrict the recipients' rights in the Source Code
168 | Form.
169 |
170 | 3.2. Distribution of Executable Form
171 |
172 | If You distribute Covered Software in Executable Form then:
173 |
174 | (a) such Covered Software must also be made available in Source Code
175 | Form, as described in Section 3.1, and You must inform recipients of
176 | the Executable Form how they can obtain a copy of such Source Code
177 | Form by reasonable means in a timely manner, at a charge no more
178 | than the cost of distribution to the recipient; and
179 |
180 | (b) You may distribute such Executable Form under the terms of this
181 | License, or sublicense it under different terms, provided that the
182 | license for the Executable Form does not attempt to limit or alter
183 | the recipients' rights in the Source Code Form under this License.
184 |
185 | 3.3. Distribution of a Larger Work
186 |
187 | You may create and distribute a Larger Work under terms of Your choice,
188 | provided that You also comply with the requirements of this License for
189 | the Covered Software. If the Larger Work is a combination of Covered
190 | Software with a work governed by one or more Secondary Licenses, and the
191 | Covered Software is not Incompatible With Secondary Licenses, this
192 | License permits You to additionally distribute such Covered Software
193 | under the terms of such Secondary License(s), so that the recipient of
194 | the Larger Work may, at their option, further distribute the Covered
195 | Software under the terms of either this License or such Secondary
196 | License(s).
197 |
198 | 3.4. Notices
199 |
200 | You may not remove or alter the substance of any license notices
201 | (including copyright notices, patent notices, disclaimers of warranty,
202 | or limitations of liability) contained within the Source Code Form of
203 | the Covered Software, except that You may alter any license notices to
204 | the extent required to remedy known factual inaccuracies.
205 |
206 | 3.5. Application of Additional Terms
207 |
208 | You may choose to offer, and to charge a fee for, warranty, support,
209 | indemnity or liability obligations to one or more recipients of Covered
210 | Software. However, You may do so only on Your own behalf, and not on
211 | behalf of any Contributor. You must make it absolutely clear that any
212 | such warranty, support, indemnity, or liability obligation is offered by
213 | You alone, and You hereby agree to indemnify every Contributor for any
214 | liability incurred by such Contributor as a result of warranty, support,
215 | indemnity or liability terms You offer. You may include additional
216 | disclaimers of warranty and limitations of liability specific to any
217 | jurisdiction.
218 |
219 | 4. Inability to Comply Due to Statute or Regulation
220 | ---------------------------------------------------
221 |
222 | If it is impossible for You to comply with any of the terms of this
223 | License with respect to some or all of the Covered Software due to
224 | statute, judicial order, or regulation then You must: (a) comply with
225 | the terms of this License to the maximum extent possible; and (b)
226 | describe the limitations and the code they affect. Such description must
227 | be placed in a text file included with all distributions of the Covered
228 | Software under this License. Except to the extent prohibited by statute
229 | or regulation, such description must be sufficiently detailed for a
230 | recipient of ordinary skill to be able to understand it.
231 |
232 | 5. Termination
233 | --------------
234 |
235 | 5.1. The rights granted under this License will terminate automatically
236 | if You fail to comply with any of its terms. However, if You become
237 | compliant, then the rights granted under this License from a particular
238 | Contributor are reinstated (a) provisionally, unless and until such
239 | Contributor explicitly and finally terminates Your grants, and (b) on an
240 | ongoing basis, if such Contributor fails to notify You of the
241 | non-compliance by some reasonable means prior to 60 days after You have
242 | come back into compliance. Moreover, Your grants from a particular
243 | Contributor are reinstated on an ongoing basis if such Contributor
244 | notifies You of the non-compliance by some reasonable means, this is the
245 | first time You have received notice of non-compliance with this License
246 | from such Contributor, and You become compliant prior to 30 days after
247 | Your receipt of the notice.
248 |
249 | 5.2. If You initiate litigation against any entity by asserting a patent
250 | infringement claim (excluding declaratory judgment actions,
251 | counter-claims, and cross-claims) alleging that a Contributor Version
252 | directly or indirectly infringes any patent, then the rights granted to
253 | You by any and all Contributors for the Covered Software under Section
254 | 2.1 of this License shall terminate.
255 |
256 | 5.3. In the event of termination under Sections 5.1 or 5.2 above, all
257 | end user license agreements (excluding distributors and resellers) which
258 | have been validly granted by You or Your distributors under this License
259 | prior to termination shall survive termination.
260 |
261 | ************************************************************************
262 | * *
263 | * 6. Disclaimer of Warranty *
264 | * ------------------------- *
265 | * *
266 | * Covered Software is provided under this License on an "as is" *
267 | * basis, without warranty of any kind, either expressed, implied, or *
268 | * statutory, including, without limitation, warranties that the *
269 | * Covered Software is free of defects, merchantable, fit for a *
270 | * particular purpose or non-infringing. The entire risk as to the *
271 | * quality and performance of the Covered Software is with You. *
272 | * Should any Covered Software prove defective in any respect, You *
273 | * (not any Contributor) assume the cost of any necessary servicing, *
274 | * repair, or correction. This disclaimer of warranty constitutes an *
275 | * essential part of this License. No use of any Covered Software is *
276 | * authorized under this License except under this disclaimer. *
277 | * *
278 | ************************************************************************
279 |
280 | ************************************************************************
281 | * *
282 | * 7. Limitation of Liability *
283 | * -------------------------- *
284 | * *
285 | * Under no circumstances and under no legal theory, whether tort *
286 | * (including negligence), contract, or otherwise, shall any *
287 | * Contributor, or anyone who distributes Covered Software as *
288 | * permitted above, be liable to You for any direct, indirect, *
289 | * special, incidental, or consequential damages of any character *
290 | * including, without limitation, damages for lost profits, loss of *
291 | * goodwill, work stoppage, computer failure or malfunction, or any *
292 | * and all other commercial damages or losses, even if such party *
293 | * shall have been informed of the possibility of such damages. This *
294 | * limitation of liability shall not apply to liability for death or *
295 | * personal injury resulting from such party's negligence to the *
296 | * extent applicable law prohibits such limitation. Some *
297 | * jurisdictions do not allow the exclusion or limitation of *
298 | * incidental or consequential damages, so this exclusion and *
299 | * limitation may not apply to You. *
300 | * *
301 | ************************************************************************
302 |
303 | 8. Litigation
304 | -------------
305 |
306 | Any litigation relating to this License may be brought only in the
307 | courts of a jurisdiction where the defendant maintains its principal
308 | place of business and such litigation shall be governed by laws of that
309 | jurisdiction, without reference to its conflict-of-law provisions.
310 | Nothing in this Section shall prevent a party's ability to bring
311 | cross-claims or counter-claims.
312 |
313 | 9. Miscellaneous
314 | ----------------
315 |
316 | This License represents the complete agreement concerning the subject
317 | matter hereof. If any provision of this License is held to be
318 | unenforceable, such provision shall be reformed only to the extent
319 | necessary to make it enforceable. Any law or regulation which provides
320 | that the language of a contract shall be construed against the drafter
321 | shall not be used to construe this License against a Contributor.
322 |
323 | 10. Versions of the License
324 | ---------------------------
325 |
326 | 10.1. New Versions
327 |
328 | Mozilla Foundation is the license steward. Except as provided in Section
329 | 10.3, no one other than the license steward has the right to modify or
330 | publish new versions of this License. Each version will be given a
331 | distinguishing version number.
332 |
333 | 10.2. Effect of New Versions
334 |
335 | You may distribute the Covered Software under the terms of the version
336 | of the License under which You originally received the Covered Software,
337 | or under the terms of any subsequent version published by the license
338 | steward.
339 |
340 | 10.3. Modified Versions
341 |
342 | If you create software not governed by this License, and you want to
343 | create a new license for such software, you may create and use a
344 | modified version of this License if you rename the license and remove
345 | any references to the name of the license steward (except to note that
346 | such modified license differs from this License).
347 |
348 | 10.4. Distributing Source Code Form that is Incompatible With Secondary
349 | Licenses
350 |
351 | If You choose to distribute Source Code Form that is Incompatible With
352 | Secondary Licenses under the terms of this version of the License, the
353 | notice described in Exhibit B of this License must be attached.
354 |
355 | Exhibit A - Source Code Form License Notice
356 | -------------------------------------------
357 |
358 | This Source Code Form is subject to the terms of the Mozilla Public
359 | License, v. 2.0. If a copy of the MPL was not distributed with this
360 | file, You can obtain one at http://mozilla.org/MPL/2.0/.
361 |
362 | If it is not possible or desirable to put the notice in a particular
363 | file, then You may include the notice in a location (such as a LICENSE
364 | file in a relevant directory) where a recipient would be likely to look
365 | for such a notice.
366 |
367 | You may add additional accurate notices of copyright ownership.
368 |
369 | Exhibit B - "Incompatible With Secondary Licenses" Notice
370 | ---------------------------------------------------------
371 |
372 | This Source Code Form is "Incompatible With Secondary Licenses", as
373 | defined by the Mozilla Public License, v. 2.0.
374 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Plenish
2 |
3 | Restock your warehouse.
4 |
5 | Sync Datomic to a RDBMS.
6 |
7 |
8 | [](https://cljdoc.org/d/com.lambdaisland/plenish) [](https://clojars.org/com.lambdaisland/plenish)
9 |
10 |
11 | ## Requirements
12 |
13 | Plenish requires Clojure version >= 1.11.2.
14 |
15 | ## Usage
16 |
17 | For the most common use case, copying all transactions that haven't been copied
18 | yet, this is what you need:
19 |
20 | ```clj
21 | (def datomic-conn (d/connect "datomic:..."))
22 | (def pg-conn (jdbc/get-datasource "jdbc:pgsql://..."))
23 |
24 | (def metaschema
25 | {:tables {:user/name {}}})
26 |
27 | (def db-adapter (postgres/db-adapter))
28 |
29 | (plenish/sync-to-latest datomic-conn pg-conn metaschema db-adapter)
30 | ```
31 |
32 | There are more fine-grained functions if you want to have greater control over
33 | the process.
34 |
35 | ```clj
36 | (let [;; find the most recent transaction that has been copied, or `nil` if this
37 | ;; is the first run
38 | max-t (plenish/find-max-t pg-conn)
39 |
40 | ;; query the current datomic schema. plenish will track schema changes as
41 | ;; it processes transcations, but it needs to know what the schema looks
42 | ;; like so far.
43 | ctx (plenish/initial-ctx datomic-conn metaschema db-adapter max-t)
44 |
45 | ;; grab the datomic transactions you want plenish to process. this grabs
46 | ;; all transactions that haven't been processed yet.
47 | txs (d/tx-range (d/log datomic-conn) (when max-t (inc max-t)) nil)]
48 |
49 | ;; get to work
50 | (plenish/import-tx-range ctx datomic-conn pg-conn txs))
51 | ```
52 |
53 | Note that Plenish will ensure that a transaction is never processed twice
54 | (through a PostgreSQL uniqueness constraint on the tranactions table), but it
55 | won't check if you are skipping transactions. This is not a problem if you are
56 | using `find-max-t` as shown above, but if you are piping the tx-report-queue
57 | into Plenish then you will have to build in your own guarantees to make sure you
58 | don't lose any transactions.
59 |
60 | ## Configuration
61 |
62 | Plenish takes a Metaschema, a map with (currently) a single key, `:tables`, it's
63 | value being a map. Each map entry creates a table, where the map entry is the
64 | membership attribute that determines whether an entity becomes a row in that
65 | table. The value is a map of configuration keys for that table.
66 |
67 | - `:name` Name of the table, optional, defaults to the namespace name of the membership attribute
68 | - `:rename` Alternative names for specific columns
69 | - `:rename-many-table` Alternative names for join tables created for has-many attributes
70 |
71 | ```clj
72 | {:tables
73 | {:user/name {:name "users"
74 | :rename {:user/profile "profile_url"}
75 | :user-group/name {:rename-many-table {:user-group/users "group_members"}}}}
76 | ```
77 |
78 | This above configuration will result in three tables, `users`, `user-group`, and
79 | `group_members`. Had the `:rename-many-table` been omitted, the last would be
80 | called `user_group_x_user`.
81 |
82 | The columns in each table are determined by which attributes coincide with the
83 | membership attributes. The column names are the attribute names without
84 | namespace. You can use `:rename` to set them explicitly.
85 |
86 | ## Running tests
87 |
88 | Requires PostgreSQL to be running. To not have to mess around with permissions we run it like so:
89 |
90 | ```
91 | docker run -e POSTGRES_HOST_AUTH_METHOD=trust -p 5432:5432 postgres
92 | ```
93 |
94 | Now you can
95 |
96 | ```
97 | bin/kaocha
98 | ```
99 |
100 | as usual.
101 |
102 | ## Adding new SQL database adapter
103 |
104 | To add a new adapter, one should consider doing the following steps:
105 |
106 | 1. Create a adapter inside `src/lambdaisland/plenish/adapters` directory, reify the `IDatomicEncoder` protocol.
107 | 2. Add necessary test in file `test/lambdaisland/$ADAPTER/plenish_test.clj`
108 |
109 | When trying to make the test pass, consider temporily to remove the `jdbc/with-transaction` in `plenish.clj` so as to make the writing-to-db becomes writing in granuality of command instead of a series of commands.
110 |
111 |
112 | ## Lambda Island Open Source
113 |
114 | Thank you! plenish is made possible thanks to our generous backers. [Become a
115 | backer on OpenCollective](https://opencollective.com/lambda-island) so that we
116 | can continue to make plenish better.
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 | plenish is part of a growing collection of quality Clojure libraries created and maintained
127 | by the fine folks at [Gaiwan](https://gaiwan.co).
128 |
129 | Pay it forward by [becoming a backer on our OpenCollective](http://opencollective.com/lambda-island),
130 | so that we continue to enjoy a thriving Clojure ecosystem.
131 |
132 | You can find an overview of all our different projects at [lambdaisland/open-source](https://github.com/lambdaisland/open-source).
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 | ## License
143 |
144 | Copyright © 2023 Arne Brasseur and Contributors
145 |
146 | Licensed under the term of the Mozilla Public License 2.0, see LICENSE.
147 |
148 |
--------------------------------------------------------------------------------
/bb.edn:
--------------------------------------------------------------------------------
1 | {:deps
2 | {com.lambdaisland/launchpad {:mvn/version "0.37.162-alpha"}
3 | lambdaisland/open-source {:git/url "https://github.com/lambdaisland/open-source"
4 | :git/sha "99b33741ea499e1c58e5ab6c2b785ba18eca84d2"
5 | #_#_:local/root "../open-source"}}}
6 |
--------------------------------------------------------------------------------
/bin/kaocha:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | exec clojure -M:test:dev:postgresql:duckdb:datomic-pro -m kaocha.runner "$@"
4 |
--------------------------------------------------------------------------------
/bin/launchpad:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bb
2 |
3 | (require '[lambdaisland.launchpad :as launchpad])
4 |
5 | (launchpad/main {})
6 |
7 | ;; (launchpad/main {:steps (into [(partial launchpad/ensure-java-version 17)]
8 | ;; launchpad/default-steps)})
9 |
--------------------------------------------------------------------------------
/bin/proj:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bb
2 |
3 | (ns proj
4 | (:require [lioss.main :as lioss]))
5 |
6 | (lioss/main
7 | {:license :mpl
8 | :inception-year 2022
9 | :description ""
10 | :group-id "com.lambdaisland"})
11 |
12 | ;; Local Variables:
13 | ;; mode:clojure
14 | ;; End:
15 |
--------------------------------------------------------------------------------
/deps.edn:
--------------------------------------------------------------------------------
1 | {:paths ["src" "resources"]
2 |
3 | :deps
4 | {org.clojure/clojure {:mvn/version "1.11.2"}
5 | com.cnuernber/charred {:mvn/version "1.028"}
6 | com.github.seancorfield/next.jdbc {:mvn/version "1.3.874"}
7 | com.github.seancorfield/honeysql {:mvn/version "2.4.1026"}}
8 |
9 | :aliases
10 | {:dev
11 | {:extra-paths ["dev"]
12 | :extra-deps {com.lambdaisland/facai {:mvn/version "0.7.59-alpha"}
13 | djblue/portal {:mvn/version "RELEASE"}}}
14 |
15 | :test
16 | {:extra-paths ["test"]
17 | :extra-deps {lambdaisland/kaocha {:mvn/version "1.82.1306"}}}
18 |
19 | :postgresql
20 | {:extra-deps {;; org.postgresql/postgresql {:mvn/version "42.4.0"} ; "classic" pg jdbc driver
21 | com.impossibl.pgjdbc-ng/pgjdbc-ng {:mvn/version "0.8.9"} ; new generation driver
22 | }}
23 |
24 | :duckdb
25 | {:extra-deps {org.duckdb/duckdb_jdbc {:mvn/version "1.0.0"}}}
26 |
27 | :datomic-pro
28 | {:extra-deps {com.datomic/peer {:mvn/version "1.0.7277"}}}
29 |
30 | :datomic-cloud
31 | {:extra-deps {com.datomic/client-cloud {:mvn/version "1.0.123"}}}}}
32 |
--------------------------------------------------------------------------------
/dev/user.clj:
--------------------------------------------------------------------------------
1 | (ns user)
2 |
3 | (defmacro jit [sym]
4 | `(requiring-resolve '~sym))
5 |
6 | (defn browse []
7 | ((jit clojure.java.browse/browse-url) "http://localhost:8000"))
8 |
9 | (def portal-instance (atom nil))
10 |
11 | (defn portal
12 | "Open a Portal window and register a tap handler for it. The result can be
13 | treated like an atom."
14 | []
15 | ;; Portal is both an IPersistentMap and an IDeref, which confuses pprint.
16 | (prefer-method @(jit clojure.pprint/simple-dispatch) clojure.lang.IPersistentMap clojure.lang.IDeref)
17 | (let [p ((jit portal.api/open) @portal-instance)]
18 | (reset! portal-instance p)
19 | (add-tap (jit portal.api/submit))
20 | p))
21 |
--------------------------------------------------------------------------------
/examples/mbrainz-duckdb/README.md:
--------------------------------------------------------------------------------
1 | # About the mbrainz example
2 |
3 | This example demonstrates how mbrainz datomic database works with the
4 | plenish. To use this example, we need to prepare the Datomic database
5 | and the empty duckDB database.
6 |
7 | ## Prepare the Datomic database
8 | ### Getting Datomic
9 |
10 | Follow the directs [here](https://docs.datomic.com/setup/pro-setup.html#get-datomic) to download it.
11 |
12 | Start the transactor:
13 |
14 | cd datomic-pro-$VERSION
15 | bin/transactor config/samples/dev-transactor-template.properties
16 |
17 | ### Getting the Data
18 |
19 | Next download the
20 | [subset of the mbrainz database](https://s3.amazonaws.com/mbrainz/datomic-mbrainz-1968-1973-backup-2017-07-20.tar)
21 | covering the period 1968-1973 (which the Datomic team has
22 | scientifically determined as being the most important period in the
23 | history of recorded music):
24 |
25 | wget https://s3.amazonaws.com/mbrainz/datomic-mbrainz-1968-1973-backup-2017-07-20.tar -O mbrainz.tar
26 | tar -xvf mbrainz.tar
27 |
28 | From the `datomic-pro-$VERSION` directory, [restore the backup](http://docs.datomic.com/on-prem/operation/backup.html#restoring):
29 |
30 | # prints progress -- ~1,000 segments in restore
31 | bin/datomic restore-db file://path/to/backup/mbrainz-1968-1973 datomic:dev://localhost:4334/mbrainz-1968-1973
32 |
33 | ### Trouble shooting in getting data
34 |
35 | If encountering the error:
36 |
37 | ```
38 | java.lang.IllegalArgumentException: :storage/invalid-uri Unsupported protocol:
39 | ```
40 |
41 | Check again if the first argument starting with `file://`
42 |
43 | ## Prepare the empty duckdb database
44 |
45 | 1. Install duckdb CLI. Homebrew: `brew install duckdb`
46 | 2. check DuckDB version with `duckdb --version`
47 | 3. rm -rf /tmp/mbrainz
48 |
49 | ## Init the nREPL with Debug flag
50 |
51 | ```
52 | export PLENISH_DEBUG=true && clj -M:dev:cider:duckdb:datomic-pro
53 | ```
54 |
55 | ## Results
56 |
57 | After running the commands in `src/lambdaisland/mbrainz.clj`, the REPL results should be like
58 |
59 | ```
60 | ; eval (current-form): (def datomic-conn (d/connect "...
61 | #'lambdaisland.mbrainz/datomic-conn
62 | ; --------------------------------------------------------------------------------
63 | ; eval (current-form): (def duck-conn (jdbc/get-datas...
64 | #'lambdaisland.mbrainz/duck-conn
65 | ; --------------------------------------------------------------------------------
66 | ; eval (current-form): (def metaschema {:tables {:rel...
67 | #'lambdaisland.mbrainz/metaschema
68 | ; --------------------------------------------------------------------------------
69 | ; eval (current-form): (def initial-ctx (plenish/init...
70 | #'lambdaisland.mbrainz/initial-ctx
71 | ; --------------------------------------------------------------------------------
72 | ; eval (current-form): (def new-ctx (plenish/import-t...
73 | ; (out) ..........
74 | ; (out) 2025-02-22T12:14:12.989453Z
75 | ; (out) ...
76 | #'lambdaisland.mbrainz/new-ctx
77 | ```
78 |
79 | ### Checking the results with DuckDB CLI
80 |
81 | - Use DuckDB CLI to check the DB data file.
82 |
83 | ```
84 | duckdb /tmp/mbrainz
85 | ```
86 |
87 | - In CLI,
88 |
89 | ```
90 | D select schema_name, table_name from duckdb_tables;
91 | ┌─────────────┬────────────────────┐
92 | │ schema_name │ table_name │
93 | │ varchar │ varchar │
94 | ├─────────────┼────────────────────┤
95 | │ main │ artist │
96 | │ main │ idents │
97 | │ main │ idents_x_partition │
98 | │ main │ release │
99 | │ main │ release_x_artists │
100 | │ main │ release_x_labels │
101 | │ main │ release_x_media │
102 | │ main │ transactions │
103 | └─────────────┴────────────────────┘
104 |
105 | D select count(*) from transactions;
106 | ┌──────────────┐
107 | │ count_star() │
108 | │ int64 │
109 | ├──────────────┤
110 | │ 1318 │
111 | └──────────────┘
112 | ```
--------------------------------------------------------------------------------
/examples/mbrainz-duckdb/deps.edn:
--------------------------------------------------------------------------------
1 | {:paths ["src" "resources"]
2 |
3 | :deps
4 | {org.clojure/clojure {:mvn/version "1.11.2"}
5 | com.cnuernber/charred {:mvn/version "1.028"}
6 | com.github.seancorfield/next.jdbc {:mvn/version "1.3.874"}
7 | com.github.seancorfield/honeysql {:mvn/version "2.4.1026"}
8 | com.lambdaisland/plenish {:local/root "../../"}}
9 |
10 | :mvn/repos
11 | {"my.datomic.com" {:url "https://my.datomic.com/repo"}}
12 |
13 | :aliases
14 | {:dev
15 | {:extra-paths ["dev"]
16 | :extra-deps {com.lambdaisland/facai {:mvn/version "0.7.59-alpha"}
17 | djblue/portal {:mvn/version "RELEASE"}}}
18 |
19 | :test
20 | {:extra-paths ["test"]
21 | :extra-deps {lambdaisland/kaocha {:mvn/version "1.82.1306"}}}
22 |
23 | :duckdb
24 | {:extra-deps {org.duckdb/duckdb_jdbc {:mvn/version "1.0.0"}}}
25 |
26 | :datomic-pro
27 | {:extra-deps {com.datomic/peer {:mvn/version "1.0.7277"}}}
28 |
29 | :datomic-cloud
30 | {:extra-deps {com.datomic/client-cloud {:mvn/version "1.0.123"}}}
31 |
32 | :datomic-free
33 | {:extra-deps {com.datomic/datomic-free {:mvn/version "0.9.5703.21"}}}}}
34 |
--------------------------------------------------------------------------------
/examples/mbrainz-duckdb/repl_sessions/db_insert.clj:
--------------------------------------------------------------------------------
1 | (ns db-insert
2 | (:require
3 | [next.jdbc :as jdbc]
4 | [next.jdbc.result-set :as rs]))
5 |
6 | (def duck-conn (jdbc/get-datasource "jdbc:duckdb:/tmp/mbrainz"))
7 |
8 | (def cmd-a
9 | ["INSERT INTO \"artist\" (\"db__id\", \"sortName\", \"name\", \"type\", \"country\", \"gid\", \"startYear\") VALUES (?, ?, ?, ?, ?, ?, ?) ON CONFLICT (\"db__id\") DO UPDATE SET \"sortName\" = EXCLUDED.\"sortName\", \"name\" = EXCLUDED.\"name\", \"type\" = EXCLUDED.\"type\", \"country\" = EXCLUDED.\"country\", \"gid\" = EXCLUDED.\"gid\", \"startYear\" = EXCLUDED.\"startYear\"" 778454232474826 "Deuter" "Deuter" 17592186045423 17592186045657 "c4e7031f-a5f0-476a-b1f0-1f3e8c573f4b" 1945])
10 |
11 | (def cmd-b
12 | ["INSERT INTO \"artist\" (\"db__id\", \"sortName\", \"name\", \"type\", \"country\", \"gid\", \"startYear\") VALUES (?, ?, ?, ?, ?, ?, ?)"
13 | 778454232474825 "Deuter" "Deuter" 17592186045423 17592186045657 "f81d4fae-7dec-11d0-a765-00a0c91e6bf6" 1945])
14 |
15 | (jdbc/with-transaction [jdbc-tx duck-conn]
16 | (jdbc/execute! jdbc-tx cmd-a))
17 |
--------------------------------------------------------------------------------
/examples/mbrainz-duckdb/src/lambdaisland/mbrainz.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.mbrainz
2 | (:require
3 | [datomic.api :as d]
4 | [lambdaisland.plenish :as plenish]
5 | [lambdaisland.plenish.adapters.duckdb :as duckdb]
6 | [next.jdbc :as jdbc]
7 | [next.jdbc.result-set :as rs]))
8 |
9 | (def datomic-conn (d/connect "datomic:dev://localhost:4334/mbrainz-1968-1973"))
10 | (def duck-conn (jdbc/get-datasource "jdbc:duckdb:/tmp/mbrainz"))
11 |
12 | (def metaschema
13 | {:tables {:release/name {}
14 | :artist/name {}}})
15 |
16 | (def db-adapter (duckdb/db-adapter))
17 |
18 | (def initial-ctx (plenish/initial-ctx datomic-conn metaschema db-adapter))
19 |
20 | (def new-ctx (plenish/import-tx-range
21 | initial-ctx datomic-conn duck-conn
22 | (d/tx-range (d/log datomic-conn) nil nil)))
23 |
--------------------------------------------------------------------------------
/examples/mbrainz-postgres/README.md:
--------------------------------------------------------------------------------
1 | # About the mbrainz example
2 |
3 | This example demonstrates how mbrainz datomic database works with the
4 | plenish. To use this example, we need to prepare the Datomic database
5 | and the empty Postgres database.
6 |
7 | ## Prepare the Datomic database
8 | ### Getting Datomic
9 |
10 | Follow the directions [here](https://docs.datomic.com/setup/pro-setup.html#get-datomic) to download it.
11 |
12 | Start the transactor:
13 |
14 | cd datomic-pro-$VERSION
15 | bin/transactor config/samples/dev-transactor-template.properties
16 |
17 | ### Getting the Data
18 |
19 | Next download the
20 | [subset of the mbrainz database](https://s3.amazonaws.com/mbrainz/datomic-mbrainz-1968-1973-backup-2017-07-20.tar)
21 | covering the period 1968-1973 (which the Datomic team has
22 | scientifically determined as being the most important period in the
23 | history of recorded music):
24 |
25 | wget https://s3.amazonaws.com/mbrainz/datomic-mbrainz-1968-1973-backup-2017-07-20.tar -O mbrainz.tar
26 | tar -xvf mbrainz.tar
27 |
28 | From the `datomic-pro-$VERSION` directory, [restore the backup](http://docs.datomic.com/on-prem/operation/backup.html#restoring):
29 |
30 | # prints progress -- ~1,000 segments in restore
31 | bin/datomic restore-db file://path/to/backup/mbrainz-1968-1973 datomic:dev://localhost:4334/mbrainz-1968-1973
32 |
33 | ### Trouble shooting in getting data
34 |
35 | If encountering the error:
36 |
37 | ```
38 | java.lang.IllegalArgumentException: :storage/invalid-uri Unsupported protocol:
39 | ```
40 |
41 | Check again if the first argument starting with `file://`
42 |
43 | ## Prepare the empty Postgres database
44 |
45 | 1. Run `psql` to connect to the Postgres database
46 | 2. Inside the psql session, run the following commands to create user and empty database.
47 |
48 | ```
49 | CREATE DATABASE mbrainz;
50 | CREATE ROLE plenish WITH LOGIN PASSWORD 'plenish';
51 | GRANT ALL ON DATABASE mbrainz TO plenish;
52 | ```
53 |
54 | ## Init the nREPL
55 |
56 | ```
57 | clj -M:dev:cider:postgresql:datomic-pro
58 | ```
59 |
60 | ## Results
61 |
62 | After running the commands in `src/lambdaisland/mbrainz.clj`, the tables in Postgres db are
63 |
64 | - Login to the result DB:
65 |
66 | ```
67 | psql mbrainz
68 | ```
69 |
70 | - Check the content of the DB:
71 |
72 | ```
73 | mbrainz# \d
74 | List of relations
75 | Schema │ Name │ Type │ Owner
76 | ────────┼────────────────────┼───────┼─────────
77 | public │ artist │ table │ plenish
78 | public │ idents │ table │ plenish
79 | public │ idents_x_partition │ table │ plenish
80 | public │ release │ table │ plenish
81 | public │ release_x_artists │ table │ plenish
82 | public │ release_x_labels │ table │ plenish
83 | public │ release_x_media │ table │ plenish
84 | public │ transactions │ table │ plenish
85 | (8 rows)
86 |
87 | mbrainz# select count(*) from transactions;
88 | count
89 | ───────
90 | 1318
91 | (1 row)
92 | ```
93 |
--------------------------------------------------------------------------------
/examples/mbrainz-postgres/deps.edn:
--------------------------------------------------------------------------------
1 | {:paths ["src" "resources"]
2 |
3 | :deps
4 | {org.clojure/clojure {:mvn/version "1.11.2"}
5 | com.cnuernber/charred {:mvn/version "1.028"}
6 | com.github.seancorfield/next.jdbc {:mvn/version "1.3.874"}
7 | com.github.seancorfield/honeysql {:mvn/version "2.4.1026"}
8 | com.lambdaisland/plenish {:local/root "../../"}}
9 |
10 | :mvn/repos
11 | {"my.datomic.com" {:url "https://my.datomic.com/repo"}}
12 |
13 | :aliases
14 | {:dev
15 | {:extra-paths ["dev"]
16 | :extra-deps {com.lambdaisland/facai {:mvn/version "0.7.59-alpha"}
17 | djblue/portal {:mvn/version "RELEASE"}}}
18 |
19 | :test
20 | {:extra-paths ["test"]
21 | :extra-deps {lambdaisland/kaocha {:mvn/version "1.82.1306"}}}
22 |
23 | :postgresql
24 | {:extra-deps {;; org.postgresql/postgresql {:mvn/version "42.4.0"} ; "classic" pg jdbc driver
25 | com.impossibl.pgjdbc-ng/pgjdbc-ng {:mvn/version "0.8.9"} ; new generation driver
26 | }}
27 |
28 | :datomic-pro
29 | {:extra-deps {com.datomic/peer {:mvn/version "1.0.7277"}}}
30 |
31 | :datomic-cloud
32 | {:extra-deps {com.datomic/client-cloud {:mvn/version "1.0.123"}}}
33 |
34 | :datomic-free
35 | {:extra-deps {com.datomic/datomic-free {:mvn/version "0.9.5703.21"}}}}}
36 |
--------------------------------------------------------------------------------
/examples/mbrainz-postgres/src/lambdaisland/mbrainz.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.mbrainz
2 | (:require
3 | [datomic.api :as d]
4 | [lambdaisland.plenish :as plenish]
5 | [lambdaisland.plenish.adapters.postgres :as postgres]
6 | [next.jdbc :as jdbc]
7 | [next.jdbc.result-set :as rs]))
8 |
9 | (def datomic-conn (d/connect "datomic:dev://localhost:4334/mbrainz-1968-1973"))
10 | (def pg-conn (jdbc/get-datasource "jdbc:pgsql://localhost:5432/mbrainz?user=plenish&password=plenish"))
11 |
12 | (def metaschema
13 | {:tables {:release/name {}
14 | :artist/name {}}})
15 |
16 | (def db-adapter (postgres/db-adapter))
17 |
18 | (def initial-ctx (plenish/initial-ctx datomic-conn metaschema db-adapter))
19 |
20 | (def new-ctx (plenish/import-tx-range
21 | initial-ctx datomic-conn pg-conn
22 | (d/tx-range (d/log datomic-conn) nil nil)))
23 |
--------------------------------------------------------------------------------
/examples/v0.4.50/mbrainz-postgres/README.md:
--------------------------------------------------------------------------------
1 | # About the mbrainz example
2 |
3 | This example demonstrates how mbrainz datomic database works with the
4 | plenish. To use this example, we need to prepare the Datomic database
5 | and the empty Postgres database.
6 |
7 | ## Prepare the Datomic database
8 | ### Getting Datomic
9 |
10 | Follow the directs in your [My Datomic](http://my.datomic.com) account to
11 | download a [Datomic distribution](http://www.datomic.com/get-datomic.html) and
12 | unzip it somewhere convenient.
13 |
14 | Update `config/samples/dev-transactor-template.properties` with your license key
15 | where you see`license=`.
16 |
17 | Start the transactor:
18 |
19 | cd datomic-pro-$VERSION
20 | bin/transactor config/samples/dev-transactor-template.properties
21 |
22 | ### Getting the Data
23 |
24 | Next download the
25 | [subset of the mbrainz database](https://s3.amazonaws.com/mbrainz/datomic-mbrainz-1968-1973-backup-2017-07-20.tar)
26 | covering the period 1968-1973 (which the Datomic team has
27 | scientifically determined as being the most important period in the
28 | history of recorded music):
29 |
30 | wget https://s3.amazonaws.com/mbrainz/datomic-mbrainz-1968-1973-backup-2017-07-20.tar -O mbrainz.tar
31 | tar -xvf mbrainz.tar
32 |
33 | From the `datomic-pro-$VERSION` directory, [restore the backup](http://docs.datomic.com/on-prem/operation/backup.html#restoring):
34 |
35 | # prints progress -- ~1,000 segments in restore
36 | bin/datomic restore-db file://path/to/backup/mbrainz-1968-1973 datomic:dev://localhost:4334/mbrainz-1968-1973
37 |
38 | ## Prepare the empty Postgres database
39 |
40 | 1. Run `psql` to connect to the Postgres database
41 | 2. Inside the psql session, run the following commands to create user and empty database.
42 |
43 | CREATE DATABASE mbrainz;
44 | CREATE ROLE plenish WITH LOGIN PASSWORD 'plenish';
45 | GRANT ALL ON DATABASE mbrainz TO plenish;
46 |
47 | ## Init the nREPL
48 |
49 | ```
50 | clj -M:dev:cider:postgresql:datomic-pro
51 | ```
52 |
53 | ## Results
54 |
55 | After running the commands in `src/lambdaisland/mbrainz.clj`, the tables in Postgres db are
56 |
57 | ```
58 | mbrainz# \d
59 | List of relations
60 | Schema │ Name │ Type │ Owner
61 | ────────┼────────────────────┼───────┼─────────
62 | public │ artist │ table │ plenish
63 | public │ idents │ table │ plenish
64 | public │ idents_x_partition │ table │ plenish
65 | public │ release │ table │ plenish
66 | public │ release_x_artists │ table │ plenish
67 | public │ release_x_labels │ table │ plenish
68 | public │ release_x_media │ table │ plenish
69 | public │ transactions │ table │ plenish
70 | (8 rows)
71 | ```
72 |
--------------------------------------------------------------------------------
/examples/v0.4.50/mbrainz-postgres/deps.edn:
--------------------------------------------------------------------------------
1 | {:paths ["src" "resources"]
2 |
3 | :deps
4 | {org.clojure/clojure {:mvn/version "1.11.2"}
5 | com.cnuernber/charred {:mvn/version "1.028"}
6 | com.github.seancorfield/next.jdbc {:mvn/version "1.3.874"}
7 | com.github.seancorfield/honeysql {:mvn/version "2.4.1026"}
8 | com.lambdaisland/plenish {:mvn/version "0.4.50"}}
9 |
10 | :mvn/repos
11 | {"my.datomic.com" {:url "https://my.datomic.com/repo"}}
12 |
13 | :aliases
14 | {:dev
15 | {:extra-paths ["dev"]
16 | :extra-deps {com.lambdaisland/facai {:mvn/version "0.7.59-alpha"}
17 | djblue/portal {:mvn/version "RELEASE"}}}
18 |
19 | :test
20 | {:extra-paths ["test"]
21 | :extra-deps {lambdaisland/kaocha {:mvn/version "1.82.1306"}}}
22 |
23 | :postgresql
24 | {:extra-deps {;; org.postgresql/postgresql {:mvn/version "42.4.0"} ; "classic" pg jdbc driver
25 | com.impossibl.pgjdbc-ng/pgjdbc-ng {:mvn/version "0.8.9"} ; new generation driver
26 | }}
27 |
28 | :datomic-pro
29 | {:extra-deps {com.datomic/datomic-pro {:mvn/version "1.0.6202"}}}
30 |
31 | :datomic-cloud
32 | {:extra-deps {com.datomic/client-cloud {:mvn/version "1.0.123"}}}
33 |
34 | :datomic-free
35 | {:extra-deps {com.datomic/datomic-free {:mvn/version "0.9.5703.21"}}}}}
36 |
--------------------------------------------------------------------------------
/examples/v0.4.50/mbrainz-postgres/src/lambdaisland/mbrainz.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.mbrainz
2 | (:require
3 | [datomic.api :as d]
4 | [lambdaisland.plenish :as plenish]
5 | [next.jdbc :as jdbc]
6 | [next.jdbc.result-set :as rs]))
7 |
8 | (def datomic-conn (d/connect "datomic:dev://localhost:4334/mbrainz-1968-1973"))
9 | (def pg-conn (jdbc/get-datasource "jdbc:pgsql://localhost:5432/mbrainz?user=plenish&password=plenish"))
10 |
11 | (def metaschema
12 | {:tables {:release/name {}
13 | :artist/name {}}})
14 |
15 | (def initial-ctx (plenish/initial-ctx datomic-conn metaschema))
16 |
17 | (def new-ctx (plenish/import-tx-range
18 | initial-ctx datomic-conn pg-conn
19 | (d/tx-range (d/log datomic-conn) nil nil)))
20 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 | 4.0.0
4 | com.lambdaisland
5 | plenish
6 | 0.6.99
7 | plenish
8 |
9 | https://github.com/lambdaisland/plenish
10 | 2022
11 |
12 | Lambda Island
13 | https://lambdaisland.com
14 |
15 |
16 | UTF-8
17 |
18 |
19 |
20 | MPL-2.0
21 | https://www.mozilla.org/media/MPL/2.0/index.txt
22 |
23 |
24 |
25 | https://github.com/lambdaisland/plenish
26 | scm:git:git://github.com/lambdaisland/plenish.git
27 | scm:git:ssh://git@github.com/lambdaisland/plenish.git
28 | 6380bddb4cbd78e3ff8bfdb402221380a4878b6d
29 |
30 |
31 |
32 | org.clojure
33 | clojure
34 | 1.11.2
35 |
36 |
37 | com.cnuernber
38 | charred
39 | 1.028
40 |
41 |
42 | com.github.seancorfield
43 | next.jdbc
44 | 1.3.874
45 |
46 |
47 | com.github.seancorfield
48 | honeysql
49 | 2.4.1026
50 |
51 |
52 |
53 | src
54 |
55 |
56 | src
57 |
58 |
59 | resources
60 |
61 |
62 |
63 |
64 | org.apache.maven.plugins
65 | maven-compiler-plugin
66 | 3.8.1
67 |
68 | 1.8
69 | 1.8
70 |
71 |
72 |
73 | org.apache.maven.plugins
74 | maven-jar-plugin
75 | 3.2.0
76 |
77 |
78 |
79 | 6380bddb4cbd78e3ff8bfdb402221380a4878b6d
80 |
81 |
82 |
83 |
84 |
85 | org.apache.maven.plugins
86 | maven-gpg-plugin
87 | 1.6
88 |
89 |
90 | sign-artifacts
91 | verify
92 |
93 | sign
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 | clojars
103 | https://repo.clojars.org/
104 |
105 |
106 |
107 |
108 | clojars
109 | Clojars repository
110 | https://clojars.org/repo
111 |
112 |
113 |
--------------------------------------------------------------------------------
/repl_sessions/etl.clj:
--------------------------------------------------------------------------------
1 | (ns repl-sessions.etl
2 | (:require [lambdaisland.plenish :as plenish]
3 | [datomic.api :as d]
4 | [next.jdbc :as jdbc]))
5 |
6 | ;; CREATE DATABASE enzo;
7 | ;; CREATE ROLE plenish WITH LOGIN PASSWORD 'plenish';
8 | ;; GRANT ALL ON DATABASE enzo TO plenish;
9 |
10 | (def metaschema
11 | (read-string (slurp "/home/arne/Eleven/runeleven/melvn/presto-etc/datomic/accounting.edn")))
12 |
13 | (defn recreate-db! [name]
14 | (let [ds (jdbc/get-datasource "jdbc:pgsql://localhost:5432/postgres?user=postgres")]
15 | (jdbc/execute! ds [(str "DROP DATABASE IF EXISTS " name)])
16 | (jdbc/execute! ds [(str "CREATE DATABASE " name)])))
17 |
18 | (defn import! [datomic-url jdbc-url]
19 | (let [conn (d/connect datomic-url)
20 | ds (jdbc/get-datasource jdbc-url)
21 | txs (d/tx-range (d/log conn) nil nil)]
22 | (plenish/import-tx-range
23 | (plenish/initial-ctx conn metaschema)
24 | conn
25 | ds
26 | txs)))
27 |
28 | (recreate-db! "enzo")
29 |
30 | (import! "datomic:dev://localhost:4334/example-tenant---enzo-gardening-service-limited-demo-"
31 | "jdbc:pgsql://localhost:5432/enzo?user=postgres")
32 |
33 | (def conn
34 | (d/connect
35 | "datomic:dev://localhost:4334/example-tenant---enzo-gardening-service-limited-demo-"
36 | #_
37 | "datomic:dev://localhost:4334/example-tenant---camelot-global-trust-pte-ltd")
38 | )
39 |
40 | (d/q
41 | '[:find (pull ?e [*])
42 | :where (or
43 | [?e :accounting.fiscal-year/closing-description]
44 | [?e :accounting.account/bank-account-name]
45 | [?e :accounting.account/bank-account-number]
46 | [?e :accounting.account/description])]
47 | (d/db conn))
48 |
49 |
50 | (seq
51 | (d/q
52 | '[:find [?e ...]
53 | :where [_ :db/ident ?e]]
54 | (d/db conn)))
55 |
56 | ;; datomic:dev://localhost:4334/tnt
57 | ;; datomic:dev://localhost:4334/onze
58 | ;; datomic:dev://localhost:4334/example-tenant---camelot-global-trust-pte-ltd-
59 | ;; datomic:dev://localhost:4334/example-tenant---enzo-gardening-service-limited-demo-
60 |
61 | :accounting.fiscal-year/closing-description
62 | :accounting.account/bank-account-name
63 | :accounting.account/bank-account-number
64 | :accounting.account/description
65 |
66 | (d/entid (d/db conn) :accounting.account/description);; => 183
67 | (d/entid (d/db conn) :accounting.account/fiscal-year-ref+number);; => 232
68 | (d/entid (d/db conn) :accounting.account/bank-account-number);; => 181
69 |
70 | (map :t
71 | (filter
72 | (fn [tx]
73 | (some #(#{181} (.-a %)) (:data tx)))
74 | (seq
75 | (d/tx-range
76 | (d/log conn)
77 | nil
78 | nil))))
79 |
80 | (rand-nth (seq
81 | (d/tx-range
82 | (d/log conn)
83 | nil
84 | nil)))
85 |
86 |
87 | (d/pull (d/db conn) '[*] 17592186045433 #_13194139534819)
88 | (map
89 | d/tx->t
90 | (d/q
91 | '[:find [?tx ...]
92 | :where [_ :accounting.account/description _ ?tx]]
93 | (d/db conn)))
94 |
--------------------------------------------------------------------------------
/repl_sessions/find_prev_tx.clj:
--------------------------------------------------------------------------------
1 | (ns repl-sessions.find-prev-tx
2 | (:require [lambdaisland.plenish :as plenish]
3 | [lambdaisland.plenish.factories :as factories]
4 | [lambdaisland.facai.datomic-peer :as fd]
5 | [datomic.api :as d]))
6 |
7 | (set! *print-namespace-maps* false)
8 |
9 | (def conn
10 | (let [url (str "datomic:mem://" (gensym "dev"))]
11 | (d/create-database url)
12 | (d/connect url)))
13 |
14 | @(d/transact conn factories/schema)
15 |
16 | (fd/create! conn factories/cart)
17 |
18 | (seq
19 | (d/tx-range
20 | (d/log conn)
21 | 1000
22 | 9010))
23 |
24 | ;; Given we are currently processing a certain transaction (say t=1012), can we
25 | ;; find the t value of the previous transaction? This could be useful to add an
26 | ;; additional guarantee that transactions are processed exactly in order, by
27 | ;; adding a postgresql trigger that validates that transactions form an unbroken
28 | ;; chain.
29 |
30 | (let [max-t 1012
31 | db (d/as-of (d/db conn) (dec max-t))]
32 | (d/q '[:find (max ?t) .
33 | :where
34 | [?i :db/txInstant]
35 | [(datomic.api/tx->t ?i) ?t]]
36 | db))
37 |
--------------------------------------------------------------------------------
/repl_sessions/first_class_tx.clj:
--------------------------------------------------------------------------------
1 | (ns repl-sessions.first-class-tx
2 | (:require [lambdaisland.plenish :as plenish]
3 | [datomic.api :as d]
4 | [next.jdbc :as jdbc]))
5 |
6 | (def metaschema
7 | (read-string (slurp "/home/arne/Eleven/runeleven/melvn/presto-etc/datomic/accounting.edn")))
8 |
9 | (def conn
10 | (let [url (str "datomic:mem://" (gensym "dev"))]
11 | (d/create-database url)
12 | (d/connect url)))
13 |
14 | (defn pg-url [db-name]
15 | (str "jdbc:pgsql://localhost:5432/" db-name "?user=postgres"))
16 |
17 | (defn recreate-db! [name]
18 | (let [ds (jdbc/get-datasource (pg-url "postgres"))]
19 | (jdbc/execute! ds [(str "DROP DATABASE IF EXISTS " name)])
20 | (jdbc/execute! ds [(str "CREATE DATABASE " name)])))
21 |
22 | (defn import! [metaschema datomic-url jdbc-url]
23 | (let [conn (d/connect datomic-url)
24 | ds (jdbc/get-datasource jdbc-url)
25 | txs (d/tx-range (d/log conn) nil nil)]
26 | (plenish/import-tx-range
27 | (plenish/initial-ctx conn metaschema)
28 | conn
29 | ds
30 | txs)))
31 |
32 | (def conn (d/connect "datomic:dev://localhost:4334/example-tenant---import-company-20210901T110746698441277"))
33 |
34 | (plenish/ctx-valueType (plenish/initial-ctx conn metaschema)
35 | 50)
36 |
37 | (get-in (plenish/initial-ctx conn metaschema) [:idents 50 :db/valueType] )
38 | (map :db/valueType (vals (:idents (plenish/initial-ctx conn metaschema))))
39 |
40 | (recreate-db! "foo4")
41 |
42 | (import!
43 | metaschema
44 | "datomic:dev://localhost:4334/example-tenant---import-company-20210901T110746698441277"
45 | (pg-url "foo4"))
46 |
47 | (#'plenish/pull-idents (d/as-of (d/db conn) 999))
48 |
49 | ;; (d/entid (d/db conn) :db/txInstant)
50 |
51 | (d/pull (d/db conn) '[*] 25)
52 |
53 | (def ctx (plenish/initial-ctx conn metaschema))
54 |
55 | (let [[t1000 t1009] (seq
56 | (d/tx-range
57 | (d/log conn)
58 | 1000
59 | 1010))]
60 | (into #{} (map plenish/-e) (:data t1009))
61 | #_(-> ctx
62 | (plenish/process-tx conn t1000)
63 | (plenish/process-tx conn t1009)))
64 | (seq
65 | (d/tx-range
66 | (d/log conn)
67 | 1026
68 | 1027))
69 |
70 | ;; datomic:dev://localhost:4334/example-tenant---import-company-20210901T105828757108918
71 | ;; datomic:dev://localhost:4334/example-tenant---import-company-20210901T110147221074714
72 | ;; datomic:dev://localhost:4334/example-tenant---import-company-20210901T110526995513023
73 | ;; datomic:dev://localhost:4334/example-tenant---import-company-20210901T110623353849568
74 | ;; datomic:dev://localhost:4334/example-tenant---import-company-20210901T110713392203667
75 | ;; datomic:dev://localhost:4334/example-tenant---import-company-20210901T110746698441277
76 |
--------------------------------------------------------------------------------
/repl_sessions/jdbc_poke.clj:
--------------------------------------------------------------------------------
1 | (ns repl-sessions.jdbc-poke
2 | (:require [next.jdbc :as jdbc]
3 | [next.jdbc.result-set :as rs]
4 | [honey.sql :as honey]
5 | [honey.sql.helpers :as hh]))
6 |
7 | (def ds
8 | (jdbc/get-datasource
9 | "jdbc:pgsql://localhost:5432/replica?user=plenish&password=plenish"
10 | ))
11 |
12 | (jdbc/)
13 |
14 | (jdbc/execute!
15 | ds
16 | (honey/format
17 | (-> (hh/create-table "foo" :if-not-exists)
18 | (hh/with-columns [[:db__id :bigint [:primary-key]]]))))
19 |
20 |
21 | (jdbc/on-connection
22 | [con ds]
23 | (-> (.getMetaData con) ; produces java.sql.DatabaseMetaData
24 | #_ (.getTables nil nil nil (into-array ["TABLE" "VIEW"]))
25 | (.getColumns nil nil "test" nil)
26 | (rs/datafiable-result-set ds nil)))
27 |
--------------------------------------------------------------------------------
/repl_sessions/plenish_first_spike.clj:
--------------------------------------------------------------------------------
1 | (ns repl-sessions.plenish-first-spike
2 | (:require [datomic.api :as d]
3 | [honey.sql :as honey]
4 | [honey.sql.helpers :as hh]
5 | [lambdaisland.facai :as f]
6 | [lambdaisland.facai.datomic-peer :as fd]
7 | [next.jdbc :as jdbc]
8 | [next.jdbc.result-set :as rs]
9 | [clojure.string :as str]
10 | [charred.api :as charred]))
11 |
12 | (d/create-database "datomic:mem://foo")
13 | (def conn (d/connect "datomic:mem://foo"))
14 | (def conn (d/connect "datomic:dev://localhost:4334/example-tenant---import-company-20210901T110746698441277"))
15 | (def ds
16 | (jdbc/get-datasource
17 | "jdbc:pgsql://localhost:5432/replica?user=plenish&password=plenish"
18 | ))
19 |
20 | ;; docker run -e POSTGRES_HOST_AUTH_METHOD=trust -p 5432:5432 postgres
21 | ;; docker ps
22 | ;; docker exec -it -u postgres psql
23 |
24 | CREATE DATABASE replica;
25 | CREATE ROLE plenish WITH LOGIN PASSWORD 'plenish';
26 | GRANT ALL ON DATABASE replica TO plenish;
27 |
28 | (f/defactory line-item
29 | {:line-item/description "Widgets"
30 | :line-item/quantity 5
31 | :line-item/price 1.0})
32 |
33 | (f/defactory cart
34 | {:cart/created-at #(java.util.Date.)
35 | :cart/line-items [line-item line-item]})
36 |
37 | (def metaschema
38 | #_
39 | {:tables {:line-item/price {:name "line_items"}
40 | :cart/created-at {:name "cart"}
41 | #_#_:db/txInstant {:name "transactions"}
42 | #_#_:db/ident {:name "idents"}}}
43 | (read-string (slurp "/home/arne/Eleven/runeleven/melvn/presto-etc/datomic/accounting.edn")))
44 |
45 | (def schema
46 | [{:db/ident :line-item/description,
47 | :db/valueType :db.type/string,
48 | :db/cardinality :db.cardinality/one}
49 | {:db/ident :line-item/quantity,
50 | :db/valueType :db.type/long,
51 | :db/cardinality :db.cardinality/one}
52 | {:db/ident :line-item/price,
53 | :db/valueType :db.type/double,
54 | :db/cardinality :db.cardinality/one}
55 | {:db/ident :cart/created-at,
56 | :db/valueType :db.type/instant,
57 | :db/cardinality :db.cardinality/one}
58 | {:db/ident :cart/line-items,
59 | :db/valueType :db.type/ref,
60 | :db/cardinality :db.cardinality/many}])
61 |
62 | (def pg-type
63 | {:db.type/ref :bigint
64 | :db.type/keyword :text
65 | :db.type/long :bigint
66 | :db.type/string :text
67 | :db.type/boolean :boolean
68 | :db.type/uuid :uuid
69 | :db.type/instant :timestamp ;; no time zone information in java.util.Date
70 | :db.type/double [:float 53]
71 | ;; :db.type/fn
72 | :db.type/float [:float 24]
73 | :db.type/bytes :bytea
74 | :db.type/uri :text
75 | :db.type/bigint :numeric
76 | :db.type/bigdec :numeric
77 | :db.type/tuple :jsonb})
78 |
79 | (set! *warn-on-reflection* true)
80 |
81 | (defn has-attr? [db eid attr]
82 | (-> db
83 | ^Iterable (d/datoms :eavt eid attr)
84 | .iterator
85 | .hasNext))
86 |
87 | (defn pull-idents [db]
88 | (d/q
89 | '[:find [(pull ?e [*]) ...]
90 | :where [?e :db/ident]]
91 | db))
92 |
93 | ;; Datom helpers
94 | (def -e (memfn ^datomic.Datom e))
95 | (def -a (memfn ^datomic.Datom a))
96 | (def -v (memfn ^datomic.Datom v))
97 | (def -t (memfn ^datomic.Datom tx))
98 | (def -added? (memfn ^datomic.Datom added))
99 |
100 | ;; Context helpers
101 | (defn ctx-ident [ctx eid] (get-in ctx [:idents eid :db/ident]))
102 | (defn ctx-valueType [ctx attr-id] (ctx-ident ctx (get-in ctx [:idents attr-id :db/valueType])))
103 | (defn ctx-entid [ctx ident] (get-in ctx [:entids ident]))
104 | (defn ctx-cardinality [ctx attr-id] (ctx-ident ctx (get-in ctx [:idents attr-id :db/cardinality])))
105 | (defn ctx-card-many? [ctx attr-id] (= :db.cardinality/many
106 | (ctx-ident ctx (get-in ctx [:idents attr-id :db/cardinality]))))
107 |
108 | (defn dash->underscore [s]
109 | (str/replace s #"-" "_"))
110 |
111 | (defn table-name [ctx mem-attr]
112 | (get-in ctx
113 | [:tables mem-attr :name]
114 | (dash->underscore (namespace mem-attr))))
115 |
116 | (defn join-table-name [ctx mem-attr val-attr]
117 | (get-in ctx
118 | [:tables mem-attr :rename-many-table val-attr]
119 | (dash->underscore (str (table-name ctx mem-attr) "_x_" (name val-attr)))))
120 |
121 | (defn column-name [ctx mem-attr col-attr]
122 | (get-in ctx
123 | [:tables mem-attr :rename col-attr]
124 | (dash->underscore (name col-attr))))
125 |
126 | (defn attr-db-type [ctx attr-id]
127 | (doto (get-in ctx [:db-types (ctx-valueType ctx attr-id)]) ))
128 |
129 | ;; Transaction processing logic
130 | (defn track-idents [ctx tx-data]
131 | ;; Keep `:entids` and `:idents` up to date based on tx-data, this has some
132 | ;; shortcomings, but not sure yet about the general approach so will punt on
133 | ;; those.
134 | ;;
135 | ;; - cardinality/many is not properly handled, additional values simply
136 | ;; replace the previous value(s) -> might not be a big issue because the
137 | ;; attributes we care about are all cardinality/one
138 | ;;
139 | (let [db-ident (get-in ctx [:entids :db/ident])
140 | tx-idents (filter #(= db-ident (-a %)) tx-data)
141 | tx-rest (remove #(= db-ident (-a %)) tx-data)]
142 | (as-> ctx ctx
143 | (reduce (fn [ctx datom]
144 | (let [e (-e datom)
145 | a (-a datom)
146 | ident (-v datom)]
147 | (if (-added? datom)
148 | (-> ctx
149 | (update :entids assoc ident e)
150 | (update-in [:idents e] assoc
151 | :db/id e
152 | :db/ident ident))
153 | (-> ctx
154 | (update :entids dissoc ident)
155 | (update :idents dissoc e)))))
156 | ctx
157 | tx-idents)
158 | (reduce (fn [ctx datom]
159 | (let [e (-e datom)
160 | a (-a datom)]
161 | (if (get-in ctx [:idents e])
162 | (-> ctx
163 | (update-in
164 | [:idents e]
165 | (fn [m]
166 | (let [attr (ctx-ident ctx (-a datom))]
167 | (if (-added? datom)
168 | (assoc m attr (-v datom))
169 | (dissoc m attr))))))
170 | ctx)))
171 | ctx
172 | tx-rest))))
173 |
174 | (defn encode-value [ctx type value]
175 | (case type
176 | :db.type/ref (if (keyword? value)
177 | (ctx-entid ctx value)
178 | value)
179 | :db.type/tuple [:raw (str \' (str/replace (str (charred/write-json-str value)) "'" "''") \' "::jsonb")]
180 | :db.type/keyword (str (when (qualified-ident? value)
181 | (str (namespace value) "/"))
182 | (name value))
183 | :db.type/instant [:raw (format "to_timestamp(%.3f)" (double (/ (.getTime ^java.util.Date value) 1000)))]
184 | value))
185 |
186 | (defn card-one-entity-ops [{:keys [tables] :as ctx} mem-attr eid datoms]
187 | (let [missing-cols (sequence
188 | (comp
189 | (remove (fn [d]
190 | (get-in ctx [:tables mem-attr :columns (ctx-ident ctx (-a d))])))
191 | (map -a)
192 | (map (fn [attr-id]
193 | (let [attr (ctx-ident ctx attr-id)]
194 | [attr
195 | {:name (column-name ctx mem-attr attr)
196 | :type (attr-db-type ctx attr-id)}]))))
197 | datoms)
198 | retracted? (some (fn [d]
199 | ;; Datom with membership attribute was retracted,
200 | ;; remove from table
201 | (and (not (-added? d))
202 | (= mem-attr (ctx-ident ctx (-a d)))))
203 | datoms)]
204 | (cond-> ctx
205 | (seq missing-cols)
206 | (-> (update :ops
207 | (fnil conj [])
208 | [:ensure-columns
209 | {:table (table-name ctx mem-attr)
210 | :columns (into {} missing-cols)}])
211 | (update-in [:tables mem-attr :columns] (fnil into {}) missing-cols))
212 | :->
213 | (update :ops (fnil conj [])
214 | (if retracted?
215 | [:delete
216 | {:table (table-name ctx mem-attr)
217 | :values {:db/id eid}}]
218 | [:upsert
219 | {:table (table-name ctx mem-attr)
220 | :values (into {"db__id" eid}
221 | (map (juxt #(column-name ctx mem-attr (ctx-ident ctx (-a %)))
222 | #(when (-added? %)
223 | (encode-value ctx
224 | (ctx-valueType ctx (-a %))
225 | (-v %)))))
226 | datoms)}])))))
227 |
228 | (defn card-many-entity-ops [{:keys [tables] :as ctx} mem-attr eid datoms]
229 | (let [missing-joins (sequence
230 | (comp
231 | (remove #(get-in ctx [:tables mem-attr :join-tables (ctx-ident ctx (-a %))]))
232 | (map -a)
233 | (distinct)
234 | (map (fn [attr-id]
235 | (let [attr (ctx-ident ctx attr-id)]
236 | [attr
237 | {:name (column-name ctx mem-attr attr)
238 | :type (attr-db-type ctx attr-id)}]))))
239 | datoms)]
240 | (cond-> ctx
241 | (seq missing-joins)
242 | (-> (update :ops
243 | (fnil into [])
244 | (for [[val-attr join-opts] missing-joins]
245 | [:ensure-join
246 | {:table-name (join-table-name ctx mem-attr val-attr)
247 | :fk-table (table-name ctx mem-attr)
248 | :val-attr val-attr
249 | :val-type (:type join-opts)} ]))
250 | (update-in [:tables mem-attr :join-tables] (fnil into {}) missing-joins))
251 | :->
252 | (update :ops
253 | (fnil into [])
254 | (for [d datoms]
255 | (let [attr-id (-a d)
256 | attr (ctx-ident ctx attr-id)
257 | value (-v d)]
258 | [(if (-added? d) :upsert :delete)
259 | {:table (join-table-name ctx mem-attr attr)
260 | :values {"db__id" eid
261 | (column-name ctx mem-attr attr)
262 | (encode-value ctx (ctx-valueType ctx attr-id) value)}}]))))))
263 |
264 | (def ignore-idents #{:db/ensure :db/fn})
265 |
266 | (defn process-entity [{:keys [tables] :as ctx} db eid datoms]
267 | (reduce
268 | (fn [ctx [mem-attr table-opts]]
269 | (if (has-attr? db eid mem-attr)
270 | (let [datoms (remove (fn [d] (contains? ignore-idents (ctx-ident ctx (-a d)))) datoms)
271 | card-one-datoms (remove (fn [d] (ctx-card-many? ctx (-a d))) datoms)
272 | card-many-datoms (filter (fn [d] (ctx-card-many? ctx (-a d))) datoms)]
273 | (-> ctx
274 | (card-one-entity-ops mem-attr eid card-one-datoms)
275 | (card-many-entity-ops mem-attr eid card-many-datoms)))
276 | ctx))
277 | ctx
278 | tables))
279 |
280 | (defn process-tx [ctx conn {:keys [t data]}]
281 | (let [ctx (track-idents ctx data)
282 | db (d/as-of (d/db conn) t)
283 | entities (group-by -e data)]
284 | (reduce (fn [ctx [eid datoms]]
285 | (process-entity ctx db eid datoms))
286 | ctx
287 | entities)))
288 |
289 | (defmulti op->sql first)
290 | (defmethod op->sql :ensure-columns [[_ {:keys [table columns]}]]
291 | (into
292 | [{:create-table [table :if-not-exists],
293 | :with-columns [[:db__id [:raw "bigint"] [:primary-key]]]}]
294 | (map (fn [[_ {:keys [name type]}]]
295 | {:alter-table [table]
296 | :add-column [(keyword name)
297 | (if (keyword? type)
298 | ;; Annoyingly this is needed because we use `:quote true`, and honey tries to quote the type
299 | [:raw (clojure.core/name type)]
300 | type)
301 | :if-not-exists]}))
302 | columns))
303 |
304 | (defmethod op->sql :upsert [[_ {:keys [table values]}]]
305 | [{:insert-into [(keyword table)]
306 | :values [values]
307 | :on-conflict [:db__id]
308 | :do-update-set (keys (dissoc values "db__id"))}])
309 |
310 | (honey/format
311 | (-> (hh/alter-table :foo)
312 | (hh/add-column :skin :text :if-not-exists)))
313 |
314 | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
315 |
316 | @(d/transact conn schema)
317 |
318 | @(d/transact conn [{:db/ident :foo/baz
319 | :db/valueType :db.type/ref
320 | :db/cardinality :db.cardinality/one}])
321 |
322 | @(d/transact conn [[:db/retract :foo/bar :db/ident :foo/bar]])
323 |
324 | (fd/create! conn cart)
325 |
326 | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
327 |
328 | (def txs
329 | (into []
330 | (d/tx-range (d/log conn) nil nil)))
331 |
332 | (def ctx
333 | ;; Bootstrap, make sure we have info about idents that datomic creates itself
334 | ;; at db creation time. d/as-of t=999 is basically an empty database with only
335 | ;; metaschema attributes (:db/txInstant etc), since the first "real"
336 | ;; transaction is given t=1000. Interesting to note that Datomic seems to
337 | ;; bootstrap in pieces: t=0 most basic idents, t=57 add double, t=63 add
338 | ;; docstrings, ...
339 | (let [idents (pull-idents (d/as-of (d/db conn) 999))]
340 | {:entids (into {} (map (juxt :db/ident :db/id)) idents)
341 | :idents (into {} (map (juxt :db/id identity)) idents)
342 | :tables (:tables metaschema)
343 | :db-types pg-type}))
344 |
345 | (time
346 | (doall
347 | (:ops
348 | (reduce #(process-tx %1 conn %2) ctx txs))))
349 |
350 | (time
351 | (let [txs (d/tx-range (d/log conn) nil nil)]
352 | (loop [ctx ctx
353 | [tx & txs] txs]
354 | (when tx
355 | (let [ctx (process-tx ctx conn tx)
356 | queries (eduction
357 | (comp
358 | (mapcat op->sql)
359 | (map #(honey/format % {:quoted true})))
360 | (:ops ctx))]
361 | (run! #(jdbc/execute! ds %) queries)
362 | (recur (dissoc ctx :ops) txs)
363 | )))))
364 | (time
365 | (map op->sql
366 | (:ops
367 | (reduce #(process-tx %1 conn %2) ctx txs))))
368 |
369 | (ctx-ident
370 | (reduce track-idents ctx (map :data txs))
371 | 21)
372 |
373 | (honey/format
374 | (-> (hh/create-table "foo" :if-not-exists)
375 | (hh/with-columns [[:db__id [:float 32] [:primary-key]]]))
376 | {:quoted true})
377 |
378 | (jdbc/execute! ds
379 | ["INSERT INTO ? (\"db__id\", \"document_type_ref\", \"account_type_ref\", \"document_type_ref+account_type_ref\") VALUES (?, ?, ?, ?) ON CONFLICT (\"db__id\") DO UPDATE SET \"document_type_ref\" = EXCLUDED.\"document_type_ref\", \"account_type_ref\" = EXCLUDED.\"account_type_ref\", \"document_type_ref+account_type_ref\" = EXCLUDED.\"document_type_ref+account_type_ref\""
380 | "journal_entry_document_type"
381 | 17592186045844
382 | 17592186045736
383 | 17592186045537
384 | "[17592186045537, 17592186045736]"])
385 |
386 | (jdbc/on-connection
387 | [con ds]
388 | (-> (.getMetaData con) ; produces java.sql.DatabaseMetaData
389 | #_ (.getTables nil nil nil (into-array ["TABLE" "VIEW"]))
390 | (.getColumns nil nil "test" nil)
391 | (rs/datafiable-result-set ds nil)))
392 |
--------------------------------------------------------------------------------
/src/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lambdaisland/plenish/2a0e0af9cfd604055abdbe528717cabdd49b00fc/src/.gitkeep
--------------------------------------------------------------------------------
/src/lambdaisland/plenish.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.plenish
2 | "Transfer datomic data into a relational target database, transaction per
3 | transaction."
4 | (:require
5 | [charred.api :as charred]
6 | [clojure.string :as str]
7 | [datomic.api :as d]
8 | [honey.sql :as honey]
9 | [honey.sql.helpers :as hh]
10 | [next.jdbc :as jdbc]
11 | [next.jdbc.result-set :as rs]
12 | [lambdaisland.plenish.protocols :as proto]))
13 |
14 | (set! *warn-on-reflection* true)
15 |
16 | (def ^:dynamic *debug* (= (System/getenv "PLENISH_DEBUG") "true"))
17 | (defn dbg [& args] (when *debug* (prn args)))
18 |
19 | ;; Basic helpers
20 |
21 | (defn has-attr?
22 | "Does the entity `eid` have the attribute `attr` in the database `db`.
23 |
24 | Uses direct index access so we can cheaply check if a give entity should be
25 | added to a given table, based on a membership attribute."
26 | [db eid attr]
27 | (-> db
28 | ^Iterable (d/datoms :eavt eid attr)
29 | .iterator
30 | .hasNext))
31 |
32 | ;;;;;;;;;;;;;;;;
33 | ;; Datom helpers
34 | (def -e "Get the entity id of a datom" (memfn ^datomic.Datom e))
35 | (def -a "Get the attribute of a datom" (memfn ^datomic.Datom a))
36 | (def -v "Get the value of a datom" (memfn ^datomic.Datom v))
37 | (def -t "Get the transaction number of a datom" (memfn ^datomic.Datom tx))
38 | (def -added? "Has this datom been added or retracted?" (memfn ^datomic.Datom added))
39 |
40 | ;;;;;;;;;;;;;;;;;;;
41 | ;; Context helpers
42 |
43 | ;; All through the process we pass around a `ctx` map, which encapsulates the
44 | ;; state of the export/import process.
45 |
46 | ;; - `:idents` map from eid (entity id) to entity/value map
47 | ;; - `:entids` map from ident to eid (reverse lookup)
48 | ;; - `:tables` map from membership attribute to table config, as per Datomic
49 | ;; Analytics metaschema We also additionally store a `:columns` map for each
50 | ;; table, to track which columns already exist in the target db.
51 | ;; - `:db-types` mapping from datomic type to target DB type
52 | ;; - `:ops` vector of "operations" that need to be propagated, `[:upsert ...]`, `[:delete ...]`, etc.
53 |
54 | (defn ctx-ident
55 | "Find an ident (keyword) by eid"
56 | [ctx eid]
57 | (get-in ctx [:idents eid :db/ident]))
58 |
59 | (defn ctx-entid
60 | "Find the numeric eid for a given ident (keyword)"
61 | [ctx ident]
62 | (get-in ctx [:entids ident]))
63 |
64 | (defn ctx-valueType
65 | "Find the valueType (keyword, e.g. `:db.type/string`) for a given attribute (as eid)"
66 | [ctx attr-id]
67 | (ctx-ident ctx (get-in ctx [:idents attr-id :db/valueType])))
68 |
69 | (defn ctx-cardinality
70 | "Find the cardinality (`:db.cardinality/one` or `:db.cardinality/many`) for a
71 | given attribute (as eid)"
72 | [ctx attr-id]
73 | (ctx-ident ctx (get-in ctx [:idents attr-id :db/cardinality])))
74 |
75 | (defn ctx-card-many?
76 | "Returns true if the given attribute (given as eid) has a cardinality of `:db.cardinality/many`"
77 | [ctx attr-id]
78 | (= :db.cardinality/many (ctx-cardinality ctx attr-id)))
79 |
80 | (defn dash->underscore
81 | "Replace dashes with underscores in string s"
82 | [s]
83 | (str/replace s #"-" "_"))
84 |
85 | ;; A note on naming, PostgreSQL (our primary target) does not have the same
86 | ;; limitations on names that Presto has. We can use e.g. dashes just fine,
87 | ;; assuming names are properly quoted. We've still opted to use underscores by
88 | ;; default, to make ad-hoc querying easier (quoting will often not be
89 | ;; necessary), and to have largely the same table structure as datomic
90 | ;; analytics, to make querying easier.
91 | ;;
92 | ;; That said we don't munge anything else (like a trailing `?` and `!`), and do
93 | ;; rely on PostgreSQL quoting to handle these.
94 |
95 | (defn table-name
96 | "Find a table name for a given table based on its membership attribute, either
97 | as configured in the metaschema (`:name`), or derived from the namespace of
98 | the membership attribute."
99 | [ctx mem-attr]
100 | (get-in ctx
101 | [:tables mem-attr :name]
102 | (dash->underscore (namespace mem-attr))))
103 |
104 | (defn join-table-name
105 | "Find a table name for a join table, i.e. a table that is created because of a
106 | cardinality/many attribute, given the membership attribute of the base table,
107 | and the cardinality/many attribute. Either returns a name as configured in the
108 | metaschema under `:rename-many-table`, or derives a name as
109 | `namespace_mem_attr_x_name_val_attr`."
110 | [ctx mem-attr val-attr]
111 | (get-in ctx
112 | [:tables mem-attr :rename-many-table val-attr]
113 | (dash->underscore (str (table-name ctx mem-attr) "_x_" (name val-attr)))))
114 |
115 | (defn column-name
116 | "Find a name for a column based on the table membership attribute, and the
117 | attribute whose values are to be stored in the column."
118 | [ctx mem-attr col-attr]
119 | (get-in ctx
120 | [:tables mem-attr :rename col-attr]
121 | (dash->underscore (name col-attr))))
122 |
123 | (defn attr-db-type
124 | "Get the target db type for the given attribute."
125 | [ctx attr-id]
126 | (get-in ctx [:db-types (ctx-valueType ctx attr-id)]))
127 |
128 | ;; Transaction processing logic
129 |
130 | (defn track-idents
131 | "Keep `:entids` and `:idents` up to date based on tx-data. This allows us to
132 | incrementally track schema changes, so we always have the right metadata at
133 | hand."
134 | ;; This has a notable shortcoming in that we currently don't treat
135 | ;; `cardinality/many` in any special way. In the initial `pull-idents` these will
136 | ;; come through as collections, but any later additions will replace these with
137 | ;; single values.
138 | ;;
139 | ;; However, the attributes we care about so far (`:db/ident`,
140 | ;; `:db/cardinality`, `:db/valueType`) are all of cardinality/one, so for the
141 | ;; moment this is not a great issue.
142 | [ctx tx-data]
143 | (let [db-ident (get-in ctx [:entids :db/ident])
144 | tx-idents (filter #(= db-ident (-a %)) tx-data)
145 | tx-rest (remove #(= db-ident (-a %)) tx-data)]
146 | (as-> ctx ctx
147 | ;; Handle `[_ :db/ident _]` datoms first. We can't rely on any ordering of
148 | ;; datoms within a transaction. If a transaction adds both `:db/ident` and
149 | ;; other attributes, then we need to process the `:db/ident` datom first,
150 | ;; so that when we process the rest of the datoms we can see that this eid
151 | ;; is an ident.
152 | (reduce (fn [ctx datom]
153 | (let [e (-e datom)
154 | a (-a datom)
155 | ident (-v datom)]
156 | (if (-added? datom)
157 | (-> ctx
158 | (update :entids assoc ident e)
159 | (update-in [:idents e] assoc
160 | :db/id e
161 | :db/ident ident))
162 | (-> ctx
163 | (update :entids dissoc ident)
164 | (update :idents dissoc e)))))
165 | ctx
166 | tx-idents)
167 | ;; Handle non `[_ :db/ident _]` datoms
168 | (reduce (fn [ctx datom]
169 | (let [e (-e datom)
170 | a (-a datom)]
171 | (if (get-in ctx [:idents e])
172 | (-> ctx
173 | (update-in
174 | [:idents e]
175 | (fn [m]
176 | (let [attr (ctx-ident ctx (-a datom))]
177 | (if (-added? datom)
178 | (assoc m attr (-v datom))
179 | (dissoc m attr))))))
180 | ctx)))
181 | ctx
182 | tx-rest))))
183 |
184 | ;; The functions below are the heart of the process
185 |
186 | ;; process-tx - process all datoms in a single transaction
187 | ;; \___ process-entity - process datoms within a transaction with the same entity id
188 | ;; \____ card-one-entity-ops - process datoms for all cardinality/one attributes of a single entity
189 | ;; \____ card-many-entity-ops - process datoms for all cardinality/many attributes of a single entity
190 |
191 | (defn card-one-entity-ops
192 | "Add operations `:ops` to the context for all the `cardinality/one` datoms in a
193 | single transaction and a single entity/table, identified by its memory
194 | attribute."
195 | [{:keys [tables db-adapter] :as ctx} mem-attr eid datoms]
196 | {:pre [(every? #(= eid (-e %)) datoms)]}
197 | (let [;; An update of an attribute will manifest as two datoms in the same
198 | ;; transaction, one with added=true, and one with added=false. In this
199 | ;; case we can ignore the added=false datom.
200 | datoms (remove (fn [d]
201 | (and (not (-added? d))
202 | (some #(and (= (-a d) (-a %))
203 | (-added? %)) datoms)))
204 | datoms)
205 | ;; Figure out which columns don't exist yet in the target database. This
206 | ;; may find columns that actually do already exist, depending on the
207 | ;; state of the context. This is fine, we'll process these schema
208 | ;; changes in an idempotent way, we just want to prevent us from having
209 | ;; to attempt schema changes for every single transaction.
210 | missing-cols (sequence
211 | (comp
212 | (remove (fn [d]
213 | ;; If there's already a `:columns` entry in the
214 | ;; context, then this column already exists in
215 | ;; the target DB, if not it needs to be
216 | ;; created. This is heuristic, we do a
217 | ;; conditional alter table, so no biggie if it
218 | ;; already exists.
219 | (get-in ctx [:tables mem-attr :columns (ctx-ident ctx (-a d))])))
220 | (map -a)
221 | (map (fn [attr-id]
222 | (let [attr (ctx-ident ctx attr-id)]
223 | [attr
224 | {:name (column-name ctx mem-attr attr)
225 | :type (attr-db-type ctx attr-id)}]))))
226 | datoms)
227 | ;; Do we need to delete the row corresponding with this entity.
228 | retracted? (and (some (fn [d]
229 | ;; Datom with membership attribute was retracted,
230 | ;; remove from table
231 | (and (not (-added? d))
232 | (= mem-attr (ctx-ident ctx (-a d)))))
233 | datoms)
234 | (not (some (fn [d]
235 | ;; Unless the same transaction
236 | ;; immediately adds a new datom with the
237 | ;; membership attribute
238 | (and (-added? d)
239 | (= mem-attr (ctx-ident ctx (-a d)))))
240 | datoms)))]
241 | ;;(clojure.pprint/pprint ['card-one-entity-ops mem-attr eid datoms retracted?])
242 | (cond-> ctx
243 | ;; Evolve the schema
244 | (seq missing-cols)
245 | (-> (update :ops
246 | (fnil conj [])
247 | [:ensure-columns
248 | {:table (table-name ctx mem-attr)
249 | :columns (into {} missing-cols)}])
250 | (update-in [:tables mem-attr :columns] (fnil into {}) missing-cols))
251 | ;; Delete/insert values
252 | :->
253 | (update :ops (fnil conj [])
254 | (if retracted?
255 | [:delete
256 | {:table (table-name ctx mem-attr)
257 | :values {"db__id" eid}}]
258 | (let [table (table-name ctx mem-attr)]
259 | [(if (= "transactions" table)
260 | :insert
261 | :upsert)
262 | {:table table
263 | :by #{"db__id"}
264 | :values (into (cond-> {"db__id" eid}
265 | ;; Bit of manual fudgery to also get the "t"
266 | ;; value of each transaction into
267 | ;; our "transactions" table.
268 | (= :db/txInstant mem-attr)
269 | (assoc "t" (d/tx->t (-t (first datoms)))))
270 | (map (juxt #(column-name ctx mem-attr (ctx-ident ctx (-a %)))
271 | #(when (-added? %)
272 | (proto/encode-value db-adapter
273 | ctx
274 | (ctx-valueType ctx (-a %))
275 | (-v %)))))
276 | datoms)}]))))))
277 |
278 | (defn card-many-entity-ops
279 | "Add operations `:ops` to the context for all the `cardinality/many` datoms in a
280 | single transaction and a single table, identified by its memory attribute.
281 | Each `:db.cardinality/many` attribute results in a separate two-column join
282 | table."
283 | [{:keys [tables db-adapter] :as ctx} mem-attr eid datoms]
284 | (let [missing-joins (sequence
285 | (comp
286 | (remove #(get-in ctx [:tables mem-attr :join-tables (ctx-ident ctx (-a %))]))
287 | (map -a)
288 | (distinct)
289 | (map (fn [attr-id]
290 | (let [attr (ctx-ident ctx attr-id)]
291 | [attr
292 | {:name (column-name ctx mem-attr attr)
293 | :type (attr-db-type ctx attr-id)}]))))
294 | datoms)]
295 | (cond-> ctx
296 | (seq missing-joins)
297 | (-> (update :ops
298 | (fnil into [])
299 | (for [[val-attr join-opts] missing-joins]
300 | [:ensure-join
301 | {:table (join-table-name ctx mem-attr val-attr)
302 | :fk-table (table-name ctx mem-attr)
303 | :val-attr val-attr
304 | :val-col (column-name ctx mem-attr val-attr)
305 | :val-type (:type join-opts)}]))
306 | (update-in [:tables mem-attr :join-tables] (fnil into {}) missing-joins))
307 | :->
308 | (update :ops
309 | (fnil into [])
310 | (for [d datoms]
311 | (let [attr-id (-a d)
312 | attr (ctx-ident ctx attr-id)
313 | value (-v d)
314 | sql-table (join-table-name ctx mem-attr attr)
315 | sql-col (column-name ctx mem-attr attr)
316 | sql-val (proto/encode-value db-adapter ctx (ctx-valueType ctx attr-id) value)]
317 | (if (-added? d)
318 | [:upsert
319 | {:table sql-table
320 | :by #{"db__id" sql-col}
321 | :values {"db__id" eid
322 | sql-col sql-val}}]
323 | [:delete
324 | {:table sql-table
325 | :values {"db__id" eid
326 | sql-col sql-val}}])))))))
327 |
328 | (def ignore-idents #{:db/ensure
329 | :db/fn
330 | :db.install/valueType
331 | :db.install/attribute
332 | :db.install/function
333 | :db.entity/attrs
334 | :db.entity/preds
335 | :db.attr/preds})
336 |
337 | (defn process-entity
338 | "Process the datoms within a transaction for a single entity. This checks all
339 | tables to see if the entity contains the membership attribute, if so
340 | operations get added under `:ops` to evolve the schema and insert the data."
341 | [{:keys [tables] :as ctx} prev-db db eid datoms]
342 | ;;(clojure.pprint/pprint ['process-entity eid datoms])
343 | (reduce
344 | (fn [ctx [mem-attr table-opts]]
345 | (if (or (has-attr? prev-db eid mem-attr)
346 | (has-attr? db eid mem-attr))
347 | ;; Handle cardinality/one separate from cardinality/many
348 | (let [datoms (if (not (has-attr? prev-db eid mem-attr))
349 | ;; If after the previous transaction the
350 | ;; membership attribute wasn't there yet, then
351 | ;; it's added in this tx. In that case pull in
352 | ;; all pre-existing datoms for the entities,
353 | ;; they need to make across as well.
354 | (concat datoms (d/datoms prev-db :eavt eid))
355 | datoms)
356 | datoms (remove (fn [d] (contains? ignore-idents (ctx-ident ctx (-a d)))) datoms)
357 | card-one-datoms (remove (fn [d] (ctx-card-many? ctx (-a d))) datoms)
358 | card-many-datoms (filter (fn [d] (ctx-card-many? ctx (-a d))) datoms)]
359 | (cond-> ctx
360 | (seq card-one-datoms)
361 | (card-one-entity-ops mem-attr eid card-one-datoms)
362 |
363 | (seq card-many-datoms)
364 | (card-many-entity-ops mem-attr eid card-many-datoms)))
365 | ctx))
366 | ctx
367 | tables))
368 |
369 | (defn process-tx
370 | "Handle a single datomic transaction, this will update the context, appending to
371 | the `:ops` the operations needed to propagate the update, while also keeping
372 | the rest of the context (`ctx`) up to date, in particular by tracking any
373 | schema changes, or other changes involving `:db/ident`."
374 | [ctx conn {:keys [t data]}]
375 | (let [ctx (track-idents ctx data)
376 | prev-db (d/as-of (d/db conn) (dec t))
377 | db (d/as-of (d/db conn) t)
378 | entities (group-by -e data)]
379 | (reduce (fn [ctx [eid datoms]]
380 | (process-entity ctx prev-db db eid datoms))
381 | ctx
382 | entities)))
383 |
384 | ;; Up to here we've only dealt with extracting information from datomic
385 | ;; transactions, and turning them into
386 | ;; abstract "ops" (:ensure-columns, :upsert, :delete, etc). So this is all
387 | ;; target-database agnostic, what's left is to turn this into SQL and sending it
388 | ;; to the target.
389 |
390 | ;; Converting ops to SQL
391 |
392 | (def pg-type
393 | {:db.type/ref :bigint
394 | :db.type/keyword :text
395 | :db.type/long :bigint
396 | :db.type/string :text
397 | :db.type/boolean :boolean
398 | :db.type/uuid :uuid
399 | :db.type/instant :timestamp ;; no time zone information in java.util.Date
400 | :db.type/double [:float 53]
401 | ;; :db.type/fn
402 | :db.type/float [:float 24]
403 | :db.type/bytes :bytea
404 | :db.type/uri :text
405 | :db.type/bigint :numeric
406 | :db.type/bigdec :numeric
407 | :db.type/tuple :jsonb})
408 |
409 | (defmulti op->sql
410 | "Convert a single operation (two element vector), into a sequence of HoneySQL
411 | maps."
412 | first)
413 |
414 | (defmethod op->sql :ensure-columns [[_ {:keys [table columns]}]]
415 | (into
416 | [{:create-table [table :if-not-exists]
417 | :with-columns [[:db__id [:raw "bigint"] [:primary-key]]]}]
418 | (map (fn [[_ {:keys [name type]}]]
419 | {:alter-table [table]
420 | :add-column [(keyword name)
421 | (if (keyword? type)
422 | ;; Annoyingly this is needed because we use `:quote true`, and honey tries to quote the type
423 | [:raw (clojure.core/name type)]
424 | type)
425 | :if-not-exists]}))
426 | columns))
427 |
428 | (defmethod op->sql :insert [[_ {:keys [table by values]}]]
429 | [{:insert-into [(keyword table)]
430 | :values [values]}])
431 |
432 | (defmethod op->sql :upsert [[_ {:keys [table by values]}]]
433 | (let [op {:insert-into [(keyword table)]
434 | :values [values]
435 | :on-conflict (map keyword by)}
436 | attrs (apply dissoc values by)]
437 | [(if (seq attrs)
438 | (assoc op :do-update-set (keys attrs))
439 | (assoc op :do-nothing []))]))
440 |
441 | (defmethod op->sql :delete [[_ {:keys [table values]}]]
442 | [{:delete-from (keyword table)
443 | :where (reduce-kv (fn [clause k v]
444 | (conj clause [:= k v]))
445 | [:and]
446 | (update-keys values keyword))}])
447 |
448 | (defmethod op->sql :ensure-join [[_ {:keys [table val-col val-type]}]]
449 | [{:create-table [table :if-not-exists]
450 | :with-columns [[:db__id [:raw "bigint"]]
451 | [(keyword val-col) (if (keyword? val-type)
452 | [:raw (name val-type)]
453 | val-type)]]}
454 | ;; cardinality/many attributes are not multi-set, a given triplet can only be
455 | ;; asserted once, so a given [eid value] for a given attribute has to be
456 | ;; unique.
457 | {::create-index {:on table
458 | :name (str "unique_attr_" table "_" val-col)
459 | :unique? true
460 | :if-not-exists? true
461 | :columns [:db__id (keyword val-col)]}}])
462 |
463 | ;; HoneySQL does not support CREATE INDEX. It does support adding indexes
464 | ;; through ALTER TABLE, but that doesn't seem to give us a convenient way to
465 | ;; sneak in the IF NOT EXISTS. Namespacing this becuase it's a pretty
466 | ;; bespoke/specific implementation which we don't want to leak into application
467 | ;; code.
468 | (honey/register-clause!
469 | ::create-index
470 | (fn [clause {:keys [on name unique? if-not-exists? columns]}]
471 | [(str "CREATE " (when unique? "UNIQUE ") "INDEX "
472 | (when if-not-exists? "IF NOT EXISTS ")
473 | (when name (str (honey/format-entity name) " "))
474 | "ON " (honey/format-entity on)
475 | " (" (str/join ", " (map honey/format-entity columns)) ")")])
476 | :alter-table)
477 |
478 | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
479 | ;; Top level process
480 |
481 | (defn- pull-idents
482 | "Do a full `(pull [*])` of all database entities that have a `:db/ident`, used
483 | to bootstrap the context, we track all idents and their
484 | metadata (`:db/valueType`, `:db/cardinality` etc in memory on our inside
485 | inside a `ctx` map)."
486 | [db]
487 | (map
488 | (fn [ident-attrs]
489 | (update-vals
490 | ident-attrs
491 | (fn [v]
492 | (if-let [id (when (map? v)
493 | (:db/id v))]
494 | id
495 | v))))
496 | (d/q
497 | '[:find [(pull ?e [*]) ...]
498 | :where [?e :db/ident]]
499 | db)))
500 |
501 | (defn initial-ctx
502 | "Create the context map that gets passed around all through the process,
503 | contains both caches for quick lookup of datomic schema information,
504 | configuration regarding tables and target db, and eventually `:ops` that need
505 | to be processed."
506 | ([conn metaschema db-adapter]
507 | (initial-ctx conn metaschema db-adapter nil))
508 | ([conn metaschema db-adapter t]
509 | ;; Bootstrap, make sure we have info about idents that datomic creates itself
510 | ;; at db creation time. d/as-of t=999 is basically an empty database with only
511 | ;; metaschema attributes (:db/txInstant etc), since the first "real"
512 | ;; transaction is given t=1000. Interesting to note that Datomic seems to
513 | ;; bootstrap in pieces: t=0 most basic idents, t=57 add double, t=63 add
514 | ;; docstrings, ...
515 | (let [idents (pull-idents (d/as-of (d/db conn) (or t 999)))]
516 | {;; Track datomic schema
517 | :entids (into {} (map (juxt :db/ident :db/id)) idents)
518 | :idents (into {} (map (juxt :db/id identity)) idents)
519 | ;; Configure/track relational schema
520 | :tables (-> metaschema
521 | :tables
522 | (update :db/txInstant assoc :name "transactions")
523 | (update :db/ident assoc :name "idents"))
524 | :db-adapter db-adapter
525 | ;; Mapping from datomic to relational type
526 | :db-types (proto/db-type db-adapter)
527 | ;; Create two columns that don't have a attribute as such in datomic, but
528 | ;; which we still want to track
529 | :ops [[:ensure-columns
530 | {:table "idents"
531 | :columns {:db/id {:name "db__id"
532 | :type :bigint}}}]
533 | [:ensure-columns
534 | {:table "transactions"
535 | :columns {:t {:name "t"
536 | :type :bigint}}}]]})))
537 |
538 | (defn import-tx-range
539 | "Import a range of transactions (e.g. from [[d/tx-range]]) into the target
540 | database. Takes a `ctx` as per [[initial-ctx]], a datomic connection `conn`,
541 | and a JDBC datasource `ds`"
542 | [ctx conn ds tx-range]
543 | (loop [ctx ctx
544 | [tx & txs] tx-range
545 | cnt 1]
546 | (when (= (mod cnt 100) 0)
547 | (print ".") (flush))
548 | (when (= (mod cnt 1000) 0)
549 | (println (str "\n" (java.time.Instant/now))) (flush))
550 | (if tx
551 | (let [ctx (process-tx ctx conn tx)
552 | queries (eduction
553 | (comp
554 | (mapcat op->sql)
555 | (map #(honey/format % {:quoted true})))
556 | (:ops ctx))]
557 | ;; Each datomic transaction gets committed within a separate JDBC
558 | ;; transaction, and this includes adding an entry to the "transactions"
559 | ;; table. This allows us to see exactly which transactions have been
560 | ;; imported, and to resume work from there.
561 | (dbg 't '--> (:t tx))
562 | (jdbc/with-transaction [jdbc-tx ds]
563 | (run! dbg (:ops ctx))
564 | (run! #(do (dbg %)
565 | (jdbc/execute! jdbc-tx %)) queries))
566 | (recur (dissoc ctx :ops) txs
567 | (inc cnt)))
568 | ctx)))
569 |
570 | (defn find-max-t
571 | "Find the highest value in the transactions table in postgresql. The sync should
572 | continue from `(inc (find-max-t ds))`"
573 | [ds]
574 | (:max
575 | (update-keys
576 | (first
577 | (try
578 | (jdbc/execute! ds ["SELECT max(t) FROM transactions"])
579 | (catch Exception e
580 | ;; If the transactions table doesn't yet exist, return `nil`, so we start
581 | ;; from the beginning of the log
582 | nil)))
583 | (constantly :max))))
584 |
585 | (defn sync-to-latest
586 | "Convenience function that combines the ingredients above for the common case of
587 | processing all new transactions up to the latest one."
588 | [datomic-conn pg-conn metaschema db-adapter]
589 | (let [;; Find the most recent transaction that has been copied, or `nil` if this
590 | ;; is the first run
591 | max-t (find-max-t pg-conn)
592 |
593 | ;; Query the current datomic schema. Plenish will track schema changes as
594 | ;; it processes transcations, but it needs to know what the schema looks
595 | ;; like so far.
596 | ctx (initial-ctx datomic-conn metaschema db-adapter max-t)
597 |
598 | ;; Grab the datomic transactions you want Plenish to process. This grabs
599 | ;; all transactions that haven't been processed yet.
600 | txs (d/tx-range (d/log datomic-conn) (when max-t (inc max-t)) nil)]
601 |
602 | ;; Get to work
603 | (import-tx-range ctx datomic-conn pg-conn txs)))
604 |
--------------------------------------------------------------------------------
/src/lambdaisland/plenish/adapters/duckdb.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.plenish.adapters.duckdb
2 | "The DuckDB adapter"
3 | (:require [charred.api :as charred]
4 | [clojure.string :as str]
5 | [lambdaisland.plenish.protocols :as proto]
6 | [lambdaisland.plenish :as plenish])
7 | (:import [java.sql Timestamp]))
8 |
9 | (defn date->timestamp [^java.util.Date date]
10 | (Timestamp/from (.toInstant date)))
11 |
12 | (defn db-adapter []
13 | (reify proto/IDatomicEncoder
14 | (encode-value [_ ctx type value]
15 | (case type
16 | :db.type/ref (if (keyword? value)
17 | (plenish/ctx-entid ctx value)
18 | value)
19 | :db.type/tuple [:raw (str \' (str/replace (str (charred/write-json-str value)) "'" "''") \' "::json")]
20 | :db.type/keyword (str (when (qualified-ident? value)
21 | (str (namespace value) "/"))
22 | (name value))
23 | :db.type/instant (date->timestamp value)
24 | :db.type/uri (str value)
25 | :db.type/uuid (str value)
26 | value))
27 | (db-type [_]
28 | {:db.type/ref :bigint
29 | :db.type/keyword :text
30 | :db.type/long :bigint
31 | :db.type/string :text
32 | :db.type/boolean :boolean
33 | :db.type/uuid :uuid
34 | :db.type/instant :timestamp ;; no time zone information in java.util.Date
35 | :db.type/double :double
36 | ;; :db.type/fn
37 | :db.type/float :float
38 | :db.type/bytes :bytea
39 | :db.type/uri :text
40 | :db.type/bigint :numeric
41 | :db.type/bigdec :numeric
42 | :db.type/tuple :json})))
43 |
--------------------------------------------------------------------------------
/src/lambdaisland/plenish/adapters/postgres.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.plenish.adapters.postgres
2 | "The postgres adapter"
3 | (:require [charred.api :as charred]
4 | [clojure.string :as str]
5 | [lambdaisland.plenish.protocols :as proto]
6 | [lambdaisland.plenish :as plenish]))
7 |
8 | (defn db-adapter []
9 | (reify proto/IDatomicEncoder
10 | (encode-value [_ ctx type value]
11 | (case type
12 | :db.type/ref (if (keyword? value)
13 | (plenish/ctx-entid ctx value)
14 | value)
15 | :db.type/tuple [:raw (str \' (str/replace (str (charred/write-json-str value)) "'" "''") \' "::jsonb")]
16 | :db.type/keyword (str (when (qualified-ident? value)
17 | (str (namespace value) "/"))
18 | (name value))
19 | :db.type/instant [:raw (format "to_timestamp(%.3f)" (double (/ (.getTime ^java.util.Date value) 1000)))]
20 | value))
21 | (db-type [_]
22 | {:db.type/ref :bigint
23 | :db.type/keyword :text
24 | :db.type/long :bigint
25 | :db.type/string :text
26 | :db.type/boolean :boolean
27 | :db.type/uuid :uuid
28 | :db.type/instant :timestamp ;; no time zone information in java.util.Date
29 | :db.type/double [:float 53]
30 | ;; :db.type/fn
31 | :db.type/float [:float 24]
32 | :db.type/bytes :bytea
33 | :db.type/uri :text
34 | :db.type/bigint :numeric
35 | :db.type/bigdec :numeric
36 | :db.type/tuple :jsonb})))
37 |
--------------------------------------------------------------------------------
/src/lambdaisland/plenish/protocols.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.plenish.protocols
2 | "Main database abstractions defined as protocols")
3 |
4 | (defprotocol IDatomicEncoder
5 | "Protocol for encoding Datomic values based on target DB."
6 | (encode-value [this ctx type value] "Encode a value based on Datomic type and target DB.")
7 | (db-type [this] "Provides a hashmap to map Datomic data type to Relational data type"))
8 |
--------------------------------------------------------------------------------
/test/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lambdaisland/plenish/2a0e0af9cfd604055abdbe528717cabdd49b00fc/test/.gitkeep
--------------------------------------------------------------------------------
/test/lambdaisland/duckdb/plenish_test.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.duckdb.plenish-test
2 | (:require
3 | [clojure.test :refer :all]
4 | [datomic.api :as d]
5 | [lambdaisland.facai :as f]
6 | [lambdaisland.plenish :as plenish]
7 | [lambdaisland.plenish.adapters.duckdb :as duckdb]
8 | [lambdaisland.plenish.factories :as factories]
9 | [lambdaisland.facai.datomic-peer :as fd]
10 | [clojure.instant :as inst]
11 | [clojure.java.shell :refer [sh]]
12 | [next.jdbc :as jdbc]
13 | [next.jdbc.result-set :as rs]))
14 |
15 | (def ^:dynamic *conn* "Datomic connection" nil)
16 | (def ^:dynamic *ds* "JDBC datasource" nil)
17 |
18 | (defn recreate-replica! []
19 | (sh "rm" "-f" "/tmp/plenish_replica"))
20 |
21 | (use-fixtures :each
22 | (fn [f]
23 | (recreate-replica!)
24 | (binding [*conn* (d/connect (doto (str "datomic:mem://" (gensym "tst")) d/create-database))
25 | *ds* (jdbc/get-datasource "jdbc:duckdb:/tmp/plenish_replica")]
26 | @(d/transact *conn* factories/schema)
27 | (f))))
28 |
29 | (def db-adapter (duckdb/db-adapter))
30 |
31 | (defn import!
32 | ([metaschema]
33 | (import! metaschema nil))
34 | ([metaschema t]
35 | (let [ctx (plenish/initial-ctx *conn* metaschema db-adapter t)]
36 | (plenish/import-tx-range
37 | ctx *conn* *ds*
38 | (d/tx-range (d/log *conn*) t nil)))))
39 |
40 | (defn transact! [tx]
41 | @(d/transact *conn* tx))
42 |
43 | (defn strip-namespace
44 | "remove namespace of every keys"
45 | [m]
46 | (update-keys m
47 | (comp keyword name)))
48 |
49 | (deftest basic-create-sync-test
50 | (fd/create! *conn* factories/cart)
51 |
52 | (import! factories/metaschema)
53 |
54 | (is (= (strip-namespace
55 | {:users/email "arne@example.com"
56 | :users/email_confirmed? true})
57 | (jdbc/execute-one!
58 | *ds*
59 | ["SELECT email, \"email_confirmed?\" FROM users;"]))))
60 |
61 | (deftest add-membership-after-attributes
62 | (let [fres (fd/create! *conn* factories/cart {:traits [:not-created-yet]})
63 | cart-id (:db/id (f/sel1 fres factories/cart))
64 | user-id (:db/id (f/sel1 fres factories/user))]
65 | (transact! [{:db/id cart-id
66 | :cart/created-at #inst "2022-01-01T12:57:01.089-00:00"}])
67 |
68 | (import! factories/metaschema)
69 |
70 | (is (= (strip-namespace
71 | {:cart/db__id cart-id
72 | :cart/created_at (inst/read-instant-date "2022-01-01T12:57:01.089")
73 | :cart/age_ms 123.456
74 | :cart/user user-id})
75 | (jdbc/execute-one! *ds* ["SELECT * FROM cart;"])))))
76 |
77 | (deftest retract-attribute-test
78 | (let [fres (fd/create! *conn* factories/cart)
79 | cart-id (:db/id (f/sel1 fres factories/cart))
80 | user-id (:db/id (f/sel1 fres factories/user))]
81 |
82 | (transact! [[:db/retract cart-id :cart/age-ms 123.456]])
83 | (import! factories/metaschema)
84 |
85 | (is (= (strip-namespace
86 | {:cart/db__id cart-id
87 | :cart/created_at (inst/read-instant-date "2022-06-23T12:57:01.089")
88 | :cart/user user-id
89 | :cart/age_ms nil})
90 | (jdbc/execute-one! *ds* ["SELECT * FROM cart;"])))
91 | (is (= (mapv strip-namespace
92 | [{:cart_x_line_items/db__id 17592186045418
93 | :cart_x_line_items/line_items 17592186045419}
94 | {:cart_x_line_items/db__id 17592186045418
95 | :cart_x_line_items/line_items 17592186045420}])
96 | (jdbc/execute! *ds* ["SELECT * FROM cart_x_line_items;"])))))
97 |
98 | (deftest retract-entity-test
99 | (let [fres (fd/create! *conn* factories/cart)
100 | cart-id (:db/id (f/sel1 fres factories/cart))
101 | user-id (:db/id (f/sel1 fres factories/user))]
102 |
103 | (transact! [[:db/retractEntity cart-id]])
104 | (import! factories/metaschema)
105 |
106 | (is (= [] (jdbc/execute! *ds* ["SELECT * FROM cart;"])))
107 | (is (= [] (jdbc/execute! *ds* ["SELECT * FROM cart_x_line_items;"])))))
108 |
109 | (deftest ident-enum-test
110 | (testing "using a ref attribute and idents as an enum-type value"
111 | (transact! [{:db/ident :fruit/apple}
112 | {:db/ident :fruit/orange}
113 | {:db/ident :fruit/type
114 | :db/valueType :db.type/ref
115 | :db/cardinality :db.cardinality/one}])
116 | (let [{:keys [tempids]} (transact! [{:db/id "apple"
117 | :fruit/type :fruit/apple}
118 | {:db/id "orange"
119 | :fruit/type :fruit/orange}])]
120 | (import! {:tables {:fruit/type {}}})
121 |
122 | (is (= (mapv strip-namespace
123 | [{:fruit/db__id (get tempids "apple")
124 | :idents/ident "fruit/apple"}
125 | {:fruit/db__id (get tempids "orange")
126 | :idents/ident "fruit/orange"}])
127 | (jdbc/execute! *ds* ["SELECT fruit.db__id, idents.ident FROM fruit, idents WHERE fruit.type = idents.db__id;"]))))))
128 |
129 | (deftest update-cardinality-one-attribute--membership
130 | (testing "membership attribute"
131 | (transact! [{:db/ident :fruit/type
132 | :db/valueType :db.type/string
133 | :db/cardinality :db.cardinality/one}])
134 |
135 | (let [tx-report (transact! [{:db/id "apple"
136 | :fruit/type "apple"}])
137 | {apple-id "apple"} (:tempids tx-report)]
138 | (transact! [{:db/id apple-id
139 | :fruit/type "orange"}])
140 |
141 | (import! {:tables {:fruit/type {}}})
142 | (is (= [(strip-namespace
143 | {:fruit/db__id apple-id
144 | :fruit/type "orange"})]
145 | (jdbc/execute! *ds* ["SELECT * FROM fruit"]))))))
146 |
147 | (deftest update-cardinality-one-attribute--regular
148 | (testing "regular attribute"
149 | (transact! [{:db/ident :veggie/type
150 | :db/valueType :db.type/string
151 | :db/cardinality :db.cardinality/one}
152 | {:db/ident :veggie/rating
153 | :db/valueType :db.type/long
154 | :db/cardinality :db.cardinality/one}])
155 |
156 | (let [tx-report (transact! [{:db/id "brocolli"
157 | :veggie/type "brocolli"
158 | :veggie/rating 4}])
159 | {brocolli-id "brocolli"} (:tempids tx-report)]
160 | (transact! [{:db/id brocolli-id
161 | :veggie/rating 5}])
162 |
163 | (import! {:tables {:veggie/type {}}})
164 | (is (= [(strip-namespace
165 | {:veggie/db__id brocolli-id
166 | :veggie/type "brocolli"
167 | :veggie/rating 5})]
168 | (jdbc/execute! *ds* ["SELECT * FROM veggie"]))))))
169 |
170 | (deftest update-cardinality-many-attribute
171 | ;; Does it make sense to have a cardinality/many attribute be the membership
172 | ;; attribute? Not sure. Punting on this for now.
173 | #_(testing "membership attribute")
174 |
175 | (testing "regular attribute"
176 | (transact! [{:db/ident :veggie/type
177 | :db/valueType :db.type/string
178 | :db/cardinality :db.cardinality/one}
179 | {:db/ident :veggie/rating
180 | :db/valueType :db.type/long
181 | :db/cardinality :db.cardinality/many}])
182 |
183 | (let [tx-report (transact! [{:db/id "brocolli"
184 | :veggie/type "brocolli"
185 | :veggie/rating 4}])
186 | {brocolli-id "brocolli"} (:tempids tx-report)]
187 | (transact! [[:db/add brocolli-id :veggie/rating 5]
188 | [:db/retract brocolli-id :veggie/rating 4]])
189 |
190 | (import! {:tables {:veggie/type {}}})
191 | (is (= [(strip-namespace
192 | {:veggie_x_rating/db__id brocolli-id
193 | :veggie_x_rating/rating 5})]
194 | (jdbc/execute! *ds* ["SELECT * FROM veggie_x_rating"])))
195 |
196 | (transact! [[:db/add brocolli-id :veggie/rating 9000]])
197 |
198 | (import! {:tables {:veggie/type {}}} (inc (plenish/find-max-t *ds*)))
199 | (is (= (mapv strip-namespace
200 | [{:veggie_x_rating/db__id brocolli-id
201 | :veggie_x_rating/rating 5}
202 | {:veggie_x_rating/db__id brocolli-id
203 | :veggie_x_rating/rating 9000}])
204 | (jdbc/execute! *ds* ["SELECT * FROM veggie_x_rating"]))))))
205 |
206 | (deftest duplicate-import-throws
207 | (testing "Trying to import a transaction that was already processed should throw"
208 | (fd/create! *conn* factories/cart)
209 | (import! factories/metaschema)
210 |
211 | (let [max-t (plenish/find-max-t *ds*)]
212 | (is (thrown? java.sql.SQLException
213 | (import! factories/metaschema max-t))))))
214 |
215 | (comment
216 | ;; REPL alternative to fixture
217 | (recreate-replica!)
218 | (def *conn* (doto (d/connect
219 | (doto (str "datomic:mem://" (gensym "tst"))
220 | d/create-database))
221 | (d/transact factories/schema)))
222 | (def ^:dynamic *ds* (jdbc/get-datasource "jdbc:duckdb:/tmp/plenish_replica"))
223 |
224 | (require 'kaocha.repl)
225 | (kaocha.repl/run `basic-create-sync-test)
226 | (kaocha.repl/run `add-membership-after-attributes)
227 | (kaocha.repl/run `retract-attribute-test)
228 | (kaocha.repl/run `retract-entity-test)
229 | (kaocha.repl/run `ident-enum-test)
230 | (kaocha.repl/run `update-cardinality-one-attribute--membership)
231 | (kaocha.repl/run `update-cardinality-one-attribute--regular)
232 | (kaocha.repl/run `update-cardinality-many-attribute)
233 | (kaocha.repl/run `duplicate-import-throws)
234 |
235 | (kaocha.repl/test-plan))
236 |
--------------------------------------------------------------------------------
/test/lambdaisland/plenish/factories.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.plenish.factories
2 | "Factories so we can easily populate some test dbs"
3 | (:require [lambdaisland.facai :as f]))
4 |
5 | (f/defactory line-item
6 | {:line-item/description "Widgets"
7 | :line-item/quantity 5
8 | :line-item/price 1.0M
9 | :line-item/category :items})
10 |
11 | (f/defactory user
12 | {:user/email "arne@example.com"
13 | :user/email-confirmed? true
14 | :user/uuid #uuid "f7ea3cda-9fbb-4af4-9b2f-db72a3b57781"
15 | :user/avatar (byte-array [1 2 3 4])
16 | :user/homepage (java.net.URI. "http://example.com")})
17 |
18 | (f/defactory cart
19 | {:cart/created-at #inst "2022-06-23T12:57:01.089000000-00:00"
20 | :cart/age-ms 123.456
21 | :cart/line-items [line-item line-item]
22 | :cart/user user}
23 |
24 | :traits
25 | {:not-created-yet
26 | {:after-build
27 | (fn [ctx]
28 | (f/update-result ctx dissoc :cart/created-at))}})
29 |
30 | (defn s
31 | "More concise Datomic schema notation"
32 | [[sname type {:as opts}]]
33 | (merge
34 | {:db/ident sname
35 | :db/valueType (keyword "db.type" (name type))
36 | :db/cardinality :db.cardinality/one}
37 | opts))
38 |
39 | (def schema
40 | (map
41 | s
42 | [[:line-item/description :string]
43 | [:line-item/quantity :long]
44 | [:line-item/price :bigdec]
45 | [:line-item/category :keyword]
46 | [:user/email :string]
47 | [:user/email-confirmed? :boolean]
48 | [:user/uuid :uuid]
49 | [:user/avatar :bytes]
50 | [:user/homepage :uri]
51 | [:cart/created-at :instant]
52 | [:cart/age-ms :double]
53 | [:cart/line-items :ref {:db/cardinality :db.cardinality/many}]
54 | [:cart/user :ref]]))
55 |
56 | (def metaschema
57 | {:tables {:line-item/price {}
58 | :cart/created-at {}
59 | :user/uuid {:name "users"}}})
60 |
--------------------------------------------------------------------------------
/test/lambdaisland/postgres/plenish_test.clj:
--------------------------------------------------------------------------------
1 | (ns lambdaisland.postgres.plenish-test
2 | (:require [clojure.test :refer :all]
3 | [datomic.api :as d]
4 | [lambdaisland.facai :as f]
5 | [lambdaisland.plenish :as plenish]
6 | [lambdaisland.plenish.adapters.postgres :as postgres]
7 | [lambdaisland.plenish.factories :as factories]
8 | [lambdaisland.facai.datomic-peer :as fd]
9 | [clojure.instant :as inst]
10 | [next.jdbc :as jdbc]
11 | [next.jdbc.result-set :as rs]))
12 |
13 | (def ^:dynamic *conn* "Datomic connection" nil)
14 | (def ^:dynamic *ds* "JDBC datasource" nil)
15 |
16 | ;; docker run -e POSTGRES_HOST_AUTH_METHOD=trust -p 5432:5432 postgres
17 |
18 | (defn recreate-replica! []
19 | (let [ds (jdbc/get-datasource "jdbc:pgsql://localhost:5432/postgres?user=postgres")]
20 | (jdbc/execute! ds ["DROP DATABASE IF EXISTS replica;"])
21 | (jdbc/execute! ds ["CREATE DATABASE replica;"])))
22 |
23 | (use-fixtures :each
24 | (fn [f]
25 | (recreate-replica!)
26 | (binding [*conn* (d/connect (doto (str "datomic:mem://" (gensym "tst")) d/create-database))
27 | *ds* (jdbc/get-datasource "jdbc:pgsql://localhost:5432/replica?user=postgres")]
28 | @(d/transact *conn* factories/schema)
29 | (f))))
30 |
31 | (def db-adapter (postgres/db-adapter))
32 |
33 | (defn import!
34 | ([metaschema]
35 | (import! metaschema nil))
36 | ([metaschema t]
37 | (let [ctx (plenish/initial-ctx *conn* metaschema db-adapter t)]
38 | (plenish/import-tx-range
39 | ctx *conn* *ds*
40 | (d/tx-range (d/log *conn*) t nil)))))
41 |
42 | (defn transact! [tx]
43 | @(d/transact *conn* tx))
44 |
45 | (deftest basic-create-sync-test
46 | (fd/create! *conn* factories/cart)
47 |
48 | (import! factories/metaschema)
49 |
50 | (is (= {:users/email "arne@example.com"
51 | :users/email_confirmed? true}
52 | (jdbc/execute-one!
53 | *ds*
54 | ["SELECT email, \"email_confirmed?\" FROM users;"]))))
55 |
56 | (deftest add-membership-after-attributes
57 | (let [fres (fd/create! *conn* factories/cart {:traits [:not-created-yet]})
58 | cart-id (:db/id (f/sel1 fres factories/cart))
59 | user-id (:db/id (f/sel1 fres factories/user))]
60 | (transact! [{:db/id cart-id
61 | :cart/created-at #inst "2022-01-01T12:57:01.089-00:00"}])
62 |
63 | (import! factories/metaschema)
64 |
65 | (is (= {:cart/db__id cart-id
66 | :cart/created_at (inst/read-instant-date "2022-01-01T12:57:01.089")
67 | :cart/age_ms 123.456
68 | :cart/user user-id}
69 | (jdbc/execute-one! *ds* ["SELECT * FROM cart;"])))))
70 |
71 | (deftest retract-attribute-test
72 | (let [fres (fd/create! *conn* factories/cart)
73 | cart-id (:db/id (f/sel1 fres factories/cart))
74 | user-id (:db/id (f/sel1 fres factories/user))]
75 |
76 | (transact! [[:db/retract cart-id :cart/age-ms 123.456]])
77 | (import! factories/metaschema)
78 |
79 | (is (= {:cart/db__id cart-id
80 | :cart/created_at (inst/read-instant-date "2022-06-23T12:57:01.089")
81 | :cart/user user-id
82 | :cart/age_ms nil}
83 | (jdbc/execute-one! *ds* ["SELECT * FROM cart;"])))
84 | (is (= [{:cart_x_line_items/db__id 17592186045418
85 | :cart_x_line_items/line_items 17592186045419}
86 | {:cart_x_line_items/db__id 17592186045418
87 | :cart_x_line_items/line_items 17592186045420}]
88 | (jdbc/execute! *ds* ["SELECT * FROM cart_x_line_items;"])))))
89 |
90 | (deftest retract-entity-test
91 | (let [fres (fd/create! *conn* factories/cart)
92 | cart-id (:db/id (f/sel1 fres factories/cart))
93 | user-id (:db/id (f/sel1 fres factories/user))]
94 |
95 | (transact! [[:db/retractEntity cart-id]])
96 | (import! factories/metaschema)
97 |
98 | (is (= [] (jdbc/execute! *ds* ["SELECT * FROM cart;"])))
99 | (is (= [] (jdbc/execute! *ds* ["SELECT * FROM cart_x_line_items;"])))))
100 |
101 | (deftest ident-enum-test
102 | (testing "using a ref attribute and idents as an enum-type value"
103 | (transact! [{:db/ident :fruit/apple}
104 | {:db/ident :fruit/orange}
105 | {:db/ident :fruit/type
106 | :db/valueType :db.type/ref
107 | :db/cardinality :db.cardinality/one}])
108 | (let [{:keys [tempids]} (transact! [{:db/id "apple"
109 | :fruit/type :fruit/apple}
110 | {:db/id "orange"
111 | :fruit/type :fruit/orange}])]
112 | (import! {:tables {:fruit/type {}}})
113 |
114 | (is (= [{:fruit/db__id (get tempids "apple")
115 | :idents/ident "fruit/apple"}
116 | {:fruit/db__id (get tempids "orange")
117 | :idents/ident "fruit/orange"}]
118 | (jdbc/execute! *ds* ["SELECT fruit.db__id, idents.ident FROM fruit, idents WHERE fruit.type = idents.db__id;"]))))))
119 |
120 | (deftest update-cardinality-one-attribute--membership
121 | (testing "membership attribute"
122 | (transact! [{:db/ident :fruit/type
123 | :db/valueType :db.type/string
124 | :db/cardinality :db.cardinality/one}])
125 |
126 | (let [tx-report (transact! [{:db/id "apple"
127 | :fruit/type "apple"}])
128 | {apple-id "apple"} (:tempids tx-report)]
129 | (transact! [{:db/id apple-id
130 | :fruit/type "orange"}])
131 |
132 | (import! {:tables {:fruit/type {}}})
133 | (is (= [{:fruit/db__id apple-id
134 | :fruit/type "orange"}]
135 | (jdbc/execute! *ds* ["SELECT * FROM fruit"]))))))
136 |
137 | (deftest update-cardinality-one-attribute--regular
138 | (testing "regular attribute"
139 | (transact! [{:db/ident :veggie/type
140 | :db/valueType :db.type/string
141 | :db/cardinality :db.cardinality/one}
142 | {:db/ident :veggie/rating
143 | :db/valueType :db.type/long
144 | :db/cardinality :db.cardinality/one}])
145 |
146 | (let [tx-report (transact! [{:db/id "brocolli"
147 | :veggie/type "brocolli"
148 | :veggie/rating 4}])
149 | {brocolli-id "brocolli"} (:tempids tx-report)]
150 | (transact! [{:db/id brocolli-id
151 | :veggie/rating 5}])
152 |
153 | (import! {:tables {:veggie/type {}}})
154 | (is (= [{:veggie/db__id brocolli-id
155 | :veggie/type "brocolli"
156 | :veggie/rating 5}]
157 | (jdbc/execute! *ds* ["SELECT * FROM veggie"]))))))
158 |
159 | (deftest update-cardinality-many-attribute
160 | ;; Does it make sense to have a cardinality/many attribute be the membership
161 | ;; attribute? Not sure. Punting on this for now.
162 | #_(testing "membership attribute")
163 |
164 | (testing "regular attribute"
165 | (transact! [{:db/ident :veggie/type
166 | :db/valueType :db.type/string
167 | :db/cardinality :db.cardinality/one}
168 | {:db/ident :veggie/rating
169 | :db/valueType :db.type/long
170 | :db/cardinality :db.cardinality/many}])
171 |
172 | (let [tx-report (transact! [{:db/id "brocolli"
173 | :veggie/type "brocolli"
174 | :veggie/rating 4}])
175 | {brocolli-id "brocolli"} (:tempids tx-report)]
176 | (transact! [[:db/add brocolli-id :veggie/rating 5]
177 | [:db/retract brocolli-id :veggie/rating 4]])
178 |
179 | (import! {:tables {:veggie/type {}}})
180 | (is (= [{:veggie_x_rating/db__id brocolli-id
181 | :veggie_x_rating/rating 5}]
182 | (jdbc/execute! *ds* ["SELECT * FROM veggie_x_rating"])))
183 |
184 | (transact! [[:db/add brocolli-id :veggie/rating 9000]])
185 |
186 | (import! {:tables {:veggie/type {}}} (inc (plenish/find-max-t *ds*)))
187 | (is (= [{:veggie_x_rating/db__id brocolli-id
188 | :veggie_x_rating/rating 5}
189 | {:veggie_x_rating/db__id brocolli-id
190 | :veggie_x_rating/rating 9000}]
191 | (jdbc/execute! *ds* ["SELECT * FROM veggie_x_rating"]))))))
192 |
193 | (deftest duplicate-import-throws
194 | (testing "Trying to import a transaction that was already processed should throw"
195 | (fd/create! *conn* factories/cart)
196 | (import! factories/metaschema)
197 |
198 | (let [max-t (plenish/find-max-t *ds*)]
199 | (is (thrown? com.impossibl.postgres.jdbc.PGSQLIntegrityConstraintViolationException
200 | (import! factories/metaschema max-t))))))
201 |
202 | (comment
203 | ;; REPL alternative to fixture
204 | (recreate-replica!)
205 | (def *conn* (doto (d/connect
206 | (doto (str "datomic:mem://" (gensym "tst"))
207 | d/create-database))
208 | (d/transact factories/schema)))
209 | (def *ds* (jdbc/get-datasource "jdbc:pgsql://localhost:5432/replica?user=postgres"))
210 |
211 | (require 'kaocha.repl)
212 | (kaocha.repl/run `update-cardinality-many-attribute)
213 |
214 | (kaocha.repl/test-plan))
215 |
--------------------------------------------------------------------------------
/tests.edn:
--------------------------------------------------------------------------------
1 | #kaocha/v1
2 | {:plugins [:notifier :print-invocations :profiling]
3 | :capture-output? false}
4 |
--------------------------------------------------------------------------------