├── test
├── test_helper.exs
└── kino_db
│ ├── connection_cell_test.exs
│ └── sql_cell_test.exs
├── .gitattributes
├── .formatter.exs
├── .editorconfig
├── guides
└── components.livemd
├── lib
├── kino_db
│ ├── application.ex
│ ├── sql_cell.ex
│ └── connection_cell.ex
├── kino_db.ex
└── assets
│ ├── sql_cell
│ ├── main.css
│ └── main.js
│ └── connection_cell
│ ├── main.css
│ └── main.js
├── .gitignore
├── README.md
├── .github
└── workflows
│ └── test.yml
├── mix.exs
├── CHANGELOG.md
├── mix.lock
└── LICENSE
/test/test_helper.exs:
--------------------------------------------------------------------------------
1 | ExUnit.start()
2 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # convert all CRLF to LF
2 | * text=auto eol=lf
3 |
--------------------------------------------------------------------------------
/.formatter.exs:
--------------------------------------------------------------------------------
1 | # Used by "mix format"
2 | [
3 | inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
4 | ]
5 |
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # https://EditorConfig.org
2 |
3 | root = true
4 |
5 | [*]
6 | end_of_line = lf
7 | insert_final_newline = true
8 |
--------------------------------------------------------------------------------
/guides/components.livemd:
--------------------------------------------------------------------------------
1 | # Components
2 |
3 | ```elixir
4 | Mix.install([
5 | # {:postgrex, "~> 0.19"},
6 | # {:myxql, "~> 0.7.0"},
7 | {:kino_db, "~> 0.3.0"}
8 | ])
9 | ```
10 |
11 | ## Smart cells
12 |
13 | The following Smart cells are available:
14 |
15 | * **Database connection** - for establishing a connection to a database
16 | * **SQL query** - for querying a database
17 |
--------------------------------------------------------------------------------
/lib/kino_db/application.ex:
--------------------------------------------------------------------------------
1 | defmodule KinoDB.Application do
2 | @moduledoc false
3 |
4 | use Application
5 |
6 | @impl true
7 | def start(_type, _args) do
8 | Kino.SmartCell.register(KinoDB.ConnectionCell)
9 | Kino.SmartCell.register(KinoDB.SQLCell)
10 |
11 | children = []
12 | opts = [strategy: :one_for_one, name: KinoDB.Supervisor]
13 | Supervisor.start_link(children, opts)
14 | end
15 | end
16 |
--------------------------------------------------------------------------------
/lib/kino_db.ex:
--------------------------------------------------------------------------------
1 | results = [
2 | Postgrex.Result,
3 | MyXQL.Result,
4 | Exqlite.Result,
5 | ReqBigQuery.Result,
6 | ReqAthena.Result,
7 | Tds.Result,
8 | Adbc.Result
9 | ]
10 |
11 | for mod <- results, Code.ensure_loaded?(mod) do
12 | defimpl Kino.Render, for: mod do
13 | def to_livebook(result) do
14 | result
15 | |> Kino.DataTable.new(name: "Results")
16 | |> Kino.Render.to_livebook()
17 | end
18 | end
19 | end
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # The directory Mix will write compiled artifacts to.
2 | /_build/
3 |
4 | # If you run "mix test --cover", coverage assets end up here.
5 | /cover/
6 |
7 | # The directory Mix downloads your dependencies sources to.
8 | /deps/
9 |
10 | # Where third-party dependencies like ExDoc output generated docs.
11 | /doc/
12 |
13 | # Ignore .fetch files in case you like to edit your project deps locally.
14 | /.fetch
15 |
16 | # If the VM crashes, it generates a dump, let's ignore it too.
17 | erl_crash.dump
18 |
19 | # Also ignore archive artifacts (built via "mix archive.build").
20 | *.ez
21 |
22 | # Ignore package tarball (built via "mix hex.build").
23 | kino_db-*.tar
24 |
25 | # Temporary files, for example, from tests.
26 | /tmp/
27 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # KinoDB
2 |
3 | [](https://hexdocs.pm/kino_db)
4 | [](https://github.com/livebook-dev/kino_db/actions)
5 |
6 | Database integrations with [Kino](https://github.com/livebook-dev/kino)
7 | for [Livebook](https://github.com/livebook-dev/livebook).
8 |
9 | ## Installation
10 |
11 | To bring KinoDB to Livebook all you need to do is `Mix.install/2`:
12 |
13 | ```elixir
14 | Mix.install([
15 | {:kino_db, "~> 0.3.0"}
16 | ])
17 | ```
18 |
19 | ## License
20 |
21 | Copyright (C) 2022 Dashbit
22 |
23 | Licensed under the Apache License, Version 2.0 (the "License");
24 | you may not use this file except in compliance with the License.
25 | You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
26 |
27 | Unless required by applicable law or agreed to in writing, software
28 | distributed under the License is distributed on an "AS IS" BASIS,
29 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
30 | See the License for the specific language governing permissions and
31 | limitations under the License.
32 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 | on:
3 | pull_request:
4 | push:
5 | branches:
6 | - main
7 | jobs:
8 | main:
9 | runs-on: ubuntu-latest
10 | strategy:
11 | fail-fast: false
12 | matrix:
13 | include:
14 | - pair:
15 | elixir: "1.14.2"
16 | otp: "25.0"
17 |
18 | - pair:
19 | elixir: "1.17.3"
20 | otp: "27.1.2"
21 | lint: true
22 |
23 | steps:
24 | - uses: actions/checkout@v4
25 |
26 | - uses: erlef/setup-beam@v1
27 | with:
28 | otp-version: ${{matrix.pair.otp}}
29 | elixir-version: ${{matrix.pair.elixir}}
30 |
31 | - uses: actions/cache@v4
32 | with:
33 | path: |
34 | deps
35 | _build
36 | key: ${{ runner.os }}-mix-${{matrix.pair.elixir}}-${{matrix.pair.otp}}-${{ hashFiles('**/mix.lock') }}
37 | restore-keys: |
38 | ${{ runner.os }}-mix-
39 |
40 | - run: mix deps.get
41 |
42 | - run: mix format --check-formatted
43 | if: ${{ matrix.lint }}
44 |
45 | - run: mix deps.unlock --check-unused
46 | if: ${{ matrix.lint }}
47 |
48 | - run: mix deps.compile
49 |
50 | - run: mix compile --warnings-as-errors
51 | if: ${{ matrix.lint }}
52 |
53 | - run: mix test
54 |
--------------------------------------------------------------------------------
/mix.exs:
--------------------------------------------------------------------------------
1 | defmodule KinoDB.MixProject do
2 | use Mix.Project
3 |
4 | @version "0.4.0"
5 | @description "Databases integration with Livebook"
6 |
7 | def project do
8 | [
9 | app: :kino_db,
10 | version: @version,
11 | description: @description,
12 | name: "KinoDB",
13 | elixir: "~> 1.14",
14 | start_permanent: Mix.env() == :prod,
15 | deps: deps(),
16 | docs: docs(),
17 | package: package()
18 | ]
19 | end
20 |
21 | def application do
22 | [
23 | mod: {KinoDB.Application, []}
24 | ]
25 | end
26 |
27 | defp deps do
28 | [
29 | {:kino, "~> 0.13"},
30 | {:table, "~> 0.1"},
31 | {:adbc, "~> 0.8", optional: true},
32 | {:db_connection, "~> 2.4.2 or ~> 2.5", optional: true},
33 | {:exqlite, "~> 0.11", optional: true},
34 | {:myxql, "~> 0.7", optional: true},
35 | {:postgrex, "~> 0.18 or ~> 1.0", optional: true},
36 | {:tds, "~> 2.3.4 or ~> 2.4", optional: true},
37 |
38 | # Those dependecies are new, so we use stricter versions
39 | {:req_athena, "~> 0.3.0", optional: true},
40 | {:req_ch, "~> 0.1.0", optional: true},
41 |
42 | # Dev only
43 | {:ex_doc, "~> 0.28", only: :dev, runtime: false}
44 | ]
45 | end
46 |
47 | defp docs do
48 | [
49 | main: "components",
50 | source_url: "https://github.com/livebook-dev/kino_db",
51 | source_ref: "v#{@version}",
52 | extras: ["guides/components.livemd"]
53 | ]
54 | end
55 |
56 | def package do
57 | [
58 | licenses: ["Apache-2.0"],
59 | links: %{
60 | "GitHub" => "https://github.com/livebook-dev/kino_db"
61 | }
62 | ]
63 | end
64 | end
65 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [v0.4.0](https://github.com/livebook-dev/kino_db/tree/v0.4.0) (2025-10-24)
9 |
10 | ### Changed
11 |
12 | * Use `Adbc.Result` instead of `Explorer` when possible
13 |
14 | ## [v0.3.1](https://github.com/livebook-dev/kino_db/tree/v0.3.1) (2025-08-15)
15 |
16 | ### Added
17 |
18 | * Support for Snowflake's `auth_jwt` authentication method ([#85](https://github.com/livebook-dev/kino_db/pull/85))
19 |
20 | ## [v0.3.0](https://github.com/livebook-dev/kino_db/tree/v0.3.0) (2025-01-08)
21 |
22 | ### Changed
23 |
24 | * Google BigQuery integration to use ADBC ([#84](https://github.com/livebook-dev/kino_db/pull/84))
25 | * Athena integration to use ReqAthena v0.3 ([#84](https://github.com/livebook-dev/kino_db/pull/84))
26 |
27 | ## [v0.2.14](https://github.com/livebook-dev/kino_db/tree/v0.2.14) (2024-11-29)
28 |
29 | ### Added
30 |
31 | * Support for ClickHouse ([#82](https://github.com/livebook-dev/kino_db/pull/82))
32 |
33 | ### Fixed
34 |
35 | * Trim values for connection options ([#80](https://github.com/livebook-dev/kino_db/pull/80))
36 |
37 | ## [v0.2.13](https://github.com/livebook-dev/kino_db/tree/v0.2.13) (2024-09-14)
38 |
39 | ### Changed
40 |
41 | * Use ReqAthena v0.2, which is more efficient and directly supports Explorer ([#76](https://github.com/livebook-dev/kino_db/pull/76))
42 |
43 | ## [v0.2.12](https://github.com/livebook-dev/kino_db/tree/v0.2.12) (2024-09-06)
44 |
45 | ### Changed
46 |
47 | * Allow Explorer 0.7+
48 |
49 | ## [v0.2.11](https://github.com/livebook-dev/kino_db/tree/v0.2.11) (2024-09-04)
50 |
51 | ### Changed
52 |
53 | * Allow Kino 0.13+
54 |
55 | ## [v0.2.10](https://github.com/livebook-dev/kino_db/tree/v0.2.10) (2024-08-15)
56 |
57 | ### Fixed
58 |
59 | * SQL cell to keep the selected connection when not defined ([#75](https://github.com/livebook-dev/kino_db/pull/75))
60 |
61 | ## [v0.2.9](https://github.com/livebook-dev/kino_db/tree/v0.2.9) (2024-07-26)
62 |
63 | ### Fixed
64 |
65 | * SQL cell to ignore interpolation and escapes in the generated query string ([#74](https://github.com/livebook-dev/kino_db/pull/74))
66 |
67 | ## [v0.2.8](https://github.com/livebook-dev/kino_db/tree/v0.2.8) (2024-06-17)
68 |
69 | ### Added
70 |
71 | * Support for DuckDB via ADBC ([#71](https://github.com/livebook-dev/kino_db/pull/71))
72 | * SSL Support for Postgres/MySQL ([#73](https://github.com/livebook-dev/kino_db/pull/73))
73 | * Added an option to specify a custom CA certificates file ([#73](https://github.com/livebook-dev/kino_db/pull/73))
74 |
75 | ## [v0.2.7](https://github.com/livebook-dev/kino_db/tree/v0.2.7) (2024-03-31)
76 |
77 | ### Changed
78 |
79 | * Allow adbc 0.3+
80 |
81 | ## [v0.2.6](https://github.com/livebook-dev/kino_db/tree/v0.2.6) (2024-02-08)
82 |
83 | ### Changed
84 |
85 | * Allow Explorer 0.8.0+
86 |
87 | ## [v0.2.5](https://github.com/livebook-dev/kino_db/tree/v0.2.5) (2023-10-18)
88 |
89 | ### Changed
90 |
91 | * Allow adbc 0.2.0+
92 |
93 | ## [v0.2.4](https://github.com/livebook-dev/kino_db/tree/v0.2.4) (2023-09-13)
94 |
95 | ### Changed
96 |
97 | * Correct SSQL usage within SQL Server
98 |
99 | ## [v0.2.3](https://github.com/livebook-dev/kino_db/tree/v0.2.3) (2023-09-02)
100 |
101 | ### Added
102 |
103 | * SQL Server integration ([#65](https://github.com/livebook-dev/kino_db/pull/65))
104 |
105 | ## [v0.2.2](https://github.com/livebook-dev/kino_db/tree/v0.2.2) (2023-08-31)
106 |
107 | ### Added
108 |
109 | * SSL support for Connection cell ([#60](https://github.com/livebook-dev/kino_db/pull/60))
110 | * Snowflake integration ([#61](https://github.com/livebook-dev/kino_db/pull/61))
111 |
112 | ### Changed
113 |
114 | * Settings as a dynamic header ([#62](https://github.com/livebook-dev/kino_db/pull/62))
115 |
116 | ### Fixed
117 |
118 | * Restores ipv6 config from attrs ([#58](https://github.com/livebook-dev/kino_db/pull/58))
119 |
120 | ## [v0.2.1](https://github.com/livebook-dev/kino_db/tree/v0.2.1) (2022-12-05)
121 |
122 | ### Changed
123 |
124 | * Relaxed requirement on Kino to `~> 0.7`
125 |
126 | ### Fixed
127 |
128 | * SQL cell error when there is no default connection ([#52](https://github.com/livebook-dev/kino_db/pull/52))
129 |
130 | ## [v0.2.0](https://github.com/livebook-dev/kino_db/tree/v0.2.0) (2022-10-07)
131 |
132 | ### Added
133 |
134 | * Integration with Livebook secrets for passwords and secret keys ([#32](https://github.com/livebook-dev/kino_db/pull/32) and [#43](https://github.com/livebook-dev/kino_db/pull/43))
135 |
136 | ### Changed
137 |
138 | * Made IPv6 connection an opt-in ([#46](https://github.com/livebook-dev/kino_db/pull/46))
139 |
140 | ## [v0.1.3](https://github.com/livebook-dev/kino_db/tree/v0.1.3) (2022-07-14)
141 |
142 | ### Added
143 |
144 | * Support AWS Athena new features ([#27](https://github.com/livebook-dev/kino_db/pull/27))
145 | * Support for IPv6 address ([#26](https://github.com/livebook-dev/kino_db/pull/26))
146 |
147 | ### Fixed
148 |
149 | * Scan binding for `Req.Request` connections and Req plugins versions ([#20](https://github.com/livebook-dev/kino_db/pull/20))
150 |
151 | ## [v0.1.2](https://github.com/livebook-dev/kino_db/tree/v0.1.2) (2022-06-30)
152 |
153 | ### Added
154 |
155 | * Color required inputs and block source code generation when they're empty ([#20](https://github.com/livebook-dev/kino_db/pull/20))
156 | * Support for AWS Athena ([#15](https://github.com/livebook-dev/kino_db/pull/15))
157 | * Support for Google BigQuery ([#7](https://github.com/livebook-dev/kino_db/pull/7), [#18](https://github.com/livebook-dev/kino_db/pull/18) and [#19](https://github.com/livebook-dev/kino_db/pull/19))
158 | * Support for SQLite ([#2](https://github.com/livebook-dev/kino_db/pull/2))
159 | * Warning when there's no available connection ([#11](https://github.com/livebook-dev/kino_db/pull/11))
160 |
161 | ## [v0.1.1](https://github.com/livebook-dev/kino_db/tree/v0.1.1) (2022-05-03)
162 |
163 | ### Fixed
164 |
165 | * Smart cells source synchronization before evaluation ([#3](https://github.com/livebook-dev/kino_db/pull/3))
166 |
167 | ## [v0.1.0](https://github.com/livebook-dev/kino_db/tree/v0.1.0) (2022-04-28)
168 |
169 | Initial release.
170 |
--------------------------------------------------------------------------------
/lib/assets/sql_cell/main.css:
--------------------------------------------------------------------------------
1 | .app {
2 | font-family: "Inter";
3 |
4 | box-sizing: border-box;
5 |
6 | --gray-50: #f8fafc;
7 | --gray-100: #f0f5f9;
8 | --gray-200: #e1e8f0;
9 | --gray-300: #cad5e0;
10 | --gray-400: #91a4b7;
11 | --gray-500: #61758a;
12 | --gray-600: #445668;
13 | --gray-800: #1c2a3a;
14 |
15 | --yellow-100: #fff7ec;
16 | --yellow-600: #ffa83f;
17 |
18 | --blue-100: #ecf0ff;
19 | --blue-600: #3e64ff;
20 | }
21 |
22 | input,
23 | select,
24 | textarea,
25 | button {
26 | font-family: inherit;
27 | }
28 |
29 | .header {
30 | display: flex;
31 | flex-wrap: wrap;
32 | align-items: stretch;
33 | justify-content: flex-start;
34 | background-color: var(--blue-100);
35 | padding: 8px 16px;
36 | border-left: solid 1px var(--gray-300);
37 | border-top: solid 1px var(--gray-300);
38 | border-right: solid 1px var(--gray-300);
39 | border-bottom: solid 1px var(--gray-200);
40 | border-radius: 0.5rem 0.5rem 0 0;
41 | gap: 16px;
42 | }
43 |
44 | .input {
45 | padding: 8px 12px;
46 | background-color: var(--gray-50);
47 | font-size: 0.875rem;
48 | border: 1px solid var(--gray-200);
49 | border-radius: 0.5rem;
50 | color: var(--gray-600);
51 | }
52 |
53 | .input::placeholder {
54 | color: var(--gray-400);
55 | }
56 |
57 | .input:focus {
58 | outline: none;
59 | }
60 |
61 | .input--md {
62 | width: auto;
63 | min-width: 150px;
64 | }
65 |
66 | .input--xs {
67 | width: auto;
68 | max-width: 100px;
69 | }
70 |
71 | .input--text {
72 | max-width: 50%;
73 | }
74 |
75 | .input-label {
76 | display: block;
77 | margin-bottom: 2px;
78 | font-size: 0.875rem;
79 | color: var(--gray-800);
80 | font-weight: 500;
81 | }
82 |
83 | .inline-input-label {
84 | display: block;
85 | margin-bottom: 2px;
86 | color: var(--gray-600);
87 | font-weight: 500;
88 | padding-right: 6px;
89 | font-size: 0.875rem;
90 | text-transform: uppercase;
91 | position: relative;
92 | top: 2px;
93 | }
94 |
95 | .input-container {
96 | flex-grow: 1;
97 | display: flex;
98 | align-items: center;
99 | }
100 |
101 | .field {
102 | display: flex;
103 | flex-direction: column;
104 | }
105 |
106 | .inline-field {
107 | display: flex;
108 | flex-direction: row;
109 | align-items: center;
110 | }
111 |
112 | .nonexistent {
113 | opacity: 0.6;
114 | }
115 |
116 | .section {
117 | border: solid 1px var(--gray-300);
118 | border-top: 0;
119 | border-bottom: 0;
120 | }
121 |
122 | .help-box {
123 | padding: 16px;
124 | font-weight: 500;
125 | font-size: 0.875rem;
126 | background-color: var(--gray-100);
127 | color: var(--gray-500);
128 | }
129 |
130 | .settings-box {
131 | padding: 16px;
132 | }
133 |
134 | .hidden {
135 | display: none;
136 | }
137 |
138 | .icon-button {
139 | background: none;
140 | border: none;
141 | cursor: pointer;
142 | padding: 4px;
143 | display: flex;
144 | align-items: center;
145 | justify-content: center;
146 | border-radius: 100%;
147 | color: var(--gray-500);
148 | line-height: 1;
149 | }
150 |
151 | .icon-button:hover {
152 | color: var(--gray-900);
153 | }
154 |
155 | .icon-button:focus {
156 | outline: none;
157 | }
158 |
159 | .icon-button:disabled {
160 | color: var(--gray-300);
161 | cursor: default;
162 | }
163 |
164 | .ri {
165 | font-size: 1.25rem;
166 | vertical-align: middle;
167 | line-height: 1;
168 | }
169 |
170 | .grow {
171 | flex-grow: 1;
172 | }
173 |
174 | select.input {
175 | appearance: none;
176 | background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTIiIGhlaWdodD0iOCIgdmlld0JveD0iMCAwIDEyIDgiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CjxwYXRoIGQ9Ik01Ljk5OTg5IDQuOTc2NzFMMTAuMTI0OSAwLjg1MTcwOEwxMS4zMDMyIDIuMDMwMDRMNS45OTk4OSA3LjMzMzM3TDAuNjk2NTU1IDIuMDMwMDRMMS44NzQ4OSAwLjg1MTcwOEw1Ljk5OTg5IDQuOTc2NzFaIiBmaWxsPSIjNjE3NThBIi8+Cjwvc3ZnPgo=");
177 | background-repeat: no-repeat;
178 | background-position: center right 10px;
179 | background-size: 10px;
180 | padding-right: 28px;
181 | }
182 |
183 | .info-box {
184 | margin-bottom: 24px;
185 | padding: 16px;
186 | border-radius: 0.5rem;
187 | font-weight: 500;
188 | font-size: 0.875rem;
189 | background-color: var(--gray-100);
190 | color: var(--gray-500);
191 | }
192 |
193 | .info-box p {
194 | margin: 0;
195 | padding: 0.3em 0;
196 | }
197 |
198 | .info-box p:first-child {
199 | padding-top: 0;
200 | }
201 |
202 | .box {
203 | margin-bottom: 24px;
204 | padding: 16px;
205 | border-radius: 0.5rem;
206 | font-size: 0.875rem;
207 | color: var(--gray-700);
208 | border: solid 1px var(--gray-300);
209 | }
210 |
211 | .box > *:not(:first-child) {
212 | margin-top: 1rem;
213 | }
214 |
215 | .box pre code {
216 | font-family: "JetBrains Mono", monospace;
217 | font-size: 14px;
218 | color: var(--gray-900);
219 | }
220 |
221 | .box.box-warning {
222 | color: var(--gray-900);
223 | background-color: var(--yellow-100);
224 | border-color: var(--yellow-600);
225 | }
226 |
227 | .strong {
228 | color: var(--gray-600);
229 | padding-left: 0;
230 | }
231 |
232 | .row {
233 | display: flex;
234 | align-items: center;
235 | padding: 8px 16px;
236 | gap: 8px;
237 | }
238 |
239 | .help-box .row {
240 | display: flex;
241 | align-items: stretch;
242 | justify-content: flex-start;
243 | padding: 0px;
244 | gap: 16px;
245 | }
246 |
247 | @media only screen and (max-width: 750px) {
248 | .mixed-row .field {
249 | max-width: 32%;
250 | }
251 | }
252 |
253 | /* Switch */
254 |
255 | .switch-button {
256 | display: inline-block;
257 | position: relative;
258 | width: 56px;
259 | height: 28px;
260 | user-select: none;
261 | }
262 |
263 | .switch-button[disabled] {
264 | pointer-events: none;
265 | opacity: 0.5;
266 | }
267 |
268 | .switch-button-checkbox {
269 | outline: none;
270 | appearance: none;
271 | position: absolute;
272 | display: block;
273 | width: 28px;
274 | height: 28px;
275 | margin: 0;
276 | border-radius: 9999px;
277 | background-color: var(--gray-400);
278 | border: 5px solid var(--gray-100);
279 | cursor: pointer;
280 | transition-property: all;
281 | transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
282 | transition-duration: 300ms;
283 | }
284 |
285 | .switch-button-bg {
286 | display: block;
287 | height: 100%;
288 | width: 100%;
289 | border-radius: 9999px;
290 | background-color: var(--gray-100);
291 | cursor: pointer;
292 | transition-property: all;
293 | transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
294 | transition-duration: 300ms;
295 | }
296 |
297 | .switch-button-checkbox:checked {
298 | background: white;
299 | border-color: var(--blue-600);
300 | transform: translateX(100%);
301 | }
302 |
303 | .switch-button-checkbox:checked + .switch-button-bg {
304 | background-color: var(--blue-600);
305 | }
306 |
--------------------------------------------------------------------------------
/lib/assets/connection_cell/main.css:
--------------------------------------------------------------------------------
1 | .app {
2 | font-family: "Inter";
3 |
4 | box-sizing: border-box;
5 |
6 | --gray-50: #f8fafc;
7 | --gray-100: #f0f5f9;
8 | --gray-200: #e1e8f0;
9 | --gray-300: #cad5e0;
10 | --gray-400: #91a4b7;
11 | --gray-500: #61758a;
12 | --gray-600: #445668;
13 | --gray-800: #1c2a3a;
14 | --gray-900: #0d1829;
15 |
16 | --blue-100: #ecf0ff;
17 | --blue-600: #3e64ff;
18 |
19 | --yellow-100: #fff7ec;
20 | --yellow-600: #ffa83f;
21 |
22 | --red-300: #f1a3a6;
23 | }
24 |
25 | input,
26 | select,
27 | textarea,
28 | button {
29 | font-family: inherit;
30 | }
31 |
32 | p,
33 | pre {
34 | margin: 0;
35 | }
36 |
37 | .container {
38 | border: solid 1px var(--gray-300);
39 | border-radius: 0.5rem;
40 | background-color: rgba(248, 250, 252, 0.3);
41 | padding-bottom: 8px;
42 | }
43 |
44 | .row {
45 | display: flex;
46 | flex-wrap: wrap;
47 | padding: 8px 16px;
48 | gap: 8px;
49 | }
50 |
51 | .draggable {
52 | width: 100%;
53 | justify-content: center;
54 | text-align: center;
55 | background-color: var(--gray-50);
56 | font-size: 0.875rem;
57 | border: 1px solid var(--gray-200);
58 | border-radius: 0.5rem;
59 | color: var(--gray-400);
60 | padding: 10px;
61 | }
62 |
63 | .header {
64 | display: flex;
65 | justify-content: flex-start;
66 | background-color: var(--blue-100);
67 | padding: 8px 16px;
68 | margin-bottom: 12px;
69 | border-radius: 0.5rem 0.5rem 0 0;
70 | border-bottom: solid 1px var(--gray-200);
71 | gap: 16px;
72 | }
73 |
74 | .input {
75 | padding: 8px 12px;
76 | background-color: var(--gray-50);
77 | font-size: 0.875rem;
78 | border: 1px solid var(--gray-200);
79 | border-radius: 0.5rem;
80 | color: var(--gray-600);
81 | }
82 |
83 | input[type="file"] {
84 | display: none;
85 | }
86 |
87 | input[type="number"] {
88 | appearance: textfield;
89 | }
90 |
91 | input[required].empty {
92 | border: 1px solid var(--red-300);
93 | }
94 |
95 | .input::placeholder {
96 | color: var(--gray-400);
97 | }
98 |
99 | .input:focus {
100 | outline: none;
101 | border: 1px solid var(--gray-300);
102 | }
103 |
104 | .input--sm {
105 | width: auto;
106 | min-width: 300px;
107 | }
108 |
109 | .input--xs {
110 | width: auto;
111 | min-width: 150px;
112 | }
113 |
114 | .input--text {
115 | max-width: 50%;
116 | }
117 |
118 | .input-label {
119 | display: block;
120 | margin-bottom: 2px;
121 | font-size: 0.875rem;
122 | color: var(--gray-800);
123 | font-weight: 500;
124 | }
125 |
126 | .inline-input-label {
127 | display: block;
128 | margin-bottom: 2px;
129 | color: var(--gray-600);
130 | font-weight: 500;
131 | padding-right: 6px;
132 | font-size: 0.875rem;
133 | text-transform: uppercase;
134 | }
135 |
136 | .input-container {
137 | flex-grow: 1;
138 | display: flex;
139 | align-items: center;
140 | }
141 |
142 | .field {
143 | display: flex;
144 | flex-direction: column;
145 | }
146 |
147 | .inline-field {
148 | display: flex;
149 | flex-direction: row;
150 | align-items: baseline;
151 | }
152 |
153 | .grow {
154 | flex: 1;
155 | }
156 |
157 | .help-box {
158 | padding: 8px 16px;
159 | font-weight: 500;
160 | color: var(--gray-500);
161 | display: inherit;
162 | }
163 |
164 | .help-box a {
165 | color: var(--gray-500);
166 | }
167 |
168 | .box {
169 | margin-bottom: 24px;
170 | padding: 16px;
171 | border-radius: 0.5rem;
172 | font-size: 0.875rem;
173 | color: var(--gray-700);
174 | border: solid 1px var(--gray-300);
175 | }
176 |
177 | .box > *:not(:first-child) {
178 | margin-top: 1rem;
179 | }
180 |
181 | .box pre code {
182 | font-family: "JetBrains Mono", monospace;
183 | font-size: 14px;
184 | color: var(--gray-900);
185 | }
186 |
187 | .box.box-warning {
188 | color: var(--gray-900);
189 | background-color: var(--yellow-100);
190 | border-color: var(--yellow-600);
191 | }
192 |
193 | .hidden {
194 | display: none;
195 | }
196 |
197 | select.input {
198 | appearance: none;
199 | background-image: url("data:image/svg+xml;base64,PHN2ZyB3aWR0aD0iMTIiIGhlaWdodD0iOCIgdmlld0JveD0iMCAwIDEyIDgiIGZpbGw9Im5vbmUiIHhtbG5zPSJodHRwOi8vd3d3LnczLm9yZy8yMDAwL3N2ZyI+CjxwYXRoIGQ9Ik01Ljk5OTg5IDQuOTc2NzFMMTAuMTI0OSAwLjg1MTcwOEwxMS4zMDMyIDIuMDMwMDRMNS45OTk4OSA3LjMzMzM3TDAuNjk2NTU1IDIuMDMwMDRMMS44NzQ4OSAwLjg1MTcwOEw1Ljk5OTg5IDQuOTc2NzFaIiBmaWxsPSIjNjE3NThBIi8+Cjwvc3ZnPgo=");
200 | background-repeat: no-repeat;
201 | background-position: center right 10px;
202 | background-size: 10px;
203 | padding-right: 28px;
204 | }
205 |
206 | input::-webkit-outer-spin-button,
207 | input::-webkit-inner-spin-button {
208 | -webkit-appearance: none;
209 | margin: 0;
210 | }
211 |
212 | .unavailable {
213 | color: rgba(0, 0, 0, 0.5);
214 | }
215 |
216 | .button--sm {
217 | margin: 0;
218 | padding: 5px;
219 | background: transparent;
220 | position: relative;
221 | top: 14px;
222 | }
223 |
224 | .button--sm:hover {
225 | background: transparent;
226 | cursor: pointer;
227 | }
228 |
229 | .button-svg {
230 | transition-property: all;
231 | transition-duration: 200ms;
232 | color: var(--gray-600);
233 | }
234 |
235 | .hidden-checkbox-input {
236 | display: none;
237 | }
238 |
239 | .input-icon {
240 | border: 1px solid var(--gray-200);
241 | outline: none;
242 | padding: 8px 12px 8px 42px;
243 | }
244 |
245 | .input-icon:hover {
246 | cursor: pointer;
247 | }
248 |
249 | .input-icon.unavailable {
250 | border-color: var(--red-300);
251 | }
252 |
253 | .input-icon-container {
254 | position: relative;
255 | }
256 |
257 | .icon-container {
258 | position: absolute;
259 | top: 20px;
260 | padding: 5px 6px;
261 | border-right: 1px solid var(--gray-200);
262 | width: 22px;
263 | height: 23px;
264 | background-color: var(--gray-200);
265 | border-radius: 0.5rem;
266 | border-top-right-radius: 0;
267 | border-bottom-right-radius: 0;
268 | margin-left: 1px;
269 | }
270 |
271 | .icon-container:hover {
272 | cursor: pointer;
273 | background-color: var(--gray-300);
274 | }
275 |
276 | .input-icon-text {
277 | border: 1px solid var(--gray-200);
278 | outline: none;
279 | padding: 8px 12px 8px 42px;
280 | }
281 |
282 | .hidden-checkbox:hover {
283 | cursor: pointer;
284 | }
285 |
286 | /* Switch */
287 |
288 | .switch-button {
289 | display: inline-block;
290 | position: relative;
291 | width: 56px;
292 | height: 28px;
293 | user-select: none;
294 | }
295 |
296 | .switch-button[disabled] {
297 | pointer-events: none;
298 | opacity: 0.5;
299 | }
300 |
301 | .switch-button-checkbox {
302 | outline: none;
303 | appearance: none;
304 | position: absolute;
305 | display: block;
306 | width: 28px;
307 | height: 28px;
308 | margin: 0;
309 | border-radius: 9999px;
310 | background-color: white;
311 | border: 5px solid var(--gray-200);
312 | cursor: pointer;
313 | transition-property: all;
314 | transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
315 | transition-duration: 300ms;
316 | }
317 |
318 | .switch-button-bg {
319 | display: block;
320 | height: 100%;
321 | width: 100%;
322 | border-radius: 9999px;
323 | background-color: var(--gray-200);
324 | cursor: pointer;
325 | transition-property: all;
326 | transition-timing-function: cubic-bezier(0.4, 0, 0.2, 1);
327 | transition-duration: 300ms;
328 | }
329 |
330 | .switch-button-checkbox:checked {
331 | background: white;
332 | border-color: var(--blue-600);
333 | transform: translateX(100%);
334 | }
335 |
336 | .switch-button-checkbox:checked + .switch-button-bg {
337 | background-color: var(--blue-600);
338 | }
339 |
340 | /* Fix icon position - Safari 11+ */
341 | @media not all and (min-resolution: 0.001dpcm) {
342 | @supports (-webkit-appearance: none) and (stroke-color: transparent) {
343 | .icon-container {
344 | top: 22px;
345 | }
346 | }
347 | }
348 |
349 | /* Fix icon border - Firefox */
350 | @-moz-document url-prefix() {
351 | .icon-container {
352 | height: 24px;
353 | }
354 | }
355 |
--------------------------------------------------------------------------------
/mix.lock:
--------------------------------------------------------------------------------
1 | %{
2 | "adbc": {:hex, :adbc, "0.8.0", "3c8a264b436312795a3e6f5c833fd752d787877f03b5c048f12dd7434c2a9c56", [:make, :mix], [{:cc_precompiler, "~> 0.1.8 or ~> 0.2", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:decimal, "~> 2.1", [hex: :decimal, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.2", [hex: :table, repo: "hexpm", optional: false]}], "hexpm", "924a6714b2adfea77087a95d270b4c05ae58bbf5da15b73eb170a3242df2b5e0"},
3 | "cc_precompiler": {:hex, :cc_precompiler, "0.1.11", "8c844d0b9fb98a3edea067f94f616b3f6b29b959b6b3bf25fee94ffe34364768", [:mix], [{:elixir_make, "~> 0.7", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "3427232caf0835f94680e5bcf082408a70b48ad68a5f5c0b02a3bea9f3a075b9"},
4 | "db_connection": {:hex, :db_connection, "2.6.0", "77d835c472b5b67fc4f29556dee74bf511bbafecdcaf98c27d27fa5918152086", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "c2f992d15725e721ec7fbc1189d4ecdb8afef76648c746a8e1cad35e3b8a35f3"},
5 | "decimal": {:hex, :decimal, "2.3.0", "3ad6255aa77b4a3c4f818171b12d237500e63525c2fd056699967a3e7ea20f62", [:mix], [], "hexpm", "a4d66355cb29cb47c3cf30e71329e58361cfcb37c34235ef3bf1d7bf3773aeac"},
6 | "earmark_parser": {:hex, :earmark_parser, "1.4.44", "f20830dd6b5c77afe2b063777ddbbff09f9759396500cdbe7523efd58d7a339c", [:mix], [], "hexpm", "4778ac752b4701a5599215f7030989c989ffdc4f6df457c5f36938cc2d2a2750"},
7 | "elixir_make": {:hex, :elixir_make, "0.9.0", "6484b3cd8c0cee58f09f05ecaf1a140a8c97670671a6a0e7ab4dc326c3109726", [:mix], [], "hexpm", "db23d4fd8b757462ad02f8aa73431a426fe6671c80b200d9710caf3d1dd0ffdb"},
8 | "ex_doc": {:hex, :ex_doc, "0.39.1", "e19d356a1ba1e8f8cfc79ce1c3f83884b6abfcb79329d435d4bbb3e97ccc286e", [:mix], [{:earmark_parser, "~> 1.4.44", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.0", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14 or ~> 1.0", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1 or ~> 1.0", [hex: :makeup_erlang, repo: "hexpm", optional: false]}, {:makeup_html, ">= 0.1.0", [hex: :makeup_html, repo: "hexpm", optional: true]}], "hexpm", "8abf0ed3e3ca87c0847dfc4168ceab5bedfe881692f1b7c45f4a11b232806865"},
9 | "exqlite": {:hex, :exqlite, "0.20.0", "99b711eb1a3309b380ff54901d3d7db8e7afaf4b68a34398a69e1fa1b9b2054e", [:make, :mix], [{:cc_precompiler, "~> 0.1", [hex: :cc_precompiler, repo: "hexpm", optional: false]}, {:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:elixir_make, "~> 0.8", [hex: :elixir_make, repo: "hexpm", optional: false]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "385ed37b8317101b7f9b58333910798ebe395e77ee6ca261be74a1a06b3d61f6"},
10 | "finch": {:hex, :finch, "0.20.0", "5330aefb6b010f424dcbbc4615d914e9e3deae40095e73ab0c1bb0968933cadf", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:mint, "~> 1.6.2 or ~> 1.7", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4 or ~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 1.1", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "2658131a74d051aabfcba936093c903b8e89da9a1b63e430bee62045fa9b2ee2"},
11 | "fss": {:hex, :fss, "0.1.1", "9db2344dbbb5d555ce442ac7c2f82dd975b605b50d169314a20f08ed21e08642", [:mix], [], "hexpm", "78ad5955c7919c3764065b21144913df7515d52e228c09427a004afe9c1a16b0"},
12 | "hpax": {:hex, :hpax, "1.0.3", "ed67ef51ad4df91e75cc6a1494f851850c0bd98ebc0be6e81b026e765ee535aa", [:mix], [], "hexpm", "8eab6e1cfa8d5918c2ce4ba43588e894af35dbd8e91e6e55c817bca5847df34a"},
13 | "jason": {:hex, :jason, "1.4.4", "b9226785a9aa77b6857ca22832cffa5d5011a667207eb2a0ad56adb5db443b8a", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "c5eb0cab91f094599f94d55bc63409236a8ec69a21a67814529e8d5f6cc90b3b"},
14 | "kino": {:hex, :kino, "0.14.0", "d0e18f8bcbf81de41dbafdad9c7b9f0607f5e87c2d881cd46e567477abf0fd37", [:mix], [{:fss, "~> 0.1.0", [hex: :fss, repo: "hexpm", optional: false]}, {:nx, "~> 0.1", [hex: :nx, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}, {:table, "~> 0.1.2", [hex: :table, repo: "hexpm", optional: false]}], "hexpm", "91bcc77cc58a6ed69697d6e2bdf5f577b03f93476a26270dea66b1a770b1fc76"},
15 | "makeup": {:hex, :makeup, "1.2.1", "e90ac1c65589ef354378def3ba19d401e739ee7ee06fb47f94c687016e3713d1", [:mix], [{:nimble_parsec, "~> 1.4", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "d36484867b0bae0fea568d10131197a4c2e47056a6fbe84922bf6ba71c8d17ce"},
16 | "makeup_elixir": {:hex, :makeup_elixir, "1.0.1", "e928a4f984e795e41e3abd27bfc09f51db16ab8ba1aebdba2b3a575437efafc2", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "7284900d412a3e5cfd97fdaed4f5ed389b8f2b4cb49efc0eb3bd10e2febf9507"},
17 | "makeup_erlang": {:hex, :makeup_erlang, "1.0.2", "03e1804074b3aa64d5fad7aa64601ed0fb395337b982d9bcf04029d68d51b6a7", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "af33ff7ef368d5893e4a267933e7744e46ce3cf1f61e2dccf53a111ed3aa3727"},
18 | "mime": {:hex, :mime, "2.0.7", "b8d739037be7cd402aee1ba0306edfdef982687ee7e9859bee6198c1e7e2f128", [:mix], [], "hexpm", "6171188e399ee16023ffc5b76ce445eb6d9672e2e241d2df6050f3c771e80ccd"},
19 | "mint": {:hex, :mint, "1.7.1", "113fdb2b2f3b59e47c7955971854641c61f378549d73e829e1768de90fc1abf1", [:mix], [{:castore, "~> 0.1.0 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1 or ~> 0.2.0 or ~> 1.0", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "fceba0a4d0f24301ddee3024ae116df1c3f4bb7a563a731f45fdfeb9d39a231b"},
20 | "myxql": {:hex, :myxql, "0.7.0", "3382f139b0b0da977a8fc33c8cded125e20df2e400f8d7b7e674fa62a7e077dd", [:mix], [{:db_connection, "~> 2.4.1 or ~> 2.5", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.6 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:geo, "~> 3.4", [hex: :geo, repo: "hexpm", optional: true]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "40e4b7ad4973c8b895e86a3de04ff7a79c2cf72b9f2bddef7717afb4ab36d8c0"},
21 | "nimble_options": {:hex, :nimble_options, "1.1.1", "e3a492d54d85fc3fd7c5baf411d9d2852922f66e69476317787a7b2bb000a61b", [:mix], [], "hexpm", "821b2470ca9442c4b6984882fe9bb0389371b8ddec4d45a9504f00a66f650b44"},
22 | "nimble_parsec": {:hex, :nimble_parsec, "1.4.2", "8efba0122db06df95bfaa78f791344a89352ba04baedd3849593bfce4d0dc1c6", [:mix], [], "hexpm", "4b21398942dda052b403bbe1da991ccd03a053668d147d53fb8c4e0efe09c973"},
23 | "nimble_pool": {:hex, :nimble_pool, "1.1.0", "bf9c29fbdcba3564a8b800d1eeb5a3c58f36e1e11d7b7fb2e084a643f645f06b", [:mix], [], "hexpm", "af2e4e6b34197db81f7aad230c1118eac993acc0dae6bc83bac0126d4ae0813a"},
24 | "postgrex": {:hex, :postgrex, "0.18.0", "f34664101eaca11ff24481ed4c378492fed2ff416cd9b06c399e90f321867d7e", [:mix], [{:db_connection, "~> 2.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.5 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "a042989ba1bc1cca7383ebb9e461398e3f89f868c92ce6671feb7ef132a252d1"},
25 | "req": {:hex, :req, "0.5.15", "662020efb6ea60b9f0e0fac9be88cd7558b53fe51155a2d9899de594f9906ba9", [:mix], [{:brotli, "~> 0.3.1", [hex: :brotli, repo: "hexpm", optional: true]}, {:ezstd, "~> 1.0", [hex: :ezstd, repo: "hexpm", optional: true]}, {:finch, "~> 0.17", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}, {:mime, "~> 2.0.6 or ~> 2.1", [hex: :mime, repo: "hexpm", optional: false]}, {:nimble_csv, "~> 1.0", [hex: :nimble_csv, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: true]}], "hexpm", "a6513a35fad65467893ced9785457e91693352c70b58bbc045b47e5eb2ef0c53"},
26 | "req_athena": {:hex, :req_athena, "0.3.0", "aa063489fd9a4506f85b74b42e1b609b3525ad3c9bcd2e41f78763e14e867086", [:mix], [{:aws_credentials, "~> 0.2", [hex: :aws_credentials, repo: "hexpm", optional: true]}, {:explorer, "~> 0.9", [hex: :explorer, repo: "hexpm", optional: true]}, {:req, "~> 0.5.0", [hex: :req, repo: "hexpm", optional: false]}, {:req_s3, "~> 0.2", [hex: :req_s3, repo: "hexpm", optional: false]}], "hexpm", "646e4c659e4ad3532d8bd209651abb5f7c2db4c9e06aede1f0ee7bc04feb8b82"},
27 | "req_ch": {:hex, :req_ch, "0.1.0", "a8c8710b2fc51ff6bf43d3842bdaff8269688f62ae94250dcfa92d2d37ca4279", [:mix], [{:explorer, "~> 0.10", [hex: :explorer, repo: "hexpm", optional: true]}, {:req, "~> 0.5", [hex: :req, repo: "hexpm", optional: false]}], "hexpm", "7aad3e3f4492647b431294e1fbb915c7280af8628ec6104506029b376b2b4150"},
28 | "req_s3": {:hex, :req_s3, "0.2.3", "ede5f4c792cf39995379307733ff4593032a876f38da29d9d7ea03881b498b51", [:mix], [{:req, "~> 0.5.6", [hex: :req, repo: "hexpm", optional: false]}], "hexpm", "31b5d52490495c8aeea7e3c5cbcec82f49035e11bdaf41f0e58ab716fefe44ca"},
29 | "table": {:hex, :table, "0.1.2", "87ad1125f5b70c5dea0307aa633194083eb5182ec537efc94e96af08937e14a8", [:mix], [], "hexpm", "7e99bc7efef806315c7e65640724bf165c3061cdc5d854060f74468367065029"},
30 | "tds": {:hex, :tds, "2.3.4", "534749dd9ef61af960fcafa9cbb7186d6d7b9f92ea0133fb25da07b121c8295c", [:mix], [{:db_connection, "~> 2.0", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.9 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:table, "~> 0.1.0", [hex: :table, repo: "hexpm", optional: true]}], "hexpm", "bb9a53d4688a85fd566f342f76b50d39adfc4b410062886ef908365ead24ba3f"},
31 | "telemetry": {:hex, :telemetry, "1.3.0", "fedebbae410d715cf8e7062c96a1ef32ec22e764197f70cda73d82778d61e7a2", [:rebar3], [], "hexpm", "7015fc8919dbe63764f4b4b87a95b7c0996bd539e0d499be6ec9d7f3875b79e6"},
32 | }
33 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
--------------------------------------------------------------------------------
/lib/assets/sql_cell/main.js:
--------------------------------------------------------------------------------
1 | import * as Vue from "https://cdn.jsdelivr.net/npm/vue@3.2.26/dist/vue.esm-browser.prod.js";
2 |
3 | export function init(ctx, payload) {
4 | ctx.importCSS("main.css");
5 | ctx.importCSS(
6 | "https://fonts.googleapis.com/css2?family=Inter:wght@400;500&display=swap"
7 | );
8 | ctx.importCSS(
9 | "https://cdn.jsdelivr.net/npm/remixicon@2.5.0/fonts/remixicon.min.css"
10 | );
11 |
12 | const BaseSelect = {
13 | name: "BaseSelect",
14 |
15 | props: {
16 | label: {
17 | type: String,
18 | default: "",
19 | },
20 | selectClass: {
21 | type: String,
22 | default: "input",
23 | },
24 | modelValue: {
25 | type: String,
26 | default: "",
27 | },
28 | options: {
29 | type: Array,
30 | default: [],
31 | required: true,
32 | },
33 | required: {
34 | type: Boolean,
35 | default: false,
36 | },
37 | inline: {
38 | type: Boolean,
39 | default: false,
40 | },
41 | existent: {
42 | type: Boolean,
43 | default: false,
44 | },
45 | disabled: {
46 | type: Boolean,
47 | default: false,
48 | },
49 | },
50 |
51 | template: `
52 |
53 |
56 |
70 |
71 | `,
72 | };
73 |
74 | const BaseInput = {
75 | name: "BaseInput",
76 |
77 | props: {
78 | label: {
79 | type: String,
80 | default: "",
81 | },
82 | inputClass: {
83 | type: String,
84 | default: "input",
85 | },
86 | modelValue: {
87 | type: [String, Number],
88 | default: "",
89 | },
90 | inline: {
91 | type: Boolean,
92 | default: false,
93 | },
94 | grow: {
95 | type: Boolean,
96 | default: false,
97 | },
98 | number: {
99 | type: Boolean,
100 | default: false,
101 | },
102 | },
103 |
104 | template: `
105 |
106 |
109 |
115 |
116 | `,
117 | };
118 |
119 | const BaseSwitch = {
120 | name: "BaseSwitch",
121 |
122 | props: {
123 | label: {
124 | type: String,
125 | default: "",
126 | },
127 | modelValue: {
128 | type: Boolean,
129 | default: true,
130 | },
131 | inline: {
132 | type: Boolean,
133 | default: false,
134 | },
135 | grow: {
136 | type: Boolean,
137 | default: false,
138 | },
139 | },
140 |
141 | template: `
142 |
143 |
146 |
159 |
160 | `,
161 | };
162 |
163 | const ToggleBox = {
164 | name: "ToggleBox",
165 |
166 | props: {
167 | toggle: {
168 | type: Boolean,
169 | default: true,
170 | },
171 | },
172 |
173 | template: `
174 |
175 |
176 |
177 | `,
178 | };
179 |
180 | const app = Vue.createApp({
181 | components: {
182 | BaseSelect: BaseSelect,
183 | BaseInput: BaseInput,
184 | BaseSwitch: BaseSwitch,
185 | ToggleBox: ToggleBox,
186 | },
187 |
188 | template: `
189 |
190 |
191 |
192 | To successfully query, you need at least one database connection available.
193 | To create a database connection, you can add the Database connection smart cell.
194 |
195 |
196 |
To successfully query with {{ connectionType }}, you need to add the following dependency:
197 |
{{ missingDep }}
198 |
199 |
244 |
245 | To dynamically inject values into the query use double curly braces, like {{name}}.
246 |
247 |
248 | `,
249 |
250 | data() {
251 | return {
252 | isHelpBoxHidden: true,
253 | isSettingsBoxHidden: true,
254 | isConnectionExistent: false,
255 | isConnectionDisabled: true,
256 | payload: payload,
257 | missingDep: payload.missing_dep,
258 | availableDatabases: {
259 | postgres: "PostgreSQL",
260 | mysql: "MySQL",
261 | sqlite: "SQLite",
262 | bigquery: "Google BigQuery",
263 | athena: "AWS Athena",
264 | snowflake: "Snowflake",
265 | sqlserver: "SQL Server",
266 | duckdb: "DuckDB",
267 | clickhouse: "Clickhouse"
268 | },
269 | };
270 | },
271 |
272 | computed: {
273 | availableConnections() {
274 | const connection = this.payload.connection;
275 | const connections = this.payload.connections;
276 |
277 | const availableConnection =
278 | connection &&
279 | connections.some((conn) => conn.variable === connection.variable);
280 |
281 | if (connection === null) {
282 | this.isConnectionExistent = false;
283 | this.isConnectionDisabled = true;
284 | return [];
285 | } else if (availableConnection) {
286 | this.isConnectionExistent = true;
287 | this.isConnectionDisabled = false;
288 | return this.buildSelectConnectionOptions(connections);
289 | } else {
290 | this.isConnectionExistent = false;
291 | this.isConnectionDisabled = false;
292 | return this.buildSelectConnectionOptions([
293 | connection,
294 | ...connections,
295 | ]);
296 | }
297 | },
298 | hasCache() {
299 | return this.payload.connection?.type === 'athena';
300 | },
301 | hasTimeout() {
302 | const hasTimeout = ["postgres", "mysql", "sqlite", "sqlserver"];
303 | return hasTimeout.includes(this.payload.connection?.type);
304 | },
305 | connectionType() {
306 | return this.payload.connection?.type;
307 | }
308 | },
309 |
310 | methods: {
311 | buildSelectConnectionOptions(connections) {
312 | return connections.map((conn) => {
313 | return {
314 | label: `${conn.variable} (${this.availableDatabases[conn.type]})`,
315 | value: conn.variable,
316 | };
317 | });
318 | },
319 |
320 | handleResultVariableChange({ target: { value } }) {
321 | ctx.pushEvent("update_result_variable", value);
322 | },
323 |
324 | handleCacheQueryChange({ target: { checked } }) {
325 | ctx.pushEvent("update_cache_query", checked);
326 | },
327 |
328 | handleTimeoutChange({ target: { value } }) {
329 | ctx.pushEvent("update_timeout", value);
330 | },
331 |
332 | handleConnectionChange({ target: { value } }) {
333 | ctx.pushEvent("update_connection", value);
334 | },
335 |
336 | toggleHelpBox(_) {
337 | this.isHelpBoxHidden = !this.isHelpBoxHidden;
338 | },
339 |
340 | toggleSettingsBox(_) {
341 | this.isSettingsBoxHidden = !this.isSettingsBoxHidden;
342 | },
343 | },
344 | }).mount(ctx.root);
345 |
346 | ctx.handleEvent("update_result_variable", (variable) => {
347 | app.payload.result_variable = variable;
348 | });
349 |
350 | ctx.handleEvent("update_connection", (variable) => {
351 | const connection = app.payload.connections.find(
352 | (conn) => conn.variable === variable
353 | );
354 | app.payload.connection = connection;
355 | });
356 |
357 | ctx.handleEvent("update_cache_query", (value) => {
358 | app.payload.cache_query = value;
359 | });
360 |
361 | ctx.handleEvent("update_timeout", (timeout) => {
362 | app.payload.timeout = timeout;
363 | });
364 |
365 | ctx.handleEvent("connections", ({ connections, connection }) => {
366 | app.payload.connections = connections;
367 | app.payload.connection = connection;
368 | });
369 |
370 | ctx.handleEvent("missing_dep", ({ dep }) => {
371 | app.missingDep = dep;
372 | });
373 |
374 | ctx.handleSync(() => {
375 | // Synchronously invokes change listeners
376 | document.activeElement &&
377 | document.activeElement.dispatchEvent(
378 | new Event("change", { bubbles: true })
379 | );
380 | });
381 | }
382 |
--------------------------------------------------------------------------------
/lib/kino_db/sql_cell.ex:
--------------------------------------------------------------------------------
1 | defmodule KinoDB.SQLCell do
2 | @moduledoc false
3 |
4 | use Kino.JS, assets_path: "lib/assets/sql_cell"
5 | use Kino.JS.Live
6 | use Kino.SmartCell, name: "SQL query"
7 |
8 | @default_query "select * from table_name limit 100"
9 |
10 | @impl true
11 | def init(attrs, ctx) do
12 | connection =
13 | if conn_attrs = attrs["connection"] do
14 | %{variable: conn_attrs["variable"], type: conn_attrs["type"]}
15 | end
16 |
17 | query = attrs["query"] || @default_query
18 |
19 | ctx =
20 | assign(ctx,
21 | connections: [],
22 | connection: connection,
23 | result_variable: Kino.SmartCell.prefixed_var_name("result", attrs["result_variable"]),
24 | query: query,
25 | timeout: attrs["timeout"],
26 | cache_query: Map.get(attrs, "cache_query", true),
27 | missing_dep: missing_dep(connection)
28 | )
29 |
30 | {:ok, ctx, editor: [source: query, language: "sql"]}
31 | end
32 |
33 | @impl true
34 | def handle_connect(ctx) do
35 | payload = %{
36 | connections: ctx.assigns.connections,
37 | connection: ctx.assigns.connection,
38 | result_variable: ctx.assigns.result_variable,
39 | timeout: ctx.assigns.timeout,
40 | cache_query: ctx.assigns.cache_query,
41 | missing_dep: ctx.assigns.missing_dep
42 | }
43 |
44 | {:ok, payload, ctx}
45 | end
46 |
47 | @impl true
48 | def handle_editor_change(source, ctx) do
49 | {:ok, assign(ctx, query: source)}
50 | end
51 |
52 | @impl true
53 | def handle_event("update_connection", variable, ctx) do
54 | connection = Enum.find(ctx.assigns.connections, &(&1.variable == variable))
55 | ctx = assign(ctx, connection: connection)
56 | missing_dep = missing_dep(connection)
57 |
58 | ctx =
59 | if missing_dep == ctx.assigns.missing_dep do
60 | ctx
61 | else
62 | broadcast_event(ctx, "missing_dep", %{"dep" => missing_dep})
63 | assign(ctx, missing_dep: missing_dep)
64 | end
65 |
66 | broadcast_event(ctx, "update_connection", connection.variable)
67 |
68 | {:noreply, ctx}
69 | end
70 |
71 | def handle_event("update_result_variable", variable, ctx) do
72 | ctx =
73 | if Kino.SmartCell.valid_variable_name?(variable) do
74 | broadcast_event(ctx, "update_result_variable", variable)
75 | assign(ctx, result_variable: variable)
76 | else
77 | broadcast_event(ctx, "update_result_variable", ctx.assigns.result_variable)
78 | ctx
79 | end
80 |
81 | {:noreply, ctx}
82 | end
83 |
84 | def handle_event("update_timeout", timeout, ctx) do
85 | timeout =
86 | case Integer.parse(timeout) do
87 | {n, ""} -> n
88 | _ -> nil
89 | end
90 |
91 | ctx = assign(ctx, timeout: timeout)
92 | broadcast_event(ctx, "update_timeout", timeout)
93 | {:noreply, ctx}
94 | end
95 |
96 | def handle_event("update_cache_query", cache_query?, ctx) do
97 | ctx = assign(ctx, cache_query: cache_query?)
98 | broadcast_event(ctx, "update_cache_query", cache_query?)
99 | {:noreply, ctx}
100 | end
101 |
102 | @impl true
103 | def scan_binding(pid, binding, _env) do
104 | connections =
105 | for {key, value} <- binding,
106 | is_atom(key),
107 | type = connection_type(value),
108 | do: %{variable: Atom.to_string(key), type: type}
109 |
110 | send(pid, {:connections, connections})
111 | end
112 |
113 | @impl true
114 | def handle_info({:connections, connections}, ctx) do
115 | connection = search_connection(connections, ctx.assigns.connection)
116 | missing_dep = missing_dep(connection)
117 |
118 | ctx =
119 | if missing_dep == ctx.assigns.missing_dep do
120 | ctx
121 | else
122 | broadcast_event(ctx, "missing_dep", %{"dep" => missing_dep})
123 | assign(ctx, missing_dep: missing_dep)
124 | end
125 |
126 | broadcast_event(ctx, "connections", %{
127 | "connections" => connections,
128 | "connection" => connection
129 | })
130 |
131 | {:noreply,
132 | assign(ctx,
133 | connections: connections,
134 | connection: connection
135 | )}
136 | end
137 |
138 | defp search_connection([connection | _], nil), do: connection
139 |
140 | defp search_connection(connections, connection) do
141 | Enum.find(connections, connection, &(&1.variable == connection.variable))
142 | end
143 |
144 | @compile {:no_warn_undefined, {DBConnection, :connection_module, 1}}
145 |
146 | defp connection_type(connection) when is_pid(connection) do
147 | with true <- Code.ensure_loaded?(DBConnection),
148 | {:ok, module} <- DBConnection.connection_module(connection) do
149 | case Atom.to_string(module) do
150 | "Elixir.Postgrex" <> _ -> "postgres"
151 | "Elixir.MyXQL" <> _ -> "mysql"
152 | "Elixir.Exqlite" <> _ -> "sqlite"
153 | "Elixir.Tds" <> _ -> "sqlserver"
154 | _ -> nil
155 | end
156 | else
157 | _ -> connection_type_from_adbc(connection)
158 | end
159 | end
160 |
161 | defp connection_type(connection) when is_struct(connection, Req.Request) do
162 | cond do
163 | Keyword.has_key?(connection.request_steps, :athena_run) -> "athena"
164 | Keyword.has_key?(connection.request_steps, :clickhouse_run) -> "clickhouse"
165 | true -> nil
166 | end
167 | end
168 |
169 | defp connection_type(_connection), do: nil
170 |
171 | defp connection_type_from_adbc(connection) when is_pid(connection) do
172 | with true <- Code.ensure_loaded?(Adbc),
173 | {:ok, driver} <- Adbc.Connection.get_driver(connection) do
174 | Atom.to_string(driver)
175 | else
176 | _ -> nil
177 | end
178 | end
179 |
180 | @impl true
181 | def to_attrs(ctx) do
182 | %{
183 | "connection" =>
184 | if connection = ctx.assigns.connection do
185 | %{"variable" => connection.variable, "type" => connection.type}
186 | end,
187 | "result_variable" => ctx.assigns.result_variable,
188 | "query" => ctx.assigns.query,
189 | "timeout" => ctx.assigns.timeout,
190 | "cache_query" => ctx.assigns.cache_query
191 | }
192 | end
193 |
194 | @impl true
195 | def to_source(attrs) do
196 | attrs |> to_quoted() |> Kino.SmartCell.quoted_to_string()
197 | end
198 |
199 | # DBConnection-based
200 | defp to_quoted(%{"connection" => %{"type" => "postgres"}} = attrs) do
201 | to_quoted(attrs, quote(do: Postgrex), fn n -> "$#{n}" end)
202 | end
203 |
204 | defp to_quoted(%{"connection" => %{"type" => "mysql"}} = attrs) do
205 | to_quoted(attrs, quote(do: MyXQL), fn _n -> "?" end)
206 | end
207 |
208 | defp to_quoted(%{"connection" => %{"type" => "sqlite"}} = attrs) do
209 | to_quoted(attrs, quote(do: Exqlite), fn n -> "?#{n}" end)
210 | end
211 |
212 | defp to_quoted(%{"connection" => %{"type" => "sqlserver"}} = attrs) do
213 | to_quoted(attrs, quote(do: Tds), fn n -> "@#{n}" end)
214 | end
215 |
216 | # Explorer-based
217 | defp to_quoted(%{"connection" => %{"type" => "snowflake"}} = attrs) do
218 | to_quoted_adbc(attrs, fn n -> "?#{n}" end)
219 | end
220 |
221 | defp to_quoted(%{"connection" => %{"type" => "duckdb"}} = attrs) do
222 | to_quoted_adbc(attrs, fn n -> "?#{n}" end)
223 | end
224 |
225 | defp to_quoted(%{"connection" => %{"type" => "bigquery"}} = attrs) do
226 | to_quoted_adbc(attrs, fn _n -> "?" end)
227 | end
228 |
229 | # Req-based
230 | defp to_quoted(%{"connection" => %{"type" => "athena"}} = attrs) do
231 | to_quoted_req_query(attrs, quote(do: ReqAthena), fn _n -> "?" end)
232 | end
233 |
234 | defp to_quoted(%{"connection" => %{"type" => "clickhouse"}} = attrs) do
235 | to_quoted_req_query(attrs, quote(do: ReqCH), fn n, inner ->
236 | name =
237 | if String.match?(inner, ~r/[^a-z0-9_]/) do
238 | "param_#{n}"
239 | else
240 | inner
241 | end
242 |
243 | "{#{name}:String}"
244 | end)
245 | end
246 |
247 | defp to_quoted(_ctx) do
248 | quote do
249 | end
250 | end
251 |
252 | defp to_quoted(attrs, quoted_module, next) do
253 | {query, params} = parameterize(attrs["query"], attrs["connection"]["type"], next)
254 | opts_args = query_opts_args(attrs)
255 |
256 | quote do
257 | unquote(quoted_var(attrs["result_variable"])) =
258 | unquote(quoted_module).query!(
259 | unquote(quoted_var(attrs["connection"]["variable"])),
260 | unquote(quoted_query(query)),
261 | unquote(params),
262 | unquote_splicing(opts_args)
263 | )
264 | end
265 | end
266 |
267 | defp to_quoted_adbc(attrs, next) do
268 | {query, params} = parameterize(attrs["query"], attrs["connection"]["type"], next)
269 |
270 | quote do
271 | unquote(quoted_var(attrs["result_variable"])) =
272 | Adbc.Connection.query!(
273 | unquote(quoted_var(attrs["connection"]["variable"])),
274 | unquote(quoted_query(query)),
275 | unquote(params)
276 | )
277 | end
278 | end
279 |
280 | defp to_quoted_req_query(attrs, quoted_module, next) do
281 | {query, params} = parameterize(attrs["query"], attrs["connection"]["type"], next)
282 | opts_args = query_opts_args(attrs)
283 | var = quoted_var(attrs["result_variable"])
284 |
285 | quote do
286 | unquote(var) =
287 | unquote(quoted_module).query!(
288 | unquote(quoted_var(attrs["connection"]["variable"])),
289 | unquote(quoted_query(query)),
290 | unquote(params),
291 | unquote_splicing(opts_args)
292 | ).body
293 |
294 | Kino.DataTable.new(unquote(var))
295 | end
296 | end
297 |
298 | defp quoted_var(nil), do: nil
299 | defp quoted_var(string), do: {String.to_atom(string), [], nil}
300 |
301 | defp quoted_query(query) do
302 | if String.contains?(query, "\n") do
303 | {:sigil_S, [delimiter: ~s["""]], [{:<<>>, [], [query <> "\n"]}, []]}
304 | else
305 | {:sigil_S, [delimiter: ~s["]], [{:<<>>, [], [query]}, []]}
306 | end
307 | end
308 |
309 | @connection_types_with_timeout ~w|postgres mysql sqlite sqlserver|
310 |
311 | defp query_opts_args(%{"connection" => %{"type" => type}, "timeout" => timeout})
312 | when timeout != nil and type in @connection_types_with_timeout,
313 | do: [[timeout: timeout * 1000]]
314 |
315 | defp query_opts_args(%{"connection" => %{"type" => "athena"}} = attrs) do
316 | [[format: :explorer] ++ if(attrs["cache_query"], do: [], else: [cache_query: false])]
317 | end
318 |
319 | defp query_opts_args(%{"connection" => %{"type" => "clickhouse"}}),
320 | do: [[format: :explorer]]
321 |
322 | defp query_opts_args(_attrs), do: []
323 |
324 | defp parameterize(query, type, next) do
325 | parameterize(query, "", [], 1, type, next)
326 | end
327 |
328 | defp parameterize("", raw, params, _n, _type, _next) do
329 | {raw, Enum.reverse(params)}
330 | end
331 |
332 | defp parameterize("--" <> _ = query, raw, params, n, type, next) do
333 | {comment, rest} =
334 | case String.split(query, "\n", parts: 2) do
335 | [comment, rest] -> {comment <> "\n", rest}
336 | [comment] -> {comment, ""}
337 | end
338 |
339 | parameterize(rest, raw <> comment, params, n, type, next)
340 | end
341 |
342 | defp parameterize("/*" <> _ = query, raw, params, n, type, next) do
343 | {comment, rest} =
344 | case String.split(query, "*/", parts: 2) do
345 | [comment, rest] -> {comment <> "*/", rest}
346 | [comment] -> {comment, ""}
347 | end
348 |
349 | parameterize(rest, raw <> comment, params, n, type, next)
350 | end
351 |
352 | defp parameterize("{{" <> rest = query, raw, params, n, type, next) do
353 | with [inner, rest] <- String.split(rest, "}}", parts: 2),
354 | sql_param <- apply_next(next, n, inner),
355 | {:ok, param} <- quote_param(type, inner, sql_param) do
356 | parameterize(rest, raw <> sql_param, [param | params], n + 1, type, next)
357 | else
358 | _ -> parameterize("", raw <> query, params, n, type, next)
359 | end
360 | end
361 |
362 | defp parameterize(<
>, raw, params, n, type, next) do
363 | parameterize(rest, <>, params, n, type, next)
364 | end
365 |
366 | defp apply_next(next, n, _inner) when is_function(next, 1), do: next.(n)
367 | defp apply_next(next, n, inner) when is_function(next, 2), do: next.(n, inner)
368 |
369 | defp quote_param("clickhouse", inner, sql_param) do
370 | with {:ok, inner_ast} <- Code.string_to_quoted(inner) do
371 | name =
372 | sql_param |> String.trim_leading("{") |> String.split(":", parts: 2) |> List.first()
373 |
374 | {:ok,
375 | quote do
376 | {unquote(name), unquote(inner_ast)}
377 | end}
378 | end
379 | end
380 |
381 | defp quote_param("sqlserver", inner, sql_param) do
382 | with {:ok, inner_ast} <- Code.string_to_quoted(inner) do
383 | {:ok,
384 | quote do
385 | %Tds.Parameter{name: unquote(sql_param), value: unquote(inner_ast)}
386 | end}
387 | end
388 | end
389 |
390 | defp quote_param(_type, inner, _sql_param) do
391 | Code.string_to_quoted(inner)
392 | end
393 |
394 | defp missing_dep(_), do: nil
395 | end
396 |
--------------------------------------------------------------------------------
/test/kino_db/connection_cell_test.exs:
--------------------------------------------------------------------------------
1 | defmodule KinoDB.ConnectionCellTest do
2 | use ExUnit.Case, async: true
3 |
4 | import Kino.Test
5 |
6 | alias KinoDB.ConnectionCell
7 |
8 | setup :configure_livebook_bridge
9 |
10 | @attrs %{
11 | "variable" => "db",
12 | "type" => "postgres",
13 | "hostname" => "localhost",
14 | "port" => 4444,
15 | "use_ipv6" => false,
16 | "use_ssl" => false,
17 | "cacertfile" => "",
18 | "auth_type" => "auth_jwt",
19 | "username" => "admin",
20 | "password" => "pass",
21 | "use_password_secret" => false,
22 | "password_secret" => "",
23 | "database" => "default",
24 | "database_path" => "/path/to/sqlite3.db",
25 | "project_id" => "foo",
26 | "credentials_json" => "",
27 | "default_dataset_id" => "",
28 | "access_key_id" => "id",
29 | "secret_access_key" => "secret",
30 | "use_secret_access_key_secret" => false,
31 | "secret_access_key_secret" => "",
32 | "private_key" => "-----BEGIN PRIVATE KEY-----...",
33 | "use_private_key_secret" => false,
34 | "private_key_secret" => "",
35 | "use_encrypted_private_key" => false,
36 | "private_key_passphrase" => "passphrase",
37 | "use_private_key_passphrase_secret" => false,
38 | "private_key_passphrase_secret" => "",
39 | "token" => "token",
40 | "region" => "region",
41 | "output_location" => "s3://my-bucket",
42 | "workgroup" => "primary",
43 | "account" => "account",
44 | "schema" => "schema",
45 | "warehouse" => ""
46 | }
47 |
48 | @empty_required_fields %{
49 | "variable" => "db",
50 | "type" => "postgres",
51 | "hostname" => "",
52 | "port" => nil,
53 | "database_path" => "",
54 | "project_id" => "",
55 | "access_key_id" => "",
56 | "secret_access_key" => "",
57 | "region" => "",
58 | "account" => "",
59 | "username" => "",
60 | "auth_type" => ""
61 | }
62 |
63 | describe "initialization" do
64 | test "returns default source when started with missing attrs" do
65 | {_kino, source} = start_smart_cell!(ConnectionCell, %{"variable" => "conn"})
66 |
67 | assert source ==
68 | """
69 | opts = [hostname: "localhost", port: 5432, username: "", password: "", database: ""]
70 | {:ok, conn} = Kino.start_child({Postgrex, opts})\
71 | """
72 | end
73 | end
74 |
75 | describe "code generation" do
76 | test "restores source code from attrs" do
77 | assert ConnectionCell.to_source(@attrs) === ~s'''
78 | opts = [
79 | hostname: "localhost",
80 | port: 4444,
81 | username: "admin",
82 | password: "pass",
83 | database: "default"
84 | ]
85 |
86 | {:ok, db} = Kino.start_child({Postgrex, opts})\
87 | '''
88 |
89 | attrs = Map.put(@attrs, "use_ipv6", true)
90 |
91 | assert ConnectionCell.to_source(attrs) === ~s'''
92 | opts = [
93 | hostname: "localhost",
94 | port: 4444,
95 | username: "admin",
96 | password: "pass",
97 | database: "default",
98 | socket_options: [:inet6]
99 | ]
100 |
101 | {:ok, db} = Kino.start_child({Postgrex, opts})\
102 | '''
103 |
104 | attrs = Map.put(@attrs, "use_ssl", true)
105 |
106 | assert ConnectionCell.to_source(attrs) === ~s'''
107 | opts = [
108 | hostname: "localhost",
109 | port: 4444,
110 | username: "admin",
111 | password: "pass",
112 | database: "default",
113 | ssl: [cacerts: :public_key.cacerts_get()]
114 | ]
115 |
116 | {:ok, db} = Kino.start_child({Postgrex, opts})\
117 | '''
118 |
119 | attrs = Map.merge(@attrs, %{"use_ssl" => true, "cacertfile" => "/path/to/cacertfile"})
120 |
121 | assert ConnectionCell.to_source(attrs) === ~s'''
122 | opts = [
123 | hostname: "localhost",
124 | port: 4444,
125 | username: "admin",
126 | password: "pass",
127 | database: "default",
128 | ssl: [cacertfile: "/path/to/cacertfile"]
129 | ]
130 |
131 | {:ok, db} = Kino.start_child({Postgrex, opts})\
132 | '''
133 |
134 | attrs = Map.delete(@attrs, "password") |> Map.merge(%{"password_secret" => "PASS"})
135 |
136 | assert ConnectionCell.to_source(attrs) === ~s'''
137 | opts = [
138 | hostname: "localhost",
139 | port: 4444,
140 | username: "admin",
141 | password: System.fetch_env!("LB_PASS"),
142 | database: "default"
143 | ]
144 |
145 | {:ok, db} = Kino.start_child({Postgrex, opts})\
146 | '''
147 |
148 | assert ConnectionCell.to_source(put_in(@attrs["type"], "mysql")) == ~s'''
149 | opts = [
150 | hostname: "localhost",
151 | port: 4444,
152 | username: "admin",
153 | password: "pass",
154 | database: "default"
155 | ]
156 |
157 | {:ok, db} = Kino.start_child({MyXQL, opts})\
158 | '''
159 |
160 | assert ConnectionCell.to_source(put_in(@attrs["type"], "sqlite")) == ~s'''
161 | opts = [database: "/path/to/sqlite3.db"]
162 | {:ok, db} = Kino.start_child({Exqlite, opts})\
163 | '''
164 |
165 | assert ConnectionCell.to_source(put_in(@attrs["type"], "athena")) == ~s'''
166 | db =
167 | ReqAthena.new(
168 | access_key_id: "id",
169 | database: "default",
170 | output_location: "s3://my-bucket",
171 | region: "region",
172 | secret_access_key: "secret",
173 | token: "token",
174 | workgroup: "primary",
175 | http_errors: :raise
176 | )
177 |
178 | :ok\
179 | '''
180 |
181 | attrs =
182 | Map.delete(@attrs, "secret_access_key")
183 | |> Map.merge(%{"type" => "athena", "secret_access_key_secret" => "ATHENA_KEY"})
184 |
185 | assert ConnectionCell.to_source(attrs) == ~s'''
186 | db =
187 | ReqAthena.new(
188 | access_key_id: "id",
189 | database: "default",
190 | output_location: "s3://my-bucket",
191 | region: "region",
192 | secret_access_key: System.fetch_env!("LB_ATHENA_KEY"),
193 | token: "token",
194 | workgroup: "primary",
195 | http_errors: :raise
196 | )
197 |
198 | :ok\
199 | '''
200 |
201 | attrs =
202 | Map.delete(@attrs, "password_secret")
203 | |> Map.merge(%{"variable" => "conn", "auth_type" => "auth_snowflake"})
204 |
205 | assert ConnectionCell.to_source(put_in(attrs["type"], "snowflake")) == ~s'''
206 | :ok = Adbc.download_driver!(:snowflake)
207 |
208 | {:ok, db} =
209 | Kino.start_child(
210 | {Adbc.Database,
211 | driver: :snowflake,
212 | username: "admin",
213 | "adbc.snowflake.sql.account": "account",
214 | "adbc.snowflake.sql.db": "default",
215 | "adbc.snowflake.sql.schema": "schema",
216 | "adbc.snowflake.sql.warehouse": "",
217 | "adbc.snowflake.sql.auth_type": "auth_snowflake",
218 | password: "pass"}
219 | )
220 |
221 | {:ok, conn} = Kino.start_child({Adbc.Connection, database: db})\
222 | '''
223 |
224 | attrs =
225 | Map.delete(@attrs, "private_key_secret")
226 | |> Map.merge(%{"variable" => "conn", "auth_type" => "auth_jwt"})
227 |
228 | assert ConnectionCell.to_source(put_in(attrs["type"], "snowflake")) == ~s'''
229 | :ok = Adbc.download_driver!(:snowflake)
230 |
231 | {:ok, db} =
232 | Kino.start_child(
233 | {Adbc.Database,
234 | driver: :snowflake,
235 | username: "admin",
236 | "adbc.snowflake.sql.account": "account",
237 | "adbc.snowflake.sql.db": "default",
238 | "adbc.snowflake.sql.schema": "schema",
239 | "adbc.snowflake.sql.warehouse": "",
240 | "adbc.snowflake.sql.auth_type": "auth_jwt",
241 | "adbc.snowflake.sql.client_option.jwt_private_key_pkcs8_value":
242 | "-----BEGIN PRIVATE KEY-----..."}
243 | )
244 |
245 | {:ok, conn} = Kino.start_child({Adbc.Connection, database: db})\
246 | '''
247 |
248 | attrs =
249 | Map.drop(@attrs, ["private_key_secret", "private_key_passphrase_secret"])
250 | |> Map.merge(%{
251 | "variable" => "conn",
252 | "auth_type" => "auth_jwt",
253 | "use_encrypted_private_key" => true
254 | })
255 |
256 | assert ConnectionCell.to_source(put_in(attrs["type"], "snowflake")) == ~s'''
257 | :ok = Adbc.download_driver!(:snowflake)
258 |
259 | {:ok, db} =
260 | Kino.start_child(
261 | {Adbc.Database,
262 | driver: :snowflake,
263 | username: "admin",
264 | "adbc.snowflake.sql.account": "account",
265 | "adbc.snowflake.sql.db": "default",
266 | "adbc.snowflake.sql.schema": "schema",
267 | "adbc.snowflake.sql.warehouse": "",
268 | "adbc.snowflake.sql.auth_type": "auth_jwt",
269 | "adbc.snowflake.sql.client_option.jwt_private_key_pkcs8_value":
270 | "-----BEGIN PRIVATE KEY-----...",
271 | "adbc.snowflake.sql.client_option.jwt_private_key_pkcs8_password": "passphrase"}
272 | )
273 |
274 | {:ok, conn} = Kino.start_child({Adbc.Connection, database: db})\
275 | '''
276 |
277 | assert ConnectionCell.to_source(put_in(attrs["type"], "clickhouse")) == ~s'''
278 | conn =
279 | ReqCH.new(
280 | database: "default",
281 | auth: {:basic, "admin:pass"},
282 | base_url: "http://localhost:4444"
283 | )
284 |
285 | :ok\
286 | '''
287 |
288 | assert ConnectionCell.to_source(put_in(attrs["type"], "bigquery")) == ~s'''
289 | :ok = Adbc.download_driver!(:bigquery)
290 |
291 | {:ok, db} =
292 | Kino.start_child(
293 | {Adbc.Database, driver: :bigquery, "adbc.bigquery.sql.project_id": "foo"}
294 | )
295 |
296 | {:ok, conn} = Kino.start_child({Adbc.Connection, database: db})\
297 | '''
298 | end
299 |
300 | test "generates empty source code when required fields are missing" do
301 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "postgres")) == ""
302 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "mysql")) == ""
303 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "sqlite")) == ""
304 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "bigquery")) == ""
305 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "athena")) == ""
306 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "snowflake")) == ""
307 | assert ConnectionCell.to_source(put_in(@empty_required_fields["type"], "clickhouse")) == ""
308 | end
309 |
310 | test "generates empty source code when all conditional fields are missing" do
311 | attrs = Map.merge(@attrs, %{"type" => "athena", "workgroup" => "", "output_location" => ""})
312 |
313 | assert ConnectionCell.to_source(attrs) == ""
314 | end
315 | end
316 |
317 | test "when a field changes, broadcasts the change and sends source update" do
318 | {kino, _source} = start_smart_cell!(ConnectionCell, %{"variable" => "conn"})
319 |
320 | push_event(kino, "update_field", %{"field" => "hostname", "value" => "myhost"})
321 |
322 | assert_broadcast_event(kino, "update", %{"fields" => %{"hostname" => "myhost"}})
323 |
324 | assert_smart_cell_update(kino, %{"hostname" => "myhost"}, """
325 | opts = [hostname: "myhost", port: 5432, username: "", password: "", database: ""]
326 | {:ok, conn} = Kino.start_child({Postgrex, opts})\
327 | """)
328 | end
329 |
330 | test "when an invalid variable name is set, restores the previous value" do
331 | {kino, _source} = start_smart_cell!(ConnectionCell, %{"variable" => "db"})
332 |
333 | push_event(kino, "update_field", %{"field" => "variable", "value" => "DB"})
334 |
335 | assert_broadcast_event(kino, "update", %{"fields" => %{"variable" => "db"}})
336 | end
337 |
338 | test "when the database type changes, restores the default port for that database" do
339 | {kino, _source} =
340 | start_smart_cell!(ConnectionCell, %{
341 | "variable" => "conn",
342 | "type" => "postgres",
343 | "port" => 5432
344 | })
345 |
346 | push_event(kino, "update_field", %{"field" => "type", "value" => "mysql"})
347 |
348 | assert_broadcast_event(kino, "update", %{"fields" => %{"type" => "mysql", "port" => 3306}})
349 |
350 | assert_smart_cell_update(kino, %{"type" => "mysql", "port" => 3306}, """
351 | opts = [hostname: "localhost", port: 3306, username: "", password: "", database: ""]
352 | {:ok, conn} = Kino.start_child({MyXQL, opts})\
353 | """)
354 | end
355 |
356 | test "password from secrets" do
357 | {kino, _source} =
358 | start_smart_cell!(ConnectionCell, %{
359 | "variable" => "conn",
360 | "type" => "postgres",
361 | "port" => 5432
362 | })
363 |
364 | push_event(kino, "update_field", %{"field" => "use_password_secret", "value" => true})
365 | assert_broadcast_event(kino, "update", %{"fields" => %{"use_password_secret" => true}})
366 |
367 | push_event(kino, "update_field", %{"field" => "password_secret", "value" => "PASS"})
368 | assert_broadcast_event(kino, "update", %{"fields" => %{"password_secret" => "PASS"}})
369 |
370 | assert_smart_cell_update(
371 | kino,
372 | %{"password_secret" => "PASS"},
373 | """
374 | opts = [
375 | hostname: "localhost",
376 | port: 5432,
377 | username: "",
378 | password: System.fetch_env!("LB_PASS"),
379 | database: ""
380 | ]
381 |
382 | {:ok, conn} = Kino.start_child({Postgrex, opts})\
383 | """
384 | )
385 | end
386 |
387 | test "athena secret key from secrets" do
388 | {kino, _source} =
389 | start_smart_cell!(ConnectionCell, %{
390 | "variable" => "conn",
391 | "type" => "athena",
392 | "database" => "default",
393 | "access_key_id" => "id",
394 | "secret_access_key" => "secret_key",
395 | "token" => "token",
396 | "region" => "region",
397 | "output_location" => "s3://my-bucket",
398 | "workgroup" => "primary"
399 | })
400 |
401 | push_event(kino, "update_field", %{"field" => "use_secret_access_key_secret", "value" => true})
402 |
403 | assert_broadcast_event(kino, "update", %{
404 | "fields" => %{"use_secret_access_key_secret" => true}
405 | })
406 |
407 | push_event(kino, "update_field", %{
408 | "field" => "secret_access_key_secret",
409 | "value" => "ATHENA_KEY"
410 | })
411 |
412 | assert_broadcast_event(kino, "update", %{
413 | "fields" => %{"secret_access_key_secret" => "ATHENA_KEY"}
414 | })
415 |
416 | assert_smart_cell_update(
417 | kino,
418 | %{"secret_access_key_secret" => "ATHENA_KEY"},
419 | """
420 | conn =
421 | ReqAthena.new(
422 | access_key_id: "id",
423 | database: "default",
424 | output_location: "s3://my-bucket",
425 | region: "region",
426 | secret_access_key: System.fetch_env!("LB_ATHENA_KEY"),
427 | token: "token",
428 | workgroup: "primary",
429 | http_errors: :raise
430 | )
431 |
432 | :ok\
433 | """
434 | )
435 | end
436 | end
437 |
--------------------------------------------------------------------------------
/test/kino_db/sql_cell_test.exs:
--------------------------------------------------------------------------------
1 | defmodule KinoDB.SQLCellTest do
2 | use ExUnit.Case, async: true
3 |
4 | import Kino.Test
5 |
6 | alias KinoDB.SQLCell
7 |
8 | setup :configure_livebook_bridge
9 |
10 | describe "initialization" do
11 | test "restores source code from attrs" do
12 | attrs = %{
13 | "connection" => %{"variable" => "db", "type" => "postgres"},
14 | "result_variable" => "ids_result",
15 | "timeout" => nil,
16 | "query" => "SELECT id FROM users",
17 | "data_frame_alias" => Explorer.DataFrame
18 | }
19 |
20 | {_kino, source} = start_smart_cell!(SQLCell, attrs)
21 |
22 | assert source ==
23 | """
24 | ids_result = Postgrex.query!(db, ~S"SELECT id FROM users", [])\
25 | """
26 | end
27 | end
28 |
29 | test "when an invalid result variable name is set, restores the previous value" do
30 | {kino, _source} = start_smart_cell!(SQLCell, %{"result_variable" => "result"})
31 |
32 | push_event(kino, "update_result_variable", "RESULT")
33 |
34 | assert_broadcast_event(kino, "update_result_variable", "result")
35 | end
36 |
37 | test "finds database connections in binding and sends them to the client" do
38 | {kino, _source} = start_smart_cell!(SQLCell, %{})
39 |
40 | conn_pid = spawn_fake_postgrex_connection()
41 |
42 | binding = [non_conn: self(), conn: conn_pid]
43 | env = Code.env_for_eval([])
44 | SQLCell.scan_binding(kino.pid, binding, env)
45 |
46 | connection = %{variable: "conn", type: "postgres"}
47 |
48 | assert_broadcast_event(kino, "connections", %{
49 | "connections" => [^connection],
50 | "connection" => ^connection
51 | })
52 | end
53 |
54 | test "keeps the currently selected connection if not available in binding" do
55 | attrs = %{"connection" => %{"variable" => "conn1", "type" => "postgres"}}
56 | {kino, _source} = start_smart_cell!(SQLCell, attrs)
57 |
58 | conn_pid = spawn_fake_postgrex_connection()
59 |
60 | binding = [conn: conn_pid]
61 | env = Code.env_for_eval([])
62 | SQLCell.scan_binding(kino.pid, binding, env)
63 |
64 | current_connection = %{variable: "conn1", type: "postgres"}
65 | connection = %{variable: "conn", type: "postgres"}
66 |
67 | assert_broadcast_event(kino, "connections", %{
68 | "connections" => [^connection],
69 | "connection" => ^current_connection
70 | })
71 | end
72 |
73 | test "updates the selected connection type when the variable changes" do
74 | attrs = %{"connection" => %{"variable" => "conn", "type" => "sqlite"}}
75 | {kino, _source} = start_smart_cell!(SQLCell, attrs)
76 |
77 | conn_pid = spawn_fake_postgrex_connection()
78 |
79 | binding = [conn: conn_pid]
80 | env = Code.env_for_eval([])
81 | SQLCell.scan_binding(kino.pid, binding, env)
82 |
83 | connection = %{variable: "conn", type: "postgres"}
84 |
85 | assert_broadcast_event(kino, "connections", %{
86 | "connections" => [^connection],
87 | "connection" => ^connection
88 | })
89 | end
90 |
91 | describe "code generation" do
92 | test "uses regular string for a single-line query" do
93 | attrs = %{
94 | "connection" => %{"variable" => "conn", "type" => "postgres"},
95 | "result_variable" => "result",
96 | "cache_query" => true,
97 | "timeout" => nil,
98 | "query" => "SELECT id FROM users",
99 | "data_frame_alias" => Explorer.DataFrame
100 | }
101 |
102 | assert SQLCell.to_source(attrs) == """
103 | result = Postgrex.query!(conn, ~S"SELECT id FROM users", [])\
104 | """
105 |
106 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "mysql")) == """
107 | result = MyXQL.query!(conn, ~S"SELECT id FROM users", [])\
108 | """
109 |
110 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlite")) == """
111 | result = Exqlite.query!(conn, ~S"SELECT id FROM users", [])\
112 | """
113 |
114 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "bigquery")) == """
115 | result = Adbc.Connection.query!(conn, ~S"SELECT id FROM users", [])\
116 | """
117 |
118 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "athena")) == """
119 | result = ReqAthena.query!(conn, ~S"SELECT id FROM users", [], format: :explorer).body
120 | Kino.DataTable.new(result)\
121 | """
122 |
123 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "snowflake")) == """
124 | result = Adbc.Connection.query!(conn, ~S"SELECT id FROM users", [])\
125 | """
126 |
127 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "clickhouse")) == """
128 | result = ReqCH.query!(conn, ~S"SELECT id FROM users", [], format: :explorer).body
129 | Kino.DataTable.new(result)\
130 | """
131 |
132 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlserver")) == """
133 | result = Tds.query!(conn, ~S"SELECT id FROM users", [])\
134 | """
135 | end
136 |
137 | test "uses heredoc string for a multi-line query" do
138 | attrs = %{
139 | "connection" => %{"variable" => "conn", "type" => "postgres"},
140 | "result_variable" => "result",
141 | "cache_query" => true,
142 | "timeout" => nil,
143 | "query" => "SELECT id FROM users\nWHERE last_name = 'Sherlock'",
144 | "data_frame_alias" => Explorer.DataFrame
145 | }
146 |
147 | assert SQLCell.to_source(attrs) == ~s'''
148 | result =
149 | Postgrex.query!(
150 | conn,
151 | ~S"""
152 | SELECT id FROM users
153 | WHERE last_name = 'Sherlock'
154 | """,
155 | []
156 | )\
157 | '''
158 |
159 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "mysql")) == ~s'''
160 | result =
161 | MyXQL.query!(
162 | conn,
163 | ~S"""
164 | SELECT id FROM users
165 | WHERE last_name = 'Sherlock'
166 | """,
167 | []
168 | )\
169 | '''
170 |
171 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlite")) == ~s'''
172 | result =
173 | Exqlite.query!(
174 | conn,
175 | ~S"""
176 | SELECT id FROM users
177 | WHERE last_name = 'Sherlock'
178 | """,
179 | []
180 | )\
181 | '''
182 |
183 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "bigquery")) == ~s'''
184 | result =
185 | Adbc.Connection.query!(
186 | conn,
187 | ~S"""
188 | SELECT id FROM users
189 | WHERE last_name = 'Sherlock'
190 | """,
191 | []
192 | )\
193 | '''
194 |
195 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "athena")) == ~s'''
196 | result =
197 | ReqAthena.query!(
198 | conn,
199 | ~S"""
200 | SELECT id FROM users
201 | WHERE last_name = 'Sherlock'
202 | """,
203 | [],
204 | format: :explorer
205 | ).body
206 |
207 | Kino.DataTable.new(result)\
208 | '''
209 |
210 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "snowflake")) == ~s'''
211 | result =
212 | Adbc.Connection.query!(
213 | conn,
214 | ~S"""
215 | SELECT id FROM users
216 | WHERE last_name = 'Sherlock'
217 | """,
218 | []
219 | )\
220 | '''
221 |
222 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "clickhouse")) == ~s'''
223 | result =
224 | ReqCH.query!(
225 | conn,
226 | ~S"""
227 | SELECT id FROM users
228 | WHERE last_name = 'Sherlock'
229 | """,
230 | [],
231 | format: :explorer
232 | ).body
233 |
234 | Kino.DataTable.new(result)\
235 | '''
236 |
237 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlserver")) == ~s'''
238 | result =
239 | Tds.query!(
240 | conn,
241 | ~S"""
242 | SELECT id FROM users
243 | WHERE last_name = 'Sherlock'
244 | """,
245 | []
246 | )\
247 | '''
248 | end
249 |
250 | test "parses parameter expressions" do
251 | attrs = %{
252 | "connection" => %{"variable" => "conn", "type" => "postgres"},
253 | "result_variable" => "result",
254 | "cache_query" => true,
255 | "timeout" => nil,
256 | "query" => ~s/SELECT id FROM users WHERE id {{user_id}} AND name LIKE {{search <> "%"}}/,
257 | "data_frame_alias" => Explorer.DataFrame
258 | }
259 |
260 | assert SQLCell.to_source(attrs) == ~s'''
261 | result =
262 | Postgrex.query!(conn, ~S"SELECT id FROM users WHERE id $1 AND name LIKE $2", [
263 | user_id,
264 | search <> "%"
265 | ])\
266 | '''
267 |
268 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "mysql")) == ~s'''
269 | result =
270 | MyXQL.query!(conn, ~S"SELECT id FROM users WHERE id ? AND name LIKE ?", [
271 | user_id,
272 | search <> "%"
273 | ])\
274 | '''
275 |
276 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlite")) == ~s'''
277 | result =
278 | Exqlite.query!(conn, ~S"SELECT id FROM users WHERE id ?1 AND name LIKE ?2", [
279 | user_id,
280 | search <> "%"
281 | ])\
282 | '''
283 |
284 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "bigquery")) == ~s'''
285 | result =
286 | Adbc.Connection.query!(conn, ~S"SELECT id FROM users WHERE id ? AND name LIKE ?", [
287 | user_id,
288 | search <> \"%\"
289 | ])\
290 | '''
291 |
292 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "athena")) == ~s'''
293 | result =
294 | ReqAthena.query!(
295 | conn,
296 | ~S"SELECT id FROM users WHERE id ? AND name LIKE ?",
297 | [user_id, search <> "%"],
298 | format: :explorer
299 | ).body
300 |
301 | Kino.DataTable.new(result)\
302 | '''
303 |
304 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "snowflake")) == ~s'''
305 | result =
306 | Adbc.Connection.query!(conn, ~S"SELECT id FROM users WHERE id ?1 AND name LIKE ?2", [
307 | user_id,
308 | search <> \"%\"
309 | ])\
310 | '''
311 |
312 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "clickhouse")) == ~s'''
313 | result =
314 | ReqCH.query!(
315 | conn,
316 | ~S"SELECT id FROM users WHERE id {user_id:String} AND name LIKE {param_2:String}",
317 | [{"user_id", user_id}, {"param_2", search <> \"%\"}],
318 | format: :explorer
319 | ).body
320 |
321 | Kino.DataTable.new(result)\
322 | '''
323 |
324 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlserver")) == ~s'''
325 | result =
326 | Tds.query!(conn, ~S"SELECT id FROM users WHERE id @1 AND name LIKE @2", [
327 | %Tds.Parameter{name: "@1", value: user_id},
328 | %Tds.Parameter{name: "@2", value: search <> "%"}
329 | ])\
330 | '''
331 | end
332 |
333 | test "ignores parameters inside comments" do
334 | attrs = %{
335 | "connection" => %{"variable" => "conn", "type" => "postgres"},
336 | "result_variable" => "result",
337 | "cache_query" => true,
338 | "timeout" => nil,
339 | "query" => """
340 | SELECT id from users
341 | -- WHERE id = {{user_id1}}
342 | /* WHERE id = {{user_id2}} */ WHERE id = {{user_id3}}\
343 | """,
344 | "data_frame_alias" => Explorer.DataFrame
345 | }
346 |
347 | assert SQLCell.to_source(attrs) == ~s'''
348 | result =
349 | Postgrex.query!(
350 | conn,
351 | ~S"""
352 | SELECT id from users
353 | -- WHERE id = {{user_id1}}
354 | /* WHERE id = {{user_id2}} */ WHERE id = $1
355 | """,
356 | [user_id3]
357 | )\
358 | '''
359 |
360 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "mysql")) == ~s'''
361 | result =
362 | MyXQL.query!(
363 | conn,
364 | ~S"""
365 | SELECT id from users
366 | -- WHERE id = {{user_id1}}
367 | /* WHERE id = {{user_id2}} */ WHERE id = ?
368 | """,
369 | [user_id3]
370 | )\
371 | '''
372 |
373 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlite")) == ~s'''
374 | result =
375 | Exqlite.query!(
376 | conn,
377 | ~S"""
378 | SELECT id from users
379 | -- WHERE id = {{user_id1}}
380 | /* WHERE id = {{user_id2}} */ WHERE id = ?1
381 | """,
382 | [user_id3]
383 | )\
384 | '''
385 |
386 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "bigquery")) == ~s'''
387 | result =
388 | Adbc.Connection.query!(
389 | conn,
390 | ~S"""
391 | SELECT id from users
392 | -- WHERE id = {{user_id1}}
393 | /* WHERE id = {{user_id2}} */ WHERE id = ?
394 | """,
395 | [user_id3]
396 | )\
397 | '''
398 |
399 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "athena")) == ~s'''
400 | result =
401 | ReqAthena.query!(
402 | conn,
403 | ~S"""
404 | SELECT id from users
405 | -- WHERE id = {{user_id1}}
406 | /* WHERE id = {{user_id2}} */ WHERE id = ?
407 | """,
408 | [user_id3],
409 | format: :explorer
410 | ).body
411 |
412 | Kino.DataTable.new(result)\
413 | '''
414 |
415 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "snowflake")) == ~s'''
416 | result =
417 | Adbc.Connection.query!(
418 | conn,
419 | ~S"""
420 | SELECT id from users
421 | -- WHERE id = {{user_id1}}
422 | /* WHERE id = {{user_id2}} */ WHERE id = ?1
423 | """,
424 | [user_id3]
425 | )\
426 | '''
427 |
428 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "clickhouse")) == ~s'''
429 | result =
430 | ReqCH.query!(
431 | conn,
432 | ~S"""
433 | SELECT id from users
434 | -- WHERE id = {{user_id1}}
435 | /* WHERE id = {{user_id2}} */ WHERE id = {user_id3:String}
436 | """,
437 | [{"user_id3", user_id3}],
438 | format: :explorer
439 | ).body
440 |
441 | Kino.DataTable.new(result)\
442 | '''
443 |
444 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlserver")) == ~s'''
445 | result =
446 | Tds.query!(
447 | conn,
448 | ~S"""
449 | SELECT id from users
450 | -- WHERE id = {{user_id1}}
451 | /* WHERE id = {{user_id2}} */ WHERE id = @1
452 | """,
453 | [%Tds.Parameter{name: "@1", value: user_id3}]
454 | )\
455 | '''
456 | end
457 |
458 | test "passes timeout option when a timeout is specified" do
459 | attrs = %{
460 | "connection" => %{"variable" => "conn", "type" => "postgres"},
461 | "result_variable" => "result",
462 | "cache_query" => true,
463 | "timeout" => 30,
464 | "query" => "SELECT id FROM users",
465 | "data_frame_alias" => Explorer.DataFrame
466 | }
467 |
468 | assert SQLCell.to_source(attrs) == """
469 | result = Postgrex.query!(conn, ~S"SELECT id FROM users", [], timeout: 30000)\
470 | """
471 |
472 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "mysql")) == """
473 | result = MyXQL.query!(conn, ~S"SELECT id FROM users", [], timeout: 30000)\
474 | """
475 |
476 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlite")) == """
477 | result = Exqlite.query!(conn, ~S"SELECT id FROM users", [], timeout: 30000)\
478 | """
479 |
480 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "bigquery")) == """
481 | result = Adbc.Connection.query!(conn, ~S"SELECT id FROM users", [])\
482 | """
483 |
484 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "athena")) == """
485 | result = ReqAthena.query!(conn, ~S"SELECT id FROM users", [], format: :explorer).body
486 | Kino.DataTable.new(result)\
487 | """
488 |
489 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "snowflake")) == """
490 | result = Adbc.Connection.query!(conn, ~S"SELECT id FROM users", [])\
491 | """
492 |
493 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "clickhouse")) == """
494 | result = ReqCH.query!(conn, ~S"SELECT id FROM users", [], format: :explorer).body
495 | Kino.DataTable.new(result)\
496 | """
497 |
498 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlserver")) == """
499 | result = Tds.query!(conn, ~S"SELECT id FROM users", [], timeout: 30000)\
500 | """
501 | end
502 |
503 | test "passes cache_query option when supported" do
504 | attrs = %{
505 | "connection" => %{"variable" => "conn", "type" => "postgres"},
506 | "result_variable" => "result",
507 | "cache_query" => true,
508 | "query" => "SELECT id FROM users"
509 | }
510 |
511 | assert SQLCell.to_source(attrs) == """
512 | result = Postgrex.query!(conn, ~S"SELECT id FROM users", [])\
513 | """
514 |
515 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "mysql")) == """
516 | result = MyXQL.query!(conn, ~S"SELECT id FROM users", [])\
517 | """
518 |
519 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlite")) == """
520 | result = Exqlite.query!(conn, ~S"SELECT id FROM users", [])\
521 | """
522 |
523 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "snowflake")) == """
524 | result = Adbc.Connection.query!(conn, ~S"SELECT id FROM users", [])\
525 | """
526 |
527 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "clickhouse")) == """
528 | result = ReqCH.query!(conn, ~S"SELECT id FROM users", [], format: :explorer).body
529 | Kino.DataTable.new(result)\
530 | """
531 |
532 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "bigquery")) == """
533 | result = Adbc.Connection.query!(conn, ~S"SELECT id FROM users", [])\
534 | """
535 |
536 | athena = put_in(attrs["connection"]["type"], "athena")
537 |
538 | assert SQLCell.to_source(put_in(athena["cache_query"], true)) == """
539 | result = ReqAthena.query!(conn, ~S"SELECT id FROM users", [], format: :explorer).body
540 | Kino.DataTable.new(result)\
541 | """
542 |
543 | assert SQLCell.to_source(put_in(athena["cache_query"], false)) == """
544 | result =
545 | ReqAthena.query!(conn, ~S"SELECT id FROM users", [],
546 | format: :explorer,
547 | cache_query: false
548 | ).body
549 |
550 | Kino.DataTable.new(result)\
551 | """
552 |
553 | assert SQLCell.to_source(put_in(attrs["connection"]["type"], "sqlserver")) == """
554 | result = Tds.query!(conn, ~S"SELECT id FROM users", [])\
555 | """
556 | end
557 |
558 | test "escapes interpolation" do
559 | attrs = %{
560 | "connection" => %{"variable" => "conn", "type" => "postgres"},
561 | "result_variable" => "result",
562 | "cache_query" => true,
563 | "timeout" => nil,
564 | "query" => "SELECT id FROM users WHERE last_name = '\#{user_id}'",
565 | "data_frame_alias" => Explorer.DataFrame
566 | }
567 |
568 | assert SQLCell.to_source(attrs) == """
569 | result =
570 | Postgrex.query!(conn, ~S"SELECT id FROM users WHERE last_name = '\#{user_id}'", [])\
571 | """
572 |
573 | athena = put_in(attrs["query"], "SELECT id FROM users\nWHERE last_name = '\#{user_id}'")
574 |
575 | assert SQLCell.to_source(put_in(athena["cache_query"], true)) == ~s'''
576 | result =
577 | Postgrex.query!(
578 | conn,
579 | ~S"""
580 | SELECT id FROM users
581 | WHERE last_name = '\#{user_id}'
582 | """,
583 | []
584 | )\
585 | '''
586 | end
587 | end
588 |
589 | defp spawn_fake_postgrex_connection() do
590 | parent = self()
591 |
592 | conn =
593 | spawn_link(fn ->
594 | # Pretend we are a connection pool for Postgrex
595 | DBConnection.register_as_pool(Postgrex.Protocol)
596 | send(parent, {:ready, self()})
597 | receive do: (:stop -> :ok)
598 | end)
599 |
600 | on_exit(fn ->
601 | send(conn, :stop)
602 | end)
603 |
604 | receive do: ({:ready, ^conn} -> conn)
605 | end
606 | end
607 |
--------------------------------------------------------------------------------
/lib/kino_db/connection_cell.ex:
--------------------------------------------------------------------------------
1 | defmodule KinoDB.ConnectionCell do
2 | @moduledoc false
3 |
4 | # A smart cell used to establish connection to a database.
5 |
6 | use Kino.JS, assets_path: "lib/assets/connection_cell"
7 | use Kino.JS.Live
8 | use Kino.SmartCell, name: "Database connection"
9 |
10 | @default_port_by_type %{"postgres" => 5432, "mysql" => 3306, "sqlserver" => 1433}
11 |
12 | @impl true
13 | def init(attrs, ctx) do
14 | type = attrs["type"] || default_db_type()
15 | default_port = @default_port_by_type[type]
16 |
17 | password = attrs["password"] || ""
18 | secret_access_key = attrs["secret_access_key"] || ""
19 | priv_key = attrs["private_key"] || ""
20 | priv_key_passphrase = attrs["private_key_passphrase"] || ""
21 |
22 | auth_type = attrs["auth_type"] || ""
23 |
24 | fields = %{
25 | "variable" => Kino.SmartCell.prefixed_var_name("conn", attrs["variable"]),
26 | "type" => type,
27 | "hostname" => attrs["hostname"] || "localhost",
28 | "database_path" => attrs["database_path"] || "",
29 | "port" => attrs["port"] || default_port,
30 | "use_ipv6" => Map.get(attrs, "use_ipv6", false),
31 | "use_ssl" => Map.get(attrs, "use_ssl", false),
32 | "cacertfile" => attrs["cacertfile"] || "",
33 | "auth_type" => auth_type,
34 | "username" => attrs["username"] || "",
35 | "password" => password,
36 | "use_password_secret" => Map.has_key?(attrs, "password_secret") || password == "",
37 | "password_secret" => attrs["password_secret"] || "",
38 | "database" => attrs["database"] || "",
39 | "project_id" => attrs["project_id"] || "",
40 | "default_dataset_id" => attrs["default_dataset_id"] || "",
41 | "credentials_json" => attrs["credentials_json"] || "",
42 | "access_key_id" => attrs["access_key_id"] || "",
43 | "secret_access_key" => secret_access_key,
44 | "use_secret_access_key_secret" =>
45 | Map.has_key?(attrs, "secret_access_key_secret") || secret_access_key == "",
46 | "secret_access_key_secret" => attrs["secret_access_key_secret"] || "",
47 | "private_key" => priv_key,
48 | "use_private_key_secret" => Map.has_key?(attrs, "use_private_key_secret") || priv_key == "",
49 | "private_key_secret" => attrs["private_key_secret"] || "",
50 | "use_encrypted_private_key" => Map.get(attrs, "use_encrypted_private_key", false),
51 | "private_key_passphrase" => priv_key_passphrase,
52 | "use_private_key_passphrase_secret" =>
53 | Map.has_key?(attrs, "use_private_key_passphrase_secret") || priv_key_passphrase == "",
54 | "private_key_passphrase_secret" => attrs["private_key_passphrase_secret"] || "",
55 | "token" => attrs["token"] || "",
56 | "region" => attrs["region"] || "us-east-1",
57 | "workgroup" => attrs["workgroup"] || "",
58 | "output_location" => attrs["output_location"] || "",
59 | "account" => attrs["account"] || "",
60 | "schema" => attrs["schema"] || "",
61 | "warehouse" => attrs["warehouse"] || "",
62 | "instance" => attrs["instance"] || ""
63 | }
64 |
65 | ctx =
66 | assign(ctx,
67 | fields: fields,
68 | missing_dep: missing_dep(fields),
69 | help_box: help_box(fields),
70 | has_aws_credentials: Code.ensure_loaded?(:aws_credentials)
71 | )
72 |
73 | {:ok, ctx}
74 | end
75 |
76 | @impl true
77 | def handle_connect(ctx) do
78 | payload = %{
79 | fields: ctx.assigns.fields,
80 | missing_dep: ctx.assigns.missing_dep,
81 | help_box: ctx.assigns.help_box,
82 | has_aws_credentials: ctx.assigns.has_aws_credentials
83 | }
84 |
85 | {:ok, payload, ctx}
86 | end
87 |
88 | @impl true
89 | def handle_event("update_field", %{"field" => field, "value" => value}, ctx) do
90 | updated_fields = to_updates(ctx.assigns.fields, field, value)
91 | ctx = update(ctx, :fields, &Map.merge(&1, updated_fields))
92 |
93 | missing_dep = missing_dep(ctx.assigns.fields)
94 |
95 | ctx =
96 | if missing_dep == ctx.assigns.missing_dep do
97 | ctx
98 | else
99 | broadcast_event(ctx, "missing_dep", %{"dep" => missing_dep})
100 | assign(ctx, missing_dep: missing_dep)
101 | end
102 |
103 | broadcast_event(ctx, "update", %{"fields" => updated_fields})
104 |
105 | {:noreply, ctx}
106 | end
107 |
108 | defp to_updates(_fields, "port", value) do
109 | port =
110 | case Integer.parse(value) do
111 | {n, ""} -> n
112 | _ -> nil
113 | end
114 |
115 | %{"port" => port}
116 | end
117 |
118 | defp to_updates(_fields, "type", value) do
119 | %{"type" => value, "port" => @default_port_by_type[value]}
120 | end
121 |
122 | defp to_updates(fields, "variable", value) do
123 | if Kino.SmartCell.valid_variable_name?(value) do
124 | %{"variable" => value}
125 | else
126 | %{"variable" => fields["variable"]}
127 | end
128 | end
129 |
130 | defp to_updates(_fields, field, value), do: %{field => value}
131 |
132 | @default_keys ["type", "variable"]
133 |
134 | @impl true
135 | def to_attrs(%{assigns: %{fields: fields}}) do
136 | connection_keys =
137 | case fields["type"] do
138 | "sqlite" ->
139 | ~w|database_path|
140 |
141 | "duckdb" ->
142 | ~w|database_path|
143 |
144 | "bigquery" ->
145 | ~w|project_id default_dataset_id credentials_json|
146 |
147 | "athena" ->
148 | if fields["use_secret_access_key_secret"],
149 | do:
150 | ~w|access_key_id secret_access_key_secret token region workgroup output_location database|,
151 | else:
152 | ~w|access_key_id secret_access_key token region workgroup output_location database|
153 |
154 | "snowflake" ->
155 | snowflake_fields(fields)
156 |
157 | "sqlserver" ->
158 | if fields["use_password_secret"],
159 | do:
160 | ~w|database hostname port use_ipv6 username password_secret use_ssl cacertfile instance|,
161 | else:
162 | ~w|database hostname port use_ipv6 username password use_ssl cacertfile instance|
163 |
164 | "clickhouse" ->
165 | if fields["use_password_secret"],
166 | do: ~w|hostname port use_ssl username password_secret database|,
167 | else: ~w|hostname port use_ssl username password database|
168 |
169 | type when type in ["postgres", "mysql"] ->
170 | if fields["use_password_secret"],
171 | do: ~w|database hostname port use_ipv6 use_ssl cacertfile username password_secret|,
172 | else: ~w|database hostname port use_ipv6 use_ssl cacertfile username password|
173 | end
174 |
175 | Map.take(fields, @default_keys ++ connection_keys)
176 | end
177 |
178 | @default_snowflake_keys ~w|account username database schema warehouse auth_type|
179 | defp snowflake_fields(fields),
180 | do: snowflake_fields(fields["auth_type"], fields, @default_snowflake_keys)
181 |
182 | defp snowflake_fields("auth_jwt", fields, keys) do
183 | pk_keys =
184 | if fields["use_private_key_secret"],
185 | do: ~w|private_key_secret use_encrypted_private_key|,
186 | else: ~w|private_key use_encrypted_private_key|
187 |
188 | phrase_keys =
189 | if fields["use_encrypted_private_key"] do
190 | if fields["use_private_key_passphrase_secret"],
191 | do: ~w|private_key_passphrase_secret|,
192 | else: ~w|private_key_passphrase|
193 | else
194 | []
195 | end
196 |
197 | keys ++ pk_keys ++ phrase_keys
198 | end
199 |
200 | defp snowflake_fields(_, fields, keys) do
201 | keys ++
202 | if fields["use_password_secret"], do: ~w|password_secret|, else: ~w|password|
203 | end
204 |
205 | @impl true
206 | def to_source(attrs) do
207 | required_keys =
208 | case attrs["type"] do
209 | "sqlite" ->
210 | ~w|database_path|
211 |
212 | "duckdb" ->
213 | []
214 |
215 | "bigquery" ->
216 | ~w|project_id|
217 |
218 | "athena" ->
219 | if Code.ensure_loaded?(:aws_credentials),
220 | do: ~w|database|,
221 | else:
222 | if(Map.has_key?(attrs, "secret_access_key"),
223 | do: ~w|access_key_id secret_access_key region database|,
224 | else: ~w|access_key_id secret_access_key_secret region database|
225 | )
226 |
227 | "snowflake" ->
228 | snowflake_source_required_keys(attrs)
229 |
230 | "sqlserver" ->
231 | ~w|hostname port|
232 |
233 | "clickhouse" ->
234 | ~w|hostname port|
235 |
236 | type when type in ["postgres", "mysql"] ->
237 | ~w|hostname port|
238 | end
239 |
240 | conditional_keys =
241 | case attrs["type"] do
242 | "athena" -> ~w|workgroup output_location|
243 | _ -> []
244 | end
245 |
246 | if all_fields_filled?(attrs, required_keys) and
247 | any_fields_filled?(attrs, conditional_keys) do
248 | attrs |> to_quoted() |> Kino.SmartCell.quoted_to_string()
249 | else
250 | ""
251 | end
252 | end
253 |
254 | defp snowflake_source_required_keys(attrs),
255 | do: snowflake_source_required_keys(attrs["auth_type"], attrs, ~w|account username auth_type|)
256 |
257 | defp snowflake_source_required_keys("auth_jwt", attrs, keys) do
258 | keys ++
259 | if Map.has_key?(attrs, "private_key_secret"),
260 | do: ~w|private_key_secret|,
261 | else: ~w|private_key|
262 | end
263 |
264 | defp snowflake_source_required_keys(_, attrs, keys) do
265 | keys ++ if Map.has_key?(attrs, "password_secret"), do: ~w|password_secret|, else: ~w|password|
266 | end
267 |
268 | defp all_fields_filled?(attrs, keys) do
269 | not Enum.any?(keys, fn key -> attrs[key] in [nil, ""] end)
270 | end
271 |
272 | defp any_fields_filled?(_, []), do: true
273 |
274 | defp any_fields_filled?(attrs, keys) do
275 | Enum.any?(keys, fn key -> attrs[key] not in [nil, ""] end)
276 | end
277 |
278 | defp trim_opts(opts) do
279 | Enum.map(opts, fn
280 | {key, value} when is_binary(value) -> {key, String.trim(value)}
281 | {key, value} -> {key, value}
282 | end)
283 | end
284 |
285 | defp to_quoted(%{"type" => "sqlite"} = attrs) do
286 | quote do
287 | opts = [database: unquote(attrs["database_path"])]
288 |
289 | {:ok, unquote(quoted_var(attrs["variable"]))} = Kino.start_child({Exqlite, opts})
290 | end
291 | end
292 |
293 | defp to_quoted(%{"type" => "snowflake"} = attrs) do
294 | var = quoted_var(attrs["variable"])
295 |
296 | snowflake_opts =
297 | attrs
298 | |> trim_opts()
299 | |> Map.new()
300 | |> snowflake_options()
301 |
302 | quote do
303 | :ok = Adbc.download_driver!(:snowflake)
304 | {:ok, db} = Kino.start_child({Adbc.Database, unquote(snowflake_opts)})
305 | {:ok, unquote(var)} = Kino.start_child({Adbc.Connection, database: db})
306 | end
307 | end
308 |
309 | defp to_quoted(%{"type" => "duckdb"} = attrs) do
310 | var = quoted_var(attrs["variable"])
311 |
312 | opts =
313 | case String.trim(attrs["database_path"]) do
314 | "" -> [driver: :duckdb]
315 | path -> [driver: :duckdb, path: path]
316 | end
317 |
318 | quote do
319 | :ok = Adbc.download_driver!(:duckdb)
320 | {:ok, db} = Kino.start_child({Adbc.Database, unquote(opts)})
321 | {:ok, unquote(var)} = Kino.start_child({Adbc.Connection, database: db})
322 | end
323 | end
324 |
325 | defp to_quoted(%{"type" => "bigquery"} = attrs) do
326 | var = quoted_var(attrs["variable"])
327 |
328 | opts =
329 | [
330 | driver: :bigquery,
331 | "adbc.bigquery.sql.project_id": attrs["project_id"]
332 | ] ++
333 | case attrs["default_dataset_id"] do
334 | "" -> []
335 | dataset_id -> ["adbc.bigquery.sql.dataset_id": dataset_id]
336 | end ++
337 | case attrs["credentials_json"] do
338 | "" ->
339 | []
340 |
341 | credentials_json ->
342 | [
343 | "adbc.bigquery.sql.auth_type": "adbc.bigquery.sql.auth_type.json_credential_string",
344 | "adbc.bigquery.sql.auth_credentials":
345 | {:sigil_S, [delimiter: ~s["""]], [{:<<>>, [], [credentials_json <> "\n"]}, []]}
346 | ]
347 | end
348 |
349 | quote do
350 | :ok = Adbc.download_driver!(:bigquery)
351 | {:ok, db} = Kino.start_child({Adbc.Database, unquote(opts)})
352 | {:ok, unquote(var)} = Kino.start_child({Adbc.Connection, database: db})
353 | end
354 | end
355 |
356 | defp to_quoted(%{"type" => "postgres"} = attrs) do
357 | quote do
358 | opts = unquote(trim_opts(shared_options(attrs) ++ postgres_and_mysql_options(attrs)))
359 |
360 | {:ok, unquote(quoted_var(attrs["variable"]))} = Kino.start_child({Postgrex, opts})
361 | end
362 | end
363 |
364 | defp to_quoted(%{"type" => "mysql"} = attrs) do
365 | quote do
366 | opts = unquote(trim_opts(shared_options(attrs) ++ postgres_and_mysql_options(attrs)))
367 |
368 | {:ok, unquote(quoted_var(attrs["variable"]))} = Kino.start_child({MyXQL, opts})
369 | end
370 | end
371 |
372 | defp to_quoted(%{"type" => "sqlserver"} = attrs) do
373 | quote do
374 | opts = unquote(trim_opts(shared_options(attrs) ++ sqlserver_options(attrs)))
375 |
376 | {:ok, unquote(quoted_var(attrs["variable"]))} = Kino.start_child({Tds, opts})
377 | end
378 | end
379 |
380 | defp to_quoted(%{"type" => "athena"} = attrs) do
381 | quote do
382 | unquote(quoted_var(attrs["variable"])) =
383 | ReqAthena.new(
384 | access_key_id: unquote(attrs["access_key_id"]),
385 | database: unquote(attrs["database"]),
386 | output_location: unquote(attrs["output_location"]),
387 | region: unquote(attrs["region"]),
388 | secret_access_key: unquote(quoted_access_key(attrs)),
389 | token: unquote(attrs["token"]),
390 | workgroup: unquote(attrs["workgroup"]),
391 | http_errors: :raise
392 | )
393 |
394 | :ok
395 | end
396 | end
397 |
398 | defp to_quoted(%{"type" => "clickhouse"} = attrs) do
399 | trimmed = attrs |> trim_opts() |> Map.new()
400 | shared_opts = shared_options(trimmed)
401 |
402 | clickhouse_opts = trimmed |> clickhouse_options(shared_opts)
403 |
404 | quote do
405 | unquote(quoted_var(attrs["variable"])) = ReqCH.new(unquote(clickhouse_opts))
406 |
407 | :ok
408 | end
409 | end
410 |
411 | defp quoted_access_key(%{"secret_access_key" => password}), do: password
412 |
413 | defp quoted_access_key(%{"secret_access_key_secret" => ""}), do: ""
414 |
415 | defp quoted_access_key(%{"secret_access_key_secret" => secret}) do
416 | quote do
417 | System.fetch_env!(unquote("LB_#{secret}"))
418 | end
419 | end
420 |
421 | defp quoted_private_key(%{"private_key" => pk}), do: pk
422 |
423 | defp quoted_private_key(%{"private_key_secret" => ""}), do: ""
424 |
425 | defp quoted_private_key(%{"private_key_secret" => secret}) do
426 | quote do
427 | System.fetch_env!(unquote("LB_#{secret}"))
428 | end
429 | end
430 |
431 | defp quoted_private_key_passphrase(%{"private_key_passphrase" => phrase}), do: phrase
432 |
433 | defp quoted_private_key_passphrase(%{"private_key_passphrase_secret" => ""}), do: ""
434 |
435 | defp quoted_private_key_passphrase(%{"private_key_passphrase_secret" => secret}) do
436 | quote do
437 | System.fetch_env!(unquote("LB_#{secret}"))
438 | end
439 | end
440 |
441 | defp snowflake_options(attrs) do
442 | shared_opts =
443 | [
444 | driver: :snowflake,
445 | username: attrs["username"],
446 | "adbc.snowflake.sql.account": attrs["account"],
447 | "adbc.snowflake.sql.db": attrs["database"],
448 | "adbc.snowflake.sql.schema": attrs["schema"],
449 | "adbc.snowflake.sql.warehouse": attrs["warehouse"]
450 | ]
451 |
452 | snowflake_auth_opts(attrs["auth_type"], attrs, shared_opts)
453 | end
454 |
455 | defp snowflake_auth_opts("auth_jwt", attrs, opts) do
456 | opts ++
457 | [
458 | "adbc.snowflake.sql.auth_type": "auth_jwt",
459 | "adbc.snowflake.sql.client_option.jwt_private_key_pkcs8_value": quoted_private_key(attrs)
460 | ] ++
461 | if attrs["use_encrypted_private_key"] do
462 | [
463 | "adbc.snowflake.sql.client_option.jwt_private_key_pkcs8_password":
464 | quoted_private_key_passphrase(attrs)
465 | ]
466 | else
467 | []
468 | end
469 | end
470 |
471 | defp snowflake_auth_opts(_, attrs, opts) do
472 | opts ++
473 | [
474 | "adbc.snowflake.sql.auth_type": "auth_snowflake",
475 | password: quoted_pass(attrs)
476 | ]
477 | end
478 |
479 | defp shared_options(attrs) do
480 | opts = [
481 | hostname: attrs["hostname"],
482 | port: attrs["port"],
483 | username: attrs["username"],
484 | password: quoted_pass(attrs),
485 | database: attrs["database"]
486 | ]
487 |
488 | if attrs["use_ipv6"] do
489 | opts ++ [socket_options: [:inet6]]
490 | else
491 | opts
492 | end
493 | end
494 |
495 | defp postgres_and_mysql_options(attrs) do
496 | if attrs["use_ssl"] do
497 | cacertfile = attrs["cacertfile"]
498 |
499 | ssl_opts =
500 | if cacertfile && cacertfile != "" do
501 | [cacertfile: cacertfile]
502 | else
503 | [cacerts: quote(do: :public_key.cacerts_get())]
504 | end
505 |
506 | [ssl: ssl_opts]
507 | else
508 | []
509 | end
510 | end
511 |
512 | defp sqlserver_options(attrs) do
513 | opts =
514 | if attrs["use_ssl"] do
515 | cacertfile = attrs["cacertfile"]
516 |
517 | ssl_opts =
518 | if cacertfile && cacertfile != "" do
519 | [cacertfile: cacertfile]
520 | else
521 | [cacerts: quote(do: :public_key.cacerts_get())]
522 | end
523 |
524 | [ssl: true, ssl_opts: ssl_opts]
525 | else
526 | []
527 | end
528 |
529 | instance = attrs["instance"]
530 |
531 | if instance && instance != "" do
532 | opts ++ [instance: instance]
533 | else
534 | opts
535 | end
536 | end
537 |
538 | defp clickhouse_options(attrs) do
539 | scheme = if attrs["use_ssl"], do: "https", else: "http"
540 |
541 | [scheme: scheme]
542 | end
543 |
544 | defp clickhouse_options(attrs, shared_options) do
545 | attrs
546 | |> clickhouse_options()
547 | |> build_clickhouse_base_url(shared_options)
548 | |> maybe_add_req_basic_auth(shared_options)
549 | |> maybe_add_clickhouse_database(shared_options)
550 | end
551 |
552 | defp build_clickhouse_base_url(opts, shared_opts) do
553 | host = Keyword.fetch!(shared_opts, :hostname)
554 | port = Keyword.fetch!(shared_opts, :port)
555 | scheme = Keyword.fetch!(opts, :scheme)
556 |
557 | uri = %URI{scheme: scheme, port: port, host: host}
558 |
559 | opts
560 | |> Keyword.put_new(:base_url, URI.to_string(uri))
561 | |> Keyword.delete(:scheme)
562 | end
563 |
564 | defp maybe_add_req_basic_auth(opts, shared_opts) do
565 | username = shared_opts[:username]
566 |
567 | if username != "" do
568 | password = shared_opts[:password]
569 |
570 | auth =
571 | if is_binary(password) do
572 | "#{username}:#{password}"
573 | else
574 | quote do
575 | unquote(username) <> ":" <> unquote(password)
576 | end
577 | end
578 |
579 | Keyword.put_new(opts, :auth, {:basic, auth})
580 | else
581 | opts
582 | end
583 | end
584 |
585 | defp maybe_add_clickhouse_database(opts, shared_opts) do
586 | if shared_opts[:database] != "" do
587 | Keyword.put_new(opts, :database, shared_opts[:database])
588 | else
589 | opts
590 | end
591 | end
592 |
593 | defp quoted_var(string), do: {String.to_atom(string), [], nil}
594 |
595 | defp quoted_pass(%{"password" => password}), do: password
596 |
597 | defp quoted_pass(%{"password_secret" => ""}), do: ""
598 |
599 | defp quoted_pass(%{"password_secret" => secret}) do
600 | quote do
601 | System.fetch_env!(unquote("LB_#{secret}"))
602 | end
603 | end
604 |
605 | defp default_db_type() do
606 | cond do
607 | Code.ensure_loaded?(Postgrex) -> "postgres"
608 | Code.ensure_loaded?(MyXQL) -> "mysql"
609 | Code.ensure_loaded?(Exqlite) -> "sqlite"
610 | Code.ensure_loaded?(ReqAthena) -> "athena"
611 | Code.ensure_loaded?(ReqCH) -> "clickhouse"
612 | Code.ensure_loaded?(Adbc) -> adbc_default_db_type()
613 | Code.ensure_loaded?(Tds) -> "sqlserver"
614 | true -> "postgres"
615 | end
616 | end
617 |
618 | defp adbc_default_db_type() do
619 | drivers = Application.get_env(:adbc, :drivers, [])
620 | driver = Enum.find([:duckdb, :snowflake, :bigquery], :duckdb, &(&1 in drivers))
621 | Atom.to_string(driver)
622 | end
623 |
624 | defp missing_dep(%{"type" => "postgres"}) do
625 | unless Code.ensure_loaded?(Postgrex) do
626 | ~s/{:postgrex, "~> 0.18"}/
627 | end
628 | end
629 |
630 | defp missing_dep(%{"type" => "mysql"}) do
631 | unless Code.ensure_loaded?(MyXQL) do
632 | ~s/{:myxql, "~> 0.7"}/
633 | end
634 | end
635 |
636 | defp missing_dep(%{"type" => "sqlite"}) do
637 | unless Code.ensure_loaded?(Exqlite) do
638 | ~s/{:exqlite, "~> 0.11"}/
639 | end
640 | end
641 |
642 | defp missing_dep(%{"type" => "athena"}) do
643 | missing_many_deps([
644 | {ReqAthena, ~s|{:req_athena, "~> 0.3"}|},
645 | {Explorer, ~s|{:explorer, "~> 0.10"}|}
646 | ])
647 | end
648 |
649 | defp missing_dep(%{"type" => adbc}) when adbc in ~w[duckdb snowflake bigquery] do
650 | unless Code.ensure_loaded?(Adbc) do
651 | ~s|{:adbc, "~> 0.3"}|
652 | end
653 | end
654 |
655 | defp missing_dep(%{"type" => "sqlserver"}) do
656 | unless Code.ensure_loaded?(Tds) do
657 | ~s|{:tds, "~> 2.3"}|
658 | end
659 | end
660 |
661 | defp missing_dep(%{"type" => "clickhouse"}) do
662 | missing_many_deps([
663 | {ReqCH, ~s|{:req_ch, "~> 0.1"}|},
664 | {Explorer, ~s|{:explorer, "~> 0.10"}|}
665 | ])
666 | end
667 |
668 | defp missing_dep(_ctx), do: nil
669 |
670 | defp missing_many_deps(deps) do
671 | deps = for {module, dep} <- deps, not Code.ensure_loaded?(module), do: dep
672 |
673 | if deps != [] do
674 | Enum.join(deps, ", ")
675 | end
676 | end
677 |
678 | defp help_box(%{"type" => "bigquery"}) do
679 | if Code.ensure_loaded?(Mint.HTTP) do
680 | if running_on_google_metadata?() do
681 | "You are running inside Google Cloud. Uploading the credentials above is optional."
682 | else
683 | ~s|You must upload your Google BigQuery Credentials (find them here) or authenticate your machine with gcloud CLI authentication.|
684 | end
685 | end
686 | end
687 |
688 | defp help_box(%{"type" => "athena"}) do
689 | if Code.ensure_loaded?(:aws_credentials) do
690 | "You must fill in the fields above accordingly or authenticate your machine with AWS CLI authentication."
691 | end
692 | end
693 |
694 | defp help_box(_ctx), do: nil
695 |
696 | defp running_on_google_metadata? do
697 | with {:ok, conn} <- Mint.HTTP.connect(:http, "metadata.google.internal", 80),
698 | {:ok, _} <- Mint.HTTP.set_mode(conn, :passive),
699 | do: true,
700 | else: (_ -> false)
701 | end
702 | end
703 |
--------------------------------------------------------------------------------
/lib/assets/connection_cell/main.js:
--------------------------------------------------------------------------------
1 | import * as Vue from "https://cdn.jsdelivr.net/npm/vue@3.2.26/dist/vue.esm-browser.prod.js";
2 |
3 | export function init(ctx, info) {
4 | ctx.importCSS("main.css");
5 | ctx.importCSS(
6 | "https://fonts.googleapis.com/css2?family=Inter:wght@400;500&display=swap"
7 | );
8 |
9 | const BaseSelect = {
10 | name: "BaseSelect",
11 |
12 | props: {
13 | label: {
14 | type: String,
15 | default: "",
16 | },
17 | selectClass: {
18 | type: String,
19 | default: "input",
20 | },
21 | modelValue: {
22 | type: String,
23 | default: "",
24 | },
25 | options: {
26 | type: Array,
27 | default: [],
28 | required: true,
29 | },
30 | required: {
31 | type: Boolean,
32 | default: false,
33 | },
34 | inline: {
35 | type: Boolean,
36 | default: false,
37 | },
38 | grow: {
39 | type: Boolean,
40 | default: false,
41 | },
42 | },
43 |
44 | methods: {
45 | available(value, options) {
46 | return value
47 | ? options.map((option) => option.value).includes(value)
48 | : true;
49 | },
50 | },
51 |
52 | template: `
53 |
54 |
57 |
76 |
77 | `,
78 | };
79 |
80 | const BaseInput = {
81 | name: "BaseInput",
82 |
83 | props: {
84 | label: {
85 | type: String,
86 | default: "",
87 | },
88 | inputClass: {
89 | type: String,
90 | default: "input",
91 | },
92 | modelValue: {
93 | type: [String, Number],
94 | default: "",
95 | },
96 | inline: {
97 | type: Boolean,
98 | default: false,
99 | },
100 | grow: {
101 | type: Boolean,
102 | default: false,
103 | },
104 | number: {
105 | type: Boolean,
106 | default: false,
107 | },
108 | },
109 |
110 | computed: {
111 | emptyClass() {
112 | if (this.modelValue === "") {
113 | return "empty";
114 | }
115 | },
116 | },
117 |
118 | template: `
119 |
120 |
123 |
129 |
130 | `,
131 | };
132 |
133 | const BaseSwitch = {
134 | name: "BaseSwitch",
135 |
136 | props: {
137 | label: {
138 | type: String,
139 | default: "",
140 | },
141 | modelValue: {
142 | type: Boolean,
143 | default: true,
144 | },
145 | inline: {
146 | type: Boolean,
147 | default: false,
148 | },
149 | grow: {
150 | type: Boolean,
151 | default: false,
152 | },
153 | },
154 |
155 | template: `
156 |
157 |
160 |
173 |
174 | `,
175 | };
176 |
177 | const BaseSecret = {
178 | name: "BaseSecret",
179 |
180 | components: {
181 | BaseInput: BaseInput,
182 | BaseSelect: BaseSelect,
183 | },
184 |
185 | props: {
186 | textInputName: {
187 | type: String,
188 | default: "",
189 | },
190 | secretInputName: {
191 | type: String,
192 | default: "",
193 | },
194 | toggleInputName: {
195 | type: String,
196 | default: "",
197 | },
198 | label: {
199 | type: String,
200 | default: "",
201 | },
202 | toggleInputValue: {
203 | type: [String, Number],
204 | default: "",
205 | },
206 | secretInputValue: {
207 | type: [String, Number],
208 | default: "",
209 | },
210 | textInputValue: {
211 | type: [String, Number],
212 | default: "",
213 | },
214 | modalTitle: {
215 | type: String,
216 | default: "Select secret",
217 | },
218 | required: {
219 | type: Boolean,
220 | default: false,
221 | },
222 | },
223 |
224 | methods: {
225 | selectSecret() {
226 | const preselectName = this.secretInputValue;
227 | ctx.selectSecret(
228 | (secretName) => {
229 | ctx.pushEvent("update_field", {
230 | field: this.secretInputName,
231 | value: secretName,
232 | });
233 | },
234 | preselectName,
235 | { title: this.modalTitle }
236 | );
237 | },
238 | },
239 |
240 | template: `
241 |
288 | `,
289 | };
290 |
291 | const SQLiteForm = {
292 | name: "SQLiteForm",
293 |
294 | components: {
295 | BaseInput: BaseInput,
296 | },
297 |
298 | props: {
299 | fields: {
300 | type: Object,
301 | default: {},
302 | },
303 | },
304 |
305 | template: `
306 |
307 |
315 |
316 | `,
317 | };
318 |
319 | const DuckDBForm = {
320 | name: "DuckDBForm",
321 |
322 | components: {
323 | BaseInput: BaseInput,
324 | },
325 |
326 | props: {
327 | fields: {
328 | type: Object,
329 | default: {},
330 | },
331 | },
332 |
333 | template: `
334 |
335 |
343 |
344 | `,
345 | };
346 |
347 | const DefaultSQLForm = {
348 | name: "DefaultSQLForm",
349 |
350 | components: {
351 | BaseInput: BaseInput,
352 | BaseSwitch: BaseSwitch,
353 | BaseSecret: BaseSecret,
354 | },
355 |
356 | props: {
357 | fields: {
358 | type: Object,
359 | default: {},
360 | },
361 | },
362 |
363 | template: `
364 |
365 |
374 |
383 |
388 |
393 |
394 |
395 |
404 |
405 |
406 |
414 |
422 |
432 |
433 | `,
434 | };
435 |
436 | const SQLServerForm = {
437 | name: "SQLServerForm",
438 |
439 | components: {
440 | BaseInput: BaseInput,
441 | BaseSwitch: BaseSwitch,
442 | BaseSecret: BaseSecret,
443 | },
444 |
445 | props: {
446 | fields: {
447 | type: Object,
448 | default: {},
449 | },
450 | execution_modes: {
451 | type: Object,
452 | default: {},
453 | },
454 | datefirst: {
455 | type: Object,
456 | default: {},
457 | },
458 | },
459 |
460 | template: `
461 |
462 |
471 |
479 |
488 |
493 |
498 |
499 |
500 |
509 |
510 |
511 |
519 |
527 |
537 |
538 | `,
539 | };
540 |
541 | const AthenaForm = {
542 | name: "AthenaForm",
543 |
544 | components: {
545 | BaseInput: BaseInput,
546 | BaseSecret: BaseSecret,
547 | },
548 |
549 | props: {
550 | fields: {
551 | type: Object,
552 | default: {},
553 | },
554 | helpBox: {
555 | type: String,
556 | default: "",
557 | },
558 | hasAwsCredentials: {
559 | type: Boolean,
560 | default: false,
561 | },
562 | },
563 |
564 | methods: {
565 | areFieldsEmpty(currentField, otherField) {
566 | if (currentField === "" && otherField === "") {
567 | return true;
568 | }
569 |
570 | return false;
571 | },
572 | },
573 |
574 | template: `
575 |
576 |
585 |
595 |
596 |
597 |
605 |
614 |
615 |
616 |
625 |
634 |
643 |
644 |
645 | `,
646 | };
647 |
648 | const BigQueryForm = {
649 | name: "BigQueryForm",
650 |
651 | components: {
652 | BaseInput: BaseInput,
653 | },
654 |
655 | props: {
656 | fields: {
657 | type: Object,
658 | default: {},
659 | },
660 | helpBox: {
661 | type: String,
662 | default: "",
663 | },
664 | },
665 |
666 | methods: {
667 | credentialsChange(_) {
668 | this.updateCredentials(this.$refs.credentials.files);
669 | },
670 |
671 | credentialsClick(_) {
672 | this.$refs.credentials.click();
673 | },
674 |
675 | dragOver(event) {
676 | event.preventDefault();
677 | },
678 |
679 | dragLeave(_) {},
680 |
681 | drop(event) {
682 | event.preventDefault();
683 | this.updateCredentials(event.dataTransfer.files);
684 | },
685 |
686 | updateCredentials(fileList) {
687 | const file = fileList[0];
688 |
689 | if (file && file.type === "application/json") {
690 | const reader = new FileReader();
691 |
692 | reader.onload = (res) => {
693 | // Reformat the JSON into a compact form
694 | const value = JSON.stringify(JSON.parse(res.target.result));
695 | ctx.pushEvent("update_field", { field: "credentials_json", value });
696 | };
697 |
698 | reader.readAsText(file);
699 | }
700 | },
701 | },
702 |
703 | template: `
704 |
705 |
714 |
722 |
723 |
724 |
725 |
729 |
730 |
731 |
732 |
733 | `,
734 | };
735 |
736 | const SnowflakeForm = {
737 | name: "SnowflakeForm",
738 |
739 | components: {
740 | BaseInput: BaseInput,
741 | BaseSwitch: BaseSwitch,
742 | BaseSecret: BaseSecret,
743 | BaseSelect: BaseSelect,
744 | },
745 |
746 | props: {
747 | fields: {
748 | type: Object,
749 | default: {},
750 | },
751 | },
752 |
753 | computed: {
754 | isAuthSnowflake() {
755 | return this.fields.auth_type === "auth_snowflake" || "";
756 | },
757 |
758 | isAuthJwt() {
759 | return this.fields.auth_type === "auth_jwt";
760 | },
761 | },
762 |
763 | template: `
764 |
765 |
774 |
783 |
784 |
785 |
793 |
801 |
809 |
810 |
811 |
822 |
834 |
840 |
841 |
842 |
854 |
865 |
866 | `,
867 | };
868 |
869 | const ClickhouseForm = {
870 | name: "ClickhouseForm",
871 |
872 | components: {
873 | BaseInput: BaseInput,
874 | BaseSwitch: BaseSwitch,
875 | BaseSecret: BaseSecret,
876 | BaseSelect: BaseSelect,
877 | },
878 |
879 | props: {
880 | fields: {
881 | type: Object,
882 | default: {},
883 | },
884 | },
885 |
886 | template: `
887 |
888 |
893 |
902 |
911 |
912 |
913 |
921 |
929 |
939 |
940 | `,
941 | };
942 |
943 | const app = Vue.createApp({
944 | components: {
945 | BaseInput: BaseInput,
946 | BaseSelect: BaseSelect,
947 | SQLiteForm: SQLiteForm,
948 | DuckDBForm: DuckDBForm,
949 | SnowflakeForm: SnowflakeForm,
950 | DefaultSQLForm: DefaultSQLForm,
951 | SQLServerForm: SQLServerForm,
952 | BigQueryForm: BigQueryForm,
953 | AthenaForm: AthenaForm,
954 | ClickhouseForm: ClickhouseForm,
955 | },
956 |
957 | template: `
958 |
959 |
960 |
961 |
To successfully connect, you need to add the following dependency:
962 |
{{ missingDep }}
963 |
964 |
996 |
997 | `,
998 |
999 | data() {
1000 | return {
1001 | fields: info.fields,
1002 | missingDep: info.missing_dep,
1003 | helpBox: info.help_box,
1004 | hasAwsCredentials: info.has_aws_credentials,
1005 | availableDatabases: [
1006 | { label: "PostgreSQL", value: "postgres" },
1007 | { label: "MySQL", value: "mysql" },
1008 | { label: "SQLite", value: "sqlite" },
1009 | { label: "DuckDB", value: "duckdb" },
1010 | { label: "Google BigQuery", value: "bigquery" },
1011 | { label: "AWS Athena", value: "athena" },
1012 | { label: "Snowflake", value: "snowflake" },
1013 | { label: "Clickhouse", value: "clickhouse" },
1014 | { label: "SQL Server", value: "sqlserver" },
1015 | ],
1016 | };
1017 | },
1018 |
1019 | computed: {
1020 | isSQLite() {
1021 | return this.fields.type === "sqlite";
1022 | },
1023 |
1024 | isDuckDB() {
1025 | return this.fields.type === "duckdb";
1026 | },
1027 |
1028 | isSnowflake() {
1029 | return this.fields.type === "snowflake";
1030 | },
1031 |
1032 | isClickhouse() {
1033 | return this.fields.type === "clickhouse";
1034 | },
1035 |
1036 | isBigQuery() {
1037 | return this.fields.type === "bigquery";
1038 | },
1039 |
1040 | isAthena() {
1041 | return this.fields.type === "athena";
1042 | },
1043 |
1044 | isSQLServer() {
1045 | return this.fields.type === "sqlserver";
1046 | },
1047 |
1048 | isDefaultDatabase() {
1049 | return ["postgres", "mysql"].includes(this.fields.type);
1050 | },
1051 | },
1052 |
1053 | methods: {
1054 | handleFieldChange(event) {
1055 | const field = event.target.name;
1056 | if (field) {
1057 | const value = this.fields[field];
1058 | ctx.pushEvent("update_field", { field, value });
1059 | }
1060 | },
1061 | },
1062 | }).mount(ctx.root);
1063 |
1064 | ctx.handleEvent("update", ({ fields }) => {
1065 | setValues(fields);
1066 | });
1067 |
1068 | ctx.handleEvent("missing_dep", ({ dep }) => {
1069 | app.missingDep = dep;
1070 | });
1071 |
1072 | ctx.handleSync(() => {
1073 | // Synchronously invokes change listeners
1074 | document.activeElement &&
1075 | document.activeElement.dispatchEvent(
1076 | new Event("change", { bubbles: true })
1077 | );
1078 | });
1079 |
1080 | function setValues(fields) {
1081 | for (const field in fields) {
1082 | app.fields[field] = fields[field];
1083 | }
1084 | }
1085 | }
1086 |
--------------------------------------------------------------------------------