├── access_token_preview.png
├── LICENSE
├── exampleV100.ts
├── action.yml
├── README.md
└── exampleV010.ts
/access_token_preview.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/lyqht/generate-supabase-db-types-github-action/HEAD/access_token_preview.png
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Estee Tey Siew Wen
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/exampleV100.ts:
--------------------------------------------------------------------------------
1 | export type Json =
2 | | string
3 | | number
4 | | boolean
5 | | null
6 | | { [key: string]: Json }
7 | | Json[];
8 |
9 | export interface Database {
10 | public: {
11 | Tables: {
12 | UpdatedRecords: {
13 | Row: {
14 | id: number;
15 | created_at: string | null;
16 | repo: string;
17 | userId: string | null;
18 | initialRepoDetails: Json;
19 | updatedFields: Json;
20 | };
21 | Insert: {
22 | id?: number;
23 | created_at?: string | null;
24 | repo: string;
25 | userId?: string | null;
26 | initialRepoDetails: Json;
27 | updatedFields: Json;
28 | };
29 | Update: {
30 | id?: number;
31 | created_at?: string | null;
32 | repo?: string;
33 | userId?: string | null;
34 | initialRepoDetails?: Json;
35 | updatedFields?: Json;
36 | };
37 | };
38 | DeletedRecords: {
39 | Row: {
40 | id: number;
41 | created_at: string | null;
42 | repo: string;
43 | sourceRepo: string | null;
44 | isFork: boolean;
45 | userId: string;
46 | repoDetails: Json;
47 | };
48 | Insert: {
49 | id?: number;
50 | created_at?: string | null;
51 | repo: string;
52 | sourceRepo?: string | null;
53 | isFork?: boolean;
54 | userId: string;
55 | repoDetails: Json;
56 | };
57 | Update: {
58 | id?: number;
59 | created_at?: string | null;
60 | repo?: string;
61 | sourceRepo?: string | null;
62 | isFork?: boolean;
63 | userId?: string;
64 | repoDetails?: Json;
65 | };
66 | };
67 | };
68 | Views: {
69 | [_ in never]: never;
70 | };
71 | Functions: {
72 | [_ in never]: never;
73 | };
74 | Enums: {
75 | [_ in never]: never;
76 | };
77 | };
78 | }
79 |
--------------------------------------------------------------------------------
/action.yml:
--------------------------------------------------------------------------------
1 | name: 'Generate Supabase Database types'
2 | author: 'Estee Tey'
3 | description: 'Automatically updates your type definitions to match your table schemas'
4 |
5 | inputs:
6 | SUPABASE_REF_ID:
7 | description: "Reference id of your Supabase project"
8 | required: true
9 | SUPABASE_ACCESS_TOKEN:
10 | description: "Access token https://app.supabase.com/account/tokens"
11 | required: true
12 | DB_PASSWORD:
13 | description: "Database password"
14 | required: true
15 | OUTPUT_PATH:
16 | description: "Path where you want the definitions file to be saved. Any changes will be committed and override existing definition files. Default value is 'src/types/supabase.ts'"
17 | required: false
18 | default: "src/types/supabase.ts"
19 |
20 | runs:
21 | using: composite
22 | steps:
23 | - name: Checkout project
24 | uses: actions/checkout@v3
25 | with:
26 | persist-credentials: true
27 | fetch-depth: 0
28 | - name: Set up supabase cli
29 | uses: supabase/setup-cli@v1
30 | with:
31 | version: latest
32 | - name: "Generate database typescript types"
33 | shell: bash
34 | env:
35 | SUPABASE_REF_ID: ${{ inputs.SUPABASE_REF_ID }}
36 | SUPABASE_ACCESS_TOKEN: ${{ inputs.SUPABASE_ACCESS_TOKEN }}
37 | DB_PASSWORD: ${{ inputs.DB_PASSWORD }}
38 | OUTPUT_PATH: ${{ inputs.OUTPUT_PATH }}
39 | run: |
40 | if [ ! -f supabase/config.toml ]; then
41 | supabase init && supabase start
42 | fi
43 | supabase link --project-ref ${SUPABASE_REF_ID} --password ${DB_PASSWORD}
44 | supabase gen types typescript --db-url "postgresql://postgres:${DB_PASSWORD}@db.${SUPABASE_REF_ID}.supabase.co:6543/postgres" > ${OUTPUT_PATH}
45 | - name: Check for file changes & commit files
46 | uses: stefanzweifel/git-auto-commit-action@v4
47 | with:
48 | file_pattern: ${{ inputs.OUTPUT_PATH }}
49 | commit_message: Update database types
50 | commit_user_name: Supabot
51 | commit_user_email: 41898282+github-actions[bot]@users.noreply.github.com
52 | commit_author: Supabot <41898282+github-actions[bot]@users.noreply.github.com>
53 | branch: supabot/update-database-types
54 | create_branch: true
55 | - name: Create Pull Request
56 | uses: repo-sync/pull-request@v2
57 | with:
58 | github_token: ${{ github.token }}
59 | source_branch: supabot/update-database-types
60 | destination_branch: "main"
61 | pr_allow_empty: false
62 | pr_title: '[Supabot] Update database type definitions'
63 | pr_body: |
64 | - Auto-generated by [Supabot](https://github.com/lyqht/generate-supabase-db-types-github-action)
65 |
66 | branding:
67 | icon: 'activity'
68 | color: 'blue'
69 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # GitHub action to generate Supabase Database types
2 |
3 | This GitHub action workflow aims to help you to create a Supabase database definition types file in your project.
4 |
5 | ## What it does
6 |
7 | This workflow is a composite action:
8 |
9 | - Generate database types with a built in command from [supabase/setup-cli](https://github.com/supabase/setup-cli).
10 | - Commit and push github actions to your repo are performed by the [git-auto-commit action](https://github.com/stefanzweifel/git-auto-commit-action).
11 | - Creating the pull request is performed by [pull-request action](https://github.com/repo-sync/pull-request).
12 |
13 | ## How to use
14 |
15 | If you don't have an existing GitHub Action workflow for your repository
16 |
17 | 1. Create a folder `.github/workflows` if you don't have it already
18 | 2. Inside that folder, create a YAML file say `update-types.yml`
19 | 3. In the `update-types.yml` file, you can copy the example below and modify it to your usage.
20 | 4. You can choose to declare the `schedule` with a cron expression to run the job at a specified frequency e.g. every day once.
21 |
22 |
23 | How to get Supabase Access Token
24 |
25 | Go to https://app.supabase.com/account/tokens and get a token there.
26 |
27 | 
28 |
29 |
30 |
31 |
32 | Otherwise, you can get started by referring to to the example given and the input options available.
33 |
34 | ### Simple Example
35 |
36 | ```yml
37 | name: Update database types
38 | on:
39 | schedule:
40 | - cron: '*/60 * * * *'
41 | workflow_dispatch:
42 |
43 | jobs:
44 | build:
45 | runs-on: ubuntu-latest
46 | steps:
47 | - uses: lyqht/generate-supabase-db-types-github-action@main
48 | with:
49 | SUPABASE_REF_ID: ${{ secrets.SUPABASE_REF_ID }}
50 | SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }}
51 | DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
52 | OUTPUT_PATH: db.types.ts
53 | ```
54 | ### Example based on migration scripts
55 |
56 | If your DB schema is kept up to date based on the migration SQL scripts within your project itself, you can configure the workflow to run based on any new SQL files pushed to your branch.
57 |
58 | ```yml
59 | name: Update database types
60 | on:
61 | push:
62 | branches: [ main ]
63 | paths:
64 | - '*.sql'
65 |
66 | jobs:
67 | build:
68 | runs-on: ubuntu-latest
69 | if: github.head_ref != 'supabot**'
70 | steps:
71 | - uses: lyqht/generate-supabase-db-types-github-action@main
72 | with:
73 | SUPABASE_REF_ID: ${{ secrets.SUPABASE_REF_ID }}
74 | SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }}
75 | DB_PASSWORD: ${{ secrets.DB_PASSWORD }}
76 | OUTPUT_PATH: db.types.ts
77 | ```
78 |
79 | ## Migrating from `v0.1.0` to `v1.0.0`
80 |
81 | Please note that this will be a breaking change. The types generated from v0.1.0 could be different from that of v1.0.0, so you may need to modify your code quite a bit if you choose to migrate for fields of data types such as `JSON`, `Date`. However, `v1.0.0` types follows what supabase recommends.
82 |
83 | ### Why and how the types are different?
84 |
85 | > 🔖 Here's an [article](https://blog.esteetey.dev/how-to-create-and-test-a-github-action-that-generates-types-from-supabase-database#heading-how-to-create-the-github-workflow) that explains more in-depth the rationale & implementation of v0.1.0 of this GitHub action.
86 |
87 | v0.1.0 relies on the `openapi-typescript` library to generate types based on the OpenAPI specs that the Supabase endpoint offers.
88 |
89 | v1.0.0 relies on `supabase-cli` to generate the types using supabase, and these types are much more compatible to the `supabasejs-v2` library.
90 |
91 | - Types generated by v0.1.0: [Example](./exampleV010.ts)
92 | - Types generated by v1.0.0: [Example](./exampleV100.ts)
93 |
94 | ### Changes to make
95 |
96 | You need to make the following changes in variables:
97 | - No longer used: `SUPABASE_URL: ${{secrets.SUPABASE_URL }}` and `SUPABASE_ANON_KEY: ${{ secrets.SUPABASE_ANON_KEY }}`
98 | - You have to add `SUPABASE_REF_ID: ${{ secrets.SUPABASE_REF_ID }}`, and `SUPABASE_ACCESS_TOKEN: ${{ secrets.SUPABASE_ACCESS_TOKEN }}`
99 |
100 |
101 | ## Cavaets
102 |
103 | > Note that if your Supabase project is paused or deleted, this bot will only result in failed jobs.
104 |
--------------------------------------------------------------------------------
/exampleV010.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * This file was auto-generated by openapi-typescript.
3 | * Do not make direct changes to the file.
4 | */
5 |
6 | export interface paths {
7 | "/": {
8 | get: {
9 | responses: {
10 | /** OK */
11 | 200: unknown;
12 | };
13 | };
14 | };
15 | "/DeletedRecords": {
16 | get: {
17 | parameters: {
18 | query: {
19 | id?: parameters["rowFilter.DeletedRecords.id"];
20 | created_at?: parameters["rowFilter.DeletedRecords.created_at"];
21 | repo?: parameters["rowFilter.DeletedRecords.repo"];
22 | sourceRepo?: parameters["rowFilter.DeletedRecords.sourceRepo"];
23 | isFork?: parameters["rowFilter.DeletedRecords.isFork"];
24 | userId?: parameters["rowFilter.DeletedRecords.userId"];
25 | repoDetails?: parameters["rowFilter.DeletedRecords.repoDetails"];
26 | /** Filtering Columns */
27 | select?: parameters["select"];
28 | /** Ordering */
29 | order?: parameters["order"];
30 | /** Limiting and Pagination */
31 | offset?: parameters["offset"];
32 | /** Limiting and Pagination */
33 | limit?: parameters["limit"];
34 | };
35 | header: {
36 | /** Limiting and Pagination */
37 | Range?: parameters["range"];
38 | /** Limiting and Pagination */
39 | "Range-Unit"?: parameters["rangeUnit"];
40 | /** Preference */
41 | Prefer?: parameters["preferCount"];
42 | };
43 | };
44 | responses: {
45 | /** OK */
46 | 200: {
47 | schema: definitions["DeletedRecords"][];
48 | };
49 | /** Partial Content */
50 | 206: unknown;
51 | };
52 | };
53 | post: {
54 | parameters: {
55 | body: {
56 | /** DeletedRecords */
57 | DeletedRecords?: definitions["DeletedRecords"];
58 | };
59 | query: {
60 | /** Filtering Columns */
61 | select?: parameters["select"];
62 | };
63 | header: {
64 | /** Preference */
65 | Prefer?: parameters["preferReturn"];
66 | };
67 | };
68 | responses: {
69 | /** Created */
70 | 201: unknown;
71 | };
72 | };
73 | delete: {
74 | parameters: {
75 | query: {
76 | id?: parameters["rowFilter.DeletedRecords.id"];
77 | created_at?: parameters["rowFilter.DeletedRecords.created_at"];
78 | repo?: parameters["rowFilter.DeletedRecords.repo"];
79 | sourceRepo?: parameters["rowFilter.DeletedRecords.sourceRepo"];
80 | isFork?: parameters["rowFilter.DeletedRecords.isFork"];
81 | userId?: parameters["rowFilter.DeletedRecords.userId"];
82 | repoDetails?: parameters["rowFilter.DeletedRecords.repoDetails"];
83 | };
84 | header: {
85 | /** Preference */
86 | Prefer?: parameters["preferReturn"];
87 | };
88 | };
89 | responses: {
90 | /** No Content */
91 | 204: never;
92 | };
93 | };
94 | patch: {
95 | parameters: {
96 | query: {
97 | id?: parameters["rowFilter.DeletedRecords.id"];
98 | created_at?: parameters["rowFilter.DeletedRecords.created_at"];
99 | repo?: parameters["rowFilter.DeletedRecords.repo"];
100 | sourceRepo?: parameters["rowFilter.DeletedRecords.sourceRepo"];
101 | isFork?: parameters["rowFilter.DeletedRecords.isFork"];
102 | userId?: parameters["rowFilter.DeletedRecords.userId"];
103 | repoDetails?: parameters["rowFilter.DeletedRecords.repoDetails"];
104 | };
105 | body: {
106 | /** DeletedRecords */
107 | DeletedRecords?: definitions["DeletedRecords"];
108 | };
109 | header: {
110 | /** Preference */
111 | Prefer?: parameters["preferReturn"];
112 | };
113 | };
114 | responses: {
115 | /** No Content */
116 | 204: never;
117 | };
118 | };
119 | };
120 | "/UpdatedRecords": {
121 | get: {
122 | parameters: {
123 | query: {
124 | id?: parameters["rowFilter.UpdatedRecords.id"];
125 | created_at?: parameters["rowFilter.UpdatedRecords.created_at"];
126 | repo?: parameters["rowFilter.UpdatedRecords.repo"];
127 | userId?: parameters["rowFilter.UpdatedRecords.userId"];
128 | initialRepoDetails?: parameters["rowFilter.UpdatedRecords.initialRepoDetails"];
129 | updatedFields?: parameters["rowFilter.UpdatedRecords.updatedFields"];
130 | /** Filtering Columns */
131 | select?: parameters["select"];
132 | /** Ordering */
133 | order?: parameters["order"];
134 | /** Limiting and Pagination */
135 | offset?: parameters["offset"];
136 | /** Limiting and Pagination */
137 | limit?: parameters["limit"];
138 | };
139 | header: {
140 | /** Limiting and Pagination */
141 | Range?: parameters["range"];
142 | /** Limiting and Pagination */
143 | "Range-Unit"?: parameters["rangeUnit"];
144 | /** Preference */
145 | Prefer?: parameters["preferCount"];
146 | };
147 | };
148 | responses: {
149 | /** OK */
150 | 200: {
151 | schema: definitions["UpdatedRecords"][];
152 | };
153 | /** Partial Content */
154 | 206: unknown;
155 | };
156 | };
157 | post: {
158 | parameters: {
159 | body: {
160 | /** UpdatedRecords */
161 | UpdatedRecords?: definitions["UpdatedRecords"];
162 | };
163 | query: {
164 | /** Filtering Columns */
165 | select?: parameters["select"];
166 | };
167 | header: {
168 | /** Preference */
169 | Prefer?: parameters["preferReturn"];
170 | };
171 | };
172 | responses: {
173 | /** Created */
174 | 201: unknown;
175 | };
176 | };
177 | delete: {
178 | parameters: {
179 | query: {
180 | id?: parameters["rowFilter.UpdatedRecords.id"];
181 | created_at?: parameters["rowFilter.UpdatedRecords.created_at"];
182 | repo?: parameters["rowFilter.UpdatedRecords.repo"];
183 | userId?: parameters["rowFilter.UpdatedRecords.userId"];
184 | initialRepoDetails?: parameters["rowFilter.UpdatedRecords.initialRepoDetails"];
185 | updatedFields?: parameters["rowFilter.UpdatedRecords.updatedFields"];
186 | };
187 | header: {
188 | /** Preference */
189 | Prefer?: parameters["preferReturn"];
190 | };
191 | };
192 | responses: {
193 | /** No Content */
194 | 204: never;
195 | };
196 | };
197 | patch: {
198 | parameters: {
199 | query: {
200 | id?: parameters["rowFilter.UpdatedRecords.id"];
201 | created_at?: parameters["rowFilter.UpdatedRecords.created_at"];
202 | repo?: parameters["rowFilter.UpdatedRecords.repo"];
203 | userId?: parameters["rowFilter.UpdatedRecords.userId"];
204 | initialRepoDetails?: parameters["rowFilter.UpdatedRecords.initialRepoDetails"];
205 | updatedFields?: parameters["rowFilter.UpdatedRecords.updatedFields"];
206 | };
207 | body: {
208 | /** UpdatedRecords */
209 | UpdatedRecords?: definitions["UpdatedRecords"];
210 | };
211 | header: {
212 | /** Preference */
213 | Prefer?: parameters["preferReturn"];
214 | };
215 | };
216 | responses: {
217 | /** No Content */
218 | 204: never;
219 | };
220 | };
221 | };
222 | }
223 |
224 | export interface definitions {
225 | DeletedRecords: {
226 | /**
227 | * Format: bigint
228 | * @description Note:
229 | * This is a Primary Key.
230 | */
231 | id: number;
232 | /**
233 | * Format: timestamp with time zone
234 | * @default now()
235 | */
236 | created_at: string;
237 | /** Format: text */
238 | repo: string;
239 | /** Format: text */
240 | sourceRepo?: string;
241 | /**
242 | * Format: boolean
243 | * @default false
244 | */
245 | isFork: boolean;
246 | /** Format: uuid */
247 | userId: string;
248 | /** Format: json */
249 | repoDetails: unknown;
250 | };
251 | UpdatedRecords: {
252 | /**
253 | * Format: bigint
254 | * @description Note:
255 | * This is a Primary Key.
256 | */
257 | id: number;
258 | /**
259 | * Format: timestamp without time zone
260 | * @default now()
261 | */
262 | created_at: string;
263 | /** Format: text */
264 | repo: string;
265 | /** Format: uuid */
266 | userId?: string;
267 | /** Format: json */
268 | initialRepoDetails: {
269 | prevTopics: string[];
270 | };
271 | /** Format: json */
272 | updatedFields: {
273 | topics: string[];
274 | };
275 | };
276 | }
277 |
278 | export interface parameters {
279 | /**
280 | * @description Preference
281 | * @enum {string}
282 | */
283 | preferParams: "params=single-object";
284 | /**
285 | * @description Preference
286 | * @enum {string}
287 | */
288 | preferReturn: "return=representation" | "return=minimal" | "return=none";
289 | /**
290 | * @description Preference
291 | * @enum {string}
292 | */
293 | preferCount: "count=none";
294 | /** @description Filtering Columns */
295 | select: string;
296 | /** @description On Conflict */
297 | on_conflict: string;
298 | /** @description Ordering */
299 | order: string;
300 | /** @description Limiting and Pagination */
301 | range: string;
302 | /**
303 | * @description Limiting and Pagination
304 | * @default items
305 | */
306 | rangeUnit: string;
307 | /** @description Limiting and Pagination */
308 | offset: string;
309 | /** @description Limiting and Pagination */
310 | limit: string;
311 | /** @description DeletedRecords */
312 | "body.DeletedRecords": definitions["DeletedRecords"];
313 | /** Format: bigint */
314 | "rowFilter.DeletedRecords.id": string;
315 | /** Format: timestamp with time zone */
316 | "rowFilter.DeletedRecords.created_at": string;
317 | /** Format: text */
318 | "rowFilter.DeletedRecords.repo": string;
319 | /** Format: text */
320 | "rowFilter.DeletedRecords.sourceRepo": string;
321 | /** Format: boolean */
322 | "rowFilter.DeletedRecords.isFork": string;
323 | /** Format: uuid */
324 | "rowFilter.DeletedRecords.userId": string;
325 | /** Format: json */
326 | "rowFilter.DeletedRecords.repoDetails": string;
327 | /** @description UpdatedRecords */
328 | "body.UpdatedRecords": definitions["UpdatedRecords"];
329 | /** Format: bigint */
330 | "rowFilter.UpdatedRecords.id": string;
331 | /** Format: timestamp without time zone */
332 | "rowFilter.UpdatedRecords.created_at": string;
333 | /** Format: text */
334 | "rowFilter.UpdatedRecords.repo": string;
335 | /** Format: uuid */
336 | "rowFilter.UpdatedRecords.userId": string;
337 | /** Format: json */
338 | "rowFilter.UpdatedRecords.initialRepoDetails": string;
339 | /** Format: json */
340 | "rowFilter.UpdatedRecords.updatedFields": string;
341 | }
342 |
--------------------------------------------------------------------------------