├── .gitignore ├── LICENSE ├── Makefile ├── README.md ├── scripts ├── functions │ ├── create_collection_table.sql │ ├── create_lookup_column.sql │ ├── drop_lookup_columns.sql │ ├── ends_with.sql │ ├── exists.sql │ ├── find.sql │ ├── find_one.sql │ ├── fuzzy.sql │ ├── get.sql │ ├── modify.sql │ ├── save.sql │ ├── search.sql │ ├── starts_with.sql │ └── update_lookup.sql └── init.sql ├── test.sh └── tests ├── finding.sql ├── modify.sql ├── save.sql └── starts_with.sql /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .DS_Store 3 | notes.md 4 | build.sql 5 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2023, Rob Conery 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | DB=CHANGE_ME 2 | BUILD=${CURDIR}/build.sql 3 | FUNCTIONS=$(shell ls scripts/functions/*.sql) 4 | INIT=${CURDIR}/scripts/init.sql 5 | TESTS=$(shell ls test/*.sql) 6 | TEST=${CURDIR}/test.sql 7 | 8 | all: init functions 9 | 10 | install: all 11 | @psql $(DB) < $(BUILD) --quiet 12 | 13 | init: 14 | @cat $(INIT) >> $(BUILD) 15 | 16 | functions: 17 | @cat $(FUNCTIONS) >> $(BUILD) 18 | 19 | test: clean install 20 | . ./test.sh 21 | 22 | clean: 23 | @rm -rf $(BUILD) 24 | 25 | .PHONY: test -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # A Postgres Document API 2 | 3 | Postgres has an amazing JSON document storage capability, the only problem is that working with it is a bit clunky. Thus, I'm creating a set of extensions that, hopefully, will offer a basic API. 4 | 5 | 6 | ## Quick Example 7 | 8 | Let's say you have a JSON document called `customer`: 9 | 10 | ```js 11 | { 12 | name: "Jill", 13 | email: "jill@test.com", 14 | company: "Red:4" 15 | } 16 | ``` 17 | 18 | You want to save this to Postgres using document storage as you know things will change. With this API you can do that by calling a simple function: 19 | 20 | ```sql 21 | select * from dox.save(collection => 'customers', doc => '[wad of json]'); 22 | ``` 23 | 24 | This will do a few things: 25 | 26 | - A table named `customers` will be created with a single JSONB field, dates, an ID and a `tsvector` search field. 27 | - The `id` that's created will be appended to the new document, and returned from this call 28 | - A search index is automatically created using conventional key names, which you can configure. In this case it will recognize `email` and `name` as something that needs indexing. 29 | - The entire document will be indexed using `GIN` indexing, which again, is configurable. 30 | - The search index will be indexed using `GIN` as well, for speed. 31 | 32 | Now, you can query your document thus: 33 | 34 | ```sql 35 | select * from dox.search(collection => 'customers', term => 'jill'); -- full text search on a single term 36 | select * from dox.find_one(collection => 'customers', term => '{"name": "Jill"}'); -- simple query 37 | select * from dox.find(collection => 'customers', term => '{"company": "Red:4"}'); -- find all Red:4 people 38 | ``` 39 | 40 | These queries will be performant as they will be able to flex indexing, but there's a lot more you can do. 41 | 42 | ## Fuzzy Queries, Starts and Ends With 43 | 44 | One of the downsides of using JSONB with Postgres is *finding things*. If you do any kind of loose querying on text, you end up doing a query like this: 45 | 46 | ```sql 47 | select json from json_table 48 | where json ->> 'email' ilike '.com%'; 49 | ``` 50 | 51 | This query blows because it can't use an index. What's worse is that Postgres has to materialize the JSON to check the condition. The good news? *It's still faster than MongoDB* :). 52 | 53 | There are ways to get around this, such as creating a new column simply for lookups on common keys. That way you could: 54 | 55 | ```sql 56 | select json from json_table 57 | where lookup_email ilike '.com%'; 58 | ``` 59 | 60 | This is OK as there's an index on `lookup_email` that you added. Nice and fast! Doing this for every table is a pain, and how do you manage changes to the underlying data? A trigger! OH HEAVENS! 61 | 62 | If you use `dox.starts_with` or `dox.ends_with` all of that is done for you. I should note that **this is not something you run in production**. This is something that you run locally as you're developing, and then have your change management script move the updates live. The problem is that if you use this on a very large table the update will take a while and the index creation will lock everything as you can't run `concurrently` from a function. 63 | 64 | Anyway, it's there if you want it. 65 | 66 | You can also do things the sequential table scan way (aka "bad") if you have a small table. For that you can use `dox.fuzzy`: 67 | 68 | ```sql 69 | select * from dox.fuzzy(collection => 'customers', key => 'company', term => 'Red'); 70 | select * from dox.starts_with(collection => 'customers', key => 'company', term => 'Red'); 71 | select * from dox.ends_with(collection => 'customers', key => 'company', term => '4); 72 | ``` 73 | 74 | ## Modification 75 | 76 | Partial updates are also a pain with Postgres and JSONB although, yes, there is a way to do it better in 9.6+. All of that is wrapped up `dox.modify`: 77 | 78 | ```sql 79 | select * into res from dox.modify( 80 | id => 1, 81 | collection => 'customers', 82 | set => '{"name": "harold"}' 83 | ); 84 | ``` 85 | 86 | You can also just save things directly using `dox.save`. 87 | 88 | ## Installation 89 | 90 | The simplest thing to do is to run `make` and you'll see a `build.sql` file in your home directory. You can run that against your database and off you go. It's just a set of functions placed within a schema to keep things clean. 91 | 92 | You can also run `make install` if you change the name of the `DB` at the top of the file. 93 | 94 | ## Running The Tests 95 | 96 | I wrote some tests using plain old SQL which you can run if you want. Just clone the repo and run `make test`, which will create a database for the tests on your local Postgres (assuming you have ownership of it). 97 | 98 | 99 | -------------------------------------------------------------------------------- /scripts/functions/create_collection_table.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists create_collection(varchar,varchar,varchar,bool); 3 | create function create_collection( 4 | collection varchar, 5 | indexed varchar default null, 6 | schema varchar default 'dox', 7 | force bool default false, 8 | out res jsonb 9 | ) 10 | as $$ 11 | declare 12 | table_sql varchar; 13 | begin 14 | res := '{"created": false, "message": null}'; 15 | -- see if table exists first 16 | if not exists (select 1 from information_schema.tables where table_schema = schema AND table_name = collection) then 17 | 18 | if force then 19 | execute format('drop table if exists %s.%s cascade',schema,collection); 20 | end if; 21 | 22 | execute format('create table %s.%s( 23 | id serial primary key not null, 24 | body jsonb not null, 25 | search tsvector, 26 | created_at timestamptz not null default now(), 27 | updated_at timestamptz not null default now() 28 | );',schema,collection); 29 | 30 | execute format('create index idx_search_%s on %s.%s using GIN(search)',collection,schema,collection); 31 | -- index? 32 | if(indexed is null) then 33 | execute format('create index idx_json_%s on %s.%s using GIN(body jsonb_path_ops)',collection,schema,collection); 34 | else 35 | execute format('create index idx_json_%s on %s.%s using GIN((body -> %L))',collection,schema,collection, indexed); 36 | end if; 37 | res := '{"created": true, "message": "Table created"}'; 38 | else 39 | res := '{"created": false, "message": "Table exists"}'; 40 | raise debug 'This table already exists'; 41 | 42 | end if; 43 | 44 | end; 45 | $$ 46 | language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/create_lookup_column.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | 3 | drop function if exists create_lookup_column(varchar,varchar, varchar); 4 | create function create_lookup_column(collection varchar, schema varchar, key varchar, out res bool) 5 | as $$ 6 | declare 7 | column_exists int; 8 | lookup_key varchar := 'lookup_' || key; 9 | begin 10 | execute format('SELECT count(1) 11 | FROM information_schema.columns 12 | WHERE table_name=%L and table_schema=%L and column_name=%L', 13 | collection,schema,lookup_key) into column_exists; 14 | 15 | if column_exists < 1 then 16 | -- add the column 17 | execute format('alter table %s.%s add column %s text', schema, collection, lookup_key); 18 | 19 | -- fill it 20 | execute format('update %s.%s set %s = body ->> %L', schema, collection, lookup_key, key); 21 | 22 | -- index it 23 | execute format('create index on %s.%s(%s)', schema, collection, lookup_key); 24 | 25 | -- TODO: drop a trigger on this! 26 | execute format('CREATE TRIGGER trigger_update_%s_%s 27 | after update on %s.%s 28 | for each row 29 | when (old.body <> new.body) 30 | execute procedure dox.update_lookup();' 31 | ,collection, lookup_key, schema, collection); 32 | end if; 33 | res := true; 34 | end; 35 | $$ language plpgsql; 36 | -------------------------------------------------------------------------------- /scripts/functions/drop_lookup_columns.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists drop_lookup_columns(varchar, varchar); 3 | create function drop_lookup_columns( 4 | collection varchar, 5 | schema varchar default 'public', 6 | out res bool 7 | ) 8 | as $$ 9 | declare lookup text; 10 | begin 11 | for lookup in execute format('SELECT column_name 12 | FROM information_schema.columns 13 | WHERE table_name=%L AND table_schema=%L AND column_name LIKE %L', 14 | collection,schema,'lookup%') loop 15 | execute format('alter table %s.%s drop column %I', schema, collection, lookup); 16 | end loop; 17 | 18 | res := true; 19 | end; 20 | $$ language plpgsql; 21 | -------------------------------------------------------------------------------- /scripts/functions/ends_with.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists ends_with(varchar, varchar, varchar, varchar); 3 | create function ends_with( 4 | collection varchar, 5 | key varchar, 6 | term varchar, 7 | schema varchar default 'public' 8 | ) 9 | returns setof jsonb 10 | as $$ 11 | declare 12 | search_param text := '%' || term; 13 | query_text text := format('select body from %s.%s where %s ilike %L',schema,collection,'lookup_' || key,search_param); 14 | begin 15 | 16 | -- ensure we have the lookup column created if it doesn't already exist 17 | perform dox.create_lookup_column(collection => collection, schema => schema, key => key); 18 | 19 | return query 20 | execute query_text; 21 | end; 22 | $$ language plpgsql; 23 | -------------------------------------------------------------------------------- /scripts/functions/exists.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists "exists"(varchar, text,varchar); 3 | create function "exists"( 4 | collection varchar, 5 | term text, 6 | schema varchar default 'public' 7 | ) 8 | returns setof jsonb 9 | as $$ 10 | declare 11 | existence bool := false; 12 | begin 13 | return query 14 | execute format(' 15 | select body from %s.%s 16 | where body ? %L; 17 | ',schema,collection, term); 18 | 19 | end; 20 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/find.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists find(varchar, jsonb,varchar); 3 | create function find( 4 | collection varchar, 5 | term jsonb, 6 | schema varchar default 'public' 7 | ) 8 | returns setof jsonb 9 | as $$ 10 | begin 11 | return query 12 | execute format(' 13 | select body from %s.%s 14 | where body @> %L; 15 | ',schema,collection, term); 16 | 17 | end; 18 | $$ language plpgsql; 19 | -------------------------------------------------------------------------------- /scripts/functions/find_one.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists find_one(varchar, jsonb,varchar); 3 | create function find_one( 4 | collection varchar, 5 | term jsonb, 6 | schema varchar default 'public', 7 | out res jsonb 8 | ) 9 | as $$ 10 | begin 11 | 12 | execute format(' 13 | select body from %s.%s 14 | where body @> %L limit 1; 15 | ',schema,collection, term) into res; 16 | 17 | end; 18 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/fuzzy.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists fuzzy(varchar, varchar, varchar,varchar); 3 | create function fuzzy( 4 | collection varchar, 5 | key varchar, 6 | term varchar, 7 | schema varchar default 'public' 8 | ) 9 | returns setof jsonb 10 | as $$ 11 | begin 12 | return query 13 | execute format(' 14 | select body from %s.%s 15 | where body ->> %L ~* %L; 16 | ',schema,collection, key, term); 17 | 18 | end; 19 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/get.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists get(varchar,int,varchar); 3 | create function get(collection varchar, id int, schema varchar default 'public', out res jsonb) 4 | as $$ 5 | 6 | begin 7 | execute format('select body from %s.%s where id=%s',schema,collection, id) into res; 8 | end; 9 | 10 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/modify.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists modify(varchar, int, jsonb, varchar); 3 | create function modify( 4 | collection varchar, 5 | id int, 6 | set jsonb, 7 | schema varchar default 'public', 8 | out res jsonb 9 | ) 10 | as $$ 11 | 12 | begin 13 | -- join it 14 | execute format('select body || %L from %s.%s where id=%s', set,schema,collection, id) into res; 15 | 16 | -- save it - this will also update the search 17 | perform dox.save(collection => collection, schema => schema, doc => res); 18 | end; 19 | 20 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/save.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists save(varchar, jsonb,text[],varchar); 3 | create function save( 4 | collection varchar, 5 | doc jsonb, 6 | search text[] = array['name','email','first','first_name','last','last_name','description','title','city','state','address','street', 'company'], 7 | schema varchar default 'public', 8 | out res jsonb 9 | ) 10 | as $$ 11 | 12 | declare 13 | doc_id int := doc -> 'id'; 14 | saved record; 15 | saved_doc jsonb; 16 | search_key varchar; 17 | search_params varchar; 18 | begin 19 | 20 | 21 | -- make sure the table exists 22 | perform dox.create_collection(collection => collection, schema => schema); 23 | 24 | 25 | if (select doc ? 'id') then 26 | 27 | execute format('insert into %s.%s (id, body) 28 | values (%L, %L) 29 | on conflict (id) 30 | do update set body = excluded.body, updated_at = now() 31 | returning *',schema,collection, doc -> 'id', doc) into saved; 32 | res := saved.body; 33 | else 34 | -- there's no document id 35 | execute format('insert into %s.%s (body) values (%L) returning *',schema,collection, doc) into saved; 36 | 37 | -- this will have an id on it 38 | 39 | select(doc || format('{"id": %s}', saved.id::text)::jsonb) into res; 40 | execute format('update %s.%s set body=%L, updated_at = now() where id=%s',schema,collection,res,saved.id); 41 | 42 | end if; 43 | 44 | 45 | -- do it automatically MMMMMKKK? 46 | foreach search_key in array search 47 | loop 48 | if(res ? search_key) then 49 | search_params := concat(search_params,' ',res ->> search_key); 50 | end if; 51 | end loop; 52 | if search_params is not null then 53 | execute format('update %s.%s set search=to_tsvector(%L) where id=%s',schema,collection,search_params,saved.id); 54 | end if; 55 | 56 | end; 57 | 58 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/search.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists search(varchar, varchar, varchar); 3 | create function search(collection varchar, term varchar, schema varchar default 'public') 4 | returns setof jsonb 5 | as $$ 6 | declare 7 | begin 8 | return query 9 | execute format('select body 10 | from %s.%s 11 | where search @@ plainto_tsquery(''"%s"'') 12 | order by ts_rank_cd(search,plainto_tsquery(''"%s"'')) desc' 13 | ,schema,collection,term, term); 14 | end; 15 | 16 | $$ language plpgsql; -------------------------------------------------------------------------------- /scripts/functions/starts_with.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | drop function if exists starts_with(varchar, varchar, varchar, varchar); 3 | create function starts_with( 4 | collection varchar, 5 | key varchar, 6 | term varchar, 7 | schema varchar default 'public' 8 | ) 9 | returns setof jsonb 10 | as $$ 11 | declare 12 | search_param text := term || '%'; 13 | begin 14 | 15 | -- ensure we have the lookup column created if it doesn't already exist 16 | perform dox.create_lookup_column(collection => collection, schema => schema, key => key); 17 | 18 | return query 19 | execute format('select body from %s.%s where %s ilike %L',schema,collection,'lookup_' || key,search_param); 20 | end; 21 | $$ language plpgsql; 22 | -------------------------------------------------------------------------------- /scripts/functions/update_lookup.sql: -------------------------------------------------------------------------------- 1 | set search_path=dox; 2 | 3 | drop function if exists update_lookup(); 4 | create function update_lookup() 5 | returns trigger 6 | as $$ 7 | declare 8 | lookup_key text; 9 | json_key text; 10 | begin 11 | 12 | for lookup_key in (select column_name from information_schema.columns 13 | where table_name=TG_TABLE_NAME and table_schema=TG_TABLE_SCHEMA 14 | and column_name like 'lookup_%') 15 | loop 16 | json_key := split_part(lookup_key,'_',2); 17 | 18 | execute format('update %s.%s set %s = %L where id=%s', 19 | TG_TABLE_SCHEMA, 20 | TG_TABLE_NAME, 21 | lookup_key, new.body ->> json_key, 22 | new.id 23 | ); 24 | end loop; 25 | return new; 26 | end; 27 | $$ language plpgsql; 28 | -------------------------------------------------------------------------------- /scripts/init.sql: -------------------------------------------------------------------------------- 1 | set client_min_messages TO WARNING; 2 | drop schema if exists dox cascade; 3 | create schema if not exists dox; 4 | -------------------------------------------------------------------------------- /test.sh: -------------------------------------------------------------------------------- 1 | #include the runner 2 | TESTDB=dox_tests 3 | psql -c "drop database if exists $TESTDB;" --quiet 4 | psql -c "create database $TESTDB;" --quiet 5 | psql $TESTDB < build.sql --quiet 6 | echo "Here we go!" 7 | 8 | psql $TESTDB < tests/save.sql 9 | psql $TESTDB < tests/finding.sql 10 | psql $TESTDB < tests/starts_with.sql 11 | psql $TESTDB < tests/modify.sql 12 | -------------------------------------------------------------------------------- /tests/finding.sql: -------------------------------------------------------------------------------- 1 | DO $$ 2 | declare 3 | res jsonb; 4 | found int; 5 | begin 6 | 7 | set client_min_messages=NOTICE; 8 | raise notice '******************** FINDING THINGS ********************'; 9 | 10 | raise notice 'Looking up customers by id returns'; 11 | select * into res from dox.get(collection => 'customers', id => 1); 12 | 13 | assert res ->> 'id' = '1'; 14 | assert res ->> 'name' = 'chuck'; 15 | 16 | raise notice 'Looking up one customer by term returns'; 17 | select * into res from dox.find_one(collection => 'customers', term =>'{"name":"chuck"}'); 18 | 19 | assert res ->> 'id' = '1'; 20 | assert res ->> 'name' = 'chuck'; 21 | 22 | raise notice 'Looking up customers by company returns 2'; 23 | select count(1) into found from dox.find( 24 | collection => 'customers', 25 | term => '{"company":"red4"}' 26 | ); 27 | 28 | assert found = 2, 'Bad find'; 29 | 30 | raise notice 'Looking up using fuzzy returns 2'; 31 | select count(1) into found from dox.fuzzy( 32 | collection => 'customers', 33 | key => 'company', 34 | term => 'red' 35 | ); 36 | assert found = 2, 'Bad fuzz'; 37 | 38 | raise notice 'Looking up using search returns 2'; 39 | select count(1) into found from dox.search( 40 | collection => 'customers', 41 | term => 'red4' 42 | ); 43 | assert found = 2, 'Bad search'; 44 | 45 | end; 46 | $$ language plpgsql; 47 | 48 | 49 | -------------------------------------------------------------------------------- /tests/modify.sql: -------------------------------------------------------------------------------- 1 | DO $$ 2 | declare 3 | res jsonb; 4 | found int; 5 | begin 6 | 7 | set client_min_messages=NOTICE; 8 | raise notice '******************** Modify ********************'; 9 | 10 | raise notice 'Altering an email address updates record'; 11 | select * into res from dox.modify( 12 | id => 1, 13 | collection => 'customers', 14 | set => '{"name": "harold"}' 15 | ); 16 | 17 | assert res ->> 'name' = 'harold', 'Not modifed'; 18 | 19 | raise notice 'It also updates the lookup'; 20 | select count(1) into found 21 | from customers where lookup_name = 'harold'; 22 | 23 | end; 24 | $$ language plpgsql; -------------------------------------------------------------------------------- /tests/save.sql: -------------------------------------------------------------------------------- 1 | 2 | DO $$ 3 | declare 4 | res jsonb; 5 | begin 6 | 7 | --drop customers 8 | drop table if exists customers; 9 | set client_min_messages=NOTICE; 10 | 11 | raise notice '********************** SAVING ***********************'; 12 | 13 | raise notice 'Save works for basic operation and creates a table'; 14 | select * into res from dox.save( 15 | collection => 'customers', 16 | doc => '{"name": "chuck", "email":"chuck@test.com", "company":"red4"}' 17 | ); 18 | 19 | assert (res ->> 'name') = 'chuck', 'Nope, bad save'; 20 | assert (res ->> 'id') = '1', 'Nope, bad save'; 21 | 22 | raise notice 'Save will create a second customer'; 23 | select * into res from dox.save( 24 | collection => 'customers', 25 | doc => '{"name": "julie", "email":"jux@test.com", "company":"red4"}' 26 | ); 27 | 28 | assert (res ->> 'name') = 'julie', 'Nope, bad save'; 29 | assert (res ->> 'id') = '2', 'Nope, bad save'; 30 | 31 | end; 32 | $$ language plpgsql; 33 | 34 | 35 | -------------------------------------------------------------------------------- /tests/starts_with.sql: -------------------------------------------------------------------------------- 1 | DO $$ 2 | declare 3 | res jsonb; 4 | found int; 5 | begin 6 | 7 | set client_min_messages=NOTICE; 8 | raise notice '******************** Using starts_with ********************'; 9 | 10 | raise notice 'Looking up customers by id returns'; 11 | select count(1) into found from dox.starts_with( 12 | collection => 'customers', 13 | key => 'name', 14 | term => 'c' 15 | ); 16 | 17 | raise notice '... and creates a column'; 18 | select count(1) into found from information_schema.columns 19 | where table_name='customers' and table_schema='public' and column_name='lookup_name'; 20 | 21 | assert found = 1, 'No column added'; 22 | 23 | raise notice 'updates will reset the lookup'; 24 | update customers set body = '{"id": 1, "name": "rob", "email":"chuck@test.com", "company":"red4"}' 25 | where id = 1 returning body into res; 26 | 27 | assert res ->> 'name' = 'rob', 'Nope, update didnt work'; 28 | 29 | raise notice '******************** Using ends_with ********************'; 30 | 31 | raise notice 'Looking up customers by id returns'; 32 | select count(1) into found from dox.ends_with( 33 | collection => 'customers', 34 | key => 'email', 35 | term => '.com' 36 | ); 37 | 38 | raise notice 'found is %', found; 39 | assert found = 2, 'Bad ends_with'; 40 | 41 | raise notice '... and creates a column'; 42 | select count(1) into found from information_schema.columns 43 | where table_name='customers' and table_schema='public' and column_name='lookup_email'; 44 | 45 | 46 | 47 | 48 | end; 49 | $$ language plpgsql; --------------------------------------------------------------------------------