├── .github ├── _ └── workflows │ ├── artifacts.yml │ └── ci.yml ├── __init__.py ├── coq ├── py.typed ├── databases │ ├── __init__.py │ ├── insertions │ │ ├── sql │ │ │ ├── select │ │ │ │ ├── summaries.sql │ │ │ │ ├── inserted_count.sql │ │ │ │ ├── stat_inserted.sql │ │ │ │ └── inserted.sql │ │ │ ├── insert │ │ │ │ ├── batch.sql │ │ │ │ ├── source.sql │ │ │ │ ├── inserted.sql │ │ │ │ ├── instance.sql │ │ │ │ └── instance_stat.sql │ │ │ ├── create │ │ │ │ └── pragma.sql │ │ │ └── __init__.py │ │ ├── __init__.py │ │ └── database.py │ └── types.py ├── clients │ ├── inline │ │ ├── __init__.py │ │ └── worker.py │ ├── registers │ │ ├── __init__.py │ │ └── db │ │ │ ├── __init__.py │ │ │ └── sql │ │ │ ├── delete │ │ │ └── register.sql │ │ │ ├── insert │ │ │ ├── register.sql │ │ │ ├── word.sql │ │ │ └── line.sql │ │ │ ├── create │ │ │ ├── pragma.sql │ │ │ └── tables.sql │ │ │ ├── __init__.py │ │ │ └── select │ │ │ ├── lines.sql │ │ │ └── words.sql │ ├── tmux │ │ ├── db │ │ │ ├── __init__.py │ │ │ └── sql │ │ │ │ ├── delete │ │ │ │ └── pane.sql │ │ │ │ ├── select │ │ │ │ ├── panes.sql │ │ │ │ └── words.sql │ │ │ │ ├── create │ │ │ │ ├── pragma.sql │ │ │ │ └── tables.sql │ │ │ │ ├── insert │ │ │ │ ├── word.sql │ │ │ │ └── pane.sql │ │ │ │ └── __init__.py │ │ ├── __init__.py │ │ └── worker.py │ ├── third_party │ │ ├── __init__.py │ │ └── worker.py │ ├── tree_sitter │ │ ├── db │ │ │ ├── __init__.py │ │ │ └── sql │ │ │ │ ├── select │ │ │ │ ├── buffers.sql │ │ │ │ ├── buffer_by_id.sql │ │ │ │ └── words.sql │ │ │ │ ├── delete │ │ │ │ ├── buffer.sql │ │ │ │ └── words.sql │ │ │ │ ├── create │ │ │ │ ├── pragma.sql │ │ │ │ └── tables.sql │ │ │ │ ├── update │ │ │ │ └── buffer.sql │ │ │ │ ├── __init__.py │ │ │ │ └── insert │ │ │ │ ├── buffer.sql │ │ │ │ └── word.sql │ │ └── __init__.py │ ├── inline_third_party │ │ ├── __init__.py │ │ └── worker.py │ ├── cache │ │ ├── db │ │ │ ├── sql │ │ │ │ ├── delete │ │ │ │ │ └── words.sql │ │ │ │ ├── create │ │ │ │ │ ├── pragma.sql │ │ │ │ │ └── tables.sql │ │ │ │ ├── insert │ │ │ │ │ └── word.sql │ │ │ │ ├── __init__.py │ │ │ │ └── select │ │ │ │ │ └── words.sql │ │ │ ├── __init__.py │ │ │ └── database.py │ │ └── __init__.py │ ├── __init__.py │ ├── t9 │ │ ├── __init__.py │ │ └── types.py │ ├── lsp │ │ ├── __init__.py │ │ └── mul_bandit.py │ ├── paths │ │ └── __init__.py │ ├── tags │ │ ├── __init__.py │ │ └── db │ │ │ ├── sql │ │ │ ├── select │ │ │ │ ├── files.sql │ │ │ │ ├── files_filetype.sql │ │ │ │ └── tags.sql │ │ │ ├── delete │ │ │ │ └── file.sql │ │ │ ├── create │ │ │ │ ├── pragma.sql │ │ │ │ └── tables.sql │ │ │ ├── insert │ │ │ │ ├── file.sql │ │ │ │ └── tag.sql │ │ │ └── __init__.py │ │ │ ├── __init__.py │ │ │ └── database.py │ ├── buffers │ │ ├── db │ │ │ ├── sql │ │ │ │ ├── delete │ │ │ │ │ ├── buffer.sql │ │ │ │ │ └── lines.sql │ │ │ │ ├── select │ │ │ │ │ ├── buffer_by_id.sql │ │ │ │ │ ├── line_count.sql │ │ │ │ │ ├── buffers.sql │ │ │ │ │ ├── lines.sql │ │ │ │ │ └── words.sql │ │ │ │ ├── create │ │ │ │ │ ├── pragma.sql │ │ │ │ │ └── tables.sql │ │ │ │ ├── insert │ │ │ │ │ ├── buffer.sql │ │ │ │ │ ├── word.sql │ │ │ │ │ └── line.sql │ │ │ │ ├── update │ │ │ │ │ ├── buffer.sql │ │ │ │ │ ├── lines_shift_2.sql │ │ │ │ │ └── lines_shift_1.sql │ │ │ │ └── __init__.py │ │ │ └── __init__.py │ │ └── __init__.py │ └── snippet │ │ ├── __init__.py │ │ ├── db │ │ ├── sql │ │ │ ├── select │ │ │ │ ├── sources.sql │ │ │ │ └── snippets.sql │ │ │ ├── delete │ │ │ │ └── source.sql │ │ │ ├── insert │ │ │ │ ├── filetype.sql │ │ │ │ ├── extension.sql │ │ │ │ ├── match.sql │ │ │ │ ├── source.sql │ │ │ │ └── snippet.sql │ │ │ ├── create │ │ │ │ ├── pragma.sql │ │ │ │ └── tables.sql │ │ │ └── __init__.py │ │ └── __init__.py │ │ └── worker.py ├── snippets │ ├── consts.py │ ├── __init__.py │ ├── parsers │ │ ├── __init__.py │ │ └── types.py │ ├── loaders │ │ ├── __init__.py │ │ ├── parse.py │ │ ├── lsp.py │ │ ├── load.py │ │ ├── ultisnip.py │ │ └── neosnippet.py │ └── types.py ├── __init__.py ├── ci │ ├── __init__.py │ ├── __main__.py │ ├── types.py │ ├── snip_trans.py │ ├── main.py │ └── load.py ├── tags │ ├── __init__.py │ ├── types.py │ └── parse.py ├── tmux │ ├── __init__.py │ └── parse.py ├── lsp │ ├── __init__.py │ ├── requests │ │ ├── __init__.py │ │ ├── command.py │ │ ├── resolve.py │ │ └── completion.py │ ├── protocol.lua │ ├── protocol.py │ └── types.py ├── paths │ ├── __init__.py │ └── show.py ├── server │ ├── __init__.py │ ├── registrants │ │ ├── __init__.py │ │ ├── repeat.py │ │ ├── noop.py │ │ ├── help.py │ │ └── options.py │ ├── rt_types.py │ ├── completions.py │ ├── icons.py │ ├── mark.py │ └── state.py ├── shared │ ├── __init__.py │ ├── aio.py │ ├── lru.py │ ├── sql.py │ ├── parse.py │ ├── context.py │ ├── timeit.py │ ├── repeat.py │ ├── executor.py │ └── fuzzy.py ├── treesitter │ ├── __init__.py │ └── types.py ├── registry.py ├── _registry.py ├── lang.py ├── consts.py └── client.py ├── docker ├── __init__.py ├── packer │ ├── Dockerfile │ └── root │ │ └── .config │ │ └── nvim │ │ └── init.lua ├── _base │ └── Dockerfile ├── vimplug │ ├── Dockerfile │ └── root │ │ └── .config │ │ └── nvim │ │ └── init.vim └── __main__.py ├── tests ├── __init__.py ├── tags │ ├── __init__.py │ └── parser.py ├── clients │ ├── __init__.py │ └── paths │ │ └── __init__.py ├── server │ ├── __init__.py │ └── reviewer.py ├── shared │ └── __init__.py ├── snippets │ ├── __init__.py │ └── parse.py ├── lsp │ ├── main.ts │ ├── main.css │ └── main.c └── __main__.py ├── .dockerignore ├── lua └── coq │ ├── buf-comp.lua │ ├── completion.lua │ └── ts-request.lua ├── ci ├── __main__.py ├── __init__.py └── main.py ├── ftplugin └── coq-snip.lua ├── plugin └── coq.vim ├── ftdetect └── coq-snip.vim ├── .gitignore ├── _config.yml ├── artifacts ├── README.md └── helo.yml ├── pyproject.toml ├── requirements.txt ├── mypy.ini ├── locale ├── zh.yml └── en.yml ├── docs ├── MISC.md ├── README.md ├── COMPLETION.md ├── STATS.md ├── CONF.md ├── FUZZY.md └── CUSTOM_SOURCES.md ├── autoload └── coq.vim ├── syntax └── coq-snip.vim ├── Makefile └── config └── compilation.yml /.github/_: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/py.typed: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docker/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .gitignore -------------------------------------------------------------------------------- /tests/tags/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/databases/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/clients/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/server/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/shared/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/snippets/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/clients/inline/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/clients/registers/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/clients/paths/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/clients/registers/db/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/clients/third_party/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/lsp/main.ts: -------------------------------------------------------------------------------- 1 | /* 👩‍👩‍👧 */ 2 | -------------------------------------------------------------------------------- /coq/clients/inline_third_party/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /tests/lsp/main.css: -------------------------------------------------------------------------------- 1 | /* 👩‍👩‍👧 */ 2 | 3 | -------------------------------------------------------------------------------- /lua/coq/buf-comp.lua: -------------------------------------------------------------------------------- 1 | (function(...) 2 | end)(...) 3 | -------------------------------------------------------------------------------- /ci/__main__.py: -------------------------------------------------------------------------------- 1 | from .main import main 2 | 3 | main() 4 | -------------------------------------------------------------------------------- /ftplugin/coq-snip.lua: -------------------------------------------------------------------------------- 1 | vim.bo.commentstring = "# %s" 2 | -------------------------------------------------------------------------------- /plugin/coq.vim: -------------------------------------------------------------------------------- 1 | call luaeval('require("coq") and 0') 2 | -------------------------------------------------------------------------------- /coq/clients/cache/db/sql/delete/words.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM words 2 | -------------------------------------------------------------------------------- /ci/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file define ci as a module. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/snippets/consts.py: -------------------------------------------------------------------------------- 1 | MOD_PAD = 1000 2 | SNIP_LINE_SEP = "\n" 3 | -------------------------------------------------------------------------------- /tests/lsp/main.c: -------------------------------------------------------------------------------- 1 | int main() { 2 | /*𐐀*/ 3 | return 0; 4 | } 5 | -------------------------------------------------------------------------------- /coq/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file define coq as a module. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/ci/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file define ci as as submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/tags/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This defines Tags as a submodule of coq 3 | """ 4 | -------------------------------------------------------------------------------- /coq/tmux/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This defines Tmux as a submodule of coq 3 | """ 4 | -------------------------------------------------------------------------------- /ftdetect/coq-snip.vim: -------------------------------------------------------------------------------- 1 | autocmd BufNewFile,BufRead *.snip set filetype=coq-snip 2 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/select/summaries.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | * 3 | FROM stats_view 4 | -------------------------------------------------------------------------------- /coq/lsp/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines lsp as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/paths/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines path as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines clients as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/t9/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file define t9 as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/select/buffers.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | rowid 3 | FROM buffers 4 | -------------------------------------------------------------------------------- /coq/server/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines semver as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/shared/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines shared as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/snippets/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file define snippets as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/treesitter/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This defines Treesitter as a submodule of coq 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/lsp/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines lsp as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/paths/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines paths as a submodule of coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/tags/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines tags as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/tmux/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines tmux as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/delete/pane.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM panes 2 | WHERE 3 | pane_id = :pane_id 4 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/select/panes.sql: -------------------------------------------------------------------------------- 1 | SELECT DISTINCT 2 | pane_id 3 | FROM panes 4 | 5 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/select/inserted_count.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | COUNT(*) 3 | FROM inserted 4 | -------------------------------------------------------------------------------- /coq/ci/__main__.py: -------------------------------------------------------------------------------- 1 | from asyncio import run 2 | 3 | from .main import main 4 | 5 | run(main()) 6 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/delete/buffer.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM buffers 2 | WHERE 3 | rowid = :buffer_id 4 | -------------------------------------------------------------------------------- /coq/clients/cache/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines cache as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/cache/db/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines cache as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/select/files.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | filename, 3 | mtime 4 | FROM files 5 | 6 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/select/stat_inserted.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | * 3 | FROM stat_inserted_view 4 | -------------------------------------------------------------------------------- /coq/lsp/requests/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines requests as a submodule of lsp/coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/snippets/parsers/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This defines parser as a submodule of snippets. 3 | """ 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .ccls-cache/ 2 | /.vars/ 3 | /.venv/ 4 | /.vscode/ 5 | /temp/ 6 | /test/ 7 | __pycache__/ 8 | -------------------------------------------------------------------------------- /coq/clients/buffers/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines buffers as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/snippet/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines snippet as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/select/sources.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | filename, 3 | mtime 4 | FROM sources 5 | 6 | -------------------------------------------------------------------------------- /coq/clients/tags/db/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines tags as a submodule of databases/coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/delete/buffer.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM buffers 2 | WHERE 3 | rowid = :buffer_id 4 | -------------------------------------------------------------------------------- /coq/snippets/loaders/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Here are defined the loaders for the snippets formats. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines buffers as a submodule of databases/coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/delete/register.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM registers 2 | WHERE 3 | register = :register 4 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/delete/file.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM files 2 | WHERE 3 | filename = X_NORM_CASE(:filename) 4 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines tree_sitter as a submodule of clients. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/insert/batch.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO batches ( rowid) 2 | VALUES (:rowid) 3 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines snippets as a submodule of databases/coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/delete/source.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM sources 2 | WHERE 3 | filename = X_NORM_CASE(:filename) 4 | -------------------------------------------------------------------------------- /coq/databases/insertions/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines insertions as a submodule of databases/coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/server/registrants/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines registrants as a submodule of server/coq. 3 | """ 4 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/select/buffer_by_id.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | rowid 3 | FROM buffers 4 | WHERE 5 | rowid = :rowid 6 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/select/buffer_by_id.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | rowid 3 | FROM buffers 4 | WHERE 5 | rowid = :rowid 6 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/insert/source.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO sources ( name) 2 | VALUES (:name) 3 | -------------------------------------------------------------------------------- /coq/clients/cache/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA temp_store = MEMORY; 4 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/insert/register.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO registers (register) 2 | VALUES (:register) 3 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/insert/filetype.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO filetypes (filetype) 2 | VALUES (:filetype) 3 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/select/files_filetype.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | filetype 3 | FROM files 4 | WHERE 5 | filename = X_NORM_CASE(:filename) 6 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA temp_store = MEMORY; 4 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA temp_store = MEMORY; 4 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA temp_store = MEMORY; 4 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA temp_store = MEMORY; 4 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA temp_store = MEMORY; 4 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/insert/inserted.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO inserted ( instance_id, sort_by) 2 | VALUES (:instance_id, :sort_by) 3 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/insert/buffer.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO buffers ( rowid, filetype, filename) 2 | VALUES (:rowid, :filetype, :filename) 3 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/update/buffer.sql: -------------------------------------------------------------------------------- 1 | UPDATE buffers 2 | SET 3 | filetype = :filetype, 4 | filename = :filename 5 | WHERE 6 | rowid = :rowid 7 | -------------------------------------------------------------------------------- /coq/clients/cache/db/sql/insert/word.sql: -------------------------------------------------------------------------------- 1 | INSERT OR REPLACE INTO words (key, word, lword) 2 | VALUES (:key, :word, LOWER(:word)) 3 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/insert/word.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO words (line_id, word, lword) 2 | VALUES (:line_id, :word, LOWER(:word)) 3 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/insert/word.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO words (pane_id, word, lword) 2 | VALUES (:pane_id, :word, LOWER(:word)) 3 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/update/buffer.sql: -------------------------------------------------------------------------------- 1 | UPDATE buffers 2 | SET 3 | filetype = :filetype, 4 | filename = :filename 5 | WHERE 6 | rowid = :rowid 7 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/insert/word.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO words (register, word, lword) 2 | VALUES (:register, :word, LOWER(:word)) 3 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/insert/instance.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO instances ( rowid, source_id, batch_id) 2 | VALUES (:rowid, :source_id, :batch_id) 3 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/insert/line.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO lines ( rowid, buffer_id, line_num, line) 2 | VALUES (:rowid, :buffer_id, :line_num, :line) 3 | 4 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/select/line_count.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | COALESCE(MAX(line_num), -1) + 1 AS line_count 3 | FROM lines 4 | WHERE 5 | buffer_id = :buffer_id 6 | 7 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/update/lines_shift_2.sql: -------------------------------------------------------------------------------- 1 | UPDATE lines 2 | SET 3 | line_num = -line_num 4 | WHERE 5 | buffer_id = :buffer_id 6 | AND 7 | line_num < 0 8 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/insert/extension.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO extensions (source_id, src, dest) 2 | VALUES (:source_id, :src, :dest) 3 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA journal_mode = WAL; 4 | PRAGMA temp_store = MEMORY; 5 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/insert/file.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO files (filename, filetype, mtime) 2 | VALUES (X_NORM_CASE(:filename), :filetype, :mtime) 3 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | title: "coq.nvim 🐔" 3 | 4 | showcase: True 5 | 6 | images: 7 | - https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/pretty.gif 8 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/create/pragma.sql: -------------------------------------------------------------------------------- 1 | PRAGMA auto_vacuum = INCREMENTAL; 2 | PRAGMA foreign_keys = ON; 3 | PRAGMA journal_mode = WAL; 4 | PRAGMA temp_store = MEMORY; 5 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/insert/match.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO matches (snippet_id, word, lword) 2 | VALUES (:snippet_id, :word, LOWER(:word)) 3 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/insert/source.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO sources (rowid, filename, mtime) 2 | VALUES (:rowid, X_NORM_CASE(:filename), :mtime) 3 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from .....shared.sql import loader 4 | 5 | sql = loader(Path(__file__).resolve(strict=True).parent) 6 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from .....shared.sql import loader 4 | 5 | sql = loader(Path(__file__).resolve(strict=True).parent) 6 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from .....shared.sql import loader 4 | 5 | sql = loader(Path(__file__).resolve(strict=True).parent) 6 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/insert/buffer.sql: -------------------------------------------------------------------------------- 1 | INSERT OR REPLACE INTO buffers ( rowid, filetype, filename) 2 | VALUES (:rowid, :filetype, :filename) 3 | -------------------------------------------------------------------------------- /artifacts/README.md: -------------------------------------------------------------------------------- 1 | # Artifacts 2 | 3 | Machine generated code 4 | 5 | DO NOT EDIT BY HAND 6 | 7 | ## Snippets 8 | 9 | [Here](https://github.com/ms-jpq/coq.artifacts) 10 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/update/lines_shift_1.sql: -------------------------------------------------------------------------------- 1 | UPDATE lines 2 | SET 3 | line_num = -(line_num + :shift) 4 | WHERE 5 | buffer_id = :buffer_id 6 | AND 7 | line_num >= :lo 8 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/insert/line.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO lines (register, line, word, lword) 2 | VALUES (:register, :line, :word, LOWER(:word)) 3 | 4 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/insert/instance_stat.sql: -------------------------------------------------------------------------------- 1 | INSERT INTO instance_stats ( instance_id, interrupted, duration, items) 2 | VALUES (:instance_id, :interrupted, :duration, :items) 3 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [project] 2 | name = "coq_nvim" 3 | requires-python = ">=3.8.0" 4 | version = "0" 5 | 6 | [project.optional-dependencies] 7 | dev = ["mypy", "types-PyYAML", "black", "isort"] 8 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/delete/words.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM words 2 | WHERE 3 | buffer_id = :buffer_id 4 | AND 5 | hi >= :lo 6 | AND 7 | CASE 8 | WHEN :hi >= 0 THEN lo < :hi 9 | ELSE 1 10 | END 11 | -------------------------------------------------------------------------------- /docker/packer/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM coq_base 2 | 3 | 4 | RUN git clone --depth=1 -- https://github.com/wbthomason/packer.nvim \ 5 | ~/.local/share/nvim/site/pack/packer/start/packer.nvim 6 | 7 | 8 | COPY ./docker/packer / 9 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/select/inserted.sql: -------------------------------------------------------------------------------- 1 | SELECT DISTINCT 2 | rowid AS insert_order, 3 | sort_by AS sort_by 4 | FROM inserted 5 | GROUP BY 6 | sort_by 7 | ORDER BY 8 | rowid DESC 9 | LIMIT :limit 10 | 11 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | std2@https://github.com/ms-jpq/std2/archive/4c89fccd09a96579ad3994dc2069e528759385dc.tar.gz 2 | pynvim_pp@https://github.com/ms-jpq/pynvim_pp/archive/6beffc4f479360489481705dc23a9ebd54f0c17d.tar.gz 3 | PyYAML 4 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/select/buffers.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | buffers.rowid, 3 | COUNT(lines.rowid) AS line_count 4 | FROM buffers 5 | JOIN lines 6 | ON 7 | lines.buffer_id = buffers.rowid 8 | GROUP BY 9 | buffers.rowid 10 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/delete/lines.sql: -------------------------------------------------------------------------------- 1 | DELETE FROM lines 2 | WHERE 3 | buffer_id = :buffer_id 4 | AND 5 | line_num >= :lo 6 | AND 7 | CASE 8 | WHEN :hi >= 0 THEN line_num < :hi 9 | ELSE 1 10 | END 11 | 12 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/insert/snippet.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO snippets (rowid, source_id, filetype, grammar, content, label, doc) 2 | VALUES (:rowid, :source_id, :filetype, :grammar, :content, :label, :doc) 3 | -------------------------------------------------------------------------------- /coq/clients/cache/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file define sql as a submodule of client/cache. 3 | """ 4 | 5 | from pathlib import Path 6 | 7 | from .....shared.sql import loader 8 | 9 | sql = loader(Path(__file__).resolve(strict=True).parent) 10 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/insert/pane.sql: -------------------------------------------------------------------------------- 1 | INSERT OR REPLACE INTO panes ( pane_id, session_name, window_index, window_name, pane_index, pane_title) 2 | VALUES (:pane_id, :session_name, :window_index, :window_name, :pane_index, :pane_title) 3 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines sql as a submodule of tags/databases/coq. 3 | """ 4 | 5 | from pathlib import Path 6 | 7 | from .....shared.sql import loader 8 | 9 | sql = loader(Path(__file__).resolve(strict=True).parent) 10 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines sql as a submodule of buffers/databases/coq. 3 | """ 4 | 5 | from pathlib import Path 6 | 7 | from .....shared.sql import loader 8 | 9 | sql = loader(Path(__file__).resolve(strict=True).parent) 10 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines sql as a submodule of snippet/databases/coq. 3 | """ 4 | 5 | from pathlib import Path 6 | 7 | from .....shared.sql import loader 8 | 9 | sql = loader(Path(__file__).resolve(strict=True).parent) 10 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/select/lines.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | line 3 | FROM lines 4 | WHERE 5 | buffer_id = :buffer_id 6 | AND 7 | line_num >= :lo 8 | AND 9 | CASE 10 | WHEN :hi > 0 THEN line_num < :hi 11 | ELSE 1 12 | END 13 | ORDER BY 14 | line_num 15 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/insert/word.sql: -------------------------------------------------------------------------------- 1 | INSERT OR IGNORE INTO words (buffer_id, word, lword, lo, hi, kind, pword, pkind, gpword, gpkind) 2 | VALUES (:buffer_id, :word, LOWER(:word), :lo, :hi, :kind, :pword, :pkind, :gpword, :gpkind) 3 | -------------------------------------------------------------------------------- /coq/databases/insertions/sql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | This file defines sql as a submodule of insertions/databases/coq. 3 | """ 4 | 5 | from pathlib import Path 6 | 7 | from ....shared.sql import loader 8 | 9 | sql = loader(Path(__file__).resolve(strict=True).parent) 10 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/insert/tag.sql: -------------------------------------------------------------------------------- 1 | REPLACE INTO tags (`path`, line, name, lname, pattern, kind, typeref, scope, scopeKind, `access`) 2 | VALUES (X_NORM_CASE(:path), :line, :name, LOWER(:name), :pattern, :kind, :typeref, :scope, :scopeKind, :access) 3 | 4 | -------------------------------------------------------------------------------- /coq/clients/cache/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS words ( 5 | key BLOB NOT NULL, 6 | word TEXT NOT NULL, 7 | lword TEXT NOT NULL, 8 | UNIQUE (key, word) 9 | ); 10 | CREATE INDEX IF NOT EXISTS words_lword ON words (lword); 11 | 12 | 13 | END; 14 | -------------------------------------------------------------------------------- /coq/databases/types.py: -------------------------------------------------------------------------------- 1 | from sqlite3 import Connection 2 | from typing import cast 3 | 4 | from ..shared.types import Interruptible 5 | 6 | 7 | class DB(Interruptible): 8 | _conn: Connection = cast(Connection, None) 9 | 10 | def interrupt(self) -> None: 11 | self._conn.interrupt() 12 | -------------------------------------------------------------------------------- /docker/packer/root/.config/nvim/init.lua: -------------------------------------------------------------------------------- 1 | require("packer").startup( 2 | function(use) 3 | use {"neovim/nvim-lspconfig"} 4 | use {"ms-jpq/coq_nvim"} 5 | use {"ms-jpq/coq.artifacts"} 6 | end 7 | ) 8 | 9 | local lsp = require "lspconfig" 10 | local coq = require("coq") 11 | lsp.cssls.setup(coq.lsp_ensure_capabilities()) 12 | lsp.cssls.setup(coq().lsp_ensure_capabilities()) 13 | assert(coq == coq()()) 14 | -------------------------------------------------------------------------------- /coq/shared/aio.py: -------------------------------------------------------------------------------- 1 | from asyncio import create_task, wait 2 | from typing import Any, Coroutine, Optional, TypeVar 3 | 4 | from std2.asyncio import cancel 5 | 6 | _T = TypeVar("_T") 7 | 8 | 9 | async def with_timeout(timeout: float, co: Coroutine[Any, Any, _T]) -> Optional[_T]: 10 | done, not_done = await wait((create_task(co),), timeout=timeout) 11 | await cancel(*not_done) 12 | return (await done.pop()) if done else None 13 | -------------------------------------------------------------------------------- /coq/registry.py: -------------------------------------------------------------------------------- 1 | from typing import Any, Awaitable, Callable 2 | 3 | from pynvim_pp.atomic import Atomic 4 | from pynvim_pp.autocmd import AutoCMD 5 | from pynvim_pp.handler import RPC 6 | 7 | NAMESPACE = "COQ" 8 | 9 | 10 | def _name_gen(fn: Callable[..., Awaitable[Any]]) -> str: 11 | return fn.__qualname__.lstrip("_").capitalize() 12 | 13 | 14 | autocmd = AutoCMD() 15 | atomic = Atomic() 16 | rpc = RPC(NAMESPACE, name_gen=_name_gen) 17 | -------------------------------------------------------------------------------- /tests/server/reviewer.py: -------------------------------------------------------------------------------- 1 | from random import uniform 2 | from unittest import TestCase 3 | 4 | from ...coq.server.reviewer import sigmoid 5 | 6 | 7 | class Sigmoid(TestCase): 8 | def test_1(self) -> None: 9 | y = sigmoid(0) 10 | self.assertEqual(y, 1) 11 | 12 | def test_2(self) -> None: 13 | for _ in range(0, 10000): 14 | y = sigmoid(uniform(-10, 10)) 15 | self.assertTrue(y >= 0.5 and y <= 1.5) 16 | -------------------------------------------------------------------------------- /coq/ci/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from pathlib import Path 3 | from typing import AbstractSet, Mapping 4 | 5 | 6 | @dataclass 7 | class _CompilationPaths: 8 | lsp: AbstractSet[Path] 9 | neosnippet: AbstractSet[Path] 10 | ultisnip: AbstractSet[Path] 11 | 12 | 13 | @dataclass(frozen=True) 14 | class Compilation: 15 | git: AbstractSet[str] 16 | paths: _CompilationPaths 17 | remaps: Mapping[str, AbstractSet[str]] 18 | -------------------------------------------------------------------------------- /coq/snippets/loaders/parse.py: -------------------------------------------------------------------------------- 1 | from pathlib import PurePath 2 | from textwrap import dedent 3 | from typing import NoReturn 4 | 5 | from ..types import LoadError 6 | 7 | 8 | def raise_err(path: PurePath, lineno: int, line: str, reason: str) -> NoReturn: 9 | msg = f"""\ 10 | Cannot load: 11 | path: {path} 12 | lineno: {lineno} 13 | line: {line} 14 | reason: |- 15 | {reason} 16 | """ 17 | raise LoadError(dedent(msg)) 18 | -------------------------------------------------------------------------------- /coq/tags/types.py: -------------------------------------------------------------------------------- 1 | from typing import Mapping, Optional, Sequence, Tuple, TypedDict 2 | 3 | 4 | class Tag(TypedDict): 5 | language: str 6 | 7 | path: str 8 | 9 | line: int 10 | name: str 11 | pattern: Optional[str] 12 | kind: str 13 | 14 | typeref: Optional[str] 15 | 16 | scope: Optional[str] 17 | scopeKind: Optional[str] 18 | 19 | access: Optional[str] 20 | 21 | 22 | Tags = Mapping[str, Tuple[str, float, Sequence[Tag]]] 23 | -------------------------------------------------------------------------------- /docker/_base/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:focal 2 | 3 | 4 | ENV TERM=xterm-256color 5 | WORKDIR /root/.config/nvim 6 | RUN apt-get update && \ 7 | DEBIAN_FRONTEND=noninteractive apt-get install --yes -- software-properties-common && \ 8 | add-apt-repository ppa:neovim-ppa/unstable && \ 9 | apt-get update && \ 10 | DEBIAN_FRONTEND=noninteractive apt-get install --yes -- \ 11 | python3-venv git neovim universal-ctags tmux && \ 12 | rm -rf /var/lib/apt/lists/* 13 | -------------------------------------------------------------------------------- /coq/ci/snip_trans.py: -------------------------------------------------------------------------------- 1 | from dataclasses import replace 2 | from re import RegexFlag, compile 3 | 4 | from ..snippets.types import ParsedSnippet 5 | 6 | _JS = compile(r";$|;(\n)", flags=RegexFlag.MULTILINE) 7 | 8 | 9 | def trans(snip: ParsedSnippet) -> ParsedSnippet: 10 | if snip.filetype in {"javascript", "typescript", "typescriptreact"}: 11 | content = _JS.sub(r"\1", snip.content) 12 | return replace(snip, content=content) 13 | else: 14 | return snip 15 | -------------------------------------------------------------------------------- /.github/workflows/artifacts.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: Artifacts 3 | 4 | on: 5 | push: 6 | branches: 7 | - coq 8 | schedule: 9 | - cron: "0 0 * * *" # daily 10 | 11 | jobs: 12 | build: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - name: Checkout 17 | uses: actions/checkout@v3 18 | 19 | - name: Setup Python 20 | uses: actions/setup-python@v3 21 | 22 | - name: Build 23 | env: 24 | CI_TOKEN: ${{ secrets.CI_TOKEN }} 25 | run: |- 26 | make build 27 | -------------------------------------------------------------------------------- /coq/lsp/protocol.lua: -------------------------------------------------------------------------------- 1 | return (function() 2 | local sanitize = function(spec) 3 | local tb = {[vim.type_idx] = vim.types.dictionary} 4 | for k, v in pairs(spec) do 5 | if type(k) == "string" and type(v) == "number" then 6 | tb[k] = v 7 | end 8 | end 9 | return tb 10 | end 11 | 12 | local lookup = vim.empty_dict() 13 | 14 | for key, val in pairs(vim.lsp.protocol or {}) do 15 | if type(val) == "table" then 16 | lookup[key] = sanitize(val) 17 | end 18 | end 19 | 20 | return lookup 21 | end)() 22 | -------------------------------------------------------------------------------- /coq/shared/lru.py: -------------------------------------------------------------------------------- 1 | from collections import OrderedDict, UserDict 2 | from typing import Generic, TypeVar, cast 3 | 4 | K = TypeVar("K") 5 | V = TypeVar("V") 6 | 7 | 8 | class LRU(UserDict, Generic[K, V]): 9 | def __init__(self, size: int) -> None: 10 | assert size > 0 11 | self._size = size 12 | self.data = OrderedDict() 13 | 14 | def __setitem__(self, key: K, item: V) -> None: 15 | if len(self) >= self._size: 16 | cast(OrderedDict, self.data).popitem(last=False) 17 | return super().__setitem__(key, item) 18 | -------------------------------------------------------------------------------- /coq/_registry.py: -------------------------------------------------------------------------------- 1 | from .lsp.requests import completion, request, resolve 2 | from .server.registrants import attachment, autocmds, help, marks, noop, omnifunc 3 | from .server.registrants import preview as rp 4 | from .server.registrants import repeat, snippets, stats, user_snippets 5 | 6 | assert attachment 7 | assert autocmds 8 | assert completion 9 | assert help 10 | assert marks 11 | assert noop 12 | assert omnifunc 13 | assert repeat 14 | assert request 15 | assert resolve 16 | assert rp 17 | assert snippets 18 | assert stats 19 | assert user_snippets 20 | 21 | ____ = None 22 | -------------------------------------------------------------------------------- /coq/snippets/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import AbstractSet, Mapping 3 | from uuid import UUID 4 | 5 | from ..shared.types import SnippetGrammar 6 | 7 | SCHEMA = "v2" 8 | 9 | 10 | class LoadError(Exception): ... 11 | 12 | 13 | @dataclass(frozen=True) 14 | class ParsedSnippet: 15 | grammar: SnippetGrammar 16 | filetype: str 17 | content: str 18 | label: str 19 | doc: str 20 | matches: AbstractSet[str] 21 | 22 | 23 | @dataclass(frozen=True) 24 | class LoadedSnips: 25 | exts: Mapping[str, AbstractSet[str]] 26 | snippets: Mapping[UUID, ParsedSnippet] 27 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/select/lines.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | register, 3 | word, 4 | line AS text 5 | FROM lines 6 | WHERE 7 | ( 8 | :word <> '' 9 | AND 10 | lword LIKE :like_word ESCAPE '!' 11 | AND 12 | LENGTH(word) + :look_ahead >= LENGTH(:word) 13 | AND 14 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 15 | ) 16 | OR 17 | ( 18 | :sym <> '' 19 | AND 20 | lword LIKE :like_sym ESCAPE '!' 21 | AND 22 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 23 | AND 24 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 25 | ) 26 | LIMIT :limit 27 | -------------------------------------------------------------------------------- /docker/vimplug/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM coq_base 2 | 3 | 4 | ADD https://raw.githubusercontent.com/junegunn/vim-plug/master/plug.vim /root/.config/nvim/autoload/plug.vim 5 | COPY ./docker/vimplug / 6 | WORKDIR /root/.config/nvim/plugged 7 | RUN git clone --depth=1 -- https://github.com/ms-jpq/chadtree.git && \ 8 | git clone --depth=1 -- https://github.com/ms-jpq/coq.artifacts.git 9 | RUN cd /root/.config/nvim/plugged/chadtree || exit 1 && \ 10 | python3 -m chadtree deps 11 | 12 | 13 | COPY ./ /root/.config/nvim/plugged/coq_nvim 14 | RUN cd /root/.config/nvim/plugged/coq_nvim || exit 1 && \ 15 | python3 -m coq deps 16 | -------------------------------------------------------------------------------- /coq/clients/cache/db/sql/select/words.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | key, 3 | word 4 | FROM words 5 | WHERE 6 | word <> '' 7 | AND 8 | ( 9 | ( 10 | lword LIKE :like_word ESCAPE '!' 11 | AND 12 | LENGTH(word) + :look_ahead >= LENGTH(:word) 13 | AND 14 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 15 | ) 16 | OR 17 | ( 18 | lword LIKE :like_sym ESCAPE '!' 19 | AND 20 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 21 | AND 22 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 23 | ) 24 | ) 25 | GROUP BY 26 | key 27 | LIMIT :limit 28 | -------------------------------------------------------------------------------- /coq/lsp/requests/command.py: -------------------------------------------------------------------------------- 1 | from std2.pickle.encoder import new_encoder 2 | 3 | from ...shared.types import ExternLSP, ExternLUA 4 | from ..types import Command 5 | from .request import async_request 6 | 7 | _ENCODER = new_encoder[Command](Command) 8 | 9 | 10 | async def cmd(extern: ExternLSP) -> None: 11 | if extern.command: 12 | name = "lsp_third_party_cmd" if isinstance(extern, ExternLUA) else "lsp_command" 13 | command = _ENCODER(extern.command) 14 | 15 | clients = {extern.client} if extern.client else set() 16 | async for _ in async_request(name, None, clients, command): 17 | pass 18 | -------------------------------------------------------------------------------- /coq/server/rt_types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import AbstractSet, MutableMapping 3 | from uuid import UUID 4 | 5 | from ..databases.insertions.database import IDB 6 | from ..shared.runtime import Metric, Supervisor, Worker 7 | from ..shared.settings import Settings 8 | from ..shared.types import Completion 9 | 10 | 11 | class ValidationError(Exception): ... 12 | 13 | 14 | @dataclass(frozen=True) 15 | class Stack: 16 | settings: Settings 17 | lru: MutableMapping[UUID, Completion] 18 | metrics: MutableMapping[UUID, Metric] 19 | idb: IDB 20 | supervisor: Supervisor 21 | workers: AbstractSet[Worker] 22 | -------------------------------------------------------------------------------- /docker/vimplug/root/.config/nvim/init.vim: -------------------------------------------------------------------------------- 1 | nnoremap Q 2 | nnoremap QQ quitall! 3 | vnoremap Q 4 | vnoremap QQ quitall! 5 | 6 | filetype on 7 | set nomodeline 8 | set secure 9 | set termguicolors 10 | set shortmess+=I 11 | 12 | 13 | call plug#begin('~/.config/nvim/plugged') 14 | Plug 'ms-jpq/coq_nvim', {'branch': 'dev'} 15 | Plug 'ms-jpq/coq.artifacts', {'branch': 'artifacts'} 16 | Plug 'ms-jpq/chadtree', {'branch': 'chad'} 17 | call plug#end() 18 | 19 | 20 | let g:python3_host_prog = '/usr/bin/python3' 21 | let mapleader=' ' 22 | nnoremap v CHADopen 23 | nnoremap z COQnow 24 | -------------------------------------------------------------------------------- /coq/clients/third_party/worker.py: -------------------------------------------------------------------------------- 1 | from typing import AsyncIterator 2 | 3 | from ...lsp.requests.completion import comp_thirdparty 4 | from ...lsp.types import LSPcomp 5 | from ...shared.types import Context 6 | from ..lsp.worker import Worker as LSPWorker 7 | 8 | 9 | class Worker(LSPWorker): 10 | def _request(self, context: Context) -> AsyncIterator[LSPcomp]: 11 | return comp_thirdparty( 12 | short_name=self._options.short_name, 13 | always_on_top=self._options.always_on_top, 14 | weight_adjust=self._options.weight_adjust, 15 | context=context, 16 | chunk=self._max_results * 2, 17 | clients=set(), 18 | ) 19 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/select/words.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | register, 3 | word, 4 | word AS text 5 | FROM words 6 | WHERE 7 | ( 8 | :word <> '' 9 | AND 10 | lword LIKE :like_word ESCAPE '!' 11 | AND 12 | LENGTH(word) + :look_ahead >= LENGTH(:word) 13 | AND 14 | word <> SUBSTR(:word, 1, LENGTH(word)) 15 | AND 16 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 17 | ) 18 | OR 19 | ( 20 | :sym <> '' 21 | AND 22 | lword LIKE :like_sym ESCAPE '!' 23 | AND 24 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 25 | AND 26 | word <> SUBSTR(:sym, 1, LENGTH(word)) 27 | AND 28 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 29 | ) 30 | LIMIT :limit 31 | -------------------------------------------------------------------------------- /coq/clients/inline_third_party/worker.py: -------------------------------------------------------------------------------- 1 | from typing import AsyncIterator 2 | 3 | from ...lsp.requests.completion import comp_thirdparty_inline 4 | from ...lsp.types import LSPcomp 5 | from ...shared.types import Context 6 | from ..inline.worker import Worker as InlineWorker 7 | 8 | 9 | class Worker(InlineWorker): 10 | def _request(self, context: Context) -> AsyncIterator[LSPcomp]: 11 | return comp_thirdparty_inline( 12 | short_name=self._options.short_name, 13 | always_on_top=self._options.always_on_top, 14 | weight_adjust=self._options.weight_adjust, 15 | context=context, 16 | chunk=self._supervisor.match.max_results * 2, 17 | clients=set(), 18 | ) 19 | -------------------------------------------------------------------------------- /coq/treesitter/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from typing import Optional, Tuple, TypedDict 5 | 6 | 7 | class SimpleRawPayload(TypedDict, total=False): 8 | kind: str 9 | text: str 10 | 11 | 12 | class RawPayload(SimpleRawPayload, TypedDict, total=False): 13 | range: Tuple[int, int] 14 | parent: SimpleRawPayload 15 | grandparent: SimpleRawPayload 16 | 17 | 18 | @dataclass(frozen=True) 19 | class SimplePayload: 20 | kind: str 21 | text: str 22 | 23 | 24 | @dataclass(frozen=True) 25 | class Payload(SimplePayload): 26 | filename: str 27 | range: Tuple[int, int] 28 | parent: Optional[SimplePayload] 29 | grandparent: Optional[SimplePayload] 30 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | cache_dir = .vars/mypy 3 | check_untyped_defs = true 4 | disallow_any_generics = false 5 | disallow_any_unimported = true 6 | disallow_incomplete_defs = true 7 | disallow_subclassing_any = true 8 | disallow_untyped_calls = true 9 | disallow_untyped_decorators = true 10 | disallow_untyped_defs = true 11 | error_summary = true 12 | extra_checks = true 13 | implicit_reexport = false 14 | no_implicit_optional = true 15 | pretty = true 16 | show_column_numbers = true 17 | show_error_codes = true 18 | show_error_context = true 19 | strict = true 20 | strict_equality = true 21 | warn_incomplete_stub = true 22 | warn_redundant_casts = true 23 | warn_return_any = true 24 | warn_unreachable = true 25 | warn_unused_configs = true 26 | warn_unused_ignores = true 27 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/select/snippets.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | grammar, 3 | word, 4 | snippet, 5 | label, 6 | doc 7 | FROM snippets_view 8 | WHERE 9 | ft_src IN (:filetype, '*', '_') 10 | AND 11 | ( 12 | ( 13 | :word <> '' 14 | AND 15 | lword LIKE :like_word ESCAPE '!' 16 | AND 17 | LENGTH(word) + :look_ahead >= LENGTH(:word) 18 | AND 19 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 20 | ) 21 | OR 22 | ( 23 | :sym <> '' 24 | AND 25 | lword LIKE :like_sym ESCAPE '!' 26 | AND 27 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 28 | AND 29 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 30 | ) 31 | ) 32 | GROUP BY 33 | snippet_id 34 | LIMIT :limit 35 | -------------------------------------------------------------------------------- /docker/__main__.py: -------------------------------------------------------------------------------- 1 | from asyncio import gather, run 2 | from pathlib import Path 3 | 4 | from std2.asyncio.subprocess import call 5 | 6 | _PARENT = Path(__file__).resolve(strict=True).parent 7 | _TOP_LEVEL = _PARENT.parent 8 | 9 | 10 | async def _build(path: str) -> None: 11 | tag = f"coq_{path.lstrip('_')}" 12 | await call( 13 | "docker", 14 | "buildx", 15 | "build", 16 | "--progress", 17 | "plain", 18 | "--file", 19 | _PARENT / path / "Dockerfile", 20 | "--tag", 21 | tag, 22 | "--", 23 | _TOP_LEVEL, 24 | capture_stdout=False, 25 | capture_stderr=False, 26 | ) 27 | 28 | 29 | async def main() -> None: 30 | await _build("_base") 31 | await gather(_build("packer"), _build("vimplug")) 32 | 33 | 34 | run(main()) 35 | -------------------------------------------------------------------------------- /coq/ci/main.py: -------------------------------------------------------------------------------- 1 | from difflib import unified_diff 2 | 3 | from pynvim_pp.logging import log 4 | 5 | from ..consts import DEBUG, VARS 6 | from ..server.registrants.snippets import BUNDLED_PATH_TPL, jsonify 7 | from ..shared.types import UTF8 8 | from ..snippets.types import SCHEMA 9 | from .load import load_parsable 10 | 11 | 12 | async def main() -> None: 13 | snippets = await load_parsable() 14 | j_snippets = jsonify(snippets) 15 | 16 | snip_art = VARS / "snippets" / BUNDLED_PATH_TPL.substitute(schema=SCHEMA) 17 | snip_art.parent.mkdir(parents=True, exist_ok=True) 18 | 19 | if DEBUG and snip_art.exists(): 20 | for line in unified_diff( 21 | snip_art.read_text().splitlines(), j_snippets.splitlines() 22 | ): 23 | log.debug("%s", line) 24 | 25 | snip_art.write_text(j_snippets, encoding=UTF8) 26 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/select/words.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | word, 3 | session_name, 4 | window_index, 5 | window_name, 6 | pane_index, 7 | pane_title 8 | FROM words_view 9 | WHERE 10 | pane_id <> :pane_id 11 | AND 12 | ( 13 | ( 14 | :word <> '' 15 | AND 16 | lword LIKE :like_word ESCAPE '!' 17 | AND 18 | LENGTH(word) + :look_ahead >= LENGTH(:word) 19 | AND 20 | word <> SUBSTR(:word, 1, LENGTH(word)) 21 | AND 22 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 23 | ) 24 | OR 25 | ( 26 | :sym <> '' 27 | AND 28 | lword LIKE :like_sym ESCAPE '!' 29 | AND 30 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 31 | AND 32 | word <> SUBSTR(:sym, 1, LENGTH(word)) 33 | AND 34 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 35 | ) 36 | ) 37 | LIMIT :limit 38 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/select/words.sql: -------------------------------------------------------------------------------- 1 | SELECT 2 | word, 3 | filetype, 4 | filename, 5 | line_num 6 | FROM words_view 7 | WHERE 8 | CASE 9 | WHEN :filetype <> NULL THEN filetype = :filetype 10 | ELSE 1 11 | END 12 | AND 13 | ( 14 | ( 15 | :word <> '' 16 | AND 17 | lword LIKE :like_word ESCAPE '!' 18 | AND 19 | LENGTH(word) + :look_ahead >= LENGTH(:word) 20 | AND 21 | word <> SUBSTR(:word, 1, LENGTH(word)) 22 | AND 23 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 24 | ) 25 | OR 26 | ( 27 | :sym <> '' 28 | AND 29 | lword LIKE :like_sym ESCAPE '!' 30 | AND 31 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 32 | AND 33 | word <> SUBSTR(:sym, 1, LENGTH(word)) 34 | AND 35 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 36 | ) 37 | ) 38 | LIMIT :limit 39 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/select/words.sql: -------------------------------------------------------------------------------- 1 | SELECT DISTINCT 2 | word, 3 | lo, 4 | hi, 5 | kind, 6 | pword, 7 | pkind, 8 | gpword, 9 | gpkind, 10 | filename 11 | FROM words_view 12 | WHERE 13 | filetype = :filetype 14 | AND 15 | ( 16 | ( 17 | :word <> '' 18 | AND 19 | lword LIKE :like_word ESCAPE '!' 20 | AND 21 | LENGTH(word) + :look_ahead >= LENGTH(:word) 22 | AND 23 | word <> SUBSTR(:word, 1, LENGTH(word)) 24 | AND 25 | X_SIMILARITY(LOWER(:word), lword, :look_ahead) > :cut_off 26 | ) 27 | OR 28 | ( 29 | :sym <> '' 30 | AND 31 | lword LIKE :like_sym ESCAPE '!' 32 | AND 33 | LENGTH(word) + :look_ahead >= LENGTH(:sym) 34 | AND 35 | word <> SUBSTR(:sym, 1, LENGTH(word)) 36 | AND 37 | X_SIMILARITY(LOWER(:sym), lword, :look_ahead) > :cut_off 38 | ) 39 | ) 40 | LIMIT :limit 41 | -------------------------------------------------------------------------------- /coq/clients/t9/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, Optional, Sequence, TypedDict 3 | 4 | 5 | @dataclass(frozen=True) 6 | class ReqL2: 7 | correlation_id: int 8 | before: str 9 | after: str 10 | filename: str 11 | region_includes_beginning: bool 12 | region_includes_end: bool 13 | max_num_results: Optional[int] = None 14 | 15 | 16 | @dataclass(frozen=True) 17 | class ReqL1: 18 | Autocomplete: ReqL2 19 | 20 | 21 | @dataclass(frozen=True) 22 | class Request: 23 | request: ReqL1 24 | version: str 25 | 26 | 27 | @dataclass(frozen=True) 28 | class RespL1: 29 | new_prefix: str 30 | old_suffix: str 31 | new_suffix: str 32 | kind: Optional[int] = None 33 | 34 | 35 | class Response(TypedDict): 36 | correlation_id: int 37 | old_prefix: str 38 | is_locked: bool 39 | user_message: Sequence[str] 40 | results: Sequence[Any] 41 | -------------------------------------------------------------------------------- /lua/coq/completion.lua: -------------------------------------------------------------------------------- 1 | (function(...) 2 | COQ.send_comp = function(col, items) 3 | vim.schedule( 4 | function() 5 | local legal_modes = { 6 | ["i"] = true, 7 | ["ic"] = true, 8 | ["ix"] = true 9 | } 10 | local legal_cmodes = { 11 | [""] = true, 12 | ["eval"] = true, 13 | ["function"] = true, 14 | ["ctrl_x"] = true 15 | } 16 | local mode = vim.api.nvim_get_mode().mode 17 | local comp_mode = vim.fn.complete_info({"mode"}).mode 18 | if legal_modes[mode] and legal_cmodes[comp_mode] then 19 | -- when `#items ~= 0` there is something to show 20 | -- when `#items == 0` but `comp_mode == "eval"` there is something to close 21 | if #items ~= 0 or comp_mode == "eval" then 22 | vim.fn.complete(col, items) 23 | end 24 | end 25 | end 26 | ) 27 | end 28 | end)(...) 29 | -------------------------------------------------------------------------------- /artifacts/helo.yml: -------------------------------------------------------------------------------- 1 | --- 2 | chars: [2, 6] 3 | 4 | cocks: 5 | - "🥚" 6 | - "🐣" 7 | - "🐥" 8 | - "🐤" 9 | - "🐓" 10 | - "🐔" 11 | 12 | stars: 13 | - "✨" 14 | - "💫" 15 | - "⭐️" 16 | - "🌟" 17 | 18 | helo: 19 | - "Aanii" # Ojibwe 20 | - "Alo" # Michif 21 | - "Aloha" # Spongebob 22 | - "Bonjour" # French 23 | - "Dia dhuit" # Irish 24 | - "Hallo" # Germoney 25 | - "Halò" # Scottish? 26 | - "Hello" # English 27 | - "Hola" # Spanish 28 | - "Kwīingu-néewul" # Lunaapeew 29 | - "Merhaba" # Turkish 30 | - "Olá" # Portuguese 31 | - "Sekoh" # Mohawk 32 | - "Szia" # Hungarian 33 | - "Ullaqut" # Inuktitut 34 | - "Waajiiye" # Oji-Cree 35 | - "Wâciyê" # Cree 36 | - "tena koutou" # Maori 37 | - "γεια" # Greek 38 | - "Здраво" # Serbian 39 | - "Привет" # Russian 40 | - "העלא" # Yiddish 41 | - "שלום" # Hebrew 42 | - "سلام" # Persian 43 | - "مرحبا" # Arabic 44 | - "สวัสดี" # Thai 45 | - "你好" # Chinese 46 | - "வணக்கம்" # Tamil 47 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS files ( 5 | filename TEXT NOT NULL PRIMARY KEY, 6 | filetype TEXT NOT NULL, 7 | mtime REAL NOT NULL 8 | ) WITHOUT ROWID; 9 | CREATE INDEX IF NOT EXISTS files_filetype ON files (filetype); 10 | 11 | 12 | -- !! files 1:N tags 13 | CREATE TABLE IF NOT EXISTS tags ( 14 | `path` TEXT NOT NULL REFERENCES files (filename) ON UPDATE CASCADE ON DELETE CASCADE, 15 | line INTEGER NOT NULL, 16 | kind TEXT NOT NULL, 17 | name TEXT NOT NULL, 18 | lname TEXT NOT NULL, 19 | pattern TEXT, 20 | typeref TEXT, 21 | scope TEXT, 22 | scopeKind TEXT, 23 | `access` TEXT, 24 | UNIQUE (`path`, name) 25 | ); 26 | CREATE INDEX IF NOT EXISTS tags_path ON tags (`path`); 27 | CREATE INDEX IF NOT EXISTS tags_line ON tags (line); 28 | CREATE INDEX IF NOT EXISTS tags_name ON tags (name); 29 | CREATE INDEX IF NOT EXISTS tags_lnam ON tags (lname); 30 | 31 | 32 | END; 33 | -------------------------------------------------------------------------------- /tests/tags/parser.py: -------------------------------------------------------------------------------- 1 | from itertools import islice 2 | from os import linesep 3 | from pathlib import Path 4 | from shutil import get_terminal_size, which 5 | from sys import stderr 6 | from unittest import IsolatedAsyncioTestCase 7 | 8 | from ...coq.consts import TMP_DIR 9 | from ...coq.tags.parse import parse, run 10 | 11 | 12 | class Parser(IsolatedAsyncioTestCase): 13 | async def test_1(self) -> None: 14 | tag = TMP_DIR / "TAG" 15 | TMP_DIR.mkdir(parents=True, exist_ok=True) 16 | if not tag.exists() and (ctags := which("ctags")): 17 | text = await run(Path(ctags), "--recurse") 18 | tag.write_text(text) 19 | 20 | spec = tag.read_text() 21 | parsed = parse({}, raw=spec) 22 | 23 | cols, _ = get_terminal_size() 24 | sep = linesep + "-" * cols + linesep 25 | print( 26 | *islice((tag for _, _, tags in parsed.values() for tag in tags), 10), 27 | sep=sep, 28 | file=stderr, 29 | ) 30 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | --- 2 | name: CI 3 | 4 | on: 5 | push: 6 | schedule: 7 | - cron: "0 0 * * *" # daily 8 | 9 | jobs: 10 | mypy: 11 | strategy: 12 | matrix: 13 | python_ver: 14 | - "3.8" 15 | - "3.9" 16 | - "3.10" 17 | - "3.11" 18 | - "3.12" 19 | - "3.13" 20 | - "3" 21 | runs-on: ubuntu-latest 22 | 23 | steps: 24 | - name: Checkout 25 | uses: actions/checkout@v4 26 | 27 | - name: Setup Python 28 | uses: actions/setup-python@v5 29 | with: 30 | python-version: ${{ matrix.python_ver }} 31 | 32 | - run: |- 33 | sudo -- apt install --yes -- universal-ctags 34 | 35 | - run: |- 36 | pip3 install -- codespell 37 | codespell --skip ./artifacts/helo.yml --skip ./tests/shared/fuzzy.py --ignore-words-list assertIn thirdparty 38 | 39 | - run: |- 40 | make lint 41 | 42 | - run: |- 43 | python3 -m coq deps 44 | 45 | - run: |- 46 | make test 47 | -------------------------------------------------------------------------------- /locale/zh.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "waiting...": |- 3 | ⏳⌛️ 请稍等…… 4 | 5 | "buf 2 fat": |- 6 | ❌ 缓冲区过大,已禁用补全。 7 | 缓冲区大小 ${size} > 所定限制 ${limit} 8 | 9 | "failed to parse snippet": |- 10 | 解析代码片段失败,直接插入原文本。 11 | 12 | "added marks": |- 13 | 🎯 待替换标记: ${regions} 14 | 15 | "expand marks": |- 16 | ✏️ 展开替换标记 ${texts}: 17 | 18 | "no more marks": |- 19 | ⛔️ 无待替换标记 20 | 21 | "applied mark": |- 22 | ✅ 成功替换标记,剩余 ${marks_left} 个标记 23 | 24 | "statistics": |- 25 | 统计数据 26 | 27 | "file empty": |- 28 | 「空文件」 29 | 30 | "file binary": |- 31 | 「二进制文件」 32 | 33 | "source slow": |- 34 | ❌ 已消耗${elapsed}秒解析 ${source} 文档, 将在选中文件中禁用。 35 | 36 | "snip source not enabled": |- 37 | ❌ 没有开启代码资源 38 | 39 | "no snippets found": |- 40 | ⚠️ 未找到代码片段 41 | 42 | "snip parse fail": |- 43 | ❌ 解析代码片段失败 44 | 45 | "snip parse succ": |- 46 | ✅ 成功解析代码片段 47 | 48 | "fs snip load empty": |- 49 | ⚠️ 无可使用代码片段,请尝试更新「coq.artifacts」 50 | 51 | "fs snip load succ": |- 52 | ✅ 代码片段已更新 —— ${path} 53 | 54 | "begin T9 download": |- 55 | ⏳ 开始下载 T9…… 56 | 57 | "end T9 download": |- 58 | ✅ 成功下载 T9! 59 | 60 | "failed T9 download": |- 61 | ❌ 下载 T9 失败! 62 | -------------------------------------------------------------------------------- /coq/clients/registers/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS registers ( 5 | register TEXT NOT NULL PRIMARY KEY 6 | ) WITHOUT ROWID; 7 | 8 | 9 | CREATE TABLE IF NOT EXISTS words ( 10 | register TEXT NOT NULL REFERENCES registers (register) ON UPDATE CASCADE ON DELETE CASCADE, 11 | word TEXT NOT NULL, 12 | lword TEXT NOT NULL, 13 | UNIQUE (register, word) 14 | ); 15 | CREATE INDEX IF NOT EXISTS words_register ON words (register); 16 | CREATE INDEX IF NOT EXISTS words_word ON words (word); 17 | CREATE INDEX IF NOT EXISTS words_lword ON words (lword); 18 | 19 | 20 | CREATE TABLE IF NOT EXISTS lines ( 21 | register TEXT NOT NULL REFERENCES registers (register) ON UPDATE CASCADE ON DELETE CASCADE, 22 | word TEXT NOT NULL, 23 | lword TEXT NOT NULL, 24 | line TEXT NOT NULL, 25 | UNIQUE (register, line) 26 | ); 27 | CREATE INDEX IF NOT EXISTS lines_register ON lines (register); 28 | CREATE INDEX IF NOT EXISTS lines_word ON lines (word); 29 | CREATE INDEX IF NOT EXISTS lines_lword ON lines (lword); 30 | 31 | 32 | END; 33 | -------------------------------------------------------------------------------- /coq/server/completions.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import Any, Iterable, MutableSequence, Tuple 3 | from uuid import UUID 4 | 5 | from pynvim_pp.nvim import Nvim 6 | from pynvim_pp.types import NoneType 7 | from std2.pickle.encoder import new_encoder 8 | 9 | from ..registry import NAMESPACE 10 | from ..shared.runtime import Metric 11 | from .rt_types import Stack 12 | 13 | 14 | @dataclass(frozen=True) 15 | class VimCompletion: 16 | user_data: UUID 17 | abbr: str 18 | menu: str 19 | kind: str = "" 20 | word: str = "" 21 | equal: int = 1 22 | dup: int = 1 23 | empty: int = 1 24 | 25 | 26 | _ENCODER = new_encoder[VimCompletion](VimCompletion) 27 | 28 | 29 | async def complete( 30 | stack: Stack, col: int, comps: Iterable[Tuple[Metric, VimCompletion]] 31 | ) -> None: 32 | stack.metrics.clear() 33 | 34 | acc: MutableSequence[Any] = [] 35 | for metric, comp in comps: 36 | stack.metrics[metric.comp.uid] = metric 37 | encoded = _ENCODER(comp) 38 | acc.append(encoded) 39 | 40 | await Nvim.api.exec_lua(NoneType, f"{NAMESPACE}.send_comp(...)", (col + 1, acc)) 41 | -------------------------------------------------------------------------------- /coq/shared/sql.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | from os.path import normcase 3 | from pathlib import Path 4 | from sqlite3.dbapi2 import Connection 5 | from typing import Protocol, cast 6 | 7 | from pynvim_pp.lib import decode 8 | from std2.pathlib import AnyPath 9 | from std2.sqlite3 import add_functions, escape 10 | 11 | from .fuzzy import quick_ratio 12 | 13 | BIGGEST_INT = 2**63 - 1 14 | 15 | 16 | class _Loader(Protocol): 17 | def __call__(self, *paths: AnyPath) -> str: ... 18 | 19 | 20 | def loader(base: Path) -> _Loader: 21 | @lru_cache(maxsize=None) 22 | def cont(*paths: AnyPath) -> str: 23 | path = (base / Path(*paths)).with_suffix(".sql") 24 | return decode(path.read_bytes()) 25 | 26 | return cast(_Loader, cont) 27 | 28 | 29 | @lru_cache 30 | def like_esc(like: str) -> str: 31 | escaped = escape(nono={"%", "_", "["}, escape="!", param=like) 32 | return f"{escaped}%" 33 | 34 | 35 | def init_db(conn: Connection) -> None: 36 | add_functions(conn) 37 | conn.create_function("X_SIMILARITY", narg=3, func=quick_ratio, deterministic=True) 38 | conn.create_function("X_NORM_CASE", narg=1, func=normcase, deterministic=True) 39 | -------------------------------------------------------------------------------- /coq/clients/tmux/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS panes ( 5 | pane_id TEXT NOT NULL PRIMARY KEY, 6 | session_name TEXT NOT NULL, 7 | window_index INTEGER NOT NULL, 8 | window_name TEXT NOT NULL, 9 | pane_index INTEGER NOT NULL, 10 | pane_title TEXT NOT NULL 11 | ) WITHOUT ROWID; 12 | 13 | 14 | CREATE TABLE IF NOT EXISTS words ( 15 | pane_id TEXT NOT NULL REFERENCES panes (pane_id) ON UPDATE CASCADE ON DELETE CASCADE, 16 | word TEXT NOT NULL, 17 | lword TEXT NOT NULL, 18 | UNIQUE (pane_id, word) 19 | ); 20 | CREATE INDEX IF NOT EXISTS words_pane_id ON words (pane_id); 21 | CREATE INDEX IF NOT EXISTS words_word ON words (word); 22 | CREATE INDEX IF NOT EXISTS words_lword ON words (lword); 23 | 24 | 25 | CREATE VIEW IF NOT EXISTS words_view AS 26 | SELECT 27 | words.word, 28 | words.lword, 29 | panes.pane_id, 30 | panes.session_name, 31 | panes.window_index, 32 | panes.window_name, 33 | panes.pane_index, 34 | panes.pane_title 35 | FROM panes 36 | JOIN words 37 | ON words.pane_id = panes.pane_id 38 | GROUP BY 39 | words.word 40 | HAVING 41 | words.word <> ''; 42 | 43 | END; 44 | -------------------------------------------------------------------------------- /coq/server/registrants/repeat.py: -------------------------------------------------------------------------------- 1 | from dataclasses import replace 2 | from typing import Optional 3 | 4 | from ...registry import rpc 5 | from ...shared.repeat import sanitize 6 | from ...shared.types import ContextualEdit, Edit 7 | from ..context import context 8 | from ..edit import edit 9 | from ..rt_types import Stack 10 | from ..state import state 11 | 12 | 13 | def _edit(prev: Edit) -> Optional[Edit]: 14 | sanitized = sanitize(True, cursor=(-1, -1, -1, -1), edit=prev) 15 | new_edit = ( 16 | ContextualEdit( 17 | new_text=sanitized.new_text, old_prefix="", new_prefix=sanitized.new_text 18 | ) 19 | if type(sanitized) is Edit 20 | else sanitized 21 | ) 22 | return new_edit 23 | 24 | 25 | @rpc() 26 | async def repeat(stack: Stack) -> None: 27 | ctx = await context(state=state(), change=None, manual=True) 28 | s = state(context=ctx) 29 | metric = s.last_edit 30 | if sanitized := _edit(metric.comp.primary_edit): 31 | new_metric = replace( 32 | metric, 33 | comp=replace(metric.comp, primary_edit=sanitized, secondary_edits=()), 34 | ) 35 | await edit(stack=stack, state=s, metric=new_metric, synthetic=True) 36 | -------------------------------------------------------------------------------- /coq/server/icons.py: -------------------------------------------------------------------------------- 1 | from dataclasses import replace 2 | 3 | from std2.types import never 4 | 5 | from ..shared.settings import IconMode, Icons 6 | from ..shared.types import Completion 7 | 8 | 9 | def iconify(icons: Icons, completion: Completion) -> Completion: 10 | if not completion.icon_match: 11 | return completion 12 | else: 13 | alias = icons.aliases.get(completion.icon_match) or completion.icon_match 14 | kind = icons.mappings.get(alias) 15 | if not kind: 16 | return completion 17 | else: 18 | if icons.mode is IconMode.none: 19 | return completion 20 | 21 | elif icons.mode is IconMode.short: 22 | return replace(completion, kind=kind + (icons.spacing - 1) * " ") 23 | 24 | elif icons.mode is IconMode.long: 25 | spc = max(1, icons.spacing) * " " 26 | new_kind = ( 27 | f"{kind}{spc}{completion.kind}" 28 | if completion.kind 29 | else kind + (icons.spacing - 1) * " " 30 | ) 31 | return replace(completion, kind=new_kind) 32 | 33 | else: 34 | never(icons.mode) 35 | -------------------------------------------------------------------------------- /coq/clients/tags/db/sql/select/tags.sql: -------------------------------------------------------------------------------- 1 | WITH fts AS ( 2 | SELECT 3 | filetype 4 | FROM files 5 | WHERE 6 | filename = :filename 7 | ) 8 | SELECT 9 | tags.`path`, 10 | tags.line, 11 | tags.kind, 12 | tags.name, 13 | tags.lname, 14 | tags.pattern, 15 | tags.typeref, 16 | tags.scope, 17 | tags.scopeKind, 18 | tags.`access` 19 | FROM tags 20 | JOIN files 21 | ON 22 | files.filename = tags.`path` 23 | JOIN fts 24 | ON 25 | fts.filetype = files.filetype 26 | WHERE 27 | tags.name <> '' 28 | AND 29 | ( 30 | ( 31 | :word <> '' 32 | AND 33 | tags.lname LIKE :like_word ESCAPE '!' 34 | AND 35 | LENGTH(tags.name) + :look_ahead >= LENGTH(:word) 36 | AND 37 | tags.name <> SUBSTR(:word, 1, LENGTH(tags.name)) 38 | AND 39 | X_SIMILARITY(LOWER(:word), tags.lname, :look_ahead) > :cut_off 40 | ) 41 | OR 42 | ( 43 | :sym <> '' 44 | AND 45 | tags.lname LIKE :like_sym ESCAPE '!' 46 | AND 47 | LENGTH(tags.name) + :look_ahead >= LENGTH(:sym) 48 | AND 49 | tags.name <> SUBSTR(:sym, 1, LENGTH(tags.name)) 50 | AND 51 | X_SIMILARITY(LOWER(:sym), tags.lname, :look_ahead) > :cut_off 52 | ) 53 | ) 54 | LIMIT :limit 55 | -------------------------------------------------------------------------------- /docs/MISC.md: -------------------------------------------------------------------------------- 1 | # Misc 2 | 3 | ### coq_settings.limits 4 | 5 | Various timeouts and retry limits 6 | 7 | #### `coq_settings.limits.tokenization_limit` 8 | 9 | `coq.nvim` when performing tokenization, will parse at most this many tokens before yielding. 10 | 11 | **default:** 12 | 13 | ```json 14 | 999 15 | ``` 16 | 17 | #### `coq_settings.limits.idle_timeout` 18 | 19 | Background tasks are executed after cursor idling for `updatetime` + `idle_timeout`. 20 | 21 | **default:** 22 | 23 | ```json 24 | 1.88 25 | ``` 26 | 27 | #### `coq_settings.limits.completion_auto_timeout` 28 | 29 | Soft timeout for on-keystroke completions. 30 | 31 | **default:** 32 | 33 | ```json 34 | 0.088 35 | ``` 36 | 37 | #### `coq_settings.limits.completion_manual_timeout` 38 | 39 | Timeout for manual completions. ie. user pressing ``, or whatever custom hotkey. 40 | 41 | **default:** 42 | 43 | ```json 44 | 0.66 45 | ``` 46 | 47 | #### `coq_settings.limits.download_retries` 48 | 49 | How many attempts to download Tabnine, should previous attempts fail. 50 | 51 | **default:** 52 | 53 | ```json 54 | 6 55 | ``` 56 | 57 | #### `coq_settings.limits.download_timeout` 58 | 59 | Tabnine download timeout. 60 | 61 | **default:** 62 | 63 | ```json 64 | 66 65 | ``` 66 | -------------------------------------------------------------------------------- /tests/__main__.py: -------------------------------------------------------------------------------- 1 | from argparse import ArgumentParser, Namespace 2 | from pathlib import Path 3 | from sys import exit 4 | from unittest import defaultTestLoader 5 | from unittest.runner import TextTestRunner 6 | from unittest.signals import installHandler 7 | 8 | _TESTS = Path(__file__).resolve(strict=True).parent 9 | _TOP_LV = _TESTS.parent 10 | 11 | 12 | def _parse_args() -> Namespace: 13 | parser = ArgumentParser() 14 | parser.add_argument("-v", "--verbosity", action="count", default=1) 15 | parser.add_argument("-f", "--fail", action="store_true", default=False) 16 | parser.add_argument("-b", "--buffer", action="store_true", default=False) 17 | parser.add_argument("-p", "--pattern", default="*.py") 18 | return parser.parse_args() 19 | 20 | 21 | def main() -> int: 22 | args = _parse_args() 23 | suite = defaultTestLoader.discover( 24 | str(_TESTS), top_level_dir=str(_TOP_LV.parent), pattern=args.pattern 25 | ) 26 | runner = TextTestRunner( 27 | verbosity=args.verbosity, 28 | failfast=args.fail, 29 | buffer=args.buffer, 30 | ) 31 | 32 | installHandler() 33 | r = runner.run(suite) 34 | return not r.wasSuccessful() 35 | 36 | 37 | if __name__ == "__main__": 38 | exit(main()) 39 | -------------------------------------------------------------------------------- /coq/lsp/requests/resolve.py: -------------------------------------------------------------------------------- 1 | from typing import MutableSequence, Optional 2 | 3 | from ...shared.types import Completion, ExternLSP, ExternLUA 4 | from ..parse import parse_item 5 | from ..protocol import protocol 6 | from .request import async_request 7 | 8 | 9 | async def resolve(extern: ExternLSP) -> Optional[Completion]: 10 | name = "lsp_third_party_resolve" if isinstance(extern, ExternLUA) else "lsp_resolve" 11 | comps: MutableSequence[Completion] = [] 12 | 13 | clients = {extern.client} if extern.client else set() 14 | pc = await protocol() 15 | 16 | async for client in async_request(name, None, clients, extern.item): 17 | comp = parse_item( 18 | pc, 19 | extern_type=type(extern), 20 | client=client.name, 21 | encoding=client.offset_encoding, 22 | short_name="", 23 | cursors=(-1, -1, -1, -1), 24 | always_on_top=None, 25 | weight_adjust=0, 26 | item=client.message, 27 | ) 28 | if extern.client and client.name == extern.client: 29 | return comp 30 | elif comp: 31 | comps.append(comp) 32 | else: 33 | for comp in comps: 34 | if comp.doc: 35 | return comp 36 | else: 37 | return None 38 | -------------------------------------------------------------------------------- /locale/en.yml: -------------------------------------------------------------------------------- 1 | --- 2 | "waiting...": |- 3 | ⏳⌛️ ... 4 | 5 | "buf 2 fat": |- 6 | ❌ Buffer will not be indexed: size ${size} > ${limit} 7 | 8 | "failed to parse snippet": |- 9 | Failed to parse snippet, inserting as it is 10 | 11 | "added marks": |- 12 | 🎯 Marks added: ${regions} 13 | 14 | "expand marks": |- 15 | ✏️ Expand marks ${texts}: 16 | 17 | "no more marks": |- 18 | ⛔️ No more marks available 19 | 20 | "applied mark": |- 21 | ✅ Mark applied, ${marks_left} left 22 | 23 | "statistics": |- 24 | Statistics 25 | 26 | "file empty": |- 27 | [] 28 | 29 | "file binary": |- 30 | [] 31 | 32 | "source slow": |- 33 | ❌ ${source} took ${elapsed}s to parse document, will be disabled for current file. 34 | 35 | "snip source not enabled": |- 36 | ❌ Snippet source not enabled 37 | 38 | "no snippets found": |- 39 | ⚠️ No snippets found 40 | 41 | "snip parse fail": |- 42 | ❌ Failed to parse snippets 43 | 44 | "snip parse succ": |- 45 | ✅ Snippets parsed 46 | 47 | "fs snip load empty": |- 48 | ⚠️ No compatible snippets found, try updating `coq.artifacts` 49 | 50 | "fs snip load succ": |- 51 | ✅ Snippets updated -- ${path} 52 | 53 | "begin T9 download": |- 54 | ⏳ Downloading T9 ... 55 | 56 | "end T9 download": |- 57 | ✅ T9 download success! 58 | 59 | "failed T9 download": |- 60 | ❌ T9 download failed! 61 | -------------------------------------------------------------------------------- /autoload/coq.vim: -------------------------------------------------------------------------------- 1 | function! s:filter_completions(arg_lead, completions) abort 2 | let l:lead = escape(a:arg_lead, '\\') 3 | return filter(a:completions, {_, val -> val =~# "^" . l:lead}) 4 | endfunction 5 | 6 | function! coq#complete_now(arg_lead, cmd_line, cursor_pos) abort 7 | let l:args = [ 8 | \ '-s', 9 | \ '--shut-up', 10 | \ ] 11 | 12 | return s:filter_completions(a:arg_lead, l:args) 13 | endfunction 14 | 15 | function! coq#complete_snips(arg_lead, cmd_line, cursor_pos) abort 16 | let l:args = [ 17 | \ 'ls', 18 | \ 'cd', 19 | \ 'compile', 20 | \ 'edit', 21 | \ ] 22 | 23 | return s:filter_completions(a:arg_lead, l:args) 24 | endfunction 25 | 26 | function! coq#complete_help(arg_lead, cmd_line, cursor_pos) abort 27 | let l:topics = [ 28 | \ 'index', 29 | \ 'config', 30 | \ 'keybind', 31 | \ 'snips', 32 | \ 'fuzzy', 33 | \ 'comp', 34 | \ 'display', 35 | \ 'sources', 36 | \ 'misc', 37 | \ 'stats', 38 | \ 'perf', 39 | \ 'custom_sources', 40 | \ ] 41 | 42 | if a:cmd_line[a:cursor_pos - 7 : a:cursor_pos] ==# ' --web ' 43 | return s:filter_completions(a:arg_lead, l:topics) 44 | endif 45 | 46 | return s:filter_completions(a:arg_lead, insert(l:topics, '--web')) 47 | endfunction 48 | 49 | -------------------------------------------------------------------------------- /syntax/coq-snip.vim: -------------------------------------------------------------------------------- 1 | if exists('b:current_syntax') 2 | finish 3 | endif 4 | 5 | 6 | syntax match Error '\v^.+$' 7 | syntax match Comment '\v^\#.*$' 8 | 9 | 10 | syntax match Include '\v^extends\s' 11 | 12 | 13 | syntax match Keyword '\v^snippet\s' 14 | syntax match Error '\v%(^snippet\s\s*\S+)@<=\s+.+$' 15 | syntax match Keyword '\v^alias\s' 16 | syntax match Label '\v^abbr\s' 17 | 18 | 19 | syntax match String '\v^\s+\_.{-1,}$' contains=Special,csTrailingWS 20 | syntax match csTrailingWS '\v\s+$' 21 | 22 | syntax region Special start='\V${' end='\V}' contained contains=Number,Macro,Operator,csContainedString 23 | syntax match Special '\v\$\d+' contained contains=Number 24 | 25 | syntax match Macro '\v%(\$\{)@<=\w+%(\:)@=' contained nextgroup=Operator 26 | syntax match Number '\v%(\$\{?)@<=\d+' contained 27 | syntax match Operator '\V:' contained nextgroup=csContainedString 28 | syntax match csContainedString '\v%(\:)@<=\_.{-1,}%(\})@=' contained contains=Special,csTrailingWS 29 | 30 | 31 | highlight default link csTrailingWS Error 32 | highlight default link csContainedString String 33 | 34 | 35 | let b:current_syntax = expand(':t:r') 36 | -------------------------------------------------------------------------------- /coq/lang.py: -------------------------------------------------------------------------------- 1 | from locale import getlocale 2 | from string import Template 3 | from typing import Mapping, MutableMapping, Optional, Union 4 | 5 | from pynvim_pp.lib import decode 6 | from std2.pickle.decoder import new_decoder 7 | from yaml import safe_load 8 | 9 | from .consts import DEFAULT_LANG, LANG_ROOT 10 | 11 | 12 | def _get_lang(code: Optional[str], fallback: str) -> str: 13 | if code: 14 | return code.casefold() 15 | else: 16 | tag, _ = getlocale() 17 | tag = (tag or fallback).casefold() 18 | primary, _, _ = tag.partition("-") 19 | lang, _, _ = primary.partition("_") 20 | return lang 21 | 22 | 23 | class _Lang: 24 | def __init__(self, specs: MutableMapping[str, str]) -> None: 25 | self._specs = specs 26 | 27 | def __call__(self, key: str, **kwds: Union[int, float, str]) -> str: 28 | spec = self._specs[key] 29 | return Template(spec).substitute(kwds) 30 | 31 | 32 | LANG = _Lang({}) 33 | 34 | 35 | def init(code: Optional[str]) -> None: 36 | lang = _get_lang(code, fallback=DEFAULT_LANG) 37 | lang_path = (LANG_ROOT / lang).with_suffix(".yml") 38 | yml_path = ( 39 | lang_path 40 | if lang_path.exists() 41 | else (LANG_ROOT / DEFAULT_LANG).with_suffix(".yml") 42 | ) 43 | 44 | specs = new_decoder[Mapping[str, str]](Mapping[str, str])( 45 | safe_load(decode(yml_path.read_bytes())) 46 | ) 47 | LANG._specs.update(specs) 48 | 49 | 50 | init(None) 51 | -------------------------------------------------------------------------------- /coq/clients/tree_sitter/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS buffers ( 5 | rowid INTEGER NOT NULL PRIMARY KEY, 6 | filetype TEXT NOT NULL, 7 | filename TEXT NOT NULL 8 | ) WITHOUT ROWID; 9 | CREATE INDEX IF NOT EXISTS buffers_filetype ON buffers (filetype); 10 | 11 | 12 | CREATE TABLE IF NOT EXISTS words ( 13 | buffer_id INTEGER NOT NULL REFERENCES buffers (rowid) ON UPDATE CASCADE ON DELETE CASCADE, 14 | word TEXT NOT NULL, 15 | lword TEXT NOT NULL, 16 | lo INTEGER NOT NULL, 17 | hi INTEGER NOT NULL, 18 | kind TEXT NOT NULL, 19 | pword TEXT, 20 | pkind TEXT, 21 | gpword TEXT, 22 | gpkind TEXT, 23 | UNIQUE (buffer_id, word) 24 | ); 25 | CREATE INDEX IF NOT EXISTS words_buffer_id ON words (buffer_id); 26 | CREATE INDEX IF NOT EXISTS words_word ON words (word); 27 | CREATE INDEX IF NOT EXISTS words_lword ON words (lword); 28 | CREATE INDEX IF NOT EXISTS words_buffer_lo ON words (buffer_id, lo); 29 | CREATE INDEX IF NOT EXISTS words_buffer_hi ON words (buffer_id, hi); 30 | 31 | 32 | CREATE VIEW IF NOT EXISTS words_view AS 33 | SELECT 34 | buffers.filetype, 35 | buffers.filename, 36 | words.word, 37 | words.lword, 38 | words.lo + 1 AS lo, 39 | words.hi + 1 AS hi, 40 | words.kind, 41 | words.pword, 42 | words.pkind, 43 | words.gpword, 44 | words.gpkind 45 | FROM buffers 46 | JOIN words 47 | ON 48 | words.buffer_id = buffers.rowid 49 | GROUP BY 50 | words.word 51 | HAVING 52 | words.word <> ''; 53 | 54 | 55 | END; 56 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | MAKEFLAGS += --check-symlink-times 2 | MAKEFLAGS += --jobs 3 | MAKEFLAGS += --no-builtin-rules 4 | MAKEFLAGS += --no-builtin-variables 5 | MAKEFLAGS += --shuffle 6 | MAKEFLAGS += --warn-undefined-variables 7 | SHELL := bash 8 | .DELETE_ON_ERROR: 9 | .ONESHELL: 10 | .SHELLFLAGS := --norc --noprofile -Eeuo pipefail -O dotglob -O nullglob -O extglob -O failglob -O globstar -c 11 | 12 | .DEFAULT_GOAL := help 13 | 14 | .PHONY: clean clobber lint test build fmt ci 15 | 16 | clean: 17 | rm -v -rf -- .mypy_cache/ .venv/ 18 | 19 | clobber: clean 20 | rm -v -rf -- .vars/ 21 | 22 | .venv/bin/python3: 23 | python3 -m venv -- .venv 24 | 25 | define PYDEPS 26 | from itertools import chain 27 | from os import execl 28 | from sys import executable 29 | 30 | from tomli import load 31 | 32 | toml = load(open("pyproject.toml", "rb")) 33 | 34 | project = toml["project"] 35 | execl( 36 | executable, 37 | executable, 38 | "-m", 39 | "pip", 40 | "install", 41 | "--upgrade", 42 | "--", 43 | *project.get("dependencies", ()), 44 | *chain.from_iterable(project["optional-dependencies"].values()), 45 | ) 46 | endef 47 | 48 | .venv/bin/mypy: .venv/bin/python3 49 | '$<' -m pip install --requirement requirements.txt -- tomli 50 | '$<' <<< '$(PYDEPS)' 51 | 52 | lint: .venv/bin/mypy 53 | '$<' -- . 54 | 55 | test: .venv/bin/mypy 56 | .venv/bin/python3 -m tests 57 | 58 | build: .venv/bin/mypy 59 | .venv/bin/python3 -m ci 60 | 61 | fmt: .venv/bin/mypy 62 | .venv/bin/isort --profile=black --gitignore -- . 63 | .venv/bin/black -- . 64 | 65 | ci: .venv/bin/mypy 66 | .venv/bin/python3 -m coq.ci 67 | -------------------------------------------------------------------------------- /coq/clients/buffers/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS buffers ( 5 | rowid INTEGER NOT NULL PRIMARY KEY, 6 | filetype TEXT NOT NULL, 7 | filename TEXT NOT NULL 8 | ) WITHOUT ROWID; 9 | CREATE INDEX IF NOT EXISTS buffers_filetype ON buffers (filetype); 10 | 11 | 12 | CREATE TABLE IF NOT EXISTS lines ( 13 | rowid BLOB NOT NULL PRIMARY KEY, 14 | buffer_id INTEGER NOT NULL REFERENCES buffers (rowid) ON UPDATE CASCADE ON DELETE CASCADE, 15 | line_num INTEGER NOT NULL, 16 | line TEXT NOT NULL, 17 | UNIQUE(buffer_id, line_num) 18 | ) WITHOUT ROWID; 19 | CREATE INDEX IF NOT EXISTS lines_buffer_id ON lines (buffer_id); 20 | CREATE INDEX IF NOT EXISTS lines_line_num ON lines (line_num); 21 | 22 | 23 | CREATE TABLE IF NOT EXISTS words ( 24 | line_id BLOB NOT NULL REFERENCES lines (rowid) ON UPDATE CASCADE ON DELETE CASCADE, 25 | word TEXT NOT NULL, 26 | lword TEXT NOT NULL, 27 | UNIQUE(line_id, word) 28 | ); 29 | CREATE INDEX IF NOT EXISTS words_line_id ON words (line_id); 30 | CREATE INDEX IF NOT EXISTS words_word ON words (word); 31 | CREATE INDEX IF NOT EXISTS words_lword ON words (lword); 32 | 33 | 34 | CREATE VIEW IF NOT EXISTS words_view AS 35 | SELECT 36 | words.word, 37 | words.lword, 38 | buffers.filetype, 39 | buffers.filename, 40 | lines.line_num 41 | FROM buffers 42 | JOIN lines 43 | ON lines.buffer_id = buffers.rowid 44 | JOIN words 45 | ON words.line_id = lines.rowid 46 | GROUP BY 47 | words.word 48 | HAVING 49 | words.word <> ''; 50 | 51 | 52 | END; 53 | -------------------------------------------------------------------------------- /coq/server/mark.py: -------------------------------------------------------------------------------- 1 | from string import whitespace 2 | from typing import Sequence 3 | from uuid import uuid4 4 | 5 | from pynvim_pp.buffer import Buffer, ExtMark, ExtMarker 6 | from pynvim_pp.lib import decode 7 | from pynvim_pp.logging import log 8 | from pynvim_pp.nvim import Nvim 9 | from pynvim_pp.rpc_types import NvimError 10 | 11 | from ..lang import LANG 12 | from ..shared.settings import Settings 13 | from ..shared.types import Mark 14 | 15 | NS = uuid4() 16 | 17 | _WS = {*whitespace} 18 | 19 | 20 | def _encode_for_display(text: str) -> str: 21 | encoded = "".join( 22 | decode(char.encode("unicode_escape")) if char in _WS else char for char in text 23 | ) 24 | return encoded 25 | 26 | 27 | async def mark(settings: Settings, buf: Buffer, marks: Sequence[Mark]) -> None: 28 | emarks = tuple( 29 | ExtMark( 30 | buf=buf, 31 | marker=ExtMarker(mark.idx + 1), 32 | begin=mark.begin, 33 | end=mark.end, 34 | meta={"hl_group": settings.display.mark_highlight_group}, 35 | ) 36 | for mark in marks 37 | ) 38 | ns = await Nvim.create_namespace(NS) 39 | await buf.clear_namespace(ns) 40 | 41 | try: 42 | await buf.set_extmarks(ns, extmarks=emarks) 43 | except NvimError: 44 | log.warning("%s", f"bad mark locations {marks}") 45 | else: 46 | if settings.display.mark_applied_notify: 47 | regions = _encode_for_display(" ".join(f"[{mark.text}]" for mark in marks)) 48 | msg = LANG("added marks", regions=regions) 49 | await Nvim.write(msg) 50 | -------------------------------------------------------------------------------- /config/compilation.yml: -------------------------------------------------------------------------------- 1 | --- 2 | git: 3 | # SNU 4 | - https://github.com/Shougo/neosnippet-snippets 5 | - https://github.com/fatih/vim-go 6 | - https://github.com/honza/vim-snippets 7 | 8 | # LSP 9 | - https://github.com/Ikuyadeu/vscode-R 10 | - https://github.com/Rocketseat/rocketseat-vscode-react-native-snippets 11 | - https://github.com/dsznajder/vscode-es7-javascript-react-snippets 12 | - https://github.com/johnpapa/vscode-angular-snippets 13 | - https://github.com/rafamadriz/friendly-snippets 14 | - https://github.com/sdras/vue-vscode-snippets 15 | - https://github.com/snipsnapdev/snipsnap 16 | - https://github.com/xabikos/vscode-javascript 17 | - https://github.com/xabikos/vscode-react 18 | 19 | paths: 20 | lsp: 21 | - friendly-snippets/snippets 22 | - rocketseat-vscode-react-native-snippets/snippets 23 | - snipsnap/snippets 24 | - vscode-R/snippets 25 | - vscode-angular-snippets/snippets 26 | - vscode-es7-javascript-react-snippets/src/snippets 27 | - vscode-javascript/snippets 28 | - vscode-react/snippets 29 | - vue-vscode-snippets/snippets 30 | neosnippet: 31 | - neosnippet-snippets/neosnippets 32 | - vim-go/gosnippets/snippets 33 | - vim-snippets/snippets 34 | ultisnip: 35 | - vim-go/gosnippets/UltiSnips 36 | - vim-snippets/UltiSnips 37 | 38 | remaps: 39 | dart: 40 | - dart-flutter 41 | - flutter 42 | django: 43 | - djangohtml 44 | dotenv: 45 | - dotenv-safe 46 | liquid: 47 | - html 48 | ps1: 49 | - PowerShell 50 | vue: 51 | - javascript 52 | - vue-pug 53 | - vue-script 54 | - vue-script-router 55 | - vue-script-vuex 56 | - vue-template 57 | - vuex 58 | -------------------------------------------------------------------------------- /docs/README.md: -------------------------------------------------------------------------------- 1 | # Docs 2 | 3 | Use `:COQhelp` to open up a list of help pages! 4 | 5 | Help docs are written in `markdown`. 6 | 7 | Use `:COQhelp -w` or `:COQhelp --web` to open help pages in a browser window if possible. 8 | 9 | Use `:COQhelp {topic}` or `:COQhelp {topic} --web` to visit a particular topic for more information 10 | 11 | - [:COQhelp config](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/CONF.md) 12 | 13 | - [:COQhelp keybind](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/KEYBIND.md) 14 | 15 | - [:COQhelp snips](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/SNIPS.md) 16 | 17 | - [:COQhelp fuzzy](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/FUZZY.md) 18 | 19 | - [:COQhelp display](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/DISPLAY.md) 20 | 21 | - [:COQhelp sources](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/SOURCES.md) 22 | 23 | - [:COQhelp misc](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/MISC.md) 24 | 25 | - [:COQhelp perf](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/PERF.md) 26 | 27 | - [:COQhelp stats](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/STATS.md) 28 | 29 | - [:COQhelp custom_sources](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/CUSTOM_SOURCES.md) 30 | 31 | --- 32 | 33 | ## Commands 34 | 35 | ### `COQnow` 36 | 37 | Launch `coq.nvim` with a greeting. 38 | 39 | ### `COQdeps` 40 | 41 | `:COQdeps` will install all of `coq.nvim`'s dependencies locally. 42 | 43 | Dependencies will be privately installed inside `coq.nvim`'s git root under `.vars/runtime`. 44 | 45 | Running `rm -rf` on `coq_nvim/` will cleanly remove everything `coq.nvim` installs to your local system. 46 | 47 | ### `COQstats` 48 | 49 | Launch a window and show performance data. 50 | -------------------------------------------------------------------------------- /tests/snippets/parse.py: -------------------------------------------------------------------------------- 1 | from asyncio import run 2 | from os import linesep 3 | from shutil import get_terminal_size 4 | from sys import stderr 5 | from typing import Iterator 6 | from unittest import TestCase 7 | 8 | from ...coq.ci.load import load 9 | from ...coq.shared.context import EMPTY_CONTEXT 10 | from ...coq.shared.settings import EMPTY_COMP, EMPTY_MATCH 11 | from ...coq.shared.types import SnippetEdit 12 | from ...coq.snippets.parse import parse_basic 13 | from ...coq.snippets.parsers.types import ParseError, ParseInfo 14 | 15 | _THRESHOLD = 0.95 16 | 17 | 18 | def _edits() -> Iterator[SnippetEdit]: 19 | loaded = run(load()) 20 | for snip in loaded.snippets.values(): 21 | edit = SnippetEdit(new_text=snip.content, grammar=snip.grammar) 22 | yield edit 23 | 24 | 25 | class Parser(TestCase): 26 | def test_1(self) -> None: 27 | edits = tuple(_edits()) 28 | 29 | def errs() -> Iterator[Exception]: 30 | for edit in edits: 31 | try: 32 | parse_basic( 33 | EMPTY_MATCH, 34 | comp=EMPTY_COMP, 35 | adjust_indent=False, 36 | context=EMPTY_CONTEXT, 37 | snippet=edit, 38 | info=ParseInfo(visual="", clipboard="", comment_str=("", "")), 39 | ) 40 | except ParseError as e: 41 | yield e 42 | 43 | errors = tuple(errs()) 44 | succ = 1 - (len(errors) / len(edits) if edits else 0) 45 | self.assertGreater(succ, _THRESHOLD) 46 | 47 | cols, _ = get_terminal_size() 48 | sep = "=" * cols + linesep 49 | print(*errors, sep=sep, file=stderr) 50 | -------------------------------------------------------------------------------- /coq/lsp/protocol.py: -------------------------------------------------------------------------------- 1 | from asyncio import wrap_future 2 | from asyncio.tasks import create_task 3 | from concurrent.futures import Future, InvalidStateError 4 | from contextlib import suppress 5 | from dataclasses import dataclass 6 | from functools import lru_cache 7 | from pathlib import Path 8 | from typing import Any, Mapping, Optional, cast 9 | 10 | from pynvim_pp.lib import decode 11 | from pynvim_pp.nvim import Nvim 12 | from pynvim_pp.types import NoneType 13 | from std2.pickle.decoder import new_decoder 14 | 15 | _LUA = decode( 16 | Path(__file__).resolve(strict=True).with_name("protocol.lua").read_bytes() 17 | ) 18 | 19 | 20 | @dataclass(frozen=True) 21 | class LSProtocol: 22 | CompletionItemKind: Mapping[Optional[int], str] 23 | InsertTextFormat: Mapping[Optional[int], str] 24 | 25 | 26 | @lru_cache(maxsize=None) 27 | def _protocol() -> Future: 28 | async def c1() -> LSProtocol: 29 | raw: Mapping[str, Mapping[str, int]] = await cast( 30 | Any, Nvim.api.exec_lua(NoneType, _LUA, ()) 31 | ) 32 | trans = {key: {v: k for k, v in val.items()} for key, val in raw.items()} 33 | protocol = new_decoder[LSProtocol](LSProtocol, strict=False)(trans) 34 | return protocol 35 | 36 | f: Future = Future() 37 | 38 | async def c2() -> None: 39 | try: 40 | ret = await c1() 41 | except BaseException as e: 42 | with suppress(InvalidStateError): 43 | f.set_exception(e) 44 | else: 45 | with suppress(InvalidStateError): 46 | f.set_result(ret) 47 | 48 | create_task(c2()) 49 | return f 50 | 51 | 52 | async def protocol() -> LSProtocol: 53 | f: Future = _protocol() 54 | return await wrap_future(f) 55 | -------------------------------------------------------------------------------- /docs/COMPLETION.md: -------------------------------------------------------------------------------- 1 | # Completion 2 | 3 | ### coq_settings.completion 4 | 5 | #### coq_settings.completion.always 6 | 7 | Always trigger completion on keystroke 8 | 9 | **default:** 10 | 11 | ```json 12 | true 13 | ``` 14 | 15 | --- 16 | 17 | #### coq_settings.completion.sticky_manual 18 | 19 | Trigger completion on every keystroke after manual completion until you leave insert mode. 20 | 21 | **default:** 22 | 23 | ```json 24 | true 25 | ``` 26 | 27 | --- 28 | 29 | #### coq_settings.completion.replace_prefix_threshold 30 | 31 | Controls when inexact match occurs between the text under cursor, and the text to be inserted. 32 | 33 | Depending on if the ending of the text under cursor matches the beginning of the text to be inserted, `coq.nvim` will either replace the text under cursor, or chop off the front of some portion of the text to be inserted. 34 | 35 | This is the minimum number of characters matched before `coq.nvim` will consider performing any chopping. 36 | 37 | **default:** 38 | 39 | ```json 40 | 3 41 | ``` 42 | 43 | #### coq_settings.completion.replace_suffix_threshold 44 | 45 | See above. 46 | 47 | **default:** 48 | 49 | ```json 50 | 2 51 | ``` 52 | 53 | #### coq_settings.completion.smart 54 | 55 | Tries (even harder) to reconcile differences between document and modifications. 56 | 57 | Currently used only for slower but better cache algorithm for certain LSPs. 58 | 59 | **default:** 60 | 61 | ```json 62 | true 63 | ``` 64 | 65 | #### coq_settings.completion.skip_after 66 | 67 | Set of tokens that should prevent auto completion, when found directly before the cursor. 68 | 69 | ie `["{", "}", "[", "]"]`, etc 70 | 71 | Setting this to `[""]` will disable auto complete. 72 | 73 | **default:** 74 | 75 | ```json 76 | [] 77 | ``` 78 | -------------------------------------------------------------------------------- /coq/shared/parse.py: -------------------------------------------------------------------------------- 1 | from itertools import islice 2 | from random import choice 3 | from typing import AbstractSet, Iterator, MutableSequence, Optional, Sequence 4 | 5 | from pynvim_pp.text_object import is_word 6 | 7 | 8 | def lower(text: str) -> str: 9 | return text.casefold() 10 | 11 | 12 | def coalesce( 13 | keywords: AbstractSet[str], 14 | include_syms: bool, 15 | backwards: Optional[bool], 16 | chars: Sequence[str], 17 | ) -> Iterator[str]: 18 | backwards = choice((True, False)) if backwards is None else backwards 19 | 20 | words: MutableSequence[str] = [] 21 | syms: MutableSequence[str] = [] 22 | 23 | def w_it() -> Iterator[str]: 24 | if words: 25 | word = "".join(reversed(words) if backwards else words) 26 | words.clear() 27 | yield word 28 | 29 | def s_it() -> Iterator[str]: 30 | if syms: 31 | sym = "".join(reversed(syms) if backwards else syms) 32 | syms.clear() 33 | yield sym 34 | 35 | for chr in reversed(chars) if backwards else iter(chars): 36 | if is_word(keywords, chr=chr): 37 | words.append(chr) 38 | yield from s_it() 39 | elif not chr.isspace(): 40 | if include_syms: 41 | syms.append(chr) 42 | yield from w_it() 43 | else: 44 | yield from w_it() 45 | yield from s_it() 46 | 47 | yield from w_it() 48 | yield from s_it() 49 | 50 | 51 | def tokenize( 52 | tokenization_limit: int, 53 | keywords: AbstractSet[str], 54 | include_syms: bool, 55 | text: str, 56 | ) -> Iterator[str]: 57 | words = coalesce(keywords, include_syms=include_syms, backwards=None, chars=text) 58 | return islice(words, tokenization_limit) 59 | -------------------------------------------------------------------------------- /coq/server/registrants/noop.py: -------------------------------------------------------------------------------- 1 | from argparse import Namespace 2 | from dataclasses import dataclass 3 | from itertools import chain 4 | from os import linesep 5 | from random import choice, sample 6 | from sys import stdout 7 | from typing import Sequence, Tuple 8 | 9 | from pynvim_pp.lib import decode, encode 10 | from pynvim_pp.nvim import Nvim 11 | from std2.argparse import ArgparseError, ArgParser 12 | from std2.pickle.decoder import new_decoder 13 | from yaml import safe_load 14 | 15 | from ...consts import HELO_ARTIFACTS 16 | from ...registry import rpc 17 | from ..rt_types import Stack 18 | 19 | 20 | @dataclass(frozen=True) 21 | class _Helo: 22 | chars: Tuple[int, int] 23 | cocks: Sequence[str] 24 | stars: Sequence[str] 25 | helo: Sequence[str] 26 | 27 | 28 | _HELO = new_decoder[_Helo](_Helo)(safe_load(decode(HELO_ARTIFACTS.read_bytes()))) 29 | 30 | 31 | def _parse_args(args: Sequence[str]) -> Namespace: 32 | parser = ArgParser() 33 | parser.add_argument("-s", "--shut-up", action="store_true") 34 | return parser.parse_args(args) 35 | 36 | 37 | @rpc() 38 | async def now(stack: Stack, args: Sequence[str]) -> None: 39 | try: 40 | ns = _parse_args(args) 41 | except ArgparseError as e: 42 | await Nvim.write(e, error=True) 43 | else: 44 | if stack.settings.display.statusline.helo: 45 | if not ns.shut_up: 46 | lo, hi = _HELO.chars 47 | chars = choice(range(lo, hi)) 48 | star = (choice(_HELO.stars),) 49 | birds = " ".join(chain(star, sample(_HELO.cocks, k=chars), star)) 50 | helo = choice(_HELO.helo) 51 | msg = f"{birds} {helo}{linesep}" 52 | encoded = encode(msg) 53 | stdout.buffer.write(encoded) 54 | stdout.buffer.flush() 55 | -------------------------------------------------------------------------------- /coq/snippets/parsers/types.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | from dataclasses import dataclass 4 | from typing import Iterator, MutableSequence, Optional, Sequence, Tuple, Union 5 | 6 | from std2.itertools import deiter 7 | 8 | from ...shared.types import Context, TextTransform, TextTransforms 9 | 10 | 11 | class ParseError(Exception): ... 12 | 13 | 14 | @dataclass(frozen=True) 15 | class Index: 16 | i: int 17 | row: int 18 | col: int 19 | 20 | 21 | EChar = Tuple[Index, str] 22 | 23 | 24 | @dataclass(frozen=True) 25 | class ParseInfo: 26 | visual: str 27 | clipboard: str 28 | comment_str: Tuple[str, str] 29 | 30 | 31 | @dataclass(frozen=True) 32 | class ParserCtx(Iterator): 33 | ctx: Context 34 | text: str 35 | info: ParseInfo 36 | dit: deiter[EChar] 37 | stack: MutableSequence[Union[int, str]] 38 | 39 | def __iter__(self) -> ParserCtx: 40 | return self 41 | 42 | def __next__(self) -> EChar: 43 | return next(self.dit) 44 | 45 | 46 | @dataclass(frozen=True) 47 | class Unparsed: 48 | text: str 49 | 50 | 51 | @dataclass(frozen=True) 52 | class IntBegin: 53 | idx: int 54 | 55 | 56 | @dataclass(frozen=True) 57 | class VarBegin: 58 | name: str 59 | 60 | 61 | @dataclass(frozen=True) 62 | class Transform: 63 | var_subst: Optional[str] 64 | maybe_idx: int 65 | xform: TextTransform 66 | 67 | 68 | @dataclass(frozen=True) 69 | class End: ... 70 | 71 | 72 | Token = Union[Unparsed, IntBegin, Transform, VarBegin, End, str] 73 | TokenStream = Iterator[Token] 74 | 75 | 76 | @dataclass(frozen=True) 77 | class Region: 78 | begin: int 79 | end: int 80 | text: str 81 | 82 | 83 | @dataclass(frozen=True) 84 | class Parsed: 85 | text: str 86 | cursor: int 87 | regions: Sequence[Tuple[int, Region]] 88 | xforms: TextTransforms 89 | -------------------------------------------------------------------------------- /coq/clients/lsp/mul_bandit.py: -------------------------------------------------------------------------------- 1 | from bisect import bisect 2 | from collections import defaultdict 3 | from datetime import timedelta 4 | from math import exp, floor, gamma, inf, log 5 | from random import random, uniform 6 | from typing import AbstractSet, MutableSet, Optional, Sequence 7 | 8 | from std2.itertools import pairwise 9 | 10 | 11 | def _logit(x: float) -> float: 12 | return log(x / (1 - x)) 13 | 14 | 15 | def _bins(k: float, n: int) -> Sequence[float]: 16 | return tuple((exp(k * i) - 1) * 1000 for i in range(n)) + (inf,) 17 | 18 | 19 | class _Dist: 20 | def __init__(self) -> None: 21 | self._bins = _bins(k=0.03, n=6) 22 | self._decay = 0.99 23 | self._cdf = [0.0 for _ in pairwise(self._bins)] 24 | self._sum = 0.0 25 | 26 | def update(self, x: float) -> None: 27 | binned, inc = False, 0.0 28 | for i, (lo, hi) in enumerate(pairwise(self._bins)): 29 | binned |= lo <= x < hi 30 | inc += binned 31 | self._cdf[i] = self._cdf[i] * self._decay + binned 32 | self._cdf[-1] = self._cdf[-1] * self._decay + inc 33 | 34 | def inv_cdf(self, p: float) -> float: 35 | assert 0 <= p <= 1 36 | 37 | i = round((len(self._cdf) - 1) * p) 38 | print(self._cdf[i + 1]) 39 | frac = (p - self._cdf[i]) / (self._cdf[i + 1] - self._cdf[i]) 40 | return self._bins[i] + frac * (self._bins[i + 1] - self._bins[i]) 41 | 42 | 43 | class MultiArmedBandit: 44 | def __init__(self) -> None: 45 | self._probability_floor = 0.05 46 | self._clients: MutableSet[str] = set() 47 | 48 | def _elapsed(self, client: str) -> float: 49 | return inf 50 | 51 | def update( 52 | self, clients: AbstractSet[str], client: Optional[str], elapsed: timedelta 53 | ) -> None: 54 | self._clients |= clients 55 | -------------------------------------------------------------------------------- /coq/shared/context.py: -------------------------------------------------------------------------------- 1 | from os.path import normcase 2 | from pathlib import Path, PurePath 3 | from typing import AbstractSet 4 | from uuid import uuid4 5 | 6 | from pynvim_pp.text_object import is_word 7 | 8 | from .types import Context 9 | 10 | _FILE = Path(__file__).resolve(strict=True) 11 | 12 | EMPTY_CONTEXT = Context( 13 | manual=True, 14 | change_id=uuid4(), 15 | commit_id=uuid4(), 16 | cwd=PurePath(), 17 | buf_id=0, 18 | filename=normcase(_FILE), 19 | filetype="", 20 | line_count=0, 21 | keywordset=frozenset(), 22 | linefeed="\n", 23 | tabstop=2, 24 | expandtab=True, 25 | comment=("", ""), 26 | position=(0, 0), 27 | cursor=(0, 0, 0, 0), 28 | scr_col=0, 29 | win_size=0, 30 | line="", 31 | line_before="", 32 | line_after="", 33 | lines=(), 34 | lines_before=(), 35 | lines_after=(), 36 | words="", 37 | words_before="", 38 | words_after="", 39 | syms="", 40 | syms_before="", 41 | syms_after="", 42 | ws_before="", 43 | ws_after="", 44 | l_words_before="", 45 | l_words_after="", 46 | l_syms_before="", 47 | l_syms_after="", 48 | is_lower=True, 49 | change=None, 50 | ) 51 | 52 | 53 | def cword_before( 54 | keywords: AbstractSet[str], lower: bool, context: Context, sort_by: str 55 | ) -> str: 56 | char = sort_by[:1] 57 | 58 | if char.isspace(): 59 | return context.ws_before 60 | elif is_word(keywords, chr=char): 61 | return context.l_words_before if lower else context.words_before 62 | else: 63 | return context.l_syms_before if lower else context.syms_before 64 | 65 | 66 | def cword_after( 67 | keywords: AbstractSet[str], lower: bool, context: Context, sort_by: str 68 | ) -> str: 69 | char = sort_by[-1:] 70 | 71 | if char.isspace(): 72 | return context.ws_after 73 | elif is_word(keywords, chr=char): 74 | return context.l_words_after if lower else context.words_after 75 | else: 76 | return context.l_syms_after if lower else context.syms_after 77 | -------------------------------------------------------------------------------- /docs/STATS.md: -------------------------------------------------------------------------------- 1 | ## How to interpret statistics 2 | 3 | ```viml 4 | :COQstats 5 | ``` 6 | ![statistics.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/stats.gif) 7 | 8 | ### Is this the actual response speed for each keystroke 9 | 10 | No, these measure the response speed of the sources. 11 | 12 | - Like good GUI programs, `coq.nvim` frees up the "UI Thread" as much as possible, and does work asynchronously. 13 | 14 | - When the sources are calculating, you are free to type around already. 15 | 16 | - There are also optimizations in-place so that many keystrokes do not trigger unnecessary requests to sources. 17 | 18 | - There is a near constant (and minor) overhead for each keystroke, the overhead is only profiled by running `coq.nvim` in debug mode. 19 | 20 | ### Q0, 50, 95, 100? 21 | 22 | Mean `min`, `median`, `1 in 20`, `max`, respectively. 23 | 24 | Without assuming any statistical distribution: 25 | 26 | **`Q50` is a more robust measure than `avg`**, and `Q95` is a decent measure of a common `bad` value. 27 | 28 | ### What does each column mean? 29 | 30 | #### Interrupted 31 | 32 | `coq.nvim` uses collaborative multitasking, and will cancel incomplete completion requests, if they become unnecessary. 33 | 34 | Ideally, all sources should have similar interrupted statistics, which would imply all sources are similarly fast. 35 | 36 | If some sources have many interrupted vis a vis the rest, it implies that those sources are slower than others. 37 | 38 | #### Inserted 39 | 40 | Simple count of how many insertions are from this source. 41 | 42 | #### Duration 43 | 44 | This is a misleading statistic for several reasons. 45 | 46 | The price `coq.nvim` pays for being collaboratively scheduled is that sources are executed concurrently. 47 | 48 | This means that one slow source can slow down all sources, with the exception being `LSP`, and `T9`, whose results are mostly calculated by other processes. 49 | 50 | This also means that the time spans are **not additive**. Say five sources each take 40ms to complete, the total execution time is 40ms, not 200ms. 51 | 52 | The overall duration is `min(timeout, max()) + `. 53 | -------------------------------------------------------------------------------- /docs/CONF.md: -------------------------------------------------------------------------------- 1 | # Conf 2 | 3 | All configurations are under the global variable **`coq_settings`**. 4 | 5 | VimL: 6 | 7 | ```vim 8 | let g:coq_settings = { ... } 9 | ``` 10 | 11 | Lua: 12 | 13 | ```lua 14 | vim.g.coq_settings = { ... } 15 | ``` 16 | 17 | --- 18 | 19 | ## Shorthand 20 | 21 | Dictionary keys will be automatically expanded with the `.` notation. This works recursively. 22 | 23 | ie. The following are equivalent 24 | 25 | ```json 26 | { "dog.puppy": 2 } 27 | ``` 28 | 29 | ```json 30 | { "dog": { "puppy": 2 } } 31 | ``` 32 | 33 | Note in lua, you will need to quote your keys like so: 34 | 35 | ```lua 36 | { ["dog.puppy"] = 2 } 37 | ``` 38 | 39 | Note in VimL, to specify `True` and `False`, you need to use the following: 40 | 41 | ```vim 42 | v:true 43 | v:false 44 | ``` 45 | 46 | --- 47 | 48 | ## Validation 49 | 50 | Variables will be validated against a schema. 51 | 52 | ie. 53 | 54 | Vim: 55 | 56 | ```vim 57 | let g:coq_settings = { 'match.look_ahead': 'dog' } 58 | ``` 59 | 60 | Lua: 61 | 62 | ```lua 63 | vim.g.coq_settings = { 64 | match = { 65 | look_ahead = "dog", 66 | }, 67 | } 68 | ``` 69 | 70 | Will give you the following error message: 71 | 72 | ![conf_demo.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/conf.png) 73 | 74 | **Notice it says `Extra keys: {dog}`** 75 | 76 | --- 77 | 78 | ## Specifics 79 | 80 | Set `coq_settings.auto_start` to `true | 'shut-up'` to auto start. 81 | 82 | Set `coq_settings.xdg` to `true` to use `XDG`. 83 | 84 | - [:COQhelp keybind](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/KEYBIND.md) 85 | 86 | Key bindings 87 | 88 | - [:COQhelp fuzzy](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/FUZZY.md) 89 | 90 | Fuzzy ranking 91 | 92 | - [:COQhelp comp](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/COMPLETION.md) 93 | 94 | Completion options 95 | 96 | - [:COQhelp display](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/DISPLAY.md) 97 | 98 | Appearances 99 | 100 | - [:COQhelp sources](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/SOURCES.md) 101 | 102 | Source specific 103 | 104 | - [:COQhelp misc](https://github.com/ms-jpq/coq_nvim/tree/coq/docs/MISC.md) 105 | 106 | Misc (including timeouts) 107 | -------------------------------------------------------------------------------- /ci/main.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | from os import environ, sep 3 | from pathlib import Path 4 | from shutil import rmtree 5 | from subprocess import check_call, check_output, run 6 | from sys import executable 7 | from typing import Iterator 8 | 9 | _TOP_LV = Path(__file__).resolve(strict=True).parent.parent 10 | 11 | 12 | def _git_identity() -> None: 13 | email = "ci@ci.ci" 14 | username = "ci-bot" 15 | check_call(("git", "config", "--global", "user.email", email)) 16 | check_call(("git", "config", "--global", "user.name", username)) 17 | 18 | 19 | def _git_clone(path: Path, repo_name: str) -> None: 20 | if path.is_dir(): 21 | rmtree(path) 22 | 23 | token = environ["CI_TOKEN"] 24 | uri = f"https://ms-jpq:{token}@github.com/ms-jpq/{repo_name}.git" 25 | check_call(("git", "clone", uri, str(path))) 26 | 27 | 28 | def _build(cwd: Path) -> None: 29 | check_call((executable, "-m", "coq.ci"), cwd=cwd) 30 | 31 | 32 | def _git_alert(cwd: Path) -> None: 33 | prefix = "ci" 34 | check_call(("git", "fetch"), cwd=cwd) 35 | remote_brs = check_output(("git", "branch", "--remotes"), text=True, cwd=cwd) 36 | 37 | def cont() -> Iterator[str]: 38 | for br in remote_brs.splitlines(): 39 | b = br.strip() 40 | if b and "->" not in b: 41 | _, _, name = b.partition(sep) 42 | if name.startswith(prefix): 43 | yield name 44 | 45 | refs = tuple(cont()) 46 | if refs: 47 | check_call(("git", "push", "--delete", "origin", *refs), cwd=cwd) 48 | 49 | proc = run(("git", "diff", "--exit-code"), cwd=cwd) 50 | if proc.returncode: 51 | time = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d_%H-%M-%S") 52 | brname = f"{prefix}--{time}" 53 | check_call(("git", "checkout", "-b", brname), cwd=cwd) 54 | check_call(("git", "add", "."), cwd=cwd) 55 | check_call(("git", "commit", "-m", f"update_artifacts: {time}"), cwd=cwd) 56 | check_call(("git", "push", "--set-upstream", "origin", brname), cwd=cwd) 57 | 58 | 59 | def main() -> None: 60 | snips = _TOP_LV / ".vars" / "snippets" 61 | if "CI" in environ: 62 | _git_identity() 63 | _git_clone(snips, repo_name="coq.artifacts") 64 | _build(_TOP_LV) 65 | _git_alert(snips) 66 | -------------------------------------------------------------------------------- /coq/clients/cache/db/database.py: -------------------------------------------------------------------------------- 1 | from contextlib import closing, suppress 2 | from sqlite3 import Connection, OperationalError 3 | from typing import Iterable, Iterator, Mapping, Tuple 4 | 5 | from ....databases.types import DB 6 | from ....shared.settings import MatchOptions 7 | from ....shared.sql import BIGGEST_INT, init_db, like_esc 8 | from .sql import sql 9 | 10 | 11 | def _init() -> Connection: 12 | conn = Connection(":memory:", isolation_level=None) 13 | init_db(conn) 14 | conn.executescript(sql("create", "pragma")) 15 | conn.executescript(sql("create", "tables")) 16 | return conn 17 | 18 | 19 | class Database(DB): 20 | def __init__(self) -> None: 21 | self._conn = _init() 22 | 23 | def insert(self, keys: Iterable[Tuple[bytes, str]]) -> None: 24 | def m1() -> Iterator[Mapping]: 25 | for key, word in keys: 26 | yield {"key": key, "word": word} 27 | 28 | with suppress(OperationalError): 29 | with self._conn, closing(self._conn.cursor()) as cursor: 30 | with suppress(UnicodeEncodeError): 31 | cursor.executemany(sql("insert", "word"), m1()) 32 | 33 | def select( 34 | self, clear: bool, opts: MatchOptions, word: str, sym: str, limitless: int 35 | ) -> Iterator[Tuple[bytes, str]]: 36 | with suppress(OperationalError): 37 | if clear: 38 | with self._conn, closing(self._conn.cursor()) as cursor: 39 | cursor.execute(sql("delete", "words")) 40 | else: 41 | with self._conn, closing(self._conn.cursor()) as cursor: 42 | limit = BIGGEST_INT if limitless else opts.max_results 43 | cursor.execute( 44 | sql("select", "words"), 45 | { 46 | "exact": opts.exact_matches, 47 | "cut_off": opts.fuzzy_cutoff, 48 | "look_ahead": opts.look_ahead, 49 | "limit": limit, 50 | "word": word, 51 | "sym": sym, 52 | "like_word": like_esc(word[: opts.exact_matches]), 53 | "like_sym": like_esc(sym[: opts.exact_matches]), 54 | }, 55 | ) 56 | for row in cursor: 57 | yield row["key"], row["word"] 58 | -------------------------------------------------------------------------------- /coq/snippets/loaders/lsp.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from json import loads 3 | from json.decoder import JSONDecodeError 4 | from os.path import normcase 5 | from pathlib import PurePath 6 | from typing import AbstractSet, Iterable, Iterator, Mapping, Sequence, Tuple, Union 7 | 8 | from std2.pickle.decoder import new_decoder 9 | 10 | from ...shared.types import SnippetGrammar 11 | from ..consts import SNIP_LINE_SEP 12 | from ..types import ParsedSnippet 13 | from .parse import raise_err 14 | 15 | 16 | @dataclass 17 | class _Unit: 18 | body: Union[str, Sequence[str]] 19 | prefix: Union[str, Sequence[str], None] = None 20 | description: Union[str, Sequence[str]] = "" 21 | 22 | 23 | _DECODER = new_decoder[Mapping[str, _Unit]](Mapping[str, _Unit], strict=False) 24 | 25 | 26 | def _prefix(prefix: Union[str, Sequence[str], None], content: str) -> AbstractSet[str]: 27 | if prefix is None: 28 | return {content} 29 | elif isinstance(prefix, str): 30 | return {prefix.strip()} 31 | elif isinstance(prefix, Sequence): 32 | return {p.strip() for p in prefix} 33 | else: 34 | assert False 35 | 36 | 37 | def _body(body: Union[str, Sequence[str]]) -> str: 38 | if isinstance(body, str): 39 | return body 40 | elif isinstance(body, Sequence): 41 | return SNIP_LINE_SEP.join(body) 42 | else: 43 | assert False 44 | 45 | 46 | def load_lsp( 47 | grammar: SnippetGrammar, path: PurePath, lines: Iterable[Tuple[int, str]] 48 | ) -> Tuple[str, AbstractSet[str], Sequence[ParsedSnippet]]: 49 | filetype = normcase(path.stem.strip()) 50 | 51 | text = SNIP_LINE_SEP.join(line.rstrip() for _, line in lines) 52 | try: 53 | json = loads(text) 54 | except JSONDecodeError as e: 55 | raise_err(path, lineno=e.lineno, line=text, reason=e.msg) 56 | else: 57 | fmt = _DECODER(json) 58 | 59 | def cont() -> Iterator[ParsedSnippet]: 60 | for label, values in fmt.items(): 61 | content = _body(values.body).strip() 62 | matches = _prefix(values.prefix, content=content) 63 | doc = _body(values.description).strip() 64 | snippet = ParsedSnippet( 65 | grammar=grammar, 66 | filetype=filetype, 67 | content=content, 68 | doc=doc, 69 | label=label, 70 | matches=matches, 71 | ) 72 | yield snippet 73 | 74 | return filetype, set(), tuple(cont()) 75 | -------------------------------------------------------------------------------- /coq/shared/timeit.py: -------------------------------------------------------------------------------- 1 | from asyncio import Lock 2 | from contextlib import contextmanager, nullcontext 3 | from time import process_time 4 | from types import TracebackType 5 | from typing import ( 6 | Any, 7 | AsyncContextManager, 8 | Iterator, 9 | MutableMapping, 10 | Optional, 11 | Tuple, 12 | Type, 13 | ) 14 | 15 | from pynvim_pp.logging import log 16 | from std2.locale import si_prefixed_smol 17 | from std2.timeit import timeit as _timeit 18 | 19 | from ..consts import DEBUG 20 | 21 | _RECORDS: MutableMapping[str, Tuple[int, float]] = {} 22 | 23 | 24 | @contextmanager 25 | def timeit( 26 | name: str, *args: Any, force: bool = False, warn: Optional[float] = None 27 | ) -> Iterator[None]: 28 | if DEBUG or force or warn is not None: 29 | with _timeit() as t: 30 | yield None 31 | delta = t().total_seconds() 32 | if DEBUG or force or delta >= (warn or 0): 33 | times, cum = _RECORDS.get(name, (0, 0)) 34 | tt, c = times + 1, cum + delta 35 | _RECORDS[name] = tt, c 36 | 37 | label = name.ljust(50) 38 | time = f"{si_prefixed_smol(delta, precision=0)}s".ljust(8) 39 | ttime = f"{si_prefixed_smol(c / tt, precision=0)}s".ljust(8) 40 | msg = f"TIME -- {label} :: {time} @ {ttime} {' '.join(map(str, args))}" 41 | if force: 42 | log.info("%s", msg) 43 | else: 44 | log.debug("%s", msg) 45 | else: 46 | yield None 47 | 48 | 49 | class TracingLocker(AsyncContextManager): 50 | def __init__(self, name: str, force: bool = False) -> None: 51 | self._lock = Lock() 52 | self._name, self._force = name, force 53 | 54 | def locked(self) -> bool: 55 | return self._lock.locked() 56 | 57 | async def __aenter__(self) -> None: 58 | mgr = ( 59 | timeit(f"LOCKED -- {self._name}", force=self._force) 60 | if self._lock.locked() 61 | else nullcontext() 62 | ) 63 | with mgr: 64 | await self._lock.__aenter__() 65 | 66 | async def __aexit__( 67 | self, 68 | exc_type: Optional[Type[BaseException]], 69 | exc: Optional[BaseException], 70 | tb: Optional[TracebackType], 71 | ) -> None: 72 | await self._lock.__aexit__(exc_type, exc, tb) 73 | 74 | 75 | @contextmanager 76 | def cpu_timeit() -> Iterator[None]: 77 | t1 = process_time() 78 | with _timeit() as t: 79 | yield None 80 | t2 = process_time() 81 | delta = t().total_seconds() 82 | cpu = (t2 - t1) / delta 83 | msg = f"CPU :: {cpu}" 84 | log.info("%s", msg) 85 | -------------------------------------------------------------------------------- /coq/consts.py: -------------------------------------------------------------------------------- 1 | from itertools import chain 2 | from os import environ, name 3 | from os.path import normpath 4 | from pathlib import Path 5 | from string import ascii_letters, digits 6 | 7 | GIL_SWITCH = 1 / (10**3) 8 | CACHE_CHUNK = 9 9 | 10 | IS_WIN = name == "nt" 11 | 12 | TOP_LEVEL = Path(__file__).resolve(strict=True).parent.parent 13 | REQUIREMENTS = TOP_LEVEL / "requirements.txt" 14 | 15 | BASIC_KEYWORDS = frozenset(chain(digits, ascii_letters, ("_", "-"))) 16 | 17 | VARS = TOP_LEVEL / ".vars" 18 | 19 | RT_DIR = VARS / "runtime" 20 | RT_PY = RT_DIR / "Scripts" / "python.exe" if IS_WIN else RT_DIR / "bin" / "python3" 21 | 22 | _CONF_DIR = TOP_LEVEL / "config" 23 | LANG_ROOT = TOP_LEVEL / "locale" 24 | DEFAULT_LANG = "en" 25 | _DOC_DIR = TOP_LEVEL / "docs" 26 | 27 | 28 | CONFIG_YML = _CONF_DIR / "defaults.yml" 29 | COMPILATION_YML = _CONF_DIR / "compilation.yml" 30 | 31 | 32 | _ART_DIR = TOP_LEVEL / "artifacts" 33 | HELO_ARTIFACTS = _ART_DIR / "helo.yml" 34 | 35 | 36 | TMP_DIR = VARS / "tmp" 37 | 38 | 39 | SETTINGS_VAR = "coq_settings" 40 | 41 | REPL_GRAMMAR = environ.get("COQ_GRAMMAR", "lsp") 42 | 43 | DEBUG = "COQ_DEBUG" in environ 44 | DEBUG_METRICS = "COQ_DEBUG_METRICS" in environ 45 | DEBUG_DB = "COQ_DEBUG_DB" in environ 46 | 47 | BUFFER_DB = normpath(TMP_DIR / "buffers.sqlite3") if DEBUG_DB else ":memory:" 48 | TREESITTER_DB = normpath(TMP_DIR / "treesitter.sqlite3") if DEBUG_DB else ":memory:" 49 | INSERT_DB = normpath(TMP_DIR / "inserts.sqlite3") if DEBUG_DB else ":memory:" 50 | TMUX_DB = normpath(TMP_DIR / "tmux.sqlite3") if DEBUG_DB else ":memory:" 51 | REGISTER_DB = normpath(TMP_DIR / "register.sqlite3") if DEBUG_DB else ":memory:" 52 | 53 | 54 | _URI_BASE = "https://github.com/ms-jpq/coq_nvim/tree/coq/docs/" 55 | 56 | MD_README = _DOC_DIR / "README.md" 57 | URI_README = _URI_BASE + MD_README.name 58 | 59 | MD_CONF = _DOC_DIR / "CONF.md" 60 | URI_CONF = _URI_BASE + MD_CONF.name 61 | 62 | MD_KEYBIND = _DOC_DIR / "KEYBIND.md" 63 | URI_KEYBIND = _URI_BASE + MD_KEYBIND.name 64 | 65 | MD_SNIPS = _DOC_DIR / "SNIPS.md" 66 | URI_SNIPS = _URI_BASE + MD_SNIPS.name 67 | 68 | MD_FUZZY = _DOC_DIR / "FUZZY.md" 69 | URI_FUZZY = _URI_BASE + MD_FUZZY.name 70 | 71 | MD_DISPLAY = _DOC_DIR / "DISPLAY.md" 72 | URI_DISPLAY = _URI_BASE + MD_DISPLAY.name 73 | 74 | MD_SOURCES = _DOC_DIR / "SOURCES.md" 75 | URI_SOURCES = _URI_BASE + MD_SOURCES.name 76 | 77 | MD_MISC = _DOC_DIR / "MISC.md" 78 | URI_MISC = _URI_BASE + MD_MISC.name 79 | 80 | MD_STATS = _DOC_DIR / "STATS.md" 81 | URI_STATISTICS = _URI_BASE + MD_STATS.name 82 | 83 | MD_PREF = _DOC_DIR / "PERF.md" 84 | URI_PREF = _URI_BASE + MD_PREF.name 85 | 86 | MD_COMPLETION = _DOC_DIR / "COMPLETION.md" 87 | URI_COMPLETION = _URI_BASE + MD_COMPLETION.name 88 | 89 | MD_C_SOURCES = _DOC_DIR / "CUSTOM_SOURCES.md" 90 | URI_C_SOURCES = _URI_BASE + MD_C_SOURCES.name 91 | -------------------------------------------------------------------------------- /coq/tags/parse.py: -------------------------------------------------------------------------------- 1 | from json import loads 2 | from json.decoder import JSONDecodeError 3 | from pathlib import Path 4 | from typing import Iterator, Mapping, MutableMapping, MutableSequence, Tuple 5 | 6 | from pynvim_pp.lib import decode 7 | from pynvim_pp.logging import log 8 | from std2.asyncio.subprocess import call 9 | from std2.string import removeprefix, removesuffix 10 | 11 | from ..shared.executor import very_nice 12 | from .types import Tag, Tags 13 | 14 | _FIELDS = "".join( 15 | f"{{{f}}}" 16 | for f in ( 17 | "language", 18 | "input", 19 | "line", 20 | "kind", 21 | "name", 22 | "pattern", 23 | "typeref", 24 | "scope", 25 | "scopeKind", 26 | "access", 27 | "signature", 28 | ) 29 | ) 30 | 31 | 32 | async def run(ctags: Path, *args: str) -> str: 33 | if not args: 34 | return "" 35 | else: 36 | prefix = await very_nice() 37 | try: 38 | proc = await call( 39 | *prefix, 40 | ctags, 41 | "--sort=no", 42 | "--output-format=json", 43 | f"--fields={_FIELDS}", 44 | *args, 45 | check_returncode=set(), 46 | ) 47 | except (FileNotFoundError, PermissionError): 48 | return "" 49 | else: 50 | return decode(proc.stdout) 51 | 52 | 53 | def _unescape(pattern: str) -> str: 54 | def cont() -> Iterator[str]: 55 | stripped = removesuffix(removeprefix(pattern[1:-1], "^"), "$").strip() 56 | it = iter(stripped) 57 | for c in it: 58 | if c == "\\": 59 | nc = next(it, "") 60 | if nc in {"/", "\\"}: 61 | yield nc 62 | else: 63 | yield c 64 | 65 | return "".join(cont()) 66 | 67 | 68 | def parse(mtimes: Mapping[str, float], raw: str) -> Tags: 69 | tags: MutableMapping[str, Tuple[str, float, MutableSequence[Tag]]] = {} 70 | 71 | for line in raw.splitlines(): 72 | if line: 73 | try: 74 | json = loads(line) 75 | except JSONDecodeError: 76 | log.warning("%s", line) 77 | else: 78 | if json["_type"] == "tag": 79 | path = json["path"] 80 | if pattern := json.get("pattern"): 81 | new_pattern = _unescape(pattern) 82 | else: 83 | new_pattern = None 84 | json["pattern"] = new_pattern 85 | _, _, acc = tags.setdefault( 86 | path, (json["language"], mtimes.get(path, 0), []) 87 | ) 88 | acc.append(json) 89 | 90 | return tags 91 | -------------------------------------------------------------------------------- /coq/shared/repeat.py: -------------------------------------------------------------------------------- 1 | from dataclasses import replace 2 | from typing import Optional, Tuple 3 | 4 | from std2.types import never 5 | 6 | from ..snippets.parse import requires_snip 7 | from .types import ( 8 | UTF8, 9 | UTF16, 10 | UTF32, 11 | BaseRangeEdit, 12 | Cursors, 13 | Edit, 14 | RangeEdit, 15 | SnippetEdit, 16 | SnippetRangeEdit, 17 | WTF8Pos, 18 | ) 19 | 20 | 21 | def _shift(cursor: Cursors, edit: BaseRangeEdit) -> Tuple[WTF8Pos, WTF8Pos]: 22 | row, u8, u16, u32 = cursor 23 | if edit.encoding == UTF16: 24 | col = u16 25 | elif edit.encoding == UTF8: 26 | col = u8 27 | elif edit.encoding == UTF32: 28 | col = u32 29 | else: 30 | never(edit.encoding) 31 | 32 | (b_row, b_col), (e_row, e_col) = edit.begin, edit.end 33 | edit_col = edit.cursor_pos 34 | diff = col - edit_col 35 | 36 | if b_row == row: 37 | if b_col > edit_col and diff < 0: 38 | new_b_col = b_col + diff 39 | else: 40 | new_b_col = b_col 41 | else: 42 | new_b_col = b_col 43 | 44 | if e_row == row: 45 | if diff > 0 and e_col >= edit_col: 46 | new_e_col = e_col + diff 47 | else: 48 | new_e_col = e_col 49 | else: 50 | new_e_col = e_col 51 | 52 | new_begin, new_end = (b_row, max(0, new_b_col)), (e_row, max(0, new_e_col)) 53 | return new_begin, new_end 54 | 55 | 56 | def sanitize(inline_shift: bool, cursor: Cursors, edit: Edit) -> Optional[Edit]: 57 | row, *_ = cursor 58 | if isinstance(edit, SnippetRangeEdit): 59 | if row == -1: 60 | if edit.fallback == edit.new_text: 61 | return SnippetEdit(grammar=edit.grammar, new_text=edit.new_text) 62 | elif not requires_snip(edit.new_text): 63 | return Edit(new_text=edit.new_text) 64 | else: 65 | return None 66 | elif fallback := edit.fallback: 67 | return SnippetEdit(grammar=edit.grammar, new_text=fallback) 68 | elif not requires_snip(edit.new_text): 69 | return Edit(new_text=edit.new_text) 70 | else: 71 | begin, end = _shift(cursor, edit=edit) 72 | return replace(edit, begin=begin, end=end) 73 | elif isinstance(edit, RangeEdit): 74 | if inline_shift: 75 | begin, end = _shift(cursor, edit=edit) 76 | return replace(edit, begin=begin, end=end) 77 | elif fallback := edit.fallback: 78 | return Edit(new_text=fallback) 79 | elif not requires_snip(edit.new_text): 80 | return Edit(new_text=edit.new_text) 81 | else: 82 | return None 83 | elif isinstance(edit, SnippetEdit): 84 | return edit 85 | else: 86 | return Edit(new_text=edit.new_text) 87 | -------------------------------------------------------------------------------- /coq/paths/show.py: -------------------------------------------------------------------------------- 1 | from contextlib import suppress 2 | from itertools import islice 3 | from os import linesep, sep 4 | from os.path import curdir, normcase 5 | from pathlib import Path, PurePath 6 | from typing import Iterator, Optional 7 | 8 | from std2.asyncio import to_thread 9 | from std2.locale import pathsort_key 10 | 11 | from ..lang import LANG 12 | from ..shared.types import Doc 13 | 14 | _KB = 1000 15 | _HOME = Path.home() 16 | 17 | 18 | def fmt_path( 19 | cwd: PurePath, path: PurePath, is_dir: bool, current: Optional[PurePath] = None 20 | ) -> str: 21 | if path == current: 22 | return curdir 23 | 24 | posfix = sep if is_dir else "" 25 | with suppress(ValueError): 26 | rel = path.relative_to(cwd) 27 | return f"{curdir}{sep}{normcase(rel)}{posfix}" 28 | 29 | with suppress(ValueError): 30 | rel = path.relative_to(_HOME) 31 | return f"~{sep}{normcase(rel)}{posfix}" 32 | 33 | return f"{normcase(path)}{posfix}" 34 | 35 | 36 | async def _show_dir(cwd: PurePath, path: Path, ellipsis: str, height: int) -> Doc: 37 | def lines() -> Iterator[str]: 38 | ordered = sorted(path.iterdir(), key=pathsort_key) 39 | for idx, child in enumerate(islice(ordered, height), start=1): 40 | if idx >= height and len(ordered) > height: 41 | yield ellipsis 42 | else: 43 | yield fmt_path(cwd, path=child, is_dir=child.is_dir()) 44 | 45 | def cont() -> Doc: 46 | text = linesep.join(lines()) 47 | doc = Doc(text=text, syntax="") 48 | return doc 49 | 50 | return await to_thread(cont) 51 | 52 | 53 | async def _show_file(path: Path, ellipsis: str, height: int) -> Doc: 54 | def lines() -> Iterator[str]: 55 | with path.open("r") as fd: 56 | lines = fd.readlines(_KB) 57 | lit = islice((line.rstrip() for line in lines), height) 58 | for idx, line in enumerate(lit, start=1): 59 | if idx >= height and len(lines) > height: 60 | yield ellipsis 61 | else: 62 | yield line 63 | 64 | def cont() -> Doc: 65 | try: 66 | text = linesep.join(lines()) 67 | except UnicodeDecodeError: 68 | text = LANG("file binary") 69 | 70 | t = text or LANG("file empty") 71 | doc = Doc(text=t, syntax="") 72 | return doc 73 | 74 | return await to_thread(cont) 75 | 76 | 77 | async def show(cwd: PurePath, path: Path, ellipsis: str, height: int) -> Optional[Doc]: 78 | try: 79 | if path.is_dir(): 80 | return await _show_dir(cwd, path=path, ellipsis=ellipsis, height=height) 81 | elif path.is_file(): 82 | return await _show_file(path, ellipsis=ellipsis, height=height) 83 | else: 84 | return None 85 | except (OSError, ValueError): 86 | return None 87 | -------------------------------------------------------------------------------- /coq/snippets/loaders/load.py: -------------------------------------------------------------------------------- 1 | from dataclasses import asdict 2 | from os.path import normcase 3 | from pathlib import Path 4 | from typing import AbstractSet, Callable, Iterable, Iterator, MutableMapping, MutableSet 5 | from uuid import UUID, uuid3 6 | 7 | from pynvim_pp.logging import log 8 | from std2.graphlib import recur_sort 9 | from std2.pathlib import walk 10 | 11 | from ...shared.types import UTF8, SnippetGrammar 12 | from ..types import LoadedSnips, LoadError, ParsedSnippet 13 | from .lsp import load_lsp 14 | from .neosnippet import load_neosnippet 15 | from .ultisnip import load_ultisnip 16 | 17 | 18 | def _load_paths(search: Iterable[Path], exts: AbstractSet[str]) -> Iterator[Path]: 19 | for search_path in search: 20 | for path in walk(search_path): 21 | if path.suffix in exts: 22 | yield Path(normcase(path)) 23 | 24 | 25 | def _key(snip: ParsedSnippet) -> UUID: 26 | name = str(recur_sort(asdict(snip))) 27 | return uuid3(UUID(int=0), name=name) 28 | 29 | 30 | def load_direct( 31 | trans: Callable[[ParsedSnippet], ParsedSnippet], 32 | ignore_error: bool, 33 | lsp: Iterable[Path], 34 | neosnippet: Iterable[Path], 35 | ultisnip: Iterable[Path], 36 | lsp_grammar: SnippetGrammar = SnippetGrammar.lsp, 37 | neosnippet_grammar: SnippetGrammar = SnippetGrammar.snu, 38 | ultisnip_grammar: SnippetGrammar = SnippetGrammar.snu, 39 | ) -> LoadedSnips: 40 | specs = { 41 | load_lsp: (lsp_grammar, lsp), 42 | load_neosnippet: (neosnippet_grammar, neosnippet), 43 | load_ultisnip: (ultisnip_grammar, ultisnip), 44 | } 45 | 46 | extensions: MutableMapping[str, MutableSet[str]] = {} 47 | snippets: MutableMapping[UUID, ParsedSnippet] = {} 48 | 49 | for parser, (grammar, paths) in specs.items(): 50 | for path in paths: 51 | with path.open(encoding=UTF8) as fd: 52 | try: 53 | filetype, exts, snips = parser( 54 | grammar, path=path, lines=enumerate(fd, start=1) 55 | ) 56 | except LoadError as e: 57 | if ignore_error: 58 | log.warning("%s", e) 59 | else: 60 | raise 61 | else: 62 | ext_acc = extensions.setdefault(filetype, set()) 63 | for ext in exts: 64 | ext_acc.add(ext) 65 | for snip in map(trans, snips): 66 | uid = _key(snip) 67 | snippets[uid] = snip 68 | 69 | loaded = LoadedSnips(exts=extensions, snippets=snippets) 70 | return loaded 71 | 72 | 73 | def load_ci( 74 | trans: Callable[[ParsedSnippet], ParsedSnippet], 75 | lsp: Iterable[Path], 76 | neosnippet: Iterable[Path], 77 | ultisnip: Iterable[Path], 78 | ) -> LoadedSnips: 79 | loaded = load_direct( 80 | trans, 81 | True, 82 | lsp=_load_paths(lsp, exts={".json"}), 83 | neosnippet=_load_paths(neosnippet, exts={".snippets", ".snip"}), 84 | ultisnip=_load_paths(ultisnip, exts={".snippets", ".snip"}), 85 | ) 86 | 87 | return loaded 88 | -------------------------------------------------------------------------------- /docs/FUZZY.md: -------------------------------------------------------------------------------- 1 | # Fuzzy 2 | 3 | ## Algorithms 4 | 5 | `coq.nvim` uses ensemble ranking. It uses a two stage Filter -> Rank system. 6 | 7 | Both stages uses a `look_ahead` parameter to adjust for typos. 8 | 9 | ### Stage 1 - Filtering 10 | 11 | All `sqlite` based sources will require some `exact_matches` number of prefix matches. 12 | 13 | This is done to reduce the non-indexed search space. 14 | 15 | A quick multiset based filter is computed on the candidates, resulting in a normalized `[0..1]` score. 16 | 17 | Results that do not score above the `fuzzy_cutoff` are dropped at this stage. 18 | 19 | ### Stage 2 - Ranking 20 | 21 | On a reduced search set, a more comprehensive ensemble score is computed for each candidate. 22 | 23 | The primary metrics are `prefix_matches`, `edit_distance`, `recency` and `proximity`. 24 | 25 | For each metric, the relative rank of each candidate among their peers is weight adjusted. 26 | 27 | All the primary metrics are summed together in a weighted average, and rounded to an integer `[0..1000]`. 28 | 29 | Lexicographical sorting is then applied with secondary metrics such as `presence of imports`, `presence of documentation`, etc serving as tie breakers. 30 | 31 | ## Conf 32 | 33 | `coq_settings.match` 34 | 35 | `coq_settings.weights` 36 | 37 | --- 38 | 39 | ### coq_settings.match 40 | 41 | These control the matching & scoring algorithms 42 | 43 | #### `coq_settings.match.max_results` 44 | 45 | Maximum number of results to return. 46 | 47 | **default:** 48 | 49 | ```json 50 | 33 51 | ``` 52 | 53 | #### `coq_settings.match.proximate_lines` 54 | 55 | How many lines to use, for the purpose of proximity bonus. 56 | 57 | Neighbouring words in proximity are counted. 58 | 59 | **default:** 60 | 61 | ```json 62 | 16 63 | ``` 64 | 65 | #### `coq_settings.match.exact_matches` 66 | 67 | For word searching, how many exact prefix characters is required. 68 | 69 | **default:** 70 | 71 | ```json 72 | 2 73 | ``` 74 | 75 | #### `coq_settings.match.look_ahead` 76 | 77 | For word searching, how many characters to look ahead, in case of typos. 78 | 79 | **default:** 80 | 81 | ```json 82 | 2 83 | ``` 84 | 85 | #### `coq_settings.match.fuzzy_cutoff` 86 | 87 | What is the minimum similarity score, for a word to be proposed by the algorithm. 88 | 89 | **default:** 90 | 91 | ```json 92 | 0.6 93 | ``` 94 | 95 | --- 96 | 97 | ### coq_settings.weights 98 | 99 | #### `coq_settings.weights.prefix_matches` 100 | 101 | Relative weight adjustment of exact prefix matches. 102 | 103 | **default:** 104 | 105 | ```json 106 | 2.0 107 | ``` 108 | 109 | #### `coq_settings.weights.edit_distance` 110 | 111 | Relative weight adjustment of [Damerau–Levenshtein distance](https://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance), normalized and adjusted for look-aheads. 112 | 113 | **default:** 114 | 115 | ```json 116 | 1.5 117 | ``` 118 | 119 | #### `coq_settings.weights.recency` 120 | 121 | Relative weight adjustment of recently inserted items. 122 | 123 | **default:** 124 | 125 | ```json 126 | 1.0 127 | ``` 128 | 129 | #### `coq_settings.weights.proximity` 130 | 131 | Relative weight adjustment of prevalence within the `proximate_lines` 132 | 133 | **default:** 134 | 135 | ```json 136 | 0.5 137 | ``` 138 | -------------------------------------------------------------------------------- /coq/client.py: -------------------------------------------------------------------------------- 1 | from asyncio import AbstractEventLoop, gather, get_running_loop, wrap_future 2 | from asyncio.exceptions import CancelledError 3 | from concurrent.futures import Future, ThreadPoolExecutor 4 | from contextlib import AbstractAsyncContextManager, suppress 5 | from functools import wraps 6 | from logging import DEBUG as DEBUG_LV 7 | from logging import INFO 8 | from string import Template 9 | from sys import exit 10 | from textwrap import dedent 11 | from typing import Any, Sequence, cast 12 | 13 | from pynvim_pp.logging import log, suppress_and_log 14 | from pynvim_pp.nvim import Nvim, conn 15 | from pynvim_pp.rpc_types import Method, MsgType, RPCallable, ServerAddr 16 | from pynvim_pp.types import NoneType 17 | from std2.contextlib import nullacontext 18 | from std2.pickle.types import DecodeError 19 | from std2.platform import OS, os 20 | from std2.sys import autodie 21 | 22 | from ._registry import ____ 23 | from .consts import DEBUG, DEBUG_DB, DEBUG_METRICS, TMP_DIR 24 | from .registry import atomic, autocmd, rpc 25 | from .server.registrants.options import set_options 26 | from .server.rt_types import Stack, ValidationError 27 | from .server.runtime import stack 28 | 29 | assert ____ or True 30 | 31 | _CB = RPCallable[None] 32 | 33 | 34 | def _autodie(ppid: int) -> AbstractAsyncContextManager: 35 | if os is OS.windows: 36 | return nullacontext(None) 37 | else: 38 | return autodie(ppid) 39 | 40 | 41 | def _set_debug(loop: AbstractEventLoop) -> None: 42 | loop.set_debug(DEBUG) 43 | if DEBUG or DEBUG_METRICS or DEBUG_DB: 44 | TMP_DIR.mkdir(parents=True, exist_ok=True) 45 | log.setLevel(DEBUG_LV) 46 | else: 47 | log.setLevel(INFO) 48 | 49 | 50 | async def _default(msg: MsgType, method: Method, params: Sequence[Any]) -> None: ... 51 | 52 | 53 | def _trans(stack: Stack, handler: _CB) -> _CB: 54 | @wraps(handler) 55 | async def f(*params: Any) -> None: 56 | with suppress(CancelledError): 57 | return await handler(stack, *params) 58 | 59 | return cast(_CB, f) 60 | 61 | 62 | async def init(socket: ServerAddr, ppid: int, th: ThreadPoolExecutor) -> None: 63 | loop = get_running_loop() 64 | loop.set_default_executor(th) 65 | 66 | async with _autodie(ppid): 67 | _set_debug(loop) 68 | 69 | die: Future = Future() 70 | 71 | async def cont() -> None: 72 | async with conn(die, socket=socket, default=_default) as client: 73 | try: 74 | stk = await stack(th=th) 75 | except (DecodeError, ValidationError) as e: 76 | tpl = """ 77 | Some options may have changed. 78 | See help doc on Github under [docs/CONFIGURATION.md] 79 | 80 | 81 | ⚠️ ${e} 82 | """ 83 | msg = Template(dedent(tpl)).substitute(e=e) 84 | await Nvim.write(msg, error=True) 85 | exit(1) 86 | else: 87 | rpc_atomic, handlers = rpc.drain() 88 | for handler in handlers.values(): 89 | hldr = _trans(stk, handler=handler) 90 | client.register(hldr) 91 | 92 | await (rpc_atomic + autocmd.drain() + atomic).commit(NoneType) 93 | await set_options( 94 | mapping=stk.settings.keymap, 95 | fast_close=stk.settings.display.pum.fast_close, 96 | ) 97 | 98 | await gather(wrap_future(die), cont()) 99 | -------------------------------------------------------------------------------- /coq/clients/snippet/worker.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path, PurePath 2 | from typing import AbstractSet, AsyncIterator, Mapping 3 | 4 | from ...shared.executor import AsyncExecutor 5 | from ...shared.runtime import Supervisor 6 | from ...shared.runtime import Worker as BaseWorker 7 | from ...shared.settings import SnippetClient 8 | from ...shared.sql import BIGGEST_INT 9 | from ...shared.types import Completion, Context, Doc, SnippetEdit, SnippetGrammar 10 | from ...snippets.types import LoadedSnips 11 | from .db.database import SDB 12 | 13 | 14 | class Worker(BaseWorker[SnippetClient, Path]): 15 | def __init__( 16 | self, 17 | ex: AsyncExecutor, 18 | supervisor: Supervisor, 19 | always_wait: bool, 20 | options: SnippetClient, 21 | misc: Path, 22 | ) -> None: 23 | self._db = SDB(misc) 24 | super().__init__( 25 | ex, 26 | supervisor=supervisor, 27 | always_wait=always_wait, 28 | options=options, 29 | misc=misc, 30 | ) 31 | 32 | def interrupt(self) -> None: 33 | with self._interrupt(): 34 | self._db.interrupt() 35 | 36 | async def db_mtimes(self) -> Mapping[PurePath, float]: 37 | async def cont() -> Mapping[PurePath, float]: 38 | with self._interrupt_lock: 39 | return self._db.mtimes() 40 | 41 | return await self._ex.submit(cont()) 42 | 43 | async def clean(self, stale: AbstractSet[PurePath]) -> None: 44 | async def cont() -> None: 45 | with self._interrupt_lock: 46 | self._db.clean(stale) 47 | 48 | await self._ex.submit(cont()) 49 | 50 | async def populate(self, path: PurePath, mtime: float, loaded: LoadedSnips) -> None: 51 | async def cont() -> None: 52 | with self._interrupt_lock: 53 | self._db.populate(path, mtime=mtime, loaded=loaded) 54 | 55 | await self._ex.submit(cont()) 56 | 57 | async def _work( 58 | self, context: Context, timeout: float 59 | ) -> AsyncIterator[Completion]: 60 | limit = ( 61 | BIGGEST_INT 62 | if context.manual 63 | else self._options.max_pulls or self._supervisor.match.max_results 64 | ) 65 | 66 | async with self._work_lock: 67 | snippets = self._db.select( 68 | self._supervisor.match, 69 | filetype=context.filetype, 70 | word=context.words, 71 | sym=context.syms, 72 | limit=limit, 73 | ) 74 | 75 | for snip in snippets: 76 | edit = SnippetEdit( 77 | new_text=snip["snippet"], 78 | grammar=SnippetGrammar[snip["grammar"]], 79 | ) 80 | label_line, *_ = (snip["label"] or edit.new_text or " ").splitlines() 81 | label = label_line.strip().expandtabs(context.tabstop) 82 | doc = Doc(text=snip["doc"] or edit.new_text, syntax=context.filetype) 83 | completion = Completion( 84 | source=self._options.short_name, 85 | always_on_top=self._options.always_on_top, 86 | weight_adjust=self._options.weight_adjust, 87 | primary_edit=edit, 88 | adjust_indent=True, 89 | sort_by=snip["word"], 90 | label=label, 91 | doc=doc, 92 | kind=snip["word"], 93 | icon_match="Snippet", 94 | ) 95 | yield completion 96 | -------------------------------------------------------------------------------- /docs/CUSTOM_SOURCES.md: -------------------------------------------------------------------------------- 1 | # Custom Sources 2 | 3 | --- 4 | 5 | ⬇️ **Known sources at bottom of page** ⬇️ 6 | 7 | --- 8 | 9 | The idea is simple: custom `coq` sources are implemented via simple adapters. 10 | 11 | The adapters turn various vim plugin's output into [LSP](https://microsoft.github.io/language-server-protocol/specification) `CompletionItem[] | CompletionList`. 12 | 13 | ## How to write a source: 14 | 15 | All the sources are, are just simple functions that feed LSP completion items via a callback, they can optionally support cancellation. 16 | 17 | ```lua 18 | -- `COQsources` is a global registry of sources 19 | COQsources = COQsources or {} 20 | 21 | COQsources[""] = { 22 | name = "", -- this is displayed to the client 23 | fn = function (args, callback) 24 | -- 0 based 25 | local row, col = unpack(args.pos) 26 | 27 | -- ... 28 | -- callback() at some point 29 | 30 | 31 | local cancel = function () 32 | -- ... 33 | end 34 | return cancel -- optionally support cancellation 35 | end 36 | } 37 | ``` 38 | 39 | Simple case: 40 | 41 | Offers suggestions of `vim.lsp.protocol.CompletionItemKind` 42 | 43 | ```lua 44 | return function(args, callback) 45 | local items = {} 46 | 47 | -- label :: display label 48 | -- insertText :: string | null, default to `label` if null 49 | -- kind :: int ∈ `vim.lsp.protocol.CompletionItemKind` 50 | -- detail :: doc popup 51 | 52 | for key, val in pairs(vim.lsp.protocol.CompletionItemKind) do 53 | if type(key) == "string" and type(val) == "number" then 54 | local item = { 55 | label = "label .. " .. key, 56 | insertText = key, 57 | kind = val, 58 | detail = tostring(math.random()) 59 | } 60 | table.insert(items, item) 61 | end 62 | end 63 | 64 | callback { 65 | isIncomplete = true, -- :: isIncomplete = True :: -->> **NO CACHING** <<-- 66 | items = items 67 | } 68 | end 69 | ``` 70 | 71 | ### Gotchas 72 | 73 | Pitfalls that can **DESTROY performance**!! 74 | 75 | #### Caching 76 | 77 | The caching semantics is identical to LSP specification. ie. `items[]...` is cached, `{ isIncomplete = false, items = ... }` is also cached, only the example above is NOT cached. 78 | 79 | #### Dangling callbacks 80 | 81 | All code paths must invoke `callback`, or else `coq.nvim` will end up waiting for `callback` and timing out on every keystroke. 82 | 83 | **`:COQstats`** is your best friend. It's super obvious if one source is slowing everybody down. 84 | 85 | ## Known sources 86 | 87 | #### [coq.thirdparty](https://github.com/ms-jpq/coq.thirdparty) 88 | 89 | **First party lua** and _external third party integrations_ 90 | 91 | ##### First party 92 | 93 | - shell repl 94 | 95 | ![repl.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/repl.gif) 96 | 97 | - nvim lua 98 | 99 | ![lua.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/nvim_lua.gif) 100 | 101 | - scientific calculator 102 | 103 | ![bc.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/bc.gif) 104 | 105 | - banner 106 | 107 | ![figlet.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/figlet.gif) 108 | 109 | - moo 110 | 111 | ![cowsay.img](https://raw.githubusercontent.com/ms-jpq/coq.artifacts/artifacts/preview/cowsay.gif) 112 | 113 | ##### Third party 114 | 115 | - vimtex 116 | 117 | - orgmode 118 | 119 | - [Wordpress hooks](https://github.com/Mte90/coq_wordpress) 120 | 121 | - [Laravel Blade files](https://github.com/RicardoRamirezR/blade-nav.nvim) 122 | -------------------------------------------------------------------------------- /coq/tmux/parse.py: -------------------------------------------------------------------------------- 1 | from asyncio import gather 2 | from dataclasses import dataclass 3 | from functools import lru_cache 4 | from os import environ 5 | from pathlib import Path 6 | from typing import Iterator, Mapping, Optional, Sequence, Tuple 7 | 8 | from pynvim_pp.lib import decode 9 | from std2.asyncio.subprocess import call 10 | 11 | from ..shared.executor import very_nice 12 | 13 | _SEP = "∪" 14 | 15 | 16 | @dataclass(frozen=True) 17 | class Pane: 18 | session: str 19 | uid: str 20 | 21 | session_name: str 22 | window_index: int 23 | window_name: str 24 | pane_index: int 25 | pane_title: str 26 | 27 | 28 | async def _panes(tmux: Path, all_sessions: bool) -> Sequence[Pane]: 29 | prefix = await very_nice() 30 | try: 31 | proc = await call( 32 | *prefix, 33 | tmux, 34 | "list-panes", 35 | ("-a" if all_sessions else "-s"), 36 | "-F", 37 | _SEP.join( 38 | ( 39 | "#{session_id}", 40 | "#{pane_id}", 41 | "#{session_name}", 42 | "#{window_index}", 43 | "#{window_name}", 44 | "#{pane_index}", 45 | "#{pane_title}", 46 | ) 47 | ), 48 | check_returncode=set(), 49 | ) 50 | except OSError: 51 | return () 52 | else: 53 | if proc.returncode: 54 | return () 55 | else: 56 | 57 | def cont() -> Iterator[Pane]: 58 | for line in decode(proc.stdout).strip().splitlines(): 59 | ( 60 | session, 61 | pane_id, 62 | session_name, 63 | window_index, 64 | window_name, 65 | pane_index, 66 | pane_title, 67 | ) = line.split(_SEP) 68 | pane = Pane( 69 | session=session, 70 | uid=pane_id, 71 | session_name=session_name, 72 | window_index=int(window_index), 73 | window_name=window_name, 74 | pane_index=int(pane_index), 75 | pane_title=pane_title, 76 | ) 77 | yield pane 78 | 79 | return tuple(cont()) 80 | 81 | 82 | async def _screenshot(tmux: Path, pane: Pane) -> Tuple[Pane, str]: 83 | prefix = await very_nice() 84 | try: 85 | proc = await call( 86 | *prefix, 87 | tmux, 88 | "capture-pane", 89 | "-p", 90 | "-J", 91 | "-t", 92 | pane.uid, 93 | check_returncode=set(), 94 | ) 95 | except OSError: 96 | return pane, "" 97 | else: 98 | if proc.returncode: 99 | return pane, "" 100 | else: 101 | text = decode(proc.stdout) 102 | return pane, text 103 | 104 | 105 | @lru_cache(maxsize=None) 106 | def pane_id() -> Optional[str]: 107 | return environ.get("TMUX_PANE") 108 | 109 | 110 | async def snapshot( 111 | tmux: Path, all_sessions: bool 112 | ) -> Tuple[Optional[Pane], Mapping[Pane, str]]: 113 | panes = await _panes(tmux, all_sessions=all_sessions) 114 | shots = await gather(*(_screenshot(tmux, pane=pane) for pane in panes)) 115 | current = next( 116 | (pane for pane in panes if pane.uid == pane_id()), 117 | None, 118 | ) 119 | snapshot = {pane: text for pane, text in shots} 120 | return current, snapshot 121 | -------------------------------------------------------------------------------- /coq/clients/snippet/db/sql/create/tables.sql: -------------------------------------------------------------------------------- 1 | BEGIN; 2 | 3 | 4 | CREATE TABLE IF NOT EXISTS sources ( 5 | rowid BLOB NOT NULL PRIMARY KEY, 6 | filename TEXT NOT NULL UNIQUE, 7 | mtime REAL NOT NULL 8 | ) WITHOUT rowid; 9 | CREATE INDEX IF NOT EXISTS sources_filename ON sources (filename); 10 | 11 | 12 | CREATE TABLE IF NOT EXISTS filetypes ( 13 | filetype TEXT NOT NULL PRIMARY KEY 14 | ) WITHOUT ROWID; 15 | 16 | 17 | CREATE TABLE IF NOT EXISTS extensions ( 18 | source_id BLOB NOT NULL REFERENCES sources (rowid) ON UPDATE CASCADE ON DELETE CASCADE, 19 | src TEXT NOT NULL REFERENCES filetypes (filetype) ON UPDATE CASCADE ON DELETE CASCADE, 20 | dest TEXT NOT NULL REFERENCES filetypes (filetype) ON UPDATE CASCADE ON DELETE CASCADE, 21 | UNIQUE (source_id, src, dest) 22 | ); 23 | CREATE INDEX IF NOT EXISTS extensions_source_id ON extensions (source_id); 24 | CREATE INDEX IF NOT EXISTS extensions_src ON extensions (src); 25 | CREATE INDEX IF NOT EXISTS extensions_dest ON extensions (dest); 26 | CREATE INDEX IF NOT EXISTS extensions_src_dest ON extensions (src, dest); 27 | CREATE INDEX IF NOT EXISTS extensions_dest_src ON extensions (dest, src); 28 | 29 | 30 | CREATE TABLE IF NOT EXISTS snippets ( 31 | rowid BLOB NOT NULL PRIMARY KEY, 32 | source_id BLOB NOT NULL REFERENCES sources (rowid) ON UPDATE CASCADE ON DELETE CASCADE, 33 | filetype TEXT NOT NULL REFERENCES filetypes (filetype) ON UPDATE CASCADE ON DELETE CASCADE, 34 | grammar TEXT NOT NULL, 35 | content TEXT NOT NULL, 36 | label TEXT NOT NULL, 37 | doc TEXT NOT NULL 38 | ) WITHOUT ROWID; 39 | CREATE INDEX IF NOT EXISTS snippets_source_id ON snippets (source_id); 40 | CREATE INDEX IF NOT EXISTS snippets_filetype ON snippets (filetype); 41 | 42 | 43 | CREATE TABLE IF NOT EXISTS matches ( 44 | snippet_id BLOB NOT NULL REFERENCES snippets (rowid) ON UPDATE CASCADE ON DELETE CASCADE, 45 | word TEXT NOT NULL, 46 | lword TEXT NOT NULL, 47 | UNIQUE(snippet_id, word) 48 | ); 49 | CREATE INDEX IF NOT EXISTS matches_snippet_id ON matches (snippet_id); 50 | CREATE INDEX IF NOT EXISTS matches_word ON matches (word); 51 | CREATE INDEX IF NOT EXISTS matches_lword ON matches (lword); 52 | 53 | 54 | CREATE VIEW IF NOT EXISTS uniq_extensions_view AS 55 | SELECT DISTINCT 56 | src, 57 | dest 58 | FROM extensions 59 | WHERE 60 | src <> dest; 61 | 62 | 63 | CREATE VIEW IF NOT EXISTS extensions_view AS 64 | WITH RECURSIVE all_exts AS ( 65 | SELECT 66 | 1 AS lvl, 67 | e1.src, 68 | e1.dest 69 | FROM uniq_extensions_view AS e1 70 | UNION ALL 71 | SELECT 72 | all_exts.lvl + 1 AS lvl, 73 | all_exts.src, 74 | e2.dest 75 | FROM uniq_extensions_view AS e2 76 | JOIN all_exts 77 | ON 78 | all_exts.dest = e2.src 79 | ) 80 | SELECT 81 | filetypes.filetype AS src, 82 | filetypes.filetype AS dest 83 | FROM filetypes 84 | UNION ALL 85 | SELECT 86 | all_exts.src, 87 | all_exts.dest 88 | FROM all_exts 89 | WHERE 90 | lvl < 9; 91 | 92 | 93 | CREATE VIEW IF NOT EXISTS snippets_view AS 94 | SELECT 95 | snippets.rowid AS snippet_id, 96 | snippets.source_id AS source_id, 97 | snippets.grammar AS grammar, 98 | matches.word AS word, 99 | matches.lword AS lword, 100 | snippets.content AS snippet, 101 | snippets.label AS label, 102 | snippets.doc AS doc, 103 | extensions_view.src AS ft_src, 104 | extensions_view.dest AS ft_dest 105 | FROM snippets 106 | JOIN matches 107 | ON matches.snippet_id = snippets.rowid 108 | JOIN extensions_view 109 | ON 110 | snippets.filetype = extensions_view.dest 111 | WHERE 112 | matches.word <> '' 113 | AND 114 | snippets.content <> ''; 115 | 116 | 117 | END; 118 | -------------------------------------------------------------------------------- /coq/server/state.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from pathlib import PurePath 3 | from typing import AbstractSet, Optional, Tuple, Union 4 | from uuid import UUID, uuid4 5 | 6 | from std2.cell import RefCell 7 | from std2.pathlib import POSIX_ROOT 8 | from std2.types import Void, VoidType 9 | 10 | from ..shared.context import EMPTY_CONTEXT 11 | from ..shared.runtime import Metric 12 | from ..shared.settings import Weights 13 | from ..shared.types import Completion, Context, Edit, NvimPos, TextTransforms 14 | 15 | 16 | @dataclass(frozen=True) 17 | class State: 18 | cwd: PurePath 19 | pum_width: int 20 | screen: Tuple[int, int] 21 | change_id: UUID 22 | commit_id: UUID 23 | preview_id: UUID 24 | nono_bufs: AbstractSet[int] 25 | context: Context 26 | last_edit: Metric 27 | text_trans: TextTransforms 28 | inserted_pos: NvimPos 29 | pum_location: Optional[int] 30 | manual_override: bool 31 | 32 | 33 | _CELL = RefCell( 34 | State( 35 | cwd=POSIX_ROOT, 36 | pum_width=0, 37 | screen=(0, 0), 38 | change_id=uuid4(), 39 | commit_id=uuid4(), 40 | preview_id=uuid4(), 41 | nono_bufs=set(), 42 | context=EMPTY_CONTEXT, 43 | last_edit=Metric( 44 | instance=uuid4(), 45 | label_width=0, 46 | kind_width=0, 47 | weight=Weights( 48 | prefix_matches=0, 49 | edit_distance=0, 50 | recency=0, 51 | proximity=0, 52 | ), 53 | weight_adjust=0, 54 | comp=Completion( 55 | source="", 56 | primary_edit=Edit(new_text=""), 57 | adjust_indent=False, 58 | always_on_top=False, 59 | weight_adjust=0, 60 | label="", 61 | sort_by="", 62 | icon_match="", 63 | ), 64 | ), 65 | text_trans={}, 66 | inserted_pos=(-1, -1), 67 | pum_location=None, 68 | manual_override=False, 69 | ) 70 | ) 71 | 72 | 73 | def state( 74 | cwd: Optional[PurePath] = None, 75 | pum_width: Optional[int] = None, 76 | screen: Optional[Tuple[int, int]] = None, 77 | change_id: Optional[UUID] = None, 78 | commit_id: Optional[UUID] = None, 79 | preview_id: Optional[UUID] = None, 80 | nono_bufs: AbstractSet[int] = frozenset(), 81 | context: Optional[Context] = None, 82 | last_edit: Optional[Metric] = None, 83 | text_trans: Optional[TextTransforms] = None, 84 | inserted_pos: Optional[NvimPos] = None, 85 | pum_location: Union[VoidType, Optional[int]] = Void, 86 | manual_override: Optional[bool] = None, 87 | ) -> State: 88 | old_state = _CELL.val 89 | 90 | new_state = State( 91 | cwd=cwd or old_state.cwd, 92 | pum_width=pum_width or old_state.pum_width, 93 | screen=screen or old_state.screen, 94 | change_id=change_id or old_state.change_id, 95 | commit_id=commit_id or old_state.commit_id, 96 | preview_id=preview_id or old_state.preview_id, 97 | nono_bufs=old_state.nono_bufs | nono_bufs, 98 | context=context or old_state.context, 99 | last_edit=last_edit or old_state.last_edit, 100 | text_trans=text_trans if text_trans is not None else old_state.text_trans, 101 | inserted_pos=inserted_pos or old_state.inserted_pos, 102 | pum_location=( 103 | pum_location 104 | if not isinstance(pum_location, VoidType) 105 | else old_state.pum_location 106 | ), 107 | manual_override=( 108 | old_state.manual_override if manual_override is None else manual_override 109 | ), 110 | ) 111 | _CELL.val = new_state 112 | 113 | return new_state 114 | -------------------------------------------------------------------------------- /coq/shared/executor.py: -------------------------------------------------------------------------------- 1 | from asyncio import ( 2 | AbstractEventLoop, 3 | create_task, 4 | gather, 5 | get_running_loop, 6 | run, 7 | run_coroutine_threadsafe, 8 | wrap_future, 9 | ) 10 | from concurrent.futures import Future, InvalidStateError, ThreadPoolExecutor 11 | from contextlib import suppress 12 | from functools import lru_cache 13 | from shutil import which 14 | from subprocess import CalledProcessError 15 | from threading import Thread 16 | from typing import Any, Awaitable, Callable, Coroutine, Sequence, TypeVar, cast 17 | 18 | from std2.asyncio.subprocess import call 19 | 20 | _T = TypeVar("_T") 21 | 22 | 23 | class AsyncExecutor: 24 | def __init__(self, threadpool: ThreadPoolExecutor) -> None: 25 | f: Future = Future() 26 | self._fut: Future = Future() 27 | 28 | async def cont() -> None: 29 | loop = get_running_loop() 30 | if threadpool: 31 | loop.set_default_executor(threadpool) 32 | f.set_result(loop) 33 | main: Coroutine = await wrap_future(self._fut) 34 | await main 35 | 36 | self._th = Thread(daemon=True, target=lambda: run(cont())) 37 | self._th.start() 38 | self.loop: AbstractEventLoop = f.result() 39 | 40 | def run(self, main: Awaitable[Any]) -> None: 41 | self._fut.set_result(main) 42 | 43 | def fsubmit(self, f: Callable[..., Any], *args: Any, **kwargs: Any) -> Future: 44 | fut: Future = Future() 45 | 46 | def cont() -> None: 47 | if fut.set_running_or_notify_cancel(): 48 | try: 49 | ret = f(*args, **kwargs) 50 | except BaseException as e: 51 | with suppress(InvalidStateError): 52 | fut.set_exception(e) 53 | else: 54 | with suppress(InvalidStateError): 55 | fut.set_result(ret) 56 | 57 | self.loop.call_soon_threadsafe(cont) 58 | return fut 59 | 60 | def submit(self, co: Awaitable[_T]) -> Awaitable[_T]: 61 | f: Future = run_coroutine_threadsafe(cast(Coroutine, co), loop=self.loop) 62 | return wrap_future(f) 63 | 64 | 65 | @lru_cache(maxsize=None) 66 | def _very_nice() -> Future: 67 | 68 | async def c1() -> Sequence[str]: 69 | if tp := which("taskpolicy"): 70 | run: Sequence[str] = (tp, "-c", "utility", "--") 71 | try: 72 | await call(*run, "true") 73 | except (OSError, CalledProcessError): 74 | return () 75 | else: 76 | return run 77 | elif (sd := which("systemd-notify")) and (sr := which("systemd-run")): 78 | run = ( 79 | sr, 80 | "--user", 81 | "--scope", 82 | "--nice", 83 | "19", 84 | "--property", 85 | "CPUWeight=69", 86 | "--", 87 | ) 88 | try: 89 | await gather(call(sd, "--booted"), call(*run, "true")) 90 | except (OSError, CalledProcessError): 91 | return () 92 | else: 93 | return run 94 | else: 95 | return () 96 | 97 | f: Future = Future() 98 | 99 | async def c2() -> None: 100 | try: 101 | ret = await c1() 102 | except BaseException as e: 103 | with suppress(InvalidStateError): 104 | f.set_exception(e) 105 | else: 106 | with suppress(InvalidStateError): 107 | f.set_result(ret) 108 | 109 | create_task(c2()) 110 | return f 111 | 112 | 113 | async def very_nice() -> Sequence[str]: 114 | f: Future = _very_nice() 115 | return await wrap_future(f) 116 | -------------------------------------------------------------------------------- /lua/coq/ts-request.lua: -------------------------------------------------------------------------------- 1 | (function(...) 2 | COQ.treesitter_start = function(buf, syntax) 3 | coq.validate {buf = {buf, "number"}, syntax = {syntax, "string"}} 4 | if vim.treesitter and vim.treesitter.start and vim.treesitter.language then 5 | local lang = vim.treesitter.language.get_lang(syntax) 6 | if lang then 7 | vim.treesitter.start(buf, lang) 8 | end 9 | end 10 | end 11 | 12 | local kind = function(node) 13 | if node:named() then 14 | return node:type() 15 | else 16 | return "" 17 | end 18 | end 19 | 20 | local payload = function(buf, node, type) 21 | if not node:missing() and not node:has_error() then 22 | local parent = node:parent() 23 | local grandparent = parent and parent:parent() or nil 24 | local lo, _, hi, _ = node:range() 25 | return { 26 | text = vim.treesitter.get_node_text(node, buf), 27 | range = {lo, hi}, 28 | kind = type, 29 | parent = parent and 30 | { 31 | text = vim.treesitter.get_node_text(parent, buf), 32 | kind = kind(parent) 33 | } or 34 | nil, 35 | grandparent = grandparent and 36 | { 37 | text = vim.treesitter.get_node_text(grandparent, buf), 38 | kind = kind(grandparent) 39 | } or 40 | nil 41 | } 42 | end 43 | end 44 | 45 | local ts_query = 46 | vim.fn.has("nvim-0.8") and 47 | (vim.treesitter.query.get or vim.treesitter.query.get_query) or 48 | vim.treesitter.get_query 49 | 50 | local iter_nodes = function(buf, lo, hi) 51 | return coroutine.wrap( 52 | function() 53 | local go, parser = pcall(vim.treesitter.get_parser) 54 | if go and parser then 55 | local query = ts_query(parser:lang(), "highlights") 56 | if query then 57 | for _, tree in pairs(parser:parse()) do 58 | for capture, node in query:iter_captures(tree:root(), buf, lo, hi) do 59 | local pl = payload(buf, node, query.captures[capture]) 60 | if pl and pl.kind ~= "comment" then 61 | coroutine.yield(pl) 62 | end 63 | end 64 | end 65 | end 66 | end 67 | end 68 | ) 69 | end 70 | 71 | COQ.ts_req = function(session) 72 | vim.schedule( 73 | function() 74 | local loop = vim.uv or vim.loop 75 | local t1 = loop.now() 76 | local win = vim.api.nvim_get_current_win() 77 | local buf = vim.api.nvim_win_get_buf(win) 78 | local height = vim.api.nvim_win_get_height(win) 79 | local filetype = vim.api.nvim_buf_get_option(buf, "filetype") 80 | local filename = vim.api.nvim_buf_get_name(buf) 81 | 82 | local lines = vim.api.nvim_buf_line_count(buf) 83 | local row, col = unpack(vim.api.nvim_win_get_cursor(win)) 84 | row = row - 1 85 | local lo, hi = 86 | math.max(0, row - height), 87 | math.min(lines, row + height + 1) 88 | 89 | local acc = {} 90 | for payload in iter_nodes(buf, lo, hi) do 91 | if type(payload) == "table" and type(payload[1]) ~= "userdata" then 92 | table.insert(acc, payload) 93 | end 94 | end 95 | 96 | local t2 = loop.now() 97 | local go, _ = 98 | pcall( 99 | function() 100 | COQ.Ts_notify( 101 | session, 102 | buf, 103 | lo, 104 | hi, 105 | filetype, 106 | filename, 107 | acc, 108 | (t2 - t1) / 1000 109 | ) 110 | end 111 | ) 112 | 113 | if not go then 114 | -- vim.print(acc) 115 | end 116 | end 117 | ) 118 | end 119 | end)(...) 120 | -------------------------------------------------------------------------------- /coq/server/registrants/help.py: -------------------------------------------------------------------------------- 1 | from enum import Enum, auto 2 | from pathlib import Path 3 | from typing import Sequence, Tuple 4 | from uuid import uuid4 5 | from webbrowser import open as open_w 6 | 7 | from pynvim_pp.buffer import Buffer 8 | from pynvim_pp.float_win import list_floatwins, open_float_win 9 | from pynvim_pp.lib import decode 10 | from pynvim_pp.nvim import Nvim 11 | from std2.argparse import ArgparseError, ArgParser 12 | from std2.types import never 13 | 14 | from ...consts import ( 15 | MD_C_SOURCES, 16 | MD_COMPLETION, 17 | MD_CONF, 18 | MD_DISPLAY, 19 | MD_FUZZY, 20 | MD_KEYBIND, 21 | MD_MISC, 22 | MD_PREF, 23 | MD_README, 24 | MD_SNIPS, 25 | MD_SOURCES, 26 | MD_STATS, 27 | URI_C_SOURCES, 28 | URI_COMPLETION, 29 | URI_CONF, 30 | URI_DISPLAY, 31 | URI_FUZZY, 32 | URI_KEYBIND, 33 | URI_MISC, 34 | URI_PREF, 35 | URI_README, 36 | URI_SNIPS, 37 | URI_SOURCES, 38 | URI_STATISTICS, 39 | ) 40 | from ...registry import rpc 41 | from ..rt_types import Stack 42 | 43 | _NS = uuid4() 44 | 45 | 46 | class _Topics(Enum): 47 | index = auto() 48 | config = auto() 49 | keybind = auto() 50 | snips = auto() 51 | fuzzy = auto() 52 | comp = auto() 53 | display = auto() 54 | sources = auto() 55 | misc = auto() 56 | stats = auto() 57 | perf = auto() 58 | custom_sources = auto() 59 | 60 | 61 | def _directory(topic: _Topics) -> Tuple[Path, str]: 62 | if topic is _Topics.index: 63 | return MD_README, URI_README 64 | elif topic is _Topics.config: 65 | return MD_CONF, URI_CONF 66 | elif topic is _Topics.keybind: 67 | return MD_KEYBIND, URI_KEYBIND 68 | elif topic is _Topics.snips: 69 | return MD_SNIPS, URI_SNIPS 70 | elif topic is _Topics.fuzzy: 71 | return MD_FUZZY, URI_FUZZY 72 | elif topic is _Topics.comp: 73 | return MD_COMPLETION, URI_COMPLETION 74 | elif topic is _Topics.display: 75 | return MD_DISPLAY, URI_DISPLAY 76 | elif topic is _Topics.sources: 77 | return MD_SOURCES, URI_SOURCES 78 | elif topic is _Topics.misc: 79 | return MD_MISC, URI_MISC 80 | elif topic is _Topics.stats: 81 | return MD_STATS, URI_STATISTICS 82 | elif topic is _Topics.perf: 83 | return MD_PREF, URI_PREF 84 | elif topic is _Topics.custom_sources: 85 | return MD_C_SOURCES, URI_C_SOURCES 86 | else: 87 | never(topic) 88 | 89 | 90 | def _parse_args(args: Sequence[str]) -> Tuple[_Topics, bool]: 91 | parser = ArgParser() 92 | parser.add_argument( 93 | "topic", 94 | nargs="?", 95 | choices=tuple(topic.name for topic in _Topics), 96 | default=_Topics.index.name, 97 | ) 98 | parser.add_argument("-w", "--web", action="store_true", default=False) 99 | ns = parser.parse_args(args) 100 | return _Topics[ns.topic], ns.web 101 | 102 | 103 | @rpc() 104 | async def _help(stack: Stack, args: Sequence[str]) -> None: 105 | try: 106 | topic, use_web = _parse_args(args) 107 | except ArgparseError as e: 108 | await Nvim.write(e, error=True) 109 | else: 110 | md, uri = _directory(topic) 111 | web_d = open_w(uri) if use_web else False 112 | if not web_d: 113 | async for win in list_floatwins(_NS): 114 | await win.close() 115 | lines = decode(md.read_bytes()).splitlines() 116 | buf = await Buffer.create( 117 | listed=False, scratch=True, wipe=True, nofile=True, noswap=True 118 | ) 119 | await buf.set_lines(lines=lines) 120 | await buf.opts.set("modifiable", val=False) 121 | await buf.opts.set("syntax", val="markdown") 122 | await open_float_win(_NS, margin=0, relsize=0.95, buf=buf, border="rounded") 123 | -------------------------------------------------------------------------------- /coq/shared/fuzzy.py: -------------------------------------------------------------------------------- 1 | from collections import Counter 2 | from dataclasses import dataclass 3 | from itertools import repeat 4 | from typing import Iterable, MutableMapping, MutableSequence, Tuple 5 | 6 | 7 | @dataclass(frozen=True) 8 | class MatchMetrics: 9 | prefix_matches: int 10 | edit_distance: float 11 | 12 | 13 | def _p_matches(lhs: Iterable[str], rhs: Iterable[str]) -> int: 14 | p_matches = 0 15 | for l, r in zip(lhs, rhs): 16 | if l == r: 17 | p_matches += 1 18 | else: 19 | break 20 | return p_matches 21 | 22 | 23 | def multi_set_ratio(lhs: str, rhs: str, look_ahead: int) -> float: 24 | """ 25 | Test intersection size, adjust for length 26 | """ 27 | 28 | shorter = min(len(lhs), len(rhs)) 29 | if not shorter: 30 | return 1 31 | else: 32 | cutoff = shorter + look_ahead 33 | l, r = lhs[:cutoff], rhs[:cutoff] 34 | longer = max(len(l), len(r)) 35 | 36 | l_c, r_c = Counter(l), Counter(r) 37 | dif = l_c - r_c if len(l) > len(r) else r_c - l_c 38 | 39 | ratio = 1 - sum(dif.values()) / longer 40 | adjust = shorter / longer 41 | return ratio / adjust 42 | 43 | 44 | def quick_ratio(lhs: str, rhs: str, look_ahead: int) -> float: 45 | """ 46 | Front end bias 47 | """ 48 | 49 | shorter = min(len(lhs), len(rhs)) 50 | if not shorter: 51 | return 1 52 | else: 53 | p_matches = _p_matches(lhs, rhs) 54 | l, r = lhs[p_matches:], rhs[p_matches:] 55 | 56 | l_ratio = p_matches / shorter 57 | r_ratio = multi_set_ratio(l, r, look_ahead=look_ahead) * (1 - l_ratio) 58 | return l_ratio + r_ratio * 0.5 59 | 60 | 61 | _ARRAY_CACHE: MutableMapping[Tuple[int, int], MutableSequence[int]] = {} 62 | _DA: MutableMapping[str, int] = {} 63 | 64 | 65 | def dl_distance(lhs: str, rhs: str) -> int: 66 | """ 67 | Modified from 68 | https://github.com/jamesturk/jellyfish/blob/main/LICENSE 69 | Dont sue me 70 | """ 71 | 72 | len_l, len_r = len(lhs), len(rhs) 73 | row_size = len_r + 2 74 | max_d = len_l + len_r 75 | _DA.clear() 76 | 77 | if not (d := _ARRAY_CACHE.get((len_l, len_r))): 78 | d = [*repeat(0, row_size * (len_l + 2))] 79 | 80 | d[0] = max_d 81 | for i in range(0, len_l + 1): 82 | i1 = i + 1 83 | d[row_size * i1] = max_d 84 | d[row_size * i1 + 1] = i 85 | 86 | for j in range(0, len_r + 1): 87 | d[j + 1] = max_d 88 | d[row_size + j + 1] = j 89 | 90 | for i in range(1, len_l + 1): 91 | db = 0 92 | for j in range(1, len_r + 1): 93 | i1 = _DA.get(rhs[j - 1], 0) 94 | j1 = db 95 | 96 | if lhs[i - 1] == rhs[j - 1]: 97 | cost = 0 98 | db = j 99 | else: 100 | cost = 1 101 | 102 | d[row_size * (i + 1) + j + 1] = min( 103 | d[row_size * i + j] + cost, 104 | d[row_size * (i + 1) + j] + 1, 105 | d[row_size * i + j + 1] + 1, 106 | d[row_size * i1 + j1] + (i - i1 - 1) + 1 + (j - j1 - 1), 107 | ) 108 | _DA[lhs[i - 1]] = i 109 | 110 | return d[row_size * (len_l + 1) + len_r + 1] 111 | 112 | 113 | def metrics(lhs: str, rhs: str, look_ahead: int) -> MatchMetrics: 114 | """ 115 | Front end bias 116 | """ 117 | 118 | shorter = min(len(lhs), len(rhs)) 119 | if not shorter: 120 | return MatchMetrics(prefix_matches=0, edit_distance=0) 121 | else: 122 | p_matches = _p_matches(lhs, rhs) 123 | cutoff = min(max(len(lhs), len(rhs)), shorter + look_ahead) 124 | more = cutoff - shorter 125 | l, r = lhs[p_matches:cutoff], rhs[p_matches:cutoff] 126 | 127 | dist = dl_distance(l, r) 128 | edit_dist = 1 - (dist - more) / shorter 129 | return MatchMetrics(prefix_matches=p_matches, edit_distance=edit_dist) 130 | -------------------------------------------------------------------------------- /coq/clients/tmux/worker.py: -------------------------------------------------------------------------------- 1 | from asyncio import Lock 2 | from os import linesep 3 | from pathlib import Path 4 | from typing import AsyncIterator, Iterator 5 | 6 | from pynvim_pp.logging import suppress_and_log 7 | 8 | from ...shared.executor import AsyncExecutor 9 | from ...shared.runtime import Supervisor 10 | from ...shared.runtime import Worker as BaseWorker 11 | from ...shared.settings import TmuxClient 12 | from ...shared.sql import BIGGEST_INT 13 | from ...shared.timeit import timeit 14 | from ...shared.types import Completion, Context, Doc, Edit 15 | from ...tmux.parse import snapshot 16 | from .db.database import TMDB, TmuxWord 17 | 18 | 19 | def _doc(client: TmuxClient, word: TmuxWord) -> Doc: 20 | def cont() -> Iterator[str]: 21 | if client.all_sessions: 22 | yield f"S: {word.session_name}{client.parent_scope}" 23 | yield f"W: #{word.window_index}{client.path_sep}{word.window_name}{client.parent_scope}" 24 | yield f"P: #{word.pane_index}{client.path_sep}{word.pane_title}" 25 | 26 | return Doc(text=linesep.join(cont()), syntax="") 27 | 28 | 29 | class Worker(BaseWorker[TmuxClient, Path]): 30 | def __init__( 31 | self, 32 | ex: AsyncExecutor, 33 | supervisor: Supervisor, 34 | always_wait: bool, 35 | options: TmuxClient, 36 | misc: Path, 37 | ) -> None: 38 | self._exec = misc 39 | self._lock = Lock() 40 | self._db = TMDB( 41 | supervisor.limits.tokenization_limit, include_syms=options.match_syms 42 | ) 43 | super().__init__( 44 | ex, 45 | supervisor=supervisor, 46 | always_wait=always_wait, 47 | options=options, 48 | misc=misc, 49 | ) 50 | self._ex.run(self._poll()) 51 | 52 | def interrupt(self) -> None: 53 | with self._interrupt(): 54 | self._db.interrupt() 55 | 56 | async def _poll(self) -> None: 57 | while True: 58 | 59 | async def cont() -> None: 60 | with suppress_and_log(), timeit("IDLE :: TMUX"): 61 | await self._periodical() 62 | 63 | await self._with_interrupt(cont()) 64 | async with self._idle: 65 | await self._idle.wait() 66 | 67 | async def _periodical(self) -> None: 68 | if not self._lock.locked(): 69 | async with self._lock: 70 | current, panes = await snapshot( 71 | self._exec, all_sessions=self._options.all_sessions 72 | ) 73 | self._db.periodical(current, panes=panes) 74 | 75 | async def periodical(self) -> None: 76 | await self._ex.submit(self._periodical()) 77 | 78 | async def _work( 79 | self, context: Context, timeout: float 80 | ) -> AsyncIterator[Completion]: 81 | limit = ( 82 | BIGGEST_INT 83 | if context.manual 84 | else self._options.max_pulls or self._supervisor.match.max_results 85 | ) 86 | async with self._work_lock: 87 | words = self._db.select( 88 | self._supervisor.match, 89 | word=context.words, 90 | sym=(context.syms if self._options.match_syms else ""), 91 | limit=limit, 92 | ) 93 | 94 | for word in words: 95 | edit = Edit(new_text=word.text) 96 | cmp = Completion( 97 | source=self._options.short_name, 98 | always_on_top=self._options.always_on_top, 99 | weight_adjust=self._options.weight_adjust, 100 | label=edit.new_text, 101 | sort_by=word.text, 102 | primary_edit=edit, 103 | adjust_indent=False, 104 | doc=_doc(self._options, word=word), 105 | icon_match="Text", 106 | ) 107 | yield cmp 108 | -------------------------------------------------------------------------------- /coq/ci/load.py: -------------------------------------------------------------------------------- 1 | from asyncio import Semaphore, gather 2 | from contextlib import suppress 3 | from multiprocessing import cpu_count 4 | from pathlib import Path 5 | from typing import Any, Iterator, MutableMapping, MutableSet, Tuple 6 | from urllib.parse import urlparse 7 | from uuid import UUID 8 | 9 | from std2.asyncio.subprocess import call 10 | from std2.graphlib import recur_sort 11 | from std2.pickle.decoder import new_decoder 12 | from std2.pickle.encoder import new_encoder 13 | from yaml import safe_load 14 | 15 | from ..consts import COMPILATION_YML, TMP_DIR 16 | from ..shared.context import EMPTY_CONTEXT 17 | from ..shared.settings import EMPTY_COMP, EMPTY_MATCH 18 | from ..shared.types import SnippetEdit 19 | from ..snippets.loaders.load import load_ci as load_from_paths 20 | from ..snippets.parse import parse_basic 21 | from ..snippets.parsers.types import ParseError, ParseInfo 22 | from ..snippets.types import LoadedSnips, ParsedSnippet 23 | from .snip_trans import trans 24 | from .types import Compilation 25 | 26 | 27 | def _p_name(uri: str) -> Path: 28 | return TMP_DIR / Path(urlparse(uri).path).name 29 | 30 | 31 | async def _git_pull(sem: Semaphore, uri: str) -> None: 32 | async with sem: 33 | location = _p_name(uri) 34 | if location.is_dir(): 35 | await call( 36 | "git", 37 | "pull", 38 | "--recurse-submodules", 39 | cwd=location, 40 | capture_stdout=False, 41 | capture_stderr=False, 42 | ) 43 | else: 44 | await call( 45 | "git", 46 | "clone", 47 | "--depth=1", 48 | "--recurse-submodules", 49 | "--shallow-submodules", 50 | uri, 51 | str(location), 52 | cwd=TMP_DIR, 53 | capture_stdout=False, 54 | capture_stderr=False, 55 | ) 56 | 57 | 58 | async def load() -> LoadedSnips: 59 | TMP_DIR.mkdir(parents=True, exist_ok=True) 60 | yaml = safe_load(COMPILATION_YML.read_bytes()) 61 | specs = new_decoder[Compilation](Compilation)(yaml) 62 | 63 | sem = Semaphore(value=cpu_count()) 64 | await gather(*(_git_pull(sem, uri=uri) for uri in specs.git)) 65 | 66 | parsed = load_from_paths( 67 | trans, 68 | lsp=(TMP_DIR / path for path in specs.paths.lsp), 69 | neosnippet=(TMP_DIR / path for path in specs.paths.neosnippet), 70 | ultisnip=(TMP_DIR / path for path in specs.paths.ultisnip), 71 | ) 72 | 73 | exts: MutableMapping[str, MutableSet[str]] = {} 74 | 75 | for key, values in parsed.exts.items(): 76 | exts.setdefault(key, {*values}) 77 | 78 | for key, vals in specs.remaps.items(): 79 | acc = exts.setdefault(key, set()) 80 | for value in vals: 81 | acc.add(value) 82 | 83 | merged = LoadedSnips(snippets=parsed.snippets, exts=exts) 84 | return merged 85 | 86 | 87 | async def load_parsable() -> Any: 88 | loaded = await load() 89 | 90 | def cont() -> Iterator[Tuple[UUID, ParsedSnippet]]: 91 | for uid, snip in loaded.snippets.items(): 92 | edit = SnippetEdit( 93 | new_text=snip.content, 94 | grammar=snip.grammar, 95 | ) 96 | with suppress(ParseError): 97 | parse_basic( 98 | EMPTY_MATCH, 99 | comp=EMPTY_COMP, 100 | adjust_indent=False, 101 | context=EMPTY_CONTEXT, 102 | snippet=edit, 103 | info=ParseInfo(visual="", clipboard="", comment_str=("", "")), 104 | ) 105 | yield uid, snip 106 | 107 | snippets = {hashed: snip for hashed, snip in cont()} 108 | safe = LoadedSnips(exts=loaded.exts, snippets=snippets) 109 | 110 | coder = new_encoder[LoadedSnips](LoadedSnips) 111 | return recur_sort(coder(safe)) 112 | -------------------------------------------------------------------------------- /coq/server/registrants/options.py: -------------------------------------------------------------------------------- 1 | from pynvim_pp.keymap import Keymap 2 | from pynvim_pp.nvim import Nvim 3 | from pynvim_pp.settings import Settings 4 | from pynvim_pp.types import NoneType 5 | 6 | from ...registry import NAMESPACE, atomic, autocmd, rpc 7 | from ...shared.settings import KeyMapping 8 | from ..rt_types import Stack 9 | from ..state import state 10 | from .marks import nav_mark 11 | from .omnifunc import omnifunc 12 | from .preview import preview_preview 13 | from .repeat import repeat 14 | from .user_snippets import eval_snips 15 | 16 | 17 | @rpc() 18 | async def _update_pumheight(stack: Stack) -> None: 19 | height, width = await Nvim.size() 20 | state(screen=(width, height)) 21 | 22 | pumheight = min( 23 | round(height * stack.settings.display.pum.y_ratio), 24 | stack.settings.display.pum.y_max_len, 25 | ) 26 | await Nvim.opts.set("pumheight", val=pumheight) 27 | 28 | 29 | atomic.exec_lua(f"{NAMESPACE}.{_update_pumheight.method}()", ()) 30 | _ = autocmd("VimResized") << f"lua {NAMESPACE}.{_update_pumheight.method}()" 31 | 32 | 33 | async def set_options(mapping: KeyMapping, fast_close: bool) -> None: 34 | settings = Settings() 35 | keymap = Keymap() 36 | 37 | settings["completefunc"] = f"v:lua.{NAMESPACE}.{omnifunc.method}" 38 | 39 | if mapping.eval_snips: 40 | _ = ( 41 | keymap.n(mapping.eval_snips) 42 | << f"lua {NAMESPACE}.{eval_snips.method}(false)" 43 | ) 44 | _ = ( 45 | keymap.v(mapping.eval_snips) 46 | << rf"lua {NAMESPACE}.{eval_snips.method}(true)" 47 | ) 48 | 49 | if mapping.bigger_preview: 50 | _ = ( 51 | keymap.i(mapping.bigger_preview, expr=True) 52 | << f"(pumvisible() && complete_info(['mode']).mode ==# 'eval') ? {preview_preview.method}() : '{mapping.bigger_preview}'" 53 | ) 54 | 55 | if mapping.jump_to_mark: 56 | _ = ( 57 | keymap.n(mapping.jump_to_mark) 58 | << f"lua {NAMESPACE}.{nav_mark.method}()" 59 | ) 60 | _ = ( 61 | keymap.iv(mapping.jump_to_mark) 62 | << rf"lua {NAMESPACE}.{nav_mark.method}()" 63 | ) 64 | 65 | if mapping.repeat: 66 | _ = keymap.n(mapping.repeat) << f"lua {NAMESPACE}.{repeat.method}()" 67 | 68 | if mapping.manual_complete: 69 | _ = ( 70 | keymap.i(mapping.manual_complete, expr=True) 71 | << "pumvisible() ? '' : ''" 72 | ) 73 | if not mapping.manual_complete_insertion_only: 74 | _ = keymap.nv(mapping.manual_complete) << r"i" 75 | 76 | settings["completeopt"] += ( 77 | "noinsert", 78 | "menuone", 79 | *(() if mapping.pre_select else ("noselect",)), 80 | ) 81 | 82 | if mapping.recommended: 83 | _ = keymap.i("", expr=True) << "pumvisible() ? '' : ''" 84 | _ = keymap.i("", expr=True) << "pumvisible() ? '' : ''" 85 | _ = keymap.i("", expr=True) << "pumvisible() ? '' : ''" 86 | _ = keymap.i("", expr=True) << "pumvisible() ? '' : ''" 87 | _ = keymap.i("", expr=True) << "pumvisible() ? '' : ''" 88 | _ = ( 89 | keymap.i("", expr=True) 90 | << "pumvisible() ? (complete_info(['selected']).selected == -1 ? '' : '') : ''" 91 | ) 92 | _ = ( 93 | keymap.i("", expr=True) 94 | << "pumvisible() && !empty(trim(strpart(getline('.'), 0, col('.') - 1))) ? '' : ''" 95 | ) 96 | _ = ( 97 | keymap.i("", expr=True) 98 | << "pumvisible() && !empty(trim(strpart(getline('.'), 0, col('.') - 1))) ? '' : ''" 99 | ) 100 | 101 | if fast_close: 102 | settings["shortmess"] += "c" 103 | await (settings.drain() + keymap.drain(buf=None)).commit(NoneType) 104 | -------------------------------------------------------------------------------- /coq/databases/insertions/database.py: -------------------------------------------------------------------------------- 1 | from contextlib import closing, suppress 2 | from dataclasses import dataclass 3 | from sqlite3 import Connection, OperationalError 4 | from typing import Iterator, Mapping 5 | 6 | from ...consts import INSERT_DB 7 | from ...shared.sql import init_db 8 | from ..types import DB 9 | from .sql import sql 10 | 11 | 12 | @dataclass(frozen=True) 13 | class Statistics: 14 | source: str 15 | interrupted: int 16 | inserted: int 17 | 18 | avg_duration: float 19 | q10_duration: float 20 | q50_duration: float 21 | q95_duration: float 22 | q99_duration: float 23 | 24 | avg_items: float 25 | q50_items: int 26 | q99_items: int 27 | 28 | 29 | def _init() -> Connection: 30 | conn = Connection(INSERT_DB, isolation_level=None) 31 | init_db(conn) 32 | conn.executescript(sql("create", "pragma")) 33 | conn.executescript(sql("create", "tables")) 34 | return conn 35 | 36 | 37 | class IDB(DB): 38 | def __init__(self) -> None: 39 | self._conn = _init() 40 | 41 | def new_source(self, source: str) -> None: 42 | # MUST OK 43 | with self._conn, closing(self._conn.cursor()) as cursor: 44 | cursor.execute(sql("insert", "source"), {"name": source}) 45 | 46 | def new_batch(self, batch_id: bytes) -> None: 47 | # MUST OK 48 | with self._conn, closing(self._conn.cursor()) as cursor: 49 | cursor.execute(sql("insert", "batch"), {"rowid": batch_id}) 50 | 51 | def new_instance(self, instance: bytes, source: str, batch_id: bytes) -> None: 52 | # MUST OK 53 | with self._conn, closing(self._conn.cursor()) as cursor: 54 | cursor.execute( 55 | sql("insert", "instance"), 56 | {"rowid": instance, "source_id": source, "batch_id": batch_id}, 57 | ) 58 | 59 | def new_stat( 60 | self, instance: bytes, interrupted: bool, duration: float, items: int 61 | ) -> None: 62 | # MUST OK 63 | with self._conn, closing(self._conn.cursor()) as cursor: 64 | cursor.execute( 65 | sql("insert", "instance_stat"), 66 | { 67 | "instance_id": instance, 68 | "interrupted": interrupted, 69 | "duration": duration, 70 | "items": items, 71 | }, 72 | ) 73 | 74 | def insertion_order(self, n_rows: int) -> Mapping[str, int]: 75 | # can interrupt 76 | with suppress(OperationalError): 77 | with self._conn, closing(self._conn.cursor()) as cursor: 78 | cursor.execute(sql("select", "inserted"), {"limit": n_rows}) 79 | order = { 80 | row["sort_by"]: row["insert_order"] for row in cursor.fetchall() 81 | } 82 | return order 83 | return {} 84 | 85 | def inserted(self, instance_id: bytes, sort_by: str) -> None: 86 | # MUST OK 87 | with self._conn, closing(self._conn.cursor()) as cursor: 88 | cursor.execute( 89 | sql("insert", "inserted"), 90 | {"instance_id": instance_id, "sort_by": sort_by}, 91 | ) 92 | 93 | def stats(self) -> Iterator[Statistics]: 94 | # MUST OK 95 | with self._conn, closing(self._conn.cursor()) as cursor: 96 | cursor.execute(sql("select", "summaries"), ()) 97 | 98 | for row in cursor: 99 | stat = Statistics( 100 | source=row["source"], 101 | interrupted=row["interrupted"], 102 | inserted=row["inserted"], 103 | avg_duration=row["avg_duration"], 104 | avg_items=row["avg_items"], 105 | q10_duration=row["q10_duration"], 106 | q50_duration=row["q50_duration"], 107 | q95_duration=row["q95_duration"], 108 | q99_duration=row["q99_duration"], 109 | q50_items=row["q50_items"], 110 | q99_items=row["q99_items"], 111 | ) 112 | yield stat 113 | -------------------------------------------------------------------------------- /coq/lsp/requests/completion.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from typing import AbstractSet, AsyncIterator, Optional, Tuple, cast 3 | 4 | from ...shared.types import Context, ExternLSP, ExternLUA 5 | from ..parse import parse, parse_inline 6 | from ..protocol import protocol 7 | from ..types import CompletionResponse, InLineCompletionResponse, LSPcomp 8 | from .request import async_request 9 | 10 | _Rsp = Tuple[LSPcomp, AbstractSet[str], timedelta] 11 | 12 | 13 | async def comp_lsp( 14 | short_name: str, 15 | always_on_top: Optional[AbstractSet[Optional[str]]], 16 | weight_adjust: float, 17 | context: Context, 18 | chunk: int, 19 | clients: AbstractSet[str], 20 | ) -> AsyncIterator[_Rsp]: 21 | pc = await protocol() 22 | 23 | async for client in async_request("lsp_comp", chunk, clients, context.cursor): 24 | resp = cast(CompletionResponse, client.message) 25 | parsed = parse( 26 | pc, 27 | extern_type=ExternLSP, 28 | client=client.name, 29 | encoding=client.offset_encoding, 30 | short_name=short_name, 31 | cursors=context.cursor, 32 | always_on_top=always_on_top, 33 | weight_adjust=weight_adjust, 34 | resp=resp, 35 | ) 36 | yield parsed, client.peers, client.elapsed 37 | 38 | 39 | async def comp_lsp_inline( 40 | short_name: str, 41 | always_on_top: Optional[AbstractSet[Optional[str]]], 42 | weight_adjust: float, 43 | context: Context, 44 | chunk: int, 45 | clients: AbstractSet[str], 46 | ) -> AsyncIterator[_Rsp]: 47 | async for client in async_request( 48 | "lsp_inline_comp", chunk, clients, context.cursor 49 | ): 50 | resp = cast(InLineCompletionResponse, client.message) 51 | parsed = parse_inline( 52 | filetype=context.filetype, 53 | extern_type=ExternLSP, 54 | client=client.name, 55 | encoding=client.offset_encoding, 56 | short_name=short_name, 57 | cursors=context.cursor, 58 | always_on_top=always_on_top, 59 | weight_adjust=weight_adjust, 60 | resp=resp, 61 | ) 62 | yield parsed, client.peers, client.elapsed 63 | 64 | 65 | async def comp_thirdparty( 66 | short_name: str, 67 | always_on_top: Optional[AbstractSet[Optional[str]]], 68 | weight_adjust: float, 69 | context: Context, 70 | chunk: int, 71 | clients: AbstractSet[str], 72 | ) -> AsyncIterator[LSPcomp]: 73 | pc = await protocol() 74 | 75 | async for client in async_request( 76 | "lsp_third_party", chunk, clients, context.cursor, context.line 77 | ): 78 | name = client.name or short_name 79 | resp = cast(CompletionResponse, client.message) 80 | parsed = parse( 81 | pc, 82 | extern_type=ExternLUA, 83 | client=client.name, 84 | encoding=client.offset_encoding, 85 | short_name=name, 86 | cursors=context.cursor, 87 | always_on_top=always_on_top, 88 | weight_adjust=weight_adjust, 89 | resp=resp, 90 | ) 91 | yield parsed 92 | 93 | 94 | async def comp_thirdparty_inline( 95 | short_name: str, 96 | always_on_top: Optional[AbstractSet[Optional[str]]], 97 | weight_adjust: float, 98 | context: Context, 99 | chunk: int, 100 | clients: AbstractSet[str], 101 | ) -> AsyncIterator[LSPcomp]: 102 | async for client in async_request( 103 | "lsp_inline_third_party", chunk, clients, context.cursor, context.line 104 | ): 105 | name = client.name or short_name 106 | resp = cast(InLineCompletionResponse, client.message) 107 | parsed = parse_inline( 108 | filetype=context.filetype, 109 | extern_type=ExternLSP, 110 | client=name, 111 | encoding=client.offset_encoding, 112 | short_name=short_name, 113 | cursors=context.cursor, 114 | always_on_top=always_on_top, 115 | weight_adjust=weight_adjust, 116 | resp=resp, 117 | ) 118 | yield parsed 119 | -------------------------------------------------------------------------------- /coq/clients/inline/worker.py: -------------------------------------------------------------------------------- 1 | from asyncio import Condition, as_completed 2 | from typing import AsyncIterator, Optional 3 | 4 | from pynvim_pp.logging import suppress_and_log 5 | from std2 import anext 6 | from std2.aitertools import to_async 7 | from std2.itertools import batched 8 | 9 | from ...consts import BASIC_KEYWORDS, CACHE_CHUNK 10 | from ...lsp.requests.completion import comp_lsp_inline 11 | from ...lsp.types import LSPcomp 12 | from ...shared.executor import AsyncExecutor 13 | from ...shared.runtime import Supervisor 14 | from ...shared.runtime import Worker as BaseWorker 15 | from ...shared.settings import LSPInlineClient 16 | from ...shared.timeit import timeit 17 | from ...shared.types import Completion, Context 18 | from ..cache.worker import CacheWorker 19 | 20 | 21 | class Worker(BaseWorker[LSPInlineClient, None]): 22 | def __init__( 23 | self, 24 | ex: AsyncExecutor, 25 | supervisor: Supervisor, 26 | always_wait: bool, 27 | options: LSPInlineClient, 28 | misc: None, 29 | ) -> None: 30 | super().__init__( 31 | ex, 32 | supervisor=supervisor, 33 | always_wait=always_wait, 34 | options=options, 35 | misc=misc, 36 | ) 37 | self._cache = CacheWorker(supervisor) 38 | self._working = Condition() 39 | self._ex.run(self._poll()) 40 | 41 | def interrupt(self) -> None: 42 | with self._interrupt(): 43 | self._cache.interrupt() 44 | 45 | async def _request(self, context: Context) -> AsyncIterator[LSPcomp]: 46 | rows = comp_lsp_inline( 47 | short_name=self._options.short_name, 48 | always_on_top=self._options.always_on_top, 49 | weight_adjust=self._options.weight_adjust, 50 | context=context, 51 | chunk=self._supervisor.match.max_results * 2, 52 | clients=set(), 53 | ) 54 | async for row, _, _ in rows: 55 | yield row 56 | 57 | async def _poll(self) -> None: 58 | while True: 59 | async with self._working: 60 | await self._working.wait() 61 | 62 | async def cont() -> None: 63 | if context := self._supervisor.current_context: 64 | with suppress_and_log(), timeit("LSP INLINE PULL"): 65 | async for comps in self._request(context): 66 | for chunked in batched(comps.items, n=CACHE_CHUNK): 67 | self._cache.set_cache( 68 | BASIC_KEYWORDS, 69 | items={comps.client: chunked}, 70 | skip_db=True, 71 | ) 72 | 73 | await self._with_interrupt(cont()) 74 | 75 | async def _work( 76 | self, context: Context, timeout: float 77 | ) -> AsyncIterator[Completion]: 78 | async with self._work_lock, self._working: 79 | try: 80 | _, _, cached = self._cache.apply_cache( 81 | context, always=True, inline_shift=True 82 | ) 83 | lsp_stream = ( 84 | self._request(context) 85 | if self._options.live_pulling 86 | else to_async(()) 87 | ) 88 | 89 | async def db() -> LSPcomp: 90 | return LSPcomp(client=None, local_cache=False, items=cached) 91 | 92 | async def lsp() -> Optional[LSPcomp]: 93 | return await anext(lsp_stream, None) 94 | 95 | async def stream() -> AsyncIterator[LSPcomp]: 96 | for co in as_completed((db(), lsp())): 97 | if comps := await co: 98 | yield comps 99 | 100 | async for lsp_comps in lsp_stream: 101 | yield lsp_comps 102 | 103 | async for comp in stream(): 104 | for row in comp.items: 105 | yield row 106 | finally: 107 | self._working.notify_all() 108 | -------------------------------------------------------------------------------- /coq/clients/tags/db/database.py: -------------------------------------------------------------------------------- 1 | from contextlib import closing, suppress 2 | from hashlib import md5 3 | from os.path import normcase 4 | from pathlib import Path, PurePath 5 | from sqlite3 import Connection, OperationalError 6 | from typing import AbstractSet, Iterator, Mapping, cast 7 | 8 | from pynvim_pp.lib import encode 9 | 10 | from ....databases.types import DB 11 | from ....shared.settings import MatchOptions 12 | from ....shared.sql import init_db, like_esc 13 | from ....tags.types import Tag, Tags 14 | from .sql import sql 15 | 16 | _SCHEMA = "v5" 17 | 18 | _NIL_TAG = Tag( 19 | language="", 20 | path="", 21 | line=0, 22 | kind="", 23 | name="", 24 | pattern=None, 25 | typeref=None, 26 | scope=None, 27 | scopeKind=None, 28 | access=None, 29 | ) 30 | 31 | 32 | def _init(db_dir: Path, cwd: PurePath) -> Connection: 33 | ncwd = normcase(cwd) 34 | name = f"{md5(encode(ncwd)).hexdigest()}-{_SCHEMA}" 35 | db = (db_dir / name).with_suffix(".sqlite3") 36 | db.parent.mkdir(parents=True, exist_ok=True) 37 | conn = Connection(str(db), isolation_level=None) 38 | init_db(conn) 39 | conn.executescript(sql("create", "pragma")) 40 | conn.executescript(sql("create", "tables")) 41 | return conn 42 | 43 | 44 | class CTDB(DB): 45 | def __init__(self, vars_dir: Path, cwd: PurePath) -> None: 46 | self._vars_dir = vars_dir / "clients" / "tags" 47 | self._conn = _init(self._vars_dir, cwd=cwd) 48 | 49 | def swap(self, cwd: PurePath) -> None: 50 | self._conn.close() 51 | self._conn = _init(self._vars_dir, cwd=cwd) 52 | 53 | def paths(self) -> Mapping[str, float]: 54 | with suppress(OperationalError): 55 | with self._conn, closing(self._conn.cursor()) as cursor: 56 | cursor.execute(sql("select", "files"), ()) 57 | files = {row["filename"]: row["mtime"] for row in cursor.fetchall()} 58 | return files 59 | return {} 60 | 61 | def reconciliate(self, dead: AbstractSet[str], new: Tags) -> None: 62 | with suppress(OperationalError): 63 | with self._conn, closing(self._conn.cursor()) as cursor: 64 | 65 | def m1() -> Iterator[Mapping]: 66 | for filename, (lang, mtime, _) in new.items(): 67 | yield { 68 | "filename": filename, 69 | "filetype": lang, 70 | "mtime": mtime, 71 | } 72 | 73 | def m2() -> Iterator[Mapping]: 74 | for _, _, tags in new.values(): 75 | for tag in tags: 76 | yield {**_NIL_TAG, **tag} 77 | 78 | cursor.executemany( 79 | sql("delete", "file"), 80 | ({"filename": f} for f in dead | new.keys()), 81 | ) 82 | cursor.executemany(sql("insert", "file"), m1()) 83 | cursor.executemany(sql("insert", "tag"), m2()) 84 | cursor.execute("PRAGMA optimize", ()) 85 | 86 | def select( 87 | self, 88 | opts: MatchOptions, 89 | filename: str, 90 | line_num: int, 91 | word: str, 92 | sym: str, 93 | limit: int, 94 | ) -> Iterator[Tag]: 95 | with suppress(OperationalError): 96 | with self._conn, closing(self._conn.cursor()) as cursor: 97 | cursor.execute( 98 | sql("select", "tags"), 99 | { 100 | "cut_off": opts.fuzzy_cutoff, 101 | "look_ahead": opts.look_ahead, 102 | "limit": limit, 103 | "filename": filename, 104 | "line_num": line_num, 105 | "word": word, 106 | "sym": sym, 107 | "like_word": like_esc(word[: opts.exact_matches]), 108 | "like_sym": like_esc(sym[: opts.exact_matches]), 109 | }, 110 | ) 111 | for row in cursor: 112 | yield cast(Tag, {**row}) 113 | -------------------------------------------------------------------------------- /coq/snippets/loaders/ultisnip.py: -------------------------------------------------------------------------------- 1 | from difflib import get_close_matches 2 | from enum import Enum, auto 3 | from os.path import normcase 4 | from pathlib import PurePath 5 | from typing import AbstractSet, Iterable, MutableSequence, MutableSet, Sequence, Tuple 6 | 7 | from ...shared.types import SnippetGrammar 8 | from ..consts import SNIP_LINE_SEP 9 | from ..types import ParsedSnippet 10 | from .parse import raise_err 11 | 12 | _COMMENT_START = "#" 13 | _EXTENDS_START = "extends" 14 | _GLOBAL_END = "globalend" 15 | _GLOBAL_START = "global" 16 | _SNIPPET_END = "endsnippet" 17 | _SNIPPET_START = "snippet" 18 | 19 | _IGNORE_STARTS = ( 20 | "iclearsnippets", 21 | "post_expand", 22 | "post_jump", 23 | "pre_expand", 24 | "priority", 25 | ) 26 | 27 | _LEGAL_STARTS = { 28 | _EXTENDS_START, 29 | _GLOBAL_END, 30 | _GLOBAL_START, 31 | _SNIPPET_END, 32 | _SNIPPET_START, 33 | } 34 | 35 | 36 | class _State(Enum): 37 | normal = auto() 38 | snippet = auto() 39 | pglobal = auto() 40 | 41 | 42 | def _start(line: str) -> Tuple[str, str]: 43 | rest = line[len(_SNIPPET_START) :].strip() 44 | name, _, label = rest.partition(" ") 45 | if label.startswith('"') and label[1:].count('"') == 1: 46 | quoted, _, _ = label[1:].partition('"') 47 | return name, quoted 48 | else: 49 | return name, label 50 | 51 | 52 | def load_ultisnip( 53 | grammar: SnippetGrammar, path: PurePath, lines: Iterable[Tuple[int, str]] 54 | ) -> Tuple[str, AbstractSet[str], Sequence[ParsedSnippet]]: 55 | filetype = normcase(path.stem.strip()) 56 | 57 | snippets: MutableSequence[ParsedSnippet] = [] 58 | extends: MutableSet[str] = set() 59 | 60 | current_name = "" 61 | state = _State.normal 62 | current_label = "" 63 | current_lines: MutableSequence[str] = [] 64 | 65 | for lineno, line in lines: 66 | line = line.rstrip() 67 | 68 | if state == _State.normal: 69 | if line.startswith(_COMMENT_START) or line.startswith(_IGNORE_STARTS): 70 | pass 71 | 72 | elif not line or line.isspace(): 73 | current_lines.append("") 74 | 75 | elif line.startswith(_EXTENDS_START): 76 | filetypes = line[len(_EXTENDS_START) :].strip() 77 | for ft in (normcase(f.strip()) for f in filetypes.split(",")): 78 | if ft: 79 | extends.add(ft) 80 | 81 | elif line.startswith(_SNIPPET_START): 82 | state = _State.snippet 83 | 84 | current_name, current_label = _start(line) 85 | 86 | elif line.startswith(_GLOBAL_START): 87 | state = _State.pglobal 88 | 89 | else: 90 | start, _, _ = line.partition(" ") 91 | close = get_close_matches(start, _LEGAL_STARTS, n=1) 92 | if close: 93 | maybe_start, *_ = close 94 | addendum = f" :: did you mean -- {maybe_start}" 95 | else: 96 | addendum = "" 97 | 98 | reason = "Unexpected line start" + addendum 99 | raise_err(path, lineno=lineno, line=line, reason=reason) 100 | 101 | elif state == _State.snippet: 102 | if line.startswith(_SNIPPET_END): 103 | state = _State.normal 104 | 105 | content = SNIP_LINE_SEP.join(current_lines).strip() 106 | snippet = ParsedSnippet( 107 | grammar=grammar, 108 | filetype=filetype, 109 | content=content, 110 | label=current_label, 111 | doc="", 112 | matches={current_name}, 113 | ) 114 | snippets.append(snippet) 115 | current_lines.clear() 116 | 117 | else: 118 | current_lines.append(line) 119 | 120 | elif state == _State.pglobal: 121 | if line.startswith(_GLOBAL_END): 122 | state = _State.normal 123 | else: 124 | pass 125 | 126 | else: 127 | assert False 128 | 129 | return filetype, extends, snippets 130 | -------------------------------------------------------------------------------- /coq/lsp/types.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from typing import ( 3 | AbstractSet, 4 | Any, 5 | Iterator, 6 | Literal, 7 | Optional, 8 | Sequence, 9 | TypedDict, 10 | Union, 11 | ) 12 | 13 | from ..shared.types import Completion 14 | 15 | # https://microsoft.github.io/language-server-protocol/specification 16 | 17 | 18 | @dataclass(frozen=True) 19 | class _CompletionItemLabelDetails: 20 | detail: Optional[str] = None 21 | description: Optional[str] = None 22 | 23 | 24 | @dataclass(frozen=True) 25 | class _Position: 26 | line: int 27 | character: int 28 | 29 | 30 | @dataclass(frozen=True) 31 | class _TextEdit: 32 | newText: str 33 | 34 | 35 | @dataclass(frozen=True) 36 | class _Range: 37 | start: _Position 38 | end: _Position 39 | 40 | 41 | @dataclass(frozen=True) 42 | class _InsertReplaceRange: 43 | insert: _Range 44 | replace: _Range 45 | 46 | 47 | @dataclass(frozen=True) 48 | class TextEdit(_TextEdit): 49 | range: _Range 50 | 51 | 52 | @dataclass(frozen=True) 53 | class TextEditNonStandard(_Range): 54 | new_text: str 55 | 56 | 57 | @dataclass(frozen=True) 58 | class InsertReplaceEditNonStandard(_InsertReplaceRange): 59 | new_text: str 60 | 61 | 62 | @dataclass(frozen=True) 63 | class InsertReplaceEdit(_TextEdit, _InsertReplaceRange): ... 64 | 65 | 66 | _CompletionItemKind = int 67 | 68 | 69 | @dataclass(frozen=True) 70 | class MarkupContent: 71 | kind: Union[Literal["plaintext", "markdown"], str] 72 | value: str 73 | 74 | 75 | _InsertTextFormat = int 76 | _CompletionItemTag = int 77 | _InsertTextMode = int 78 | 79 | 80 | @dataclass(frozen=True) 81 | class Command: 82 | title: str 83 | command: str 84 | arguments: Optional[Any] = None 85 | 86 | 87 | @dataclass(frozen=True) 88 | class CompletionItem: 89 | label: str 90 | labelDetails: Optional[_CompletionItemLabelDetails] = None 91 | 92 | kind: Optional[_CompletionItemKind] = None 93 | tags: Optional[Sequence[_CompletionItemTag]] = None 94 | 95 | detail: Optional[str] = None 96 | documentation: Union[str, MarkupContent, None] = None 97 | 98 | preselect: Optional[bool] = None 99 | filterText: Optional[str] = None 100 | 101 | insertText: Optional[str] = None 102 | insertTextFormat: Optional[_InsertTextFormat] = None 103 | insertTextMode: Optional[_InsertTextMode] = None 104 | 105 | textEdit: Union[ 106 | TextEditNonStandard, 107 | TextEdit, 108 | InsertReplaceEditNonStandard, 109 | InsertReplaceEdit, 110 | None, 111 | ] = None 112 | additionalTextEdits: Optional[Sequence[TextEdit]] = None 113 | 114 | command: Optional[Command] = None 115 | data: Optional[Any] = None 116 | 117 | 118 | @dataclass(frozen=True) 119 | class ItemDefaults: 120 | commitCharacters: Optional[AbstractSet[str]] = frozenset() 121 | editRange: Union[_Range, _InsertReplaceRange, None] = None 122 | insertTextFormat: Optional[_InsertTextFormat] = None 123 | insertTextMode: Optional[_InsertTextMode] = None 124 | data: Optional[Any] = None 125 | 126 | 127 | class _CompletionList(TypedDict): 128 | isIncomplete: bool 129 | items: Sequence[CompletionItem] 130 | itemDefaults: Optional[ItemDefaults] 131 | 132 | 133 | _NULL = Literal[None, False, 0] 134 | 135 | CompletionResponse = Union[_NULL, Sequence[CompletionItem], _CompletionList] 136 | 137 | 138 | @dataclass(frozen=True) 139 | class LSPcomp: 140 | client: Optional[str] 141 | local_cache: bool 142 | items: Iterator[Completion] 143 | 144 | 145 | @dataclass(frozen=True) 146 | class StringValue: 147 | kind: Literal["snippet"] 148 | value: str 149 | 150 | 151 | @dataclass(frozen=True) 152 | class InlineCompletionItem: 153 | insertText: Union[str, StringValue] 154 | filterText: Optional[str] = None 155 | range: Optional[_Range] = None 156 | command: Optional[Command] = None 157 | 158 | 159 | # https://microsoft.github.io/language-server-protocol/specifications/lsp/3.18/specification/#textDocument_inlineCompletion 160 | class _InLineCompletionList(TypedDict): 161 | items: Sequence[InlineCompletionItem] 162 | 163 | 164 | InLineCompletionResponse = Union[ 165 | _NULL, Sequence[InlineCompletionItem], _InLineCompletionList 166 | ] 167 | -------------------------------------------------------------------------------- /coq/snippets/loaders/neosnippet.py: -------------------------------------------------------------------------------- 1 | from difflib import get_close_matches 2 | from os.path import normcase 3 | from pathlib import PurePath 4 | from string import whitespace 5 | from textwrap import dedent 6 | from typing import AbstractSet, Iterable, MutableSequence, MutableSet, Sequence, Tuple 7 | 8 | from ...shared.types import SnippetGrammar 9 | from ..consts import SNIP_LINE_SEP 10 | from ..types import ParsedSnippet 11 | from .parse import raise_err 12 | 13 | _ALIAS_START = "alias" 14 | _COMMENT_START = "#" 15 | _EXTENDS_START = "extends" 16 | _INCLUDES_START = "include" 17 | _LABEL_START = "abbr" 18 | _SNIPPET_LINE_STARTS = {*whitespace} 19 | _SNIPPET_START = "snippet" 20 | 21 | _IGNORED_STARTS = ( 22 | "delete", 23 | "options", 24 | "regexp", 25 | "source", 26 | ) 27 | 28 | _LEGAL_STARTS = { 29 | _ALIAS_START, 30 | _EXTENDS_START, 31 | _LABEL_START, 32 | _SNIPPET_START, 33 | } 34 | 35 | 36 | def _start(line: str) -> Tuple[str, str]: 37 | rest = line[len(_SNIPPET_START) :].strip() 38 | name, _, label = rest.partition(" ") 39 | if label.startswith('"') and label[1:].count('"') == 1: 40 | quoted, _, _ = label[1:].partition('"') 41 | return name, quoted 42 | else: 43 | return name, label 44 | 45 | 46 | def load_neosnippet( 47 | grammar: SnippetGrammar, path: PurePath, lines: Iterable[Tuple[int, str]] 48 | ) -> Tuple[str, AbstractSet[str], Sequence[ParsedSnippet]]: 49 | filetype = normcase(path.stem.strip()) 50 | 51 | snippets: MutableSequence[ParsedSnippet] = [] 52 | extends: MutableSet[str] = set() 53 | 54 | current_name = "" 55 | current_label = "" 56 | current_aliases: MutableSequence[str] = [] 57 | current_lines: MutableSequence[str] = [] 58 | 59 | def push() -> None: 60 | if current_name: 61 | content = dedent(SNIP_LINE_SEP.join(current_lines)).strip() 62 | snippet = ParsedSnippet( 63 | grammar=grammar, 64 | filetype=filetype, 65 | content=content, 66 | label=current_label, 67 | doc="", 68 | matches={*current_aliases}, 69 | ) 70 | snippets.append(snippet) 71 | 72 | for lineno, line in lines: 73 | line = line.rstrip() 74 | if line.startswith(_COMMENT_START) or line.startswith(_IGNORED_STARTS): 75 | pass 76 | 77 | elif not line or line.isspace(): 78 | current_lines.append("") 79 | 80 | elif line.startswith(_EXTENDS_START): 81 | filetypes = line[len(_EXTENDS_START) :].strip() 82 | for ft in (normcase(f.strip()) for f in filetypes.split(",")): 83 | if ft: 84 | extends.add(ft) 85 | 86 | elif line.startswith(_INCLUDES_START): 87 | ft = normcase(PurePath(line[len(_INCLUDES_START) :]).stem.strip()) 88 | if ft: 89 | extends.add(ft) 90 | 91 | elif line.startswith(_SNIPPET_START): 92 | push() 93 | current_name, current_label = _start(line=line) 94 | current_lines.clear() 95 | current_aliases.clear() 96 | current_aliases.append(current_name) 97 | 98 | elif line.startswith(_ALIAS_START): 99 | current_aliases.append(line[len(_ALIAS_START) :].strip()) 100 | 101 | elif line.startswith(_LABEL_START): 102 | current_label = line[len(_LABEL_START) :].strip() 103 | 104 | elif any(line.startswith(c) for c in _SNIPPET_LINE_STARTS): 105 | if current_name: 106 | current_lines.append(line) 107 | else: 108 | reason = "Expected snippet name" 109 | raise_err(path, lineno=lineno, line=line, reason=reason) 110 | 111 | else: 112 | start, _, _ = line.partition(" ") 113 | close = get_close_matches(start, _LEGAL_STARTS, n=1) 114 | if close: 115 | maybe_start, *_ = close 116 | addendum = f" :: did you mean -- {maybe_start}" 117 | else: 118 | addendum = "" 119 | 120 | reason = "Unexpected line start" + addendum 121 | raise_err(path, lineno=lineno, line=line, reason=reason) 122 | 123 | push() 124 | 125 | return filetype, extends, snippets 126 | --------------------------------------------------------------------------------