├── intc
├── .root
├── LICENSE
├── README.md
├── README_zh.md
├── intc
│ ├── version.txt
│ ├── version.py
│ ├── __init__.py
│ ├── exceptions.py
│ ├── share.py
│ ├── loader.py
│ ├── register.py
│ └── utils.py
├── examples
│ ├── exp2
│ │ ├── src
│ │ │ ├── __init__.py
│ │ │ └── config.py
│ │ ├── .intc.json
│ │ ├── config
│ │ │ ├── module
│ │ │ │ └── model@sub.json
│ │ │ └── model.json
│ │ └── run.py
│ ├── exp3
│ │ ├── src
│ │ │ ├── __init__.py
│ │ │ └── config.py
│ │ ├── .intc.json
│ │ ├── config
│ │ │ ├── model.json
│ │ │ └── model_search.jsonc
│ │ └── run.py
│ └── exp1
│ │ ├── .intc.json
│ │ ├── config
│ │ ├── model.json
│ │ └── model_search.jsonc
│ │ ├── run.py
│ │ └── src
│ │ ├── config.py
│ │ └── __init__.py
├── requirements.txt
├── clean.py
├── setup.py
└── tests
│ ├── test_no_name_parser.py
│ ├── test_type_name_parser.py
│ └── test_config.py
├── lsp
├── .root
├── LICENSE
├── README.md
├── README_zh.md
├── intc_lsp
│ ├── version.txt
│ ├── version.py
│ ├── csrc
│ │ ├── yaml
│ │ │ ├── README.md
│ │ │ ├── package.json
│ │ │ ├── binding.gyp
│ │ │ ├── bindings
│ │ │ │ ├── node
│ │ │ │ │ ├── index.js
│ │ │ │ │ └── binding.cc
│ │ │ │ └── rust
│ │ │ │ │ ├── build.rs
│ │ │ │ │ └── lib.rs
│ │ │ ├── Cargo.toml
│ │ │ └── src
│ │ │ │ ├── tree_sitter
│ │ │ │ └── parser.h
│ │ │ │ ├── schema.generated.cc
│ │ │ │ └── node-types.json
│ │ └── json
│ │ │ ├── README.md
│ │ │ ├── package.json
│ │ │ ├── binding.gyp
│ │ │ ├── bindings
│ │ │ ├── node
│ │ │ │ ├── index.js
│ │ │ │ └── binding.cc
│ │ │ └── rust
│ │ │ │ ├── build.rs
│ │ │ │ └── lib.rs
│ │ │ ├── Cargo.toml
│ │ │ ├── grammar.js
│ │ │ ├── grammar_base.js
│ │ │ └── src
│ │ │ ├── node-types.json
│ │ │ └── tree_sitter
│ │ │ └── parser.h
│ ├── __init__.py
│ ├── src
│ │ ├── __init__.py
│ │ ├── edit.py
│ │ ├── parser_json.py
│ │ └── parser_yaml.py
│ ├── cli.py
│ └── server.py
├── pyproject.toml
└── setup.py
├── plugins
├── vscode
│ ├── LICENSE
│ ├── pics
│ │ └── icon.png
│ ├── .gitignore
│ ├── .vscodeignore
│ ├── .eslintrc.yml
│ ├── tsconfig.json
│ ├── .vscode
│ │ ├── launch.json
│ │ └── tasks.json
│ ├── README.md
│ ├── package.json
│ └── src
│ │ └── extension.ts
└── neovim
│ ├── readme.md
│ └── lsp.lua
├── pics
├── vsc_comp.png
├── vsc_diag.png
├── vsc_goto.png
├── vsc_main.png
├── nvim_comp.png
├── nvim_diag.png
├── nvim_goto.png
├── nvim_hover.png
├── nvim_intc.gif
├── vsc_hover.png
├── vsc_plugin.png
└── vscode_intc.gif
├── .gitignore
├── LICENSE
└── README.md
/intc/.root:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/lsp/.root:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/intc/LICENSE:
--------------------------------------------------------------------------------
1 | ../LICENSE
--------------------------------------------------------------------------------
/lsp/LICENSE:
--------------------------------------------------------------------------------
1 | ../LICENSE
--------------------------------------------------------------------------------
/intc/README.md:
--------------------------------------------------------------------------------
1 | ../README.md
--------------------------------------------------------------------------------
/lsp/README.md:
--------------------------------------------------------------------------------
1 | ../README.md
--------------------------------------------------------------------------------
/intc/README_zh.md:
--------------------------------------------------------------------------------
1 | ../README_zh.md
--------------------------------------------------------------------------------
/intc/intc/version.txt:
--------------------------------------------------------------------------------
1 | 0.1.2
2 |
--------------------------------------------------------------------------------
/lsp/README_zh.md:
--------------------------------------------------------------------------------
1 | ../README_zh.md
--------------------------------------------------------------------------------
/lsp/intc_lsp/version.txt:
--------------------------------------------------------------------------------
1 | 0.1.2
2 |
--------------------------------------------------------------------------------
/plugins/vscode/LICENSE:
--------------------------------------------------------------------------------
1 | ../../LICENSE
--------------------------------------------------------------------------------
/intc/intc/version.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.1.2"
2 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/version.py:
--------------------------------------------------------------------------------
1 | __version__ = "0.1.2"
2 |
--------------------------------------------------------------------------------
/intc/examples/exp2/src/__init__.py:
--------------------------------------------------------------------------------
1 | import src.config
2 |
--------------------------------------------------------------------------------
/intc/examples/exp3/src/__init__.py:
--------------------------------------------------------------------------------
1 | import src.config
2 |
--------------------------------------------------------------------------------
/pics/vsc_comp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vsc_comp.png
--------------------------------------------------------------------------------
/pics/vsc_diag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vsc_diag.png
--------------------------------------------------------------------------------
/pics/vsc_goto.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vsc_goto.png
--------------------------------------------------------------------------------
/pics/vsc_main.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vsc_main.png
--------------------------------------------------------------------------------
/pics/nvim_comp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/nvim_comp.png
--------------------------------------------------------------------------------
/pics/nvim_diag.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/nvim_diag.png
--------------------------------------------------------------------------------
/pics/nvim_goto.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/nvim_goto.png
--------------------------------------------------------------------------------
/pics/nvim_hover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/nvim_hover.png
--------------------------------------------------------------------------------
/pics/nvim_intc.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/nvim_intc.gif
--------------------------------------------------------------------------------
/pics/vsc_hover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vsc_hover.png
--------------------------------------------------------------------------------
/pics/vsc_plugin.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vsc_plugin.png
--------------------------------------------------------------------------------
/pics/vscode_intc.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/pics/vscode_intc.gif
--------------------------------------------------------------------------------
/plugins/vscode/pics/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cstsunfu/intc/HEAD/plugins/vscode/pics/icon.png
--------------------------------------------------------------------------------
/intc/examples/exp1/.intc.json:
--------------------------------------------------------------------------------
1 | {
2 | "entry": ["config"],
3 | "src": [
4 | "src"
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/intc/examples/exp3/.intc.json:
--------------------------------------------------------------------------------
1 | {
2 | "entry": ["config"],
3 | "src": [
4 | "src"
5 | ]
6 | }
7 |
--------------------------------------------------------------------------------
/plugins/vscode/.gitignore:
--------------------------------------------------------------------------------
1 | out
2 | node_modules
3 | client/server
4 | .vscode-test
5 | .vscode/settings.json
6 | env
7 | d
--------------------------------------------------------------------------------
/intc/examples/exp2/.intc.json:
--------------------------------------------------------------------------------
1 | {
2 | "module": ["config/module"],
3 | "entry": ["config"],
4 | "src": [
5 | "src"
6 | ]
7 | }
8 |
--------------------------------------------------------------------------------
/intc/examples/exp2/config/module/model@sub.json:
--------------------------------------------------------------------------------
1 | {
2 | "_base": "submodule_exp",
3 | "para1": 100,
4 | "para2": 200,
5 | "para3": 300
6 | }
7 |
--------------------------------------------------------------------------------
/intc/requirements.txt:
--------------------------------------------------------------------------------
1 | pytest>=7.0
2 | attrs>=23.1
3 | cattrs>=23.1
4 | hjson>=3.0
5 | pyyaml>=6.0
6 | click>=8.0
7 | hjson>=3.0
8 | platformdirs>=4.2.0
9 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/README.md:
--------------------------------------------------------------------------------
1 | # The tree-sitter parser for YAML
2 |
3 | the grammar is mainly copied from "https://github.com/ikatyang/tree-sitter-yaml"
4 |
--------------------------------------------------------------------------------
/intc/examples/exp2/config/model.json:
--------------------------------------------------------------------------------
1 | {
2 | "@model@sub": {
3 | "para1": "@lambda @_G.lr"
4 | },
5 | "_G": {
6 | "lr": 8
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/plugins/vscode/.vscodeignore:
--------------------------------------------------------------------------------
1 | .vscode
2 | .gitignore
3 | client/out/*.map
4 | client/src/
5 | tsconfig.json
6 | tslint.json
7 | package.json
8 | package-lock.json
9 |
10 | .pytest_cache
11 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/README.md:
--------------------------------------------------------------------------------
1 | # The tree-sitter parser for JSON
2 |
3 | the grammar is mainly copied from "https://github.com/tree-sitter/tree-sitter-json" and "https://github.com/winston0410/tree-sitter-hjson"
4 |
--------------------------------------------------------------------------------
/intc/examples/exp2/src/config.py:
--------------------------------------------------------------------------------
1 | from intc import MISSING, Base, IntField, cregister
2 |
3 |
4 | @cregister("model", "submodule_exp")
5 | class Model(Base):
6 | para1 = IntField(1)
7 | para2 = IntField(MISSING)
8 | para3 = IntField(MISSING)
9 | para4 = IntField(500)
10 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/src/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | from intc_lsp.src.resolve import HoverType, IntcResolve
7 |
8 | __all__ = ["HoverType", "IntcResolve"]
9 |
--------------------------------------------------------------------------------
/plugins/vscode/.eslintrc.yml:
--------------------------------------------------------------------------------
1 | env:
2 | es2021: true
3 | node: true
4 | extends:
5 | - 'eslint:recommended'
6 | - 'plugin:@typescript-eslint/recommended'
7 | parser: '@typescript-eslint/parser'
8 | parserOptions:
9 | ecmaVersion: 12
10 | sourceType: module
11 | plugins:
12 | - '@typescript-eslint'
13 | rules: {}
14 |
--------------------------------------------------------------------------------
/plugins/neovim/readme.md:
--------------------------------------------------------------------------------
1 | ## Install
2 |
3 | 1. install `intc`, `intc-lsp`
4 |
5 | ```
6 | # clone the repo
7 | pip install intc
8 | cd lsp
9 | pip install . # or you can just pip install intc-lsp for some platform
10 | ```
11 |
12 | 2. configure your neovim by `lspconfig`
13 |
14 | put the setting in `lsp.lua` to you configure
15 |
--------------------------------------------------------------------------------
/intc/clean.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | import os
3 | import shutil
4 |
5 |
6 | def rm_dirs(dirpath):
7 | if os.path.exists(dirpath) and os.path.isdir(dirpath):
8 | shutil.rmtree(dirpath)
9 |
10 |
11 | if __name__ == "__main__":
12 | rm_dirs("./build")
13 | rm_dirs("./intc.egg-info")
14 | rm_dirs("./logs")
15 | rm_dirs("./dist")
16 |
--------------------------------------------------------------------------------
/plugins/vscode/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "es2019",
5 | "lib": [
6 | "ES2019"
7 | ],
8 | "rootDir": "src",
9 | "outDir": "out",
10 | "sourceMap": true
11 | },
12 | "include": [
13 | "src"
14 | ],
15 | "exclude": [
16 | "node_modules"
17 | ]
18 | }
19 |
--------------------------------------------------------------------------------
/intc/examples/exp1/config/model.json:
--------------------------------------------------------------------------------
1 | {
2 | "@model@simple_cls": {
3 | "active": "none",
4 | "embedding_size": "@$.@glove.hidden_size, @$.@bert.hidden_size @lambda x, y: x+y",
5 | "@embedding@glove": {
6 | "hidden_size": 300,
7 | "vocab_size": 5000
8 | },
9 | "@embedding@bert": {
10 | "hidden_size": 768
11 | }
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tree-sitter-json",
3 | "version": "0.0.1",
4 | "description": "json grammar for tree-sitter",
5 | "main": "bindings/node",
6 | "keywords": [
7 | "parsing",
8 | "incremental"
9 | ],
10 | "dependencies": {
11 | "nan": "^2.12.1"
12 | },
13 | "devDependencies": {
14 | "tree-sitter-cli": "^0.20.8"
15 | },
16 | "scripts": {
17 | "test": "tree-sitter test"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "tree-sitter-yaml",
3 | "version": "0.0.1",
4 | "description": "yaml grammar for tree-sitter",
5 | "main": "bindings/node",
6 | "keywords": [
7 | "parsing",
8 | "incremental"
9 | ],
10 | "dependencies": {
11 | "nan": "^2.12.1"
12 | },
13 | "devDependencies": {
14 | "tree-sitter-cli": "^0.20.8"
15 | },
16 | "scripts": {
17 | "test": "tree-sitter test"
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | **/.pytest_cache/**
2 | intc/build/**
3 | intc/dist/**
4 | intc/intc.egg-info/**
5 | intc/test.py
6 | lsp/build/**
7 | lsp/dist/**
8 | lsp/clean.py
9 | lsp/intc_lsp.egg-info/**
10 | **/*.pyc
11 | **/*.so
12 | lsp/intc.json
13 | lsp/con/**
14 | lsp/config/**
15 | plugins/vscode/out
16 | plugins/vscode/node_modules
17 | plugins/vscode/.vscode-test/
18 | plugins/vscode/*.vsix
19 | plugins/vscode/tsconfig.tsbuildinfo
20 | **/__pycache__/**
21 | **/log.log
22 | **/**.vsix
23 |
--------------------------------------------------------------------------------
/intc/examples/exp3/config/model.json:
--------------------------------------------------------------------------------
1 | {
2 | "@model@simple_cls": {
3 | "active": "none",
4 | "embedding_size": "@$.@glove.hidden_size, @$.@bert.hidden_size @lambda x, y: x+y",
5 | "@embedding@glove": {
6 | "hidden_size": 300,
7 | "vocab_size": 5000
8 | },
9 | "@embedding@bert": {
10 | "hidden_size": "@lambda @_G.hid"
11 | }
12 | },
13 | "_G": {
14 | "hid": 768
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/binding.gyp:
--------------------------------------------------------------------------------
1 | {
2 | "targets": [
3 | {
4 | "target_name": "tree_sitter_json_binding",
5 | "include_dirs": [
6 | "
3 | #include "nan.h"
4 |
5 | using namespace v8;
6 |
7 | extern "C" TSLanguage * tree_sitter_json();
8 |
9 | namespace {
10 |
11 | NAN_METHOD(New) {}
12 |
13 | void Init(Local exports, Local module) {
14 | Local tpl = Nan::New(New);
15 | tpl->SetClassName(Nan::New("Language").ToLocalChecked());
16 | tpl->InstanceTemplate()->SetInternalFieldCount(1);
17 |
18 | Local constructor = Nan::GetFunction(tpl).ToLocalChecked();
19 | Local instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
20 | Nan::SetInternalFieldPointer(instance, 0, tree_sitter_json());
21 |
22 | Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("json").ToLocalChecked());
23 | Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
24 | }
25 |
26 | NODE_MODULE(tree_sitter_json_binding, Init)
27 |
28 | } // namespace
29 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/bindings/node/binding.cc:
--------------------------------------------------------------------------------
1 | #include "tree_sitter/parser.h"
2 | #include
3 | #include "nan.h"
4 |
5 | using namespace v8;
6 |
7 | extern "C" TSLanguage * tree_sitter_yaml();
8 |
9 | namespace {
10 |
11 | NAN_METHOD(New) {}
12 |
13 | void Init(Local exports, Local module) {
14 | Local tpl = Nan::New(New);
15 | tpl->SetClassName(Nan::New("Language").ToLocalChecked());
16 | tpl->InstanceTemplate()->SetInternalFieldCount(1);
17 |
18 | Local constructor = Nan::GetFunction(tpl).ToLocalChecked();
19 | Local instance = constructor->NewInstance(Nan::GetCurrentContext()).ToLocalChecked();
20 | Nan::SetInternalFieldPointer(instance, 0, tree_sitter_yaml());
21 |
22 | Nan::Set(instance, Nan::New("name").ToLocalChecked(), Nan::New("yaml").ToLocalChecked());
23 | Nan::Set(module, Nan::New("exports").ToLocalChecked(), instance);
24 | }
25 |
26 | NODE_MODULE(tree_sitter_yaml_binding, Init)
27 |
28 | } // namespace
29 |
--------------------------------------------------------------------------------
/lsp/pyproject.toml:
--------------------------------------------------------------------------------
1 |
2 | [build-system]
3 | requires = ["setuptools", "setuptools-scm"]
4 | build-backend = "setuptools.build_meta"
5 |
6 | [project]
7 | name = "intc_lsp"
8 | authors = [
9 | {name = "cstsunfu", email = "cstsunfu@gmail.com"},
10 | ]
11 | description = "intc-lsp: intc language server"
12 | readme = "README.md"
13 | requires-python = ">=3.8"
14 | keywords = ["intc", "lsp", "intc-lsp"]
15 | license = {text = "Apache License 2.0" }
16 | classifiers = [
17 | "Programming Language :: Python :: 3.8",
18 | "Programming Language :: Python :: 3.9",
19 | "License :: OSI Approved :: Apache Software License"
20 | ]
21 | dependencies = [
22 | "intc>=0.1, <0.2",
23 | "platformdirs>=4.2.0",
24 | "pygls>=1.1",
25 | "tree-sitter>=0.21.1,<0.22"
26 | ]
27 | dynamic = ["version"]
28 |
29 | [tool.setuptools]
30 | packages = ["intc_lsp", "intc_lsp.lib", "intc_lsp.src"]
31 | include-package-data = true
32 |
33 | [project.scripts]
34 | intc-lsp = "intc_lsp.cli:cli"
35 |
36 | [tool.setuptools.package-data]
37 | "intc_lsp.lib" = ["*.so"]
38 |
--------------------------------------------------------------------------------
/intc/intc/share.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | from typing import Any, Callable, Dict, Type, Union
7 |
8 | from intc.exceptions import NoModuleFoundError
9 |
10 | registry: Dict[str, Any] = {}
11 |
12 |
13 | MISSING = "???"
14 | LOAD_SUBMODULE_DONE = False
15 |
16 |
17 | def get_registed_instance(
18 | type_name: str, name: str = "", get_class=False
19 | ) -> Union[Type, Callable]:
20 | """get the module by name
21 |
22 | Args:
23 | type_name: the module type name
24 | name: the module name
25 | get_class: return the module class if True, else return the module class's _from_dict method
26 |
27 | Returns:
28 | registered module
29 |
30 | """
31 | if type_name not in registry:
32 | raise NoModuleFoundError(f"There is not a registerd type named '{type_name}'")
33 | if name not in registry[type_name]:
34 | raise NoModuleFoundError(
35 | f"In '{type_name}' register, there is not a entry named '{name}'"
36 | )
37 | if get_class:
38 | return registry[type_name][name]
39 | return registry[type_name][name]._from_dict
40 |
--------------------------------------------------------------------------------
/intc/examples/exp3/run.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 |
3 | import json
4 |
5 | from src.config import BertEmbedding, ClsDecode, GloveEmbedding, Model
6 |
7 | from intc import Parser, cregister, init_config
8 |
9 | assert cregister.get("model", "simple_cls", get_class=True) == Model
10 |
11 | parser_configs = Parser(json.load(open("./config/model.json"))).parser_init()
12 | assert len(parser_configs) == 1
13 | assert parser_configs[0]["@model"].active == "none"
14 | assert parser_configs[0]["@model"]["@bert"].hidden_size == 768
15 |
16 |
17 | parser_search_configs = Parser(
18 | json.load(open("./config/model_search.jsonc"))
19 | ).parser_init()
20 | assert len(parser_search_configs) == 4
21 |
22 | assert parser_search_configs[0]["@model"]["@glove"].dropout_rate == 0.2
23 | assert parser_search_configs[0]["@model"]["@bert"].dropout_rate == 0.0
24 | assert parser_search_configs[1]["@model"]["@glove"].dropout_rate == 0.3
25 | assert parser_search_configs[1]["@model"]["@bert"].dropout_rate == 0.0
26 |
27 | assert parser_search_configs[2]["@model"]["@glove"].dropout_rate == 0.2
28 | assert parser_search_configs[2]["@model"]["@bert"].dropout_rate == 0.1
29 | assert parser_search_configs[3]["@model"]["@glove"].dropout_rate == 0.3
30 | assert parser_search_configs[3]["@model"]["@bert"].dropout_rate == 0.1
31 |
--------------------------------------------------------------------------------
/intc/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import os
7 |
8 | from setuptools import find_packages, setup
9 |
10 |
11 | def write_version_py():
12 | with open(os.path.join("intc", "version.txt")) as f:
13 | version = f.read().strip()
14 |
15 | # write version info to fairseq/version.py
16 | with open(os.path.join("intc", "version.py"), "w") as f:
17 | f.write('__version__ = "{}"\n'.format(version))
18 | return version
19 |
20 |
21 | version = write_version_py()
22 |
23 |
24 | with open("README.md", encoding="utf-8") as f:
25 | readme = f.read()
26 |
27 | with open("LICENSE", encoding="utf-8") as f:
28 | license = f.read()
29 |
30 | with open("requirements.txt", encoding="utf-8") as f:
31 | requirements = f.read()
32 |
33 | pkgs = [p for p in find_packages() if p.startswith("intc")]
34 |
35 | setup(
36 | name="intc",
37 | version=version,
38 | url="https://github.com/cstsunfu/intc",
39 | description="intc: intelligent python config toolkit",
40 | long_description=readme,
41 | long_description_content_type="text/markdown",
42 | license="Apache Software License",
43 | author="cstsunfu",
44 | author_email="cstsunfu@gmail.com",
45 | python_requires=">=3.8",
46 | include_package_data=True,
47 | packages=pkgs,
48 | install_requires=requirements.strip().split("\n"),
49 | )
50 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/bindings/rust/build.rs:
--------------------------------------------------------------------------------
1 | fn main() {
2 | let src_dir = std::path::Path::new("src");
3 |
4 | let mut c_config = cc::Build::new();
5 | c_config.include(&src_dir);
6 | c_config
7 | .flag_if_supported("-Wno-unused-parameter")
8 | .flag_if_supported("-Wno-unused-but-set-variable")
9 | .flag_if_supported("-Wno-trigraphs");
10 | let parser_path = src_dir.join("parser.c");
11 | c_config.file(&parser_path);
12 |
13 | // If your language uses an external scanner written in C,
14 | // then include this block of code:
15 |
16 | /*
17 | let scanner_path = src_dir.join("scanner.c");
18 | c_config.file(&scanner_path);
19 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
20 | */
21 |
22 | c_config.compile("parser");
23 | println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
24 |
25 | // If your language uses an external scanner written in C++,
26 | // then include this block of code:
27 |
28 | /*
29 | let mut cpp_config = cc::Build::new();
30 | cpp_config.cpp(true);
31 | cpp_config.include(&src_dir);
32 | cpp_config
33 | .flag_if_supported("-Wno-unused-parameter")
34 | .flag_if_supported("-Wno-unused-but-set-variable");
35 | let scanner_path = src_dir.join("scanner.cc");
36 | cpp_config.file(&scanner_path);
37 | cpp_config.compile("scanner");
38 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
39 | */
40 | }
41 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/bindings/rust/build.rs:
--------------------------------------------------------------------------------
1 | fn main() {
2 | let src_dir = std::path::Path::new("src");
3 |
4 | let mut c_config = cc::Build::new();
5 | c_config.include(&src_dir);
6 | c_config
7 | .flag_if_supported("-Wno-unused-parameter")
8 | .flag_if_supported("-Wno-unused-but-set-variable")
9 | .flag_if_supported("-Wno-trigraphs");
10 | let parser_path = src_dir.join("parser.c");
11 | c_config.file(&parser_path);
12 |
13 | // If your language uses an external scanner written in C,
14 | // then include this block of code:
15 |
16 | /*
17 | let scanner_path = src_dir.join("scanner.c");
18 | c_config.file(&scanner_path);
19 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
20 | */
21 |
22 | c_config.compile("parser");
23 | println!("cargo:rerun-if-changed={}", parser_path.to_str().unwrap());
24 |
25 | // If your language uses an external scanner written in C++,
26 | // then include this block of code:
27 |
28 | /*
29 | let mut cpp_config = cc::Build::new();
30 | cpp_config.cpp(true);
31 | cpp_config.include(&src_dir);
32 | cpp_config
33 | .flag_if_supported("-Wno-unused-parameter")
34 | .flag_if_supported("-Wno-unused-but-set-variable");
35 | let scanner_path = src_dir.join("scanner.cc");
36 | cpp_config.file(&scanner_path);
37 | cpp_config.compile("scanner");
38 | println!("cargo:rerun-if-changed={}", scanner_path.to_str().unwrap());
39 | */
40 | }
41 |
--------------------------------------------------------------------------------
/intc/examples/exp1/run.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 |
3 | import json
4 |
5 | from src import BertEmbedding, ClsDecode, GloveEmbedding, Model
6 |
7 | from intc import Parser, cregister, init_config
8 |
9 | assert cregister.get("model", "simple_cls", get_class=True) == Model
10 |
11 | parser_configs = Parser(json.load(open("./config/model.json"))).parser_init()
12 | assert len(parser_configs) == 1
13 | assert parser_configs[0]["@model"].active == "none"
14 | assert (
15 | parser_configs[0]["@model"].embedding_size
16 | == parser_configs[0]["@model"]["@bert"].hidden_size
17 | + parser_configs[0]["@model"]["@glove"].hidden_size
18 | ) # "embedding_size": "@$.@glove.hidden_size, @$.@bert.hidden_size @lambda x, y: x+y"
19 | assert parser_configs[0]["@model"]["@bert"].hidden_size == 768
20 |
21 |
22 | parser_search_configs = Parser(
23 | json.load(open("./config/model_search.jsonc"))
24 | ).parser_init()
25 | assert len(parser_search_configs) == 4
26 |
27 | assert parser_search_configs[0]["@model"]["@glove"].dropout_rate == 0.2
28 | assert parser_search_configs[0]["@model"]["@bert"].dropout_rate == 0.0
29 | assert parser_search_configs[1]["@model"]["@glove"].dropout_rate == 0.3
30 | assert parser_search_configs[1]["@model"]["@bert"].dropout_rate == 0.0
31 |
32 | assert parser_search_configs[2]["@model"]["@glove"].dropout_rate == 0.2
33 | assert parser_search_configs[2]["@model"]["@bert"].dropout_rate == 0.1
34 | assert parser_search_configs[3]["@model"]["@glove"].dropout_rate == 0.3
35 | assert parser_search_configs[3]["@model"]["@bert"].dropout_rate == 0.1
36 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/grammar.js:
--------------------------------------------------------------------------------
1 | const json_base = require("./grammar_base");
2 |
3 | module.exports = grammar(json_base, {
4 | name: "json",
5 | extras: ($, original) => [...original, $.comment],
6 |
7 | rules: {
8 | _value: (_, original) => original,
9 |
10 | pair: (_, original) => original,
11 |
12 | object: ($) => seq("{", lineBreakOrComma($.pair), "}"),
13 |
14 | string: ($, original) => choice($.quoted_string, $.multiline_string),
15 | // choice($.quoted_string, $.multiline_string, $.quoteless_string),
16 |
17 | array: ($) => seq("[", lineBreakOrComma($._value), "]"),
18 |
19 | quoted_string: ($) =>
20 | choice(
21 | seq('"', '"'),
22 | seq("'", "'"),
23 | seq('"', $._quoted_string_content, '"'),
24 | seq("'", $._quoted_string_content, "'")
25 | ),
26 |
27 | // Use repeat1 here instead of repeat, as treesitter doesn't support matching with empty string
28 | _quoted_string_content: ($) =>
29 | repeat1(choice(token.immediate(/[^\\"\'\n]+/), $.escape_sequence)),
30 |
31 | // quoteless string is conflicting with quoted string
32 | // quoteless_string: ($) => repeat1(/[^\n]+/),
33 |
34 | multiline_string: ($) =>
35 | choice(seq("'''", "'''"), seq("'''", repeat1(/[^\\"\'\n]+/), "'''")),
36 |
37 | // escape_sequence: ($) => token.immediate(seq("\\", /(\"|\'|\\|\/|b|f|n|r|t|u)/)),
38 | escape_sequence: ($, original) => original,
39 |
40 | comment: ($) =>
41 | token(
42 | choice(seq("//", /.*/), seq("/*", /[^*]*\*+([^/*][^*]*\*+)*/, "/"), seq("#", /.*/))
43 | ),
44 | },
45 | });
46 |
47 | function lineBreakOrComma1(rule) {
48 | return seq(rule, repeat(seq(/,|\n/, optional(rule))));
49 | }
50 |
51 | function lineBreakOrComma(rule) {
52 | return optional(lineBreakOrComma1(rule));
53 | }
54 |
--------------------------------------------------------------------------------
/intc/examples/exp1/src/config.py:
--------------------------------------------------------------------------------
1 | from intc import (
2 | MISSING,
3 | AnyField,
4 | Base,
5 | BoolField,
6 | DictField,
7 | FloatField,
8 | IntField,
9 | ListField,
10 | NestField,
11 | StrField,
12 | SubModule,
13 | cregister,
14 | )
15 |
16 |
17 | @cregister("embedding", "bert")
18 | class BertEmbedding(Base):
19 | hidden_size = IntField(
20 | value=MISSING,
21 | minimum=1,
22 | help="the input/output/hidden size for bert, must >= 1",
23 | )
24 | dropout_rate = FloatField(
25 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
26 | )
27 |
28 |
29 | @cregister("embedding", "glove")
30 | class GloveEmbedding:
31 | """the glove embedding"""
32 |
33 | hidden_size = IntField(
34 | value=MISSING,
35 | minimum=1,
36 | help="the glove embedding size, must >= 1",
37 | )
38 | vocab_size = IntField(
39 | value=MISSING,
40 | minimum=1,
41 | help="the vocab size for glove, must >= 1",
42 | )
43 | dropout_rate = FloatField(
44 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
45 | )
46 |
47 |
48 | @cregister("model", "simple_cls")
49 | class Model(Base):
50 | embedding_combine_method = StrField(
51 | value="concat",
52 | options=["concat", "concat_linear"],
53 | help="the combine method, just `concat` or use `linear` on the concated embedding",
54 | )
55 | embedding_size = IntField(
56 | value=MISSING, help="the sum of bert and glove embedding size"
57 | )
58 | active = StrField(
59 | value="relu",
60 | options=["relu", "tanh", "sigmoid", "none"],
61 | help="the activation function",
62 | )
63 | submodule = SubModule(
64 | value={},
65 | suggestions=[
66 | "embedding",
67 | ],
68 | help="submodules for basic model",
69 | )
70 |
--------------------------------------------------------------------------------
/plugins/vscode/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "intc-lsp",
3 | "description": "intc language server",
4 | "author": "cstsunfu",
5 | "repository": "https://github.com/cstsunfu/intc",
6 | "icon": "pics/icon.png",
7 | "license": "Apache-2.0",
8 | "version": "0.1.0",
9 | "publisher": "cstsunfu",
10 | "engines": {
11 | "node": ">=16.17.1",
12 | "vscode": "^1.78.0"
13 | },
14 | "extensionDependencies": [
15 | "ms-python.python"
16 | ],
17 | "categories": [
18 | "Programming Languages"
19 | ],
20 | "activationEvents": [
21 | "onStartupFinished"
22 | ],
23 | "contributes": {
24 | "commands": [
25 | {
26 | "command": "intclsp.server.restart",
27 | "title": "Restart Language Server",
28 | "category": "intc"
29 | },
30 | {
31 | "command": "intclsp.server.executeCommand",
32 | "title": "Execute Command",
33 | "category": "intc"
34 | }
35 | ],
36 | "configuration": {
37 | "type": "object",
38 | "title": "Server Configuration",
39 | "properties": {}
40 | }
41 | },
42 | "main": "./out/extension",
43 | "scripts": {
44 | "vscode:prepublish": "npm run compile",
45 | "compile": "tsc -p .",
46 | "watch": "tsc -p . -w"
47 | },
48 | "devDependencies": {
49 | "@types/node": "^16.11.6",
50 | "@types/semver": "^7.5.0",
51 | "@types/vscode": "^1.78.0",
52 | "@typescript-eslint/eslint-plugin": "^5.3.0",
53 | "@typescript-eslint/parser": "^5.3.0",
54 | "eslint": "^8.2.0",
55 | "typescript": "^5.1.0"
56 | },
57 | "dependencies": {
58 | "@vscode/python-extension": "^1.0.4",
59 | "semver": "^7.5.4",
60 | "vscode-languageclient": "^8.1.0"
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/bindings/rust/lib.rs:
--------------------------------------------------------------------------------
1 | //! This crate provides json language support for the [tree-sitter][] parsing library.
2 | //!
3 | //! Typically, you will use the [language][language func] function to add this language to a
4 | //! tree-sitter [Parser][], and then use the parser to parse some code:
5 | //!
6 | //! ```
7 | //! let code = "";
8 | //! let mut parser = tree_sitter::Parser::new();
9 | //! parser.set_language(tree_sitter_json::language()).expect("Error loading json grammar");
10 | //! let tree = parser.parse(code, None).unwrap();
11 | //! ```
12 | //!
13 | //! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
14 | //! [language func]: fn.language.html
15 | //! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
16 | //! [tree-sitter]: https://tree-sitter.github.io/
17 |
18 | use tree_sitter::Language;
19 |
20 | extern "C" {
21 | fn tree_sitter_json() -> Language;
22 | }
23 |
24 | /// Get the tree-sitter [Language][] for this grammar.
25 | ///
26 | /// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
27 | pub fn language() -> Language {
28 | unsafe { tree_sitter_json() }
29 | }
30 |
31 | /// The content of the [`node-types.json`][] file for this grammar.
32 | ///
33 | /// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
34 | pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json");
35 |
36 | // Uncomment these to include any queries that this grammar contains
37 |
38 | // pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm");
39 | // pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm");
40 | // pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm");
41 | // pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm");
42 |
43 | #[cfg(test)]
44 | mod tests {
45 | #[test]
46 | fn test_can_load_grammar() {
47 | let mut parser = tree_sitter::Parser::new();
48 | parser
49 | .set_language(super::language())
50 | .expect("Error loading json language");
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/bindings/rust/lib.rs:
--------------------------------------------------------------------------------
1 | //! This crate provides yaml language support for the [tree-sitter][] parsing library.
2 | //!
3 | //! Typically, you will use the [language][language func] function to add this language to a
4 | //! tree-sitter [Parser][], and then use the parser to parse some code:
5 | //!
6 | //! ```
7 | //! let code = "";
8 | //! let mut parser = tree_sitter::Parser::new();
9 | //! parser.set_language(tree_sitter_yaml::language()).expect("Error loading yaml grammar");
10 | //! let tree = parser.parse(code, None).unwrap();
11 | //! ```
12 | //!
13 | //! [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
14 | //! [language func]: fn.language.html
15 | //! [Parser]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Parser.html
16 | //! [tree-sitter]: https://tree-sitter.github.io/
17 |
18 | use tree_sitter::Language;
19 |
20 | extern "C" {
21 | fn tree_sitter_yaml() -> Language;
22 | }
23 |
24 | /// Get the tree-sitter [Language][] for this grammar.
25 | ///
26 | /// [Language]: https://docs.rs/tree-sitter/*/tree_sitter/struct.Language.html
27 | pub fn language() -> Language {
28 | unsafe { tree_sitter_yaml() }
29 | }
30 |
31 | /// The content of the [`node-types.json`][] file for this grammar.
32 | ///
33 | /// [`node-types.json`]: https://tree-sitter.github.io/tree-sitter/using-parsers#static-node-types
34 | pub const NODE_TYPES: &'static str = include_str!("../../src/node-types.json");
35 |
36 | // Uncomment these to include any queries that this grammar contains
37 |
38 | // pub const HIGHLIGHTS_QUERY: &'static str = include_str!("../../queries/highlights.scm");
39 | // pub const INJECTIONS_QUERY: &'static str = include_str!("../../queries/injections.scm");
40 | // pub const LOCALS_QUERY: &'static str = include_str!("../../queries/locals.scm");
41 | // pub const TAGS_QUERY: &'static str = include_str!("../../queries/tags.scm");
42 |
43 | #[cfg(test)]
44 | mod tests {
45 | #[test]
46 | fn test_can_load_grammar() {
47 | let mut parser = tree_sitter::Parser::new();
48 | parser
49 | .set_language(super::language())
50 | .expect("Error loading yaml language");
51 | }
52 | }
53 |
--------------------------------------------------------------------------------
/intc/examples/exp3/src/config.py:
--------------------------------------------------------------------------------
1 | from intc import (
2 | MISSING,
3 | AnyField,
4 | Base,
5 | BoolField,
6 | DictField,
7 | FloatField,
8 | IntField,
9 | ListField,
10 | NestField,
11 | StrField,
12 | SubModule,
13 | cregister,
14 | )
15 |
16 |
17 | @cregister("embedding", "bert")
18 | class BertEmbedding(Base):
19 | hidden_size = IntField(
20 | value=MISSING,
21 | minimum=1,
22 | help="the input/output/hidden size for bert, must >= 1",
23 | )
24 | dropout_rate = FloatField(
25 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
26 | )
27 |
28 |
29 | @cregister("embedding", "glove")
30 | class GloveEmbedding:
31 | """the glove embedding"""
32 |
33 | hidden_size = IntField(
34 | value=MISSING,
35 | minimum=1,
36 | help="the glove embedding size, must >= 1",
37 | )
38 | vocab_size = IntField(
39 | value=MISSING,
40 | minimum=1,
41 | help="the vocab size for glove, must >= 1",
42 | )
43 | dropout_rate = FloatField(
44 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
45 | )
46 |
47 |
48 | @cregister("decode", "classification")
49 | class ClsDecode:
50 | hidden_size = IntField(
51 | value=MISSING,
52 | minimum=1,
53 | help="the input embedding size, must >= 1",
54 | )
55 | vocab_size = IntField(
56 | value=MISSING,
57 | minimum=1,
58 | help="the vocab size for glove, must >= 1",
59 | )
60 | dropout_rate = FloatField(
61 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
62 | )
63 |
64 |
65 | @cregister("model", "simple_cls")
66 | class Model(Base):
67 | embedding_combine_method = StrField(
68 | value="concat",
69 | options=["concat", "concat_linear"],
70 | help="the combine method, just `concat` or use `linear` on the concated embedding",
71 | )
72 | embedding_size = IntField(
73 | value=MISSING, help="the sum of bert and glove embedding size"
74 | )
75 | active = StrField(
76 | value="relu",
77 | options=["relu", "tanh", "sigmoid", "none"],
78 | help="the activation function",
79 | )
80 | submodule = SubModule(
81 | value={},
82 | suggestions=[
83 | "embedding",
84 | "decode",
85 | ],
86 | help="submodules for basic model",
87 | )
88 |
--------------------------------------------------------------------------------
/intc/examples/exp1/src/__init__.py:
--------------------------------------------------------------------------------
1 | from intc import (
2 | MISSING,
3 | AnyField,
4 | Base,
5 | BoolField,
6 | DictField,
7 | FloatField,
8 | IntField,
9 | ListField,
10 | NestField,
11 | StrField,
12 | SubModule,
13 | cregister,
14 | )
15 |
16 |
17 | @cregister("embedding", "bert")
18 | class BertEmbedding(Base):
19 | hidden_size = IntField(
20 | value=MISSING,
21 | minimum=1,
22 | help="the input/output/hidden size for bert, must >= 1",
23 | )
24 | dropout_rate = FloatField(
25 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
26 | )
27 |
28 |
29 | @cregister("embedding", "glove")
30 | class GloveEmbedding:
31 | """the glove embedding"""
32 |
33 | hidden_size = IntField(
34 | value=MISSING,
35 | minimum=1,
36 | help="the glove embedding size, must >= 1",
37 | )
38 | vocab_size = IntField(
39 | value=MISSING,
40 | minimum=1,
41 | help="the vocab size for glove, must >= 1",
42 | )
43 | dropout_rate = FloatField(
44 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
45 | )
46 |
47 |
48 | @cregister("decode", "classification")
49 | class ClsDecode:
50 | hidden_size = IntField(
51 | value=MISSING,
52 | minimum=1,
53 | help="the input embedding size, must >= 1",
54 | )
55 | vocab_size = IntField(
56 | value=MISSING,
57 | minimum=1,
58 | help="the vocab size for glove, must >= 1",
59 | )
60 | dropout_rate = FloatField(
61 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert"
62 | )
63 |
64 |
65 | @cregister("model", "simple_cls")
66 | class Model(Base):
67 | embedding_combine_method = StrField(
68 | value="concat",
69 | options=["concat", "concat_linear"],
70 | help="the combine method, just `concat` or use `linear` on the concated embedding",
71 | )
72 | embedding_size = IntField(
73 | value=MISSING, help="the sum of bert and glove embedding size"
74 | )
75 | active = StrField(
76 | value="relu",
77 | options=["relu", "tanh", "sigmoid", "none"],
78 | help="the activation function",
79 | )
80 | submodule = SubModule(
81 | value={},
82 | suggestions=[
83 | "embedding",
84 | "decode",
85 | ],
86 | help="submodules for basic model",
87 | )
88 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/grammar_base.js:
--------------------------------------------------------------------------------
1 | module.exports = grammar({
2 | name: 'json_base',
3 |
4 | extras: $ => [
5 | /\s/,
6 | $.comment,
7 | ],
8 |
9 | supertypes: $ => [
10 | $._value
11 | ],
12 |
13 | rules: {
14 | document: $ => repeat($._value),
15 |
16 | _value: $ => choice(
17 | $.object,
18 | $.array,
19 | $.number,
20 | $.string,
21 | $.bool,
22 | $.null
23 | ),
24 |
25 | object: $ => seq(
26 | "{", commaSep($.pair), "}"
27 | ),
28 |
29 | pair: $ => seq(
30 | field("key", choice($.string, $.number)),
31 | optional(seq(
32 | ":",
33 | field("value", $._value)
34 | ))
35 | ),
36 |
37 | array: $ => seq(
38 | "[", commaSep($._value), "]"
39 | ),
40 |
41 | string: $ => choice(
42 | seq('"', '"'),
43 | seq('"', $.string_content, '"')
44 | ),
45 |
46 | string_content: $ => repeat1(choice(
47 | token.immediate(prec(1, /[^\\"\n]+/)),
48 | $.escape_sequence
49 | )),
50 |
51 | escape_sequence: $ => token.immediate(seq(
52 | '\\',
53 | /(\"|\\|\/|b|f|n|r|t|u)/
54 | )),
55 |
56 | number: $ => {
57 | const hex_literal = seq(
58 | choice('0x', '0X'),
59 | /[\da-fA-F]+/
60 | )
61 |
62 | const decimal_digits = /\d+/
63 | const signed_integer = seq(optional(choice('-', '+')), decimal_digits)
64 | const exponent_part = seq(choice('e', 'E'), signed_integer)
65 |
66 | const binary_literal = seq(choice('0b', '0B'), /[0-1]+/)
67 |
68 | const octal_literal = seq(choice('0o', '0O'), /[0-7]+/)
69 |
70 | const decimal_integer_literal = seq(
71 | optional(choice('-', '+')),
72 | choice(
73 | '0',
74 | seq(/[1-9]/, optional(decimal_digits))
75 | )
76 | )
77 |
78 | const decimal_literal = choice(
79 | seq(decimal_integer_literal, '.', optional(decimal_digits), optional(exponent_part)),
80 | seq('.', decimal_digits, optional(exponent_part)),
81 | seq(decimal_integer_literal, optional(exponent_part))
82 | )
83 |
84 | return token(choice(
85 | hex_literal,
86 | decimal_literal,
87 | binary_literal,
88 | octal_literal
89 | ))
90 | },
91 |
92 | bool: $ => choice("true", "false"),
93 |
94 | null: $ => "null",
95 |
96 | comment: $ => token(choice(
97 | seq('//', /.*/),
98 | seq(
99 | '/*',
100 | /[^*]*\*+([^/*][^*]*\*+)*/,
101 | '/'
102 | )
103 | )),
104 | }
105 | });
106 |
107 | function commaSep1(rule) {
108 | return seq(rule, repeat(seq(",", rule)))
109 | }
110 |
111 | function commaSep(rule) {
112 | return optional(commaSep1(rule))
113 | }
114 |
115 |
--------------------------------------------------------------------------------
/intc/tests/test_no_name_parser.py:
--------------------------------------------------------------------------------
1 | # Copyright cstsunfu.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import json
7 | import os
8 | import sys
9 |
10 | import pytest
11 |
12 | from intc import (
13 | FloatField,
14 | ListField,
15 | NestField,
16 | Parser,
17 | StrField,
18 | SubModule,
19 | cregister,
20 | ic_repo,
21 | init_config,
22 | )
23 | from intc.exceptions import ValueOutOfRangeError
24 |
25 |
26 | @pytest.fixture(scope="module", autouse=True)
27 | def ChildConfigForTestParser():
28 | @cregister("child_module_for_test_parser")
29 | class ChildConfigForTestParser:
30 | """child config"""
31 |
32 | i_am_child = StrField(value="child value", help="child value")
33 | i_am_float_child = FloatField(value=0, help="child value")
34 |
35 | yield ChildConfigForTestParser
36 | cregister.registry.clear()
37 | ic_repo.clear()
38 |
39 |
40 | @pytest.fixture(scope="module", autouse=True)
41 | def ConfigAForTestParser():
42 | @cregister("module_for_test_parser")
43 | class ConfigAForTestParser:
44 | """module_for_test_parser"""
45 |
46 | epsilon = FloatField(value=1.0, minimum=0.0, additions=[-2], help="epsilon")
47 | list_test = ListField(value=["name"], help="list checker")
48 | submodule = SubModule(
49 | {
50 | "child_module_for_test_parser#1": {"i_am_child": "child value1"},
51 | "child_module_for_test_parser#2": {"i_am_child": "child value2"},
52 | }
53 | )
54 |
55 | class NestedConfig:
56 | nest_key = StrField(value="nest value", help="nest value")
57 | nest_key2 = FloatField(value=0, help="nest value2")
58 |
59 | nested = NestField(NestedConfig)
60 |
61 | yield ConfigAForTestParser
62 | cregister.registry.clear()
63 | ic_repo.clear()
64 |
65 |
66 | # Test simple config
67 | def test_simple_parser(ChildConfigForTestParser):
68 | # assert cregister.get('child_a', 'child_module') == ChildConfigForTestParser._from_dict
69 | config = {
70 | "@module_for_test_parser": {
71 | "epsilon": 8.0,
72 | }
73 | }
74 | config = init_config(Parser(config).parser()[0])
75 | assert config["@module_for_test_parser"].epsilon == 8.0
76 |
77 |
78 | def test_reference_parser(ConfigAForTestParser, ChildConfigForTestParser):
79 | assert (
80 | cregister.get("child_module_for_test_parser")
81 | == ChildConfigForTestParser._from_dict
82 | )
83 | config = {
84 | "@module_for_test_parser": {
85 | "_anchor": "module",
86 | # "epsilon": 8.0,
87 | "nested": {
88 | "nest_key": "@$.#2.i_am_child, @$.#new.i_am_child @lambda x, y: x+y",
89 | },
90 | "@child_module_for_test_parser#1": {
91 | "i_am_float_child": "@module.epsilon @lambda x: x+1"
92 | },
93 | "@child_module_for_test_parser#new": {"i_am_child": "new child value"},
94 | },
95 | "_search": {
96 | "@module_for_test_parser.epsilon": [3, 4, 8.0],
97 | },
98 | }
99 | configs = Parser(config).parser()
100 | assert len(configs) == 3
101 | assert init_config(configs[0])["@module_for_test_parser"].epsilon == 3
102 | assert (
103 | init_config(configs[0])["@module_for_test_parser"].nested.nest_key
104 | == "child value2" + "new child value"
105 | )
106 | assert init_config(configs[1])["@module_for_test_parser"].epsilon == 4
107 | assert init_config(configs[2])["@module_for_test_parser"].epsilon == 8
108 |
109 |
110 | # Run the tests
111 | if __name__ == "__main__":
112 | pytest.main()
113 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/src/node-types.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "type": "_value",
4 | "named": true,
5 | "subtypes": [
6 | {
7 | "type": "array",
8 | "named": true
9 | },
10 | {
11 | "type": "bool",
12 | "named": true
13 | },
14 | {
15 | "type": "null",
16 | "named": true
17 | },
18 | {
19 | "type": "number",
20 | "named": true
21 | },
22 | {
23 | "type": "object",
24 | "named": true
25 | },
26 | {
27 | "type": "string",
28 | "named": true
29 | }
30 | ]
31 | },
32 | {
33 | "type": "array",
34 | "named": true,
35 | "fields": {},
36 | "children": {
37 | "multiple": true,
38 | "required": false,
39 | "types": [
40 | {
41 | "type": "_value",
42 | "named": true
43 | }
44 | ]
45 | }
46 | },
47 | {
48 | "type": "bool",
49 | "named": true,
50 | "fields": {}
51 | },
52 | {
53 | "type": "document",
54 | "named": true,
55 | "fields": {},
56 | "children": {
57 | "multiple": true,
58 | "required": false,
59 | "types": [
60 | {
61 | "type": "_value",
62 | "named": true
63 | }
64 | ]
65 | }
66 | },
67 | {
68 | "type": "multiline_string",
69 | "named": true,
70 | "fields": {}
71 | },
72 | {
73 | "type": "object",
74 | "named": true,
75 | "fields": {},
76 | "children": {
77 | "multiple": true,
78 | "required": false,
79 | "types": [
80 | {
81 | "type": "pair",
82 | "named": true
83 | }
84 | ]
85 | }
86 | },
87 | {
88 | "type": "pair",
89 | "named": true,
90 | "fields": {
91 | "key": {
92 | "multiple": false,
93 | "required": true,
94 | "types": [
95 | {
96 | "type": "number",
97 | "named": true
98 | },
99 | {
100 | "type": "string",
101 | "named": true
102 | }
103 | ]
104 | },
105 | "value": {
106 | "multiple": false,
107 | "required": false,
108 | "types": [
109 | {
110 | "type": "_value",
111 | "named": true
112 | }
113 | ]
114 | }
115 | }
116 | },
117 | {
118 | "type": "quoted_string",
119 | "named": true,
120 | "fields": {},
121 | "children": {
122 | "multiple": true,
123 | "required": false,
124 | "types": [
125 | {
126 | "type": "escape_sequence",
127 | "named": true
128 | }
129 | ]
130 | }
131 | },
132 | {
133 | "type": "string",
134 | "named": true,
135 | "fields": {},
136 | "children": {
137 | "multiple": false,
138 | "required": true,
139 | "types": [
140 | {
141 | "type": "multiline_string",
142 | "named": true
143 | },
144 | {
145 | "type": "quoted_string",
146 | "named": true
147 | }
148 | ]
149 | }
150 | },
151 | {
152 | "type": "\"",
153 | "named": false
154 | },
155 | {
156 | "type": "'",
157 | "named": false
158 | },
159 | {
160 | "type": "'''",
161 | "named": false
162 | },
163 | {
164 | "type": ":",
165 | "named": false
166 | },
167 | {
168 | "type": "[",
169 | "named": false
170 | },
171 | {
172 | "type": "]",
173 | "named": false
174 | },
175 | {
176 | "type": "comment",
177 | "named": true
178 | },
179 | {
180 | "type": "escape_sequence",
181 | "named": true
182 | },
183 | {
184 | "type": "false",
185 | "named": false
186 | },
187 | {
188 | "type": "null",
189 | "named": true
190 | },
191 | {
192 | "type": "number",
193 | "named": true
194 | },
195 | {
196 | "type": "true",
197 | "named": false
198 | },
199 | {
200 | "type": "{",
201 | "named": false
202 | },
203 | {
204 | "type": "}",
205 | "named": false
206 | }
207 | ]
--------------------------------------------------------------------------------
/lsp/intc_lsp/src/edit.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import time
7 |
8 | from tree_sitter import Language, Parser
9 |
10 |
11 | def get_change(old_source_byte: bytes, new_source_byte: bytes):
12 | """get the changed range between old source and new source
13 | Args:
14 | old_source_byte: the old source
15 | new_source_byte: the new source
16 | Returns:
17 | a dict contain the changed range
18 | """
19 | old_byte_lines = old_source_byte.split(b"\n")
20 | new_byte_lines = new_source_byte.split(b"\n")
21 | start_line = 0
22 | start_byte = 0
23 | old_end_line = len(old_byte_lines)
24 | new_end_line = len(new_byte_lines)
25 | old_end_byte = len(old_source_byte)
26 | new_end_byte = len(new_source_byte)
27 | for i in range(min(len(old_byte_lines), len(new_byte_lines))):
28 | if not old_byte_lines[i] == new_byte_lines[i]:
29 | break
30 | start_line = i
31 | start_byte += len(old_byte_lines) + 1
32 |
33 | for i in range(1, min(len(old_byte_lines), len(new_byte_lines))):
34 | if not old_byte_lines[-i] == new_byte_lines[-i]:
35 | old_end_line -= i
36 | new_end_line -= i
37 | break
38 | cur_line_byte = len(old_byte_lines[-i]) + 1
39 | old_end_byte -= cur_line_byte
40 | new_end_byte -= cur_line_byte
41 | return {
42 | "start_byte": start_byte,
43 | "old_end_byte": old_end_byte,
44 | "new_end_byte": new_end_byte,
45 | "start_point": (start_line, 0),
46 | "old_end_point": (old_end_line, 0),
47 | "new_end_point": (new_end_line, 0),
48 | }
49 |
50 |
51 | if __name__ == "__main__":
52 | HJSON_LANGUAGE = Language("intc_lsp/lib/json_ts.so", "json")
53 | parser = Parser()
54 | parser.set_language(HJSON_LANGUAGE)
55 |
56 | old_source_str = """
57 | {
58 | "processor": {
59 | "_base": "basic@span_cls#pretrained",
60 | "config": {
61 | "feed_order": ["load", "seq_lab_loader", "tokenizer", "label_gather", "span_cls_relabel", "save"]
62 | "tokenizer_config_path": "./data/bert/tokenizer.json", // the tokenizer config path (the tokenizer.json path)
63 | "data_dir": "./bert/output/", // save load data base dir
64 | "size": 3, // save load data base dir
65 | "drop": 0.3, // save load data base dir
66 | },
67 | }
68 | }
69 | """
70 |
71 | new_source_str = """
72 | {
73 | "processor": {
74 | "_base": "basic@span_cls#pretrained",
75 | "drop": 0.3, // save load data base dir
76 | },
77 | }
78 | }
79 | """
80 |
81 | old_source = old_source_str.encode("utf8")
82 | new_source = new_source_str.encode("utf8")
83 |
84 | old_tree = parser.parse(old_source)
85 |
86 | ctime = 0
87 | for _ in range(1000):
88 | ss = time.time()
89 | changes = get_change(old_source, new_source)
90 | old_tree.edit(**changes)
91 | ctime += time.time() - ss
92 | new_tree = parser.parse(new_source, old_tree)
93 | print(f"get diff: {ctime}")
94 |
95 | start = time.time()
96 | for _ in range(1000):
97 | changes = get_change(old_source, new_source)
98 | old_tree.edit(**changes)
99 | new_tree = parser.parse(new_source, old_tree)
100 | end = time.time()
101 | print(f"get diff + add parse: {end - start}")
102 |
103 | start = time.time()
104 | for _ in range(1000):
105 | # old_tree.edit(**get_change(old_source_str, new_source_str))
106 | new_tree = parser.parse(new_source)
107 | end = time.time()
108 | print(f"full parse new: {end - start}")
109 |
110 | start = time.time()
111 | for _ in range(1000):
112 | # old_tree.edit(**get_change(old_source_str, new_source_str))
113 | new_tree = parser.parse(old_source)
114 | end = time.time()
115 | print(f"full parse old: {end - start}")
116 |
--------------------------------------------------------------------------------
/lsp/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import os
7 | import sys
8 | from os import PathLike, fspath, path
9 | from platform import system
10 | from tempfile import TemporaryDirectory
11 | from typing import List
12 |
13 | from setuptools import find_packages, setup
14 |
15 | if sys.platform == "win32":
16 | sys_post_fix = "win"
17 | elif sys.platform == "darwin":
18 | sys_post_fix = "mac"
19 | else:
20 | sys_post_fix = "linux"
21 |
22 |
23 | def write_version_py():
24 | with open(os.path.join("intc_lsp", "version.txt")) as f:
25 | version = f.read().strip()
26 |
27 | # write version info to fairseq/version.py
28 | with open(os.path.join("intc_lsp", "version.py"), "w") as f:
29 | f.write('__version__ = "{}"\n'.format(version))
30 | return version
31 |
32 |
33 | version = write_version_py()
34 |
35 |
36 | def build_library(output_path: str, repo_paths: List[str]) -> bool:
37 | """
38 | Build a dynamic library at the given path, based on the parser
39 | repositories at the given paths.
40 |
41 | Returns `True` if the dynamic library was compiled and `False` if
42 | the library already existed and was modified more recently than
43 | any of the source files.
44 | """
45 | output_mtime = path.getmtime(output_path) if path.exists(output_path) else 0
46 |
47 | if not repo_paths:
48 | raise ValueError("Must provide at least one language folder")
49 |
50 | cpp = False
51 | source_paths = []
52 | for repo_path in repo_paths:
53 | src_path = path.join(repo_path, "src")
54 | source_paths.append(path.join(src_path, "parser.c"))
55 | if path.exists(path.join(src_path, "scanner.cc")):
56 | cpp = True
57 | source_paths.append(path.join(src_path, "scanner.cc"))
58 | elif path.exists(path.join(src_path, "scanner.c")):
59 | source_paths.append(path.join(src_path, "scanner.c"))
60 | source_mtimes = [path.getmtime(__file__)] + [
61 | path.getmtime(path_) for path_ in source_paths
62 | ]
63 |
64 | if max(source_mtimes) <= output_mtime:
65 | return False
66 |
67 | # local import saves import time in the common case that nothing is compiled
68 | try:
69 | from distutils.ccompiler import new_compiler
70 | from distutils.unixccompiler import UnixCCompiler
71 | except ImportError as err:
72 | raise RuntimeError(
73 | "Failed to import distutils. You may need to install setuptools."
74 | ) from err
75 |
76 | compiler = new_compiler()
77 | if isinstance(compiler, UnixCCompiler):
78 | compiler.set_executables(compiler_cxx="c++")
79 |
80 | with TemporaryDirectory(suffix="tree_sitter_language") as out_dir:
81 | object_paths = []
82 | for source_path in source_paths:
83 | if system() == "Windows":
84 | flags = None
85 | else:
86 | flags = ["-fPIC"]
87 | if source_path.endswith(".c"):
88 | flags.append("-std=c11")
89 | object_paths.append(
90 | compiler.compile(
91 | [source_path],
92 | output_dir=out_dir,
93 | include_dirs=[path.dirname(source_path)],
94 | extra_preargs=flags,
95 | )[0]
96 | )
97 | compiler.link_shared_object(
98 | object_paths,
99 | output_path,
100 | target_lang="c++" if cpp else "c",
101 | )
102 | return True
103 |
104 |
105 | build_library(
106 | os.path.join("intc_lsp", "lib", f"json_{sys_post_fix}_ts.so"),
107 | [os.path.join("intc_lsp", "csrc", "json")],
108 | )
109 | build_library(
110 | os.path.join("intc_lsp", "lib", f"yaml_{sys_post_fix}_ts.so"),
111 | [os.path.join("intc_lsp", "csrc", "yaml")],
112 | )
113 |
114 | with open("README.md", encoding="utf-8") as f:
115 | readme = f.read()
116 |
117 | with open("LICENSE", encoding="utf-8") as f:
118 | license = f.read()
119 |
120 | setup(
121 | version=version,
122 | url="https://github.com/cstsunfu/intc",
123 | )
124 |
--------------------------------------------------------------------------------
/intc/tests/test_type_name_parser.py:
--------------------------------------------------------------------------------
1 | # Copyright cstsunfu.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import json
7 | import os
8 | import sys
9 |
10 | import pytest
11 |
12 | from intc import (
13 | Base,
14 | FloatField,
15 | ListField,
16 | NestField,
17 | Parser,
18 | StrField,
19 | SubModule,
20 | cregister,
21 | ic_repo,
22 | init_config,
23 | )
24 | from intc.exceptions import ValueOutOfRangeError
25 |
26 |
27 | @pytest.fixture(scope="module", autouse=True)
28 | def ChildConfigForTestParser():
29 | @cregister("child_module_for_test_parser", "child_a")
30 | class ChildConfigForTestParser:
31 | """child config"""
32 |
33 | i_am_child = StrField(value="child value", help="child value")
34 | i_am_float_child = FloatField(value=0, help="child value")
35 |
36 | yield ChildConfigForTestParser
37 | cregister.registry.clear()
38 | ic_repo.clear()
39 |
40 |
41 | @pytest.fixture(scope="module", autouse=True)
42 | def ConfigAForTestParser():
43 | @cregister("module_for_test_parser", "config_a")
44 | class ConfigAForTestParser(Base):
45 | """module_for_test_parser config a"""
46 |
47 | epsilon = FloatField(value=1.0, minimum=0.0, additions=[-2], help="epsilon")
48 | list_test = ListField(value=["name"], help="list checker")
49 | submodule = SubModule(
50 | {
51 | "child_module_for_test_parser#1": {
52 | "i_am_child": "child value1",
53 | "_base": "child_a",
54 | },
55 | "child_module_for_test_parser#2": {
56 | "i_am_child": "child value2",
57 | "_base": "child_a",
58 | },
59 | }
60 | )
61 |
62 | class NestedConfig:
63 | nest_key = StrField(value="nest value", help="nest value")
64 | nest_key2 = FloatField(value=0, help="nest value2")
65 |
66 | nested = NestField(NestedConfig)
67 |
68 | yield ConfigAForTestParser
69 | cregister.registry.clear()
70 | ic_repo.clear()
71 |
72 |
73 | @pytest.fixture(scope="module", autouse=True)
74 | def ConfigA1ForTestParser(ConfigAForTestParser):
75 | @cregister("module_for_test_parser", "config_a_1")
76 | class ConfigA1ForTestParser(ConfigAForTestParser):
77 | """module_for_test_parser config a_1"""
78 |
79 | epsilon = FloatField(value=2.0, minimum=-10, help="epsilon")
80 |
81 | yield ConfigA1ForTestParser
82 | cregister.registry.clear()
83 | ic_repo.clear()
84 |
85 |
86 | # Test simple config
87 | def test_simple_parser(ChildConfigForTestParser):
88 | # assert cregister.get('child_a', 'child_module') == ChildConfigForTestParser._from_dict
89 | config = {
90 | "@module_for_test_parser": {
91 | "_base": "config_a_1",
92 | "epsilon": 8.0,
93 | }
94 | }
95 | config = init_config(Parser(config).parser()[0])
96 | assert config["@module_for_test_parser"].epsilon == 8.0
97 |
98 |
99 | def test_reference_parser(
100 | ConfigAForTestParser, ChildConfigForTestParser, ConfigA1ForTestParser
101 | ):
102 | assert (
103 | cregister.get("child_module_for_test_parser", "child_a")
104 | == ChildConfigForTestParser._from_dict
105 | )
106 | config = {
107 | "@module_for_test_parser": {
108 | "_base": "config_a",
109 | "_anchor": "module",
110 | # "epsilon": 8.0,
111 | "nested": {
112 | "nest_key": "@$.#1.i_am_child, @$.#2.i_am_child @lambda x, y: x+y",
113 | },
114 | "@child_module_for_test_parser#1": {
115 | "i_am_float_child": "@module.epsilon @lambda x: x+1"
116 | },
117 | "@child_module_for_test_parser#child_new": {
118 | "i_am_child": "new child value",
119 | "_name": "child_a",
120 | },
121 | },
122 | "_search": {
123 | "@module_for_test_parser.epsilon": [3, 4, 8.0],
124 | },
125 | }
126 | configs = Parser(config).parser()
127 | assert len(configs) == 3
128 | assert init_config(configs[0])["@module_for_test_parser"].epsilon == 3
129 | assert init_config(configs[1])["@module_for_test_parser"].epsilon == 4
130 | assert init_config(configs[2])["@module_for_test_parser"].epsilon == 8
131 |
132 |
133 | # Run the tests
134 | if __name__ == "__main__":
135 | pytest.main()
136 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/cli.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | """Intc Language Server command line interface."""
7 |
8 | import argparse
9 | import logging
10 | import os
11 | import sys
12 | from logging.handlers import RotatingFileHandler
13 | from textwrap import dedent
14 |
15 | from platformdirs import user_log_dir
16 |
17 | from intc_lsp.server import intc_server
18 | from intc_lsp.version import __version__
19 |
20 | log_dir = user_log_dir("intc_lsp")
21 |
22 |
23 | def get_version() -> str:
24 | """Get the program version."""
25 | return __version__
26 |
27 |
28 | def cli() -> None:
29 | """intc language server cli entrypoint."""
30 | parser = argparse.ArgumentParser(
31 | prog="intc-language-server",
32 | formatter_class=argparse.RawDescriptionHelpFormatter,
33 | description="intc language server: an LSP wrapper for intc.",
34 | epilog=dedent(
35 | """\
36 | Examples:
37 |
38 | Run over stdio : intc-language-server
39 | Run over tcp : intc-language-server --tcp
40 | Run over websockets:
41 | # only need to pip install once per env
42 | pip install pygls[ws]
43 | intc-language-server --ws
44 |
45 | Notes:
46 |
47 | For use with web sockets, user must first run
48 | 'pip install pygls[ws]' to install the correct
49 | version of the websockets library.
50 | """
51 | ),
52 | )
53 | parser.add_argument(
54 | "--version",
55 | help="display version information and exit",
56 | action="store_true",
57 | )
58 | parser.add_argument(
59 | "--tcp",
60 | help="use TCP web server instead of stdio",
61 | action="store_true",
62 | )
63 | parser.add_argument(
64 | "--ws",
65 | help="use web socket server instead of stdio",
66 | action="store_true",
67 | )
68 | parser.add_argument(
69 | "--host",
70 | help="host for web server (default 127.0.0.1)",
71 | type=str,
72 | default="127.0.0.1",
73 | )
74 | parser.add_argument(
75 | "--port",
76 | help="port for web server (default 9999)",
77 | type=int,
78 | default=9999,
79 | )
80 | parser.add_argument(
81 | "--log-file",
82 | default=os.path.join(log_dir, "log"),
83 | help="redirect logs to file specified",
84 | type=str,
85 | )
86 | parser.add_argument(
87 | "--log_level",
88 | default=2,
89 | )
90 | args = parser.parse_args()
91 | if args.version:
92 | print(get_version())
93 | sys.exit(0)
94 | if args.tcp and args.ws:
95 | print(
96 | "Error: --tcp and --ws cannot both be specified",
97 | file=sys.stderr,
98 | )
99 | sys.exit(1)
100 | log_level = {
101 | 0: logging.ERROR,
102 | 1: logging.WARN,
103 | 2: logging.INFO,
104 | 3: logging.DEBUG,
105 | }.get(
106 | args.log_level,
107 | logging.WARN,
108 | )
109 | logger = logging.getLogger()
110 | logger_intc = logging.getLogger("intc_lsp")
111 |
112 | if args.log_file:
113 | if not os.path.isfile(os.path.dirname(args.log_file)):
114 | os.makedirs(os.path.dirname(args.log_file), exist_ok=True)
115 | logger.setLevel(log_level)
116 | logger_intc.setLevel(log_level)
117 | file_handler = RotatingFileHandler(
118 | filename=args.log_file,
119 | mode="w",
120 | encoding="utf8",
121 | maxBytes=1 * 1024 * 1024,
122 | )
123 | file_formatter = logging.Formatter(
124 | fmt="%(asctime)s - %(levelname)s - %(name)s - %(message)s",
125 | datefmt="%m/%d/%Y %H:%M:%S",
126 | )
127 | file_handler.setFormatter(file_formatter)
128 | logger.addHandler(file_handler)
129 | else:
130 | logging.basicConfig(stream=sys.stderr, level=log_level)
131 |
132 | if args.tcp:
133 | intc_server.start_tcp(host=args.host, port=args.port)
134 | elif args.ws:
135 | intc_server.start_ws(host=args.host, port=args.port)
136 | else:
137 | intc_server.start_io()
138 |
139 |
140 | if __name__ == "__main__":
141 | cli()
142 |
--------------------------------------------------------------------------------
/intc/tests/test_config.py:
--------------------------------------------------------------------------------
1 | # Copyright cstsunfu.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import json
7 | import os
8 | import sys
9 |
10 | import pytest
11 |
12 | from intc import (
13 | FloatField,
14 | ListField,
15 | NestField,
16 | StrField,
17 | SubModule,
18 | cregister,
19 | ic_repo,
20 | )
21 | from intc.exceptions import ValueOutOfRangeError
22 |
23 |
24 | @pytest.fixture(scope="module", autouse=True)
25 | def ChildConfigForTestConfig():
26 | @cregister("child_module_for_test_config", "child_a")
27 | class ChildConfigForTestConfig:
28 | """child_a config"""
29 |
30 | i_am_child = StrField(value="child value", help="child value")
31 |
32 | yield ChildConfigForTestConfig
33 | cregister.registry.clear()
34 | ic_repo.clear()
35 |
36 |
37 | @pytest.fixture(scope="module", autouse=True)
38 | def ConfigAForTestConfig():
39 | @cregister("module_for_test_config", "config_a")
40 | class ConfigAForTestConfig:
41 | """config_a config"""
42 |
43 | epsilon = FloatField(value=1.0, minimum=0.0, additions=[-2.0], help="epsilon")
44 | submodule = SubModule(
45 | {
46 | "child_module_for_test_config#1": {
47 | "i_am_child": "child value1",
48 | "_base": "child_a",
49 | },
50 | "child_module_for_test_config#2": {
51 | "i_am_child": "child value2",
52 | "_base": "child_a",
53 | },
54 | }
55 | )
56 |
57 | class NestedConfig:
58 | nest_key = StrField(value="nest value", help="nest value")
59 |
60 | nested = NestField(NestedConfig)
61 |
62 | yield ConfigAForTestConfig
63 | cregister.registry.clear()
64 | ic_repo.clear()
65 |
66 |
67 | @pytest.fixture(scope="module", autouse=True)
68 | def ConfigA1ForTestConfig(ConfigAForTestConfig):
69 | @cregister("module_for_test_config", "config_a_1")
70 | class ConfigA1ForTestConfig(ConfigAForTestConfig):
71 | epsilon = FloatField(value=2.0, minimum=-10, help="epsilon")
72 | list_test = ListField(value=["name"], help="list checker")
73 |
74 | yield ConfigA1ForTestConfig
75 |
76 |
77 | @pytest.fixture(scope="module", autouse=True)
78 | def config_dict():
79 | config_a_1_for_test_dict = {
80 | "_name": "config_a_1",
81 | "nested": {"nest_key": "nest value"},
82 | "epsilon": 2.0,
83 | "list_test": ["name"],
84 | "@child_module_for_test_config#1": {
85 | "_name": "child_a",
86 | "i_am_child": "child value1",
87 | },
88 | "@child_module_for_test_config#2": {
89 | "_name": "child_a",
90 | "i_am_child": "child value2",
91 | },
92 | }
93 |
94 | yield config_a_1_for_test_dict
95 |
96 |
97 | # Test module registration and retrieval
98 | def test_module_registration_and_retrieval(ChildConfigForTestConfig):
99 | assert (
100 | cregister.get("child_module_for_test_config", "child_a")
101 | == ChildConfigForTestConfig._from_dict
102 | )
103 |
104 |
105 | # Test module registration and retrieval
106 | def test_config_dumps(ConfigA1ForTestConfig, config_dict):
107 | config = ConfigA1ForTestConfig()
108 | print(json.dumps(config._to_dict(), indent=4))
109 |
110 | assert json.dumps(config._to_dict(), sort_keys=True) == json.dumps(
111 | config_dict, sort_keys=True
112 | )
113 |
114 |
115 | # Test config object equality
116 | def test_config_equality(ConfigA1ForTestConfig):
117 | config1 = ConfigA1ForTestConfig()
118 | config2 = ConfigA1ForTestConfig._from_dict(config1._to_dict())
119 | assert config1 == config2
120 |
121 |
122 | # Test config object equality
123 | def test_float_check(ConfigAForTestConfig, ConfigA1ForTestConfig):
124 | config = ConfigAForTestConfig._from_dict({"epsilon": 2.0})
125 | assert config.epsilon == 2.0
126 |
127 | config = ConfigAForTestConfig._from_dict({"epsilon": -2.0})
128 | assert config.epsilon == -2.0
129 |
130 | with pytest.raises(ValueOutOfRangeError) as exc_info:
131 | ConfigAForTestConfig._from_dict({"epsilon": -3.0})
132 | # Check the error message
133 | assert "Value -3.0 is not in range [0.0" in str(exc_info.value)
134 | config = ConfigA1ForTestConfig._from_dict({"epsilon": -3.0})
135 | # Check the error message
136 | assert config.epsilon == -3.0
137 |
138 |
139 | # Test field values of config objects
140 | def test_config_field_values(ConfigAForTestConfig, ConfigA1ForTestConfig):
141 | config = ConfigAForTestConfig()
142 | assert config.epsilon == 1.0
143 | config = ConfigA1ForTestConfig()
144 | assert config.list_test == ["name"]
145 | assert config.epsilon == 2.0
146 |
147 |
148 | # Test nested config objects
149 | def test_nested_config(ConfigAForTestConfig):
150 | config = ConfigAForTestConfig()
151 | assert config.nested.nest_key == "nest value"
152 |
153 |
154 | # Test children field of config objects
155 | def test_children_field(ConfigAForTestConfig, ChildConfigForTestConfig):
156 | config = ConfigAForTestConfig()
157 | # assert isinstance(config.submodule, dict)
158 | assert isinstance(
159 | config.submodule["child_module_for_test_config#1"], ChildConfigForTestConfig
160 | )
161 | assert config["@child_module_for_test_config#1"].i_am_child == "child value1"
162 | assert (
163 | config.submodule["child_module_for_test_config#1"].i_am_child == "child value1"
164 | )
165 |
166 |
167 | # Run the tests
168 | if __name__ == "__main__":
169 | pytest.main()
170 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/json/src/tree_sitter/parser.h:
--------------------------------------------------------------------------------
1 | #ifndef TREE_SITTER_PARSER_H_
2 | #define TREE_SITTER_PARSER_H_
3 |
4 | #ifdef __cplusplus
5 | extern "C" {
6 | #endif
7 |
8 | #include
9 | #include
10 | #include
11 |
12 | #define ts_builtin_sym_error ((TSSymbol)-1)
13 | #define ts_builtin_sym_end 0
14 | #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
15 |
16 | typedef uint16_t TSStateId;
17 |
18 | #ifndef TREE_SITTER_API_H_
19 | typedef uint16_t TSSymbol;
20 | typedef uint16_t TSFieldId;
21 | typedef struct TSLanguage TSLanguage;
22 | #endif
23 |
24 | typedef struct {
25 | TSFieldId field_id;
26 | uint8_t child_index;
27 | bool inherited;
28 | } TSFieldMapEntry;
29 |
30 | typedef struct {
31 | uint16_t index;
32 | uint16_t length;
33 | } TSFieldMapSlice;
34 |
35 | typedef struct {
36 | bool visible;
37 | bool named;
38 | bool supertype;
39 | } TSSymbolMetadata;
40 |
41 | typedef struct TSLexer TSLexer;
42 |
43 | struct TSLexer {
44 | int32_t lookahead;
45 | TSSymbol result_symbol;
46 | void (*advance)(TSLexer *, bool);
47 | void (*mark_end)(TSLexer *);
48 | uint32_t (*get_column)(TSLexer *);
49 | bool (*is_at_included_range_start)(const TSLexer *);
50 | bool (*eof)(const TSLexer *);
51 | };
52 |
53 | typedef enum {
54 | TSParseActionTypeShift,
55 | TSParseActionTypeReduce,
56 | TSParseActionTypeAccept,
57 | TSParseActionTypeRecover,
58 | } TSParseActionType;
59 |
60 | typedef union {
61 | struct {
62 | uint8_t type;
63 | TSStateId state;
64 | bool extra;
65 | bool repetition;
66 | } shift;
67 | struct {
68 | uint8_t type;
69 | uint8_t child_count;
70 | TSSymbol symbol;
71 | int16_t dynamic_precedence;
72 | uint16_t production_id;
73 | } reduce;
74 | uint8_t type;
75 | } TSParseAction;
76 |
77 | typedef struct {
78 | uint16_t lex_state;
79 | uint16_t external_lex_state;
80 | } TSLexMode;
81 |
82 | typedef union {
83 | TSParseAction action;
84 | struct {
85 | uint8_t count;
86 | bool reusable;
87 | } entry;
88 | } TSParseActionEntry;
89 |
90 | struct TSLanguage {
91 | uint32_t version;
92 | uint32_t symbol_count;
93 | uint32_t alias_count;
94 | uint32_t token_count;
95 | uint32_t external_token_count;
96 | uint32_t state_count;
97 | uint32_t large_state_count;
98 | uint32_t production_id_count;
99 | uint32_t field_count;
100 | uint16_t max_alias_sequence_length;
101 | const uint16_t *parse_table;
102 | const uint16_t *small_parse_table;
103 | const uint32_t *small_parse_table_map;
104 | const TSParseActionEntry *parse_actions;
105 | const char * const *symbol_names;
106 | const char * const *field_names;
107 | const TSFieldMapSlice *field_map_slices;
108 | const TSFieldMapEntry *field_map_entries;
109 | const TSSymbolMetadata *symbol_metadata;
110 | const TSSymbol *public_symbol_map;
111 | const uint16_t *alias_map;
112 | const TSSymbol *alias_sequences;
113 | const TSLexMode *lex_modes;
114 | bool (*lex_fn)(TSLexer *, TSStateId);
115 | bool (*keyword_lex_fn)(TSLexer *, TSStateId);
116 | TSSymbol keyword_capture_token;
117 | struct {
118 | const bool *states;
119 | const TSSymbol *symbol_map;
120 | void *(*create)(void);
121 | void (*destroy)(void *);
122 | bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
123 | unsigned (*serialize)(void *, char *);
124 | void (*deserialize)(void *, const char *, unsigned);
125 | } external_scanner;
126 | const TSStateId *primary_state_ids;
127 | };
128 |
129 | /*
130 | * Lexer Macros
131 | */
132 |
133 | #define START_LEXER() \
134 | bool result = false; \
135 | bool skip = false; \
136 | bool eof = false; \
137 | int32_t lookahead; \
138 | goto start; \
139 | next_state: \
140 | lexer->advance(lexer, skip); \
141 | start: \
142 | skip = false; \
143 | lookahead = lexer->lookahead;
144 |
145 | #define ADVANCE(state_value) \
146 | { \
147 | state = state_value; \
148 | goto next_state; \
149 | }
150 |
151 | #define SKIP(state_value) \
152 | { \
153 | skip = true; \
154 | state = state_value; \
155 | goto next_state; \
156 | }
157 |
158 | #define ACCEPT_TOKEN(symbol_value) \
159 | result = true; \
160 | lexer->result_symbol = symbol_value; \
161 | lexer->mark_end(lexer);
162 |
163 | #define END_STATE() return result;
164 |
165 | /*
166 | * Parse Table Macros
167 | */
168 |
169 | #define SMALL_STATE(id) id - LARGE_STATE_COUNT
170 |
171 | #define STATE(id) id
172 |
173 | #define ACTIONS(id) id
174 |
175 | #define SHIFT(state_value) \
176 | {{ \
177 | .shift = { \
178 | .type = TSParseActionTypeShift, \
179 | .state = state_value \
180 | } \
181 | }}
182 |
183 | #define SHIFT_REPEAT(state_value) \
184 | {{ \
185 | .shift = { \
186 | .type = TSParseActionTypeShift, \
187 | .state = state_value, \
188 | .repetition = true \
189 | } \
190 | }}
191 |
192 | #define SHIFT_EXTRA() \
193 | {{ \
194 | .shift = { \
195 | .type = TSParseActionTypeShift, \
196 | .extra = true \
197 | } \
198 | }}
199 |
200 | #define REDUCE(symbol_val, child_count_val, ...) \
201 | {{ \
202 | .reduce = { \
203 | .type = TSParseActionTypeReduce, \
204 | .symbol = symbol_val, \
205 | .child_count = child_count_val, \
206 | __VA_ARGS__ \
207 | }, \
208 | }}
209 |
210 | #define RECOVER() \
211 | {{ \
212 | .type = TSParseActionTypeRecover \
213 | }}
214 |
215 | #define ACCEPT_INPUT() \
216 | {{ \
217 | .type = TSParseActionTypeAccept \
218 | }}
219 |
220 | #ifdef __cplusplus
221 | }
222 | #endif
223 |
224 | #endif // TREE_SITTER_PARSER_H_
225 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/src/tree_sitter/parser.h:
--------------------------------------------------------------------------------
1 | #ifndef TREE_SITTER_PARSER_H_
2 | #define TREE_SITTER_PARSER_H_
3 |
4 | #ifdef __cplusplus
5 | extern "C" {
6 | #endif
7 |
8 | #include
9 | #include
10 | #include
11 |
12 | #define ts_builtin_sym_error ((TSSymbol)-1)
13 | #define ts_builtin_sym_end 0
14 | #define TREE_SITTER_SERIALIZATION_BUFFER_SIZE 1024
15 |
16 | typedef uint16_t TSStateId;
17 |
18 | #ifndef TREE_SITTER_API_H_
19 | typedef uint16_t TSSymbol;
20 | typedef uint16_t TSFieldId;
21 | typedef struct TSLanguage TSLanguage;
22 | #endif
23 |
24 | typedef struct {
25 | TSFieldId field_id;
26 | uint8_t child_index;
27 | bool inherited;
28 | } TSFieldMapEntry;
29 |
30 | typedef struct {
31 | uint16_t index;
32 | uint16_t length;
33 | } TSFieldMapSlice;
34 |
35 | typedef struct {
36 | bool visible;
37 | bool named;
38 | bool supertype;
39 | } TSSymbolMetadata;
40 |
41 | typedef struct TSLexer TSLexer;
42 |
43 | struct TSLexer {
44 | int32_t lookahead;
45 | TSSymbol result_symbol;
46 | void (*advance)(TSLexer *, bool);
47 | void (*mark_end)(TSLexer *);
48 | uint32_t (*get_column)(TSLexer *);
49 | bool (*is_at_included_range_start)(const TSLexer *);
50 | bool (*eof)(const TSLexer *);
51 | };
52 |
53 | typedef enum {
54 | TSParseActionTypeShift,
55 | TSParseActionTypeReduce,
56 | TSParseActionTypeAccept,
57 | TSParseActionTypeRecover,
58 | } TSParseActionType;
59 |
60 | typedef union {
61 | struct {
62 | uint8_t type;
63 | TSStateId state;
64 | bool extra;
65 | bool repetition;
66 | } shift;
67 | struct {
68 | uint8_t type;
69 | uint8_t child_count;
70 | TSSymbol symbol;
71 | int16_t dynamic_precedence;
72 | uint16_t production_id;
73 | } reduce;
74 | uint8_t type;
75 | } TSParseAction;
76 |
77 | typedef struct {
78 | uint16_t lex_state;
79 | uint16_t external_lex_state;
80 | } TSLexMode;
81 |
82 | typedef union {
83 | TSParseAction action;
84 | struct {
85 | uint8_t count;
86 | bool reusable;
87 | } entry;
88 | } TSParseActionEntry;
89 |
90 | struct TSLanguage {
91 | uint32_t version;
92 | uint32_t symbol_count;
93 | uint32_t alias_count;
94 | uint32_t token_count;
95 | uint32_t external_token_count;
96 | uint32_t state_count;
97 | uint32_t large_state_count;
98 | uint32_t production_id_count;
99 | uint32_t field_count;
100 | uint16_t max_alias_sequence_length;
101 | const uint16_t *parse_table;
102 | const uint16_t *small_parse_table;
103 | const uint32_t *small_parse_table_map;
104 | const TSParseActionEntry *parse_actions;
105 | const char * const *symbol_names;
106 | const char * const *field_names;
107 | const TSFieldMapSlice *field_map_slices;
108 | const TSFieldMapEntry *field_map_entries;
109 | const TSSymbolMetadata *symbol_metadata;
110 | const TSSymbol *public_symbol_map;
111 | const uint16_t *alias_map;
112 | const TSSymbol *alias_sequences;
113 | const TSLexMode *lex_modes;
114 | bool (*lex_fn)(TSLexer *, TSStateId);
115 | bool (*keyword_lex_fn)(TSLexer *, TSStateId);
116 | TSSymbol keyword_capture_token;
117 | struct {
118 | const bool *states;
119 | const TSSymbol *symbol_map;
120 | void *(*create)(void);
121 | void (*destroy)(void *);
122 | bool (*scan)(void *, TSLexer *, const bool *symbol_whitelist);
123 | unsigned (*serialize)(void *, char *);
124 | void (*deserialize)(void *, const char *, unsigned);
125 | } external_scanner;
126 | const TSStateId *primary_state_ids;
127 | };
128 |
129 | /*
130 | * Lexer Macros
131 | */
132 |
133 | #define START_LEXER() \
134 | bool result = false; \
135 | bool skip = false; \
136 | bool eof = false; \
137 | int32_t lookahead; \
138 | goto start; \
139 | next_state: \
140 | lexer->advance(lexer, skip); \
141 | start: \
142 | skip = false; \
143 | lookahead = lexer->lookahead;
144 |
145 | #define ADVANCE(state_value) \
146 | { \
147 | state = state_value; \
148 | goto next_state; \
149 | }
150 |
151 | #define SKIP(state_value) \
152 | { \
153 | skip = true; \
154 | state = state_value; \
155 | goto next_state; \
156 | }
157 |
158 | #define ACCEPT_TOKEN(symbol_value) \
159 | result = true; \
160 | lexer->result_symbol = symbol_value; \
161 | lexer->mark_end(lexer);
162 |
163 | #define END_STATE() return result;
164 |
165 | /*
166 | * Parse Table Macros
167 | */
168 |
169 | #define SMALL_STATE(id) id - LARGE_STATE_COUNT
170 |
171 | #define STATE(id) id
172 |
173 | #define ACTIONS(id) id
174 |
175 | #define SHIFT(state_value) \
176 | {{ \
177 | .shift = { \
178 | .type = TSParseActionTypeShift, \
179 | .state = state_value \
180 | } \
181 | }}
182 |
183 | #define SHIFT_REPEAT(state_value) \
184 | {{ \
185 | .shift = { \
186 | .type = TSParseActionTypeShift, \
187 | .state = state_value, \
188 | .repetition = true \
189 | } \
190 | }}
191 |
192 | #define SHIFT_EXTRA() \
193 | {{ \
194 | .shift = { \
195 | .type = TSParseActionTypeShift, \
196 | .extra = true \
197 | } \
198 | }}
199 |
200 | #define REDUCE(symbol_val, child_count_val, ...) \
201 | {{ \
202 | .reduce = { \
203 | .type = TSParseActionTypeReduce, \
204 | .symbol = symbol_val, \
205 | .child_count = child_count_val, \
206 | __VA_ARGS__ \
207 | }, \
208 | }}
209 |
210 | #define RECOVER() \
211 | {{ \
212 | .type = TSParseActionTypeRecover \
213 | }}
214 |
215 | #define ACCEPT_INPUT() \
216 | {{ \
217 | .type = TSParseActionTypeAccept \
218 | }}
219 |
220 | #ifdef __cplusplus
221 | }
222 | #endif
223 |
224 | #endif // TREE_SITTER_PARSER_H_
225 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/src/parser_json.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import os
7 | import sys
8 | from typing import Dict, List, Optional, Union
9 |
10 | from tree_sitter import Language, Node, Parser
11 |
12 | if sys.platform == "win32":
13 | sys_post_fix = "win"
14 | elif sys.platform == "darwin":
15 | sys_post_fix = "mac"
16 | else:
17 | sys_post_fix = "linux"
18 |
19 |
20 | class JsonParser(object):
21 | """docstring for JsonParser"""
22 |
23 | def __init__(self):
24 | super(JsonParser, self).__init__()
25 | self._parser = Parser()
26 | dynamic_lib_path = os.path.join(
27 | os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
28 | "lib",
29 | f"json_{sys_post_fix}_ts.so",
30 | )
31 | self._parser.set_language(Language(dynamic_lib_path, "json"))
32 | # self._parser.set_language(Language('../build/json_ts.so', 'json'))
33 | self.skip = {"comment", '"', "[", "]", "}", "{", ":"}
34 |
35 | def parser(self, doc: str) -> Optional[Union[Dict, List]]:
36 | """parser the json string to AST
37 |
38 | Args:
39 | doc: the source document
40 |
41 | Returns:
42 | the parser tree
43 | """
44 | return self.parser_object(self._parser.parse(bytes(doc, "utf8")).root_node)
45 |
46 | @staticmethod
47 | def print_parser_tree(node: Node, deep=0) -> None:
48 | for children in node.named_children:
49 | print(deep * " " + children.type)
50 | JsonParser.print_parser_tree(children, deep + 1)
51 |
52 | def drop_quote(self, _text: bytes) -> str:
53 | text = _text.decode()
54 | if (
55 | len(text) >= 2
56 | and (text[0] == '"' and text[-1] == '"')
57 | or (text[0] == "'" and text[-1] == "'")
58 | ):
59 | return text[1:-1]
60 | return text
61 |
62 | def parser_pair(self, node: Node, deep: int) -> Dict:
63 | key = self.parser_object(node.named_children[0])
64 |
65 | if len(node.named_children) == 2:
66 | value = self.parser_object(node.named_children[1])
67 | else:
68 | value = None
69 | result = {}
70 | result["__type"] = "pair"
71 | result["__range"] = (node.start_point, node.end_point)
72 | result["__value"] = value
73 | result["__key"] = key
74 | return result
75 |
76 | def parser_string(self, node: Node, deep: int) -> Dict:
77 | return {
78 | "__type": "string",
79 | "__value": self.drop_quote(node.text),
80 | "__range": (node.start_point, node.end_point),
81 | }
82 |
83 | def parser_number(self, node: Node, deep: int) -> Dict:
84 | return {
85 | "__type": "number",
86 | "__value": eval(node.text.decode()),
87 | "__range": (node.start_point, node.end_point),
88 | }
89 |
90 | def parser_array(self, node: Node, deep: int) -> Dict:
91 | return {
92 | "__type": "array",
93 | "__value": [self.parser_object(child) for child in node.named_children],
94 | "__range": (node.start_point, node.end_point),
95 | }
96 |
97 | def parser_null_true_false(self, node: Node, deep: int):
98 | return {
99 | "__type": node.type,
100 | "__value": node.text.decode(),
101 | "__range": (node.start_point, node.end_point),
102 | }
103 |
104 | def parser_object(self, node, deep=0):
105 | if node.type in self.skip:
106 | return None
107 | if node.type == "pair":
108 | return self.parser_pair(node, deep + 1)
109 | if node.type == "string":
110 | return self.parser_string(node, deep + 1)
111 |
112 | if node.type == "number":
113 | return self.parser_number(node, deep + 1)
114 |
115 | if node.type == "array":
116 | return self.parser_array(node, deep + 1)
117 |
118 | if node.type in {"null", "bool"}:
119 | return self.parser_null_true_false(node, deep + 1)
120 |
121 | if node.type == "object":
122 | values = []
123 | for child in node.named_children:
124 | if child.type in self.skip:
125 | continue
126 | value = self.parser_object(child)
127 | if value:
128 | values.append(value)
129 | return values
130 | if node.type == "document":
131 | result = []
132 | for child in node.named_children:
133 | if child.type in self.skip:
134 | continue
135 | result.append(
136 | {
137 | "__type": "document",
138 | "__value": self.parser_object(child, deep + 1),
139 | "__range": (child.start_point, child.end_point),
140 | }
141 | )
142 | return result
143 | if node.type != "ERROR":
144 | result = []
145 | for child in node.named_children:
146 | if child.type in self.skip:
147 | continue
148 | result.append(
149 | {
150 | "__type": "document",
151 | "__value": self.parser_object(child, deep + 1),
152 | "__range": (child.start_point, child.end_point),
153 | }
154 | )
155 | return result
156 | return {
157 | "__type": "error",
158 | "__range": (node.start_point, node.end_point),
159 | "__meta": {"type": node.type, "text": node.text.decode()},
160 | }
161 |
162 |
163 | if __name__ == "__main__":
164 | parser = JsonParser()
165 | print(
166 | parser.parser(
167 | """
168 | {
169 | "_global_":{
170 | "insert_0": "insert_0",
171 | },
172 | "@module_for_test_parser@config_a": {
173 | "_name_": "config_b",
174 | "_anchor_": "config_a",
175 | "@child_module_for_test_parser@child_new": {
176 | "_name_": "child_module_for_test_parser@child_a",
177 | "i_am_float_child": 0.0,
178 | "i_am_child": "new child value"
179 | },
180 | "@child_module_for_test_parser@child_b#2": {
181 | "_name_": "child_module_for_test_parser@child_a",
182 | "i_am_float_child": 0.0,
183 | "i_am_child": "child value2"
184 | }
185 | }
186 | }
187 | """
188 | )
189 | )
190 |
--------------------------------------------------------------------------------
/intc/intc/loader.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import copy
7 | import json
8 | import os
9 | from typing import Dict, List, Union
10 |
11 | import hjson
12 | import yaml
13 |
14 | import intc.share as G
15 | from intc.exceptions import NameError, RepeatRegisterError
16 | from intc.register import cregister, ic_help, ic_repo
17 |
18 |
19 | def load_submodule(cur_dir: str = None):
20 | if G.LOAD_SUBMODULE_DONE:
21 | return
22 | G.LOAD_SUBMODULE_DONE = True
23 | if not cur_dir:
24 | cur_dir = os.getcwd()
25 | intc_rc_config_path = ""
26 | for meta_config in [".intc.json", ".intc.jsonc"]:
27 | if os.path.isfile(os.path.join(cur_dir, meta_config)):
28 | intc_rc_config_path = os.path.join(cur_dir, meta_config)
29 | break
30 | if not intc_rc_config_path:
31 | return
32 | intc_rc_config = hjson.load(open(intc_rc_config_path, "r"), object_pairs_hook=dict)
33 | modules = intc_rc_config.get("module", [])
34 | loader = Loader(ignore_error=True)
35 | for module in modules:
36 | module_path = os.path.join(cur_dir, module)
37 | loader.load_files(module_path)
38 |
39 |
40 | class Loader(object):
41 | """load config from repo paths"""
42 |
43 | def __init__(self, ignore_error=False):
44 | super(Loader, self).__init__()
45 | self.ignore_error = ignore_error
46 | self.stashed = {}
47 |
48 | def resolve(self):
49 | """resolve dependency
50 | Returns:
51 | None
52 | """
53 |
54 | while self.stashed:
55 | circle_flag = True
56 | for key in self.stashed:
57 | if self.stashed[key]["base"] in ic_repo:
58 | self.store(key, self.stashed[key])
59 | del self.stashed[key]
60 | circle_flag = False
61 | break
62 | if circle_flag:
63 | raise NameError(f"Unresolved dependency {self.stashed}")
64 |
65 | def store(self, key: tuple, config: dict):
66 | """
67 | Args:
68 | config (TODO): TODO
69 |
70 | Returns: TODO
71 |
72 | """
73 | ic_repo[key] = copy.deepcopy(config["config"])
74 | base_help = copy.deepcopy(ic_help.get(config["base"], {}))
75 | base_help["inter_files"] = base_help.get("inter_files", []) + config["path"]
76 | ic_help[key] = base_help
77 |
78 | def stash(self, config: Dict, file_path, base: tuple, key: tuple):
79 | """stash the config to the repo, and wait for resolve the dependency
80 | Returns:
81 | None
82 | """
83 | assert (
84 | key not in ic_repo and key not in self.stashed
85 | ), f"Module {key} has been registered"
86 | if base not in ic_repo:
87 | self.stashed[key] = {"config": config, "path": [file_path], "base": base}
88 | else:
89 | self.store(key, {"config": config, "path": [file_path], "base": base})
90 | return None
91 |
92 | @staticmethod
93 | def get_base(config):
94 | """
95 |
96 | Args:
97 | config:
98 | The config to save.
99 |
100 | Returns:
101 | module_base
102 | """
103 | module_name = ""
104 | if not config:
105 | return module_name
106 | assert "_base" in config, f"Invalid config {config}"
107 | module_name = config.get("_base", None) or ""
108 | return module_name
109 |
110 | @staticmethod
111 | def get_key(path):
112 | """
113 | Args:
114 | path:
115 | The path which locates the config file.
116 | Returns:
117 | module_type, module_name
118 | """
119 |
120 | module_type, module_name = "", ""
121 | if not path:
122 | return module_type, module_name
123 | path = ".".join(path.split("/")[-1].split(".")[:-1])
124 | keys = path.split("@")
125 | assert len(keys) <= 2, f"Invalid module name {path}"
126 | module_type = keys[0]
127 | if len(keys) == 2:
128 | module_name = keys[1]
129 | return module_type, module_name
130 |
131 | def load_files(self, config_dir: str, exclude: List[str] = []):
132 | """Recursively load config from config_dir
133 |
134 | Args:
135 | config_dir:
136 | The config directory that contains the config files.
137 |
138 | Kwargs:
139 | exclude:
140 | The list of files to exclude from loading.
141 |
142 | Returns:
143 | None
144 | """
145 |
146 | for root, dirs, files in os.walk(config_dir):
147 | for file in files:
148 | try:
149 | if file.startswith("_") or file in exclude:
150 | continue
151 |
152 | file_path = os.path.join(root, file)
153 | file_ext = os.path.splitext(file)[1].lower()
154 | data = None
155 |
156 | # Load JSON file
157 | if file_ext == ".json":
158 | data = self.load_json(file_path)
159 |
160 | # Load HJSON file
161 | elif file_ext == ".hjson" or file_ext == ".jsonc":
162 | data = self.load_hjson(file_path)
163 |
164 | # Load YAML file
165 | elif file_ext == ".yaml" or file_ext == ".yml":
166 | data = self.load_yaml(file_path)
167 | if data:
168 | key_module_type, key_module_name = self.get_key(file_path)
169 | base_module_name = self.get_base(data)
170 | self.stash(
171 | data,
172 | file_path,
173 | (key_module_type, base_module_name),
174 | (key_module_type, key_module_name),
175 | )
176 | except Exception as e:
177 | if not self.ignore_error:
178 | raise e
179 | return
180 |
181 | @staticmethod
182 | def load_json(file_path: str) -> Dict:
183 | """Load JSON file"""
184 | with open(file_path, "r") as f:
185 | data = json.load(f)
186 | return data
187 |
188 | @staticmethod
189 | def load_hjson(file_path: str) -> Dict:
190 | """Load HJSON/jsonc file"""
191 | with open(file_path, "r") as f:
192 | data = hjson.load(f)
193 | return data
194 |
195 | @staticmethod
196 | def load_yaml(file_path: str) -> Dict:
197 | """Load YAML file"""
198 | with open(file_path, "r") as f:
199 | data = yaml.safe_load(f)
200 | return data
201 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/src/schema.generated.cc:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | namespace tree_sitter_yaml {
4 |
5 | const int8_t SCH_STT_FRZ = -1;
6 |
7 | enum ResultSchema {
8 | RS_STR,
9 | RS_INT,
10 | RS_NUL,
11 | RS_BOL,
12 | RS_FLT,
13 | };
14 |
15 | int8_t adv_sch_stt(int8_t sch_stt, int32_t cur_chr, ResultSchema *rlt_sch) {
16 | switch (sch_stt) {
17 | case -1:
18 | break;
19 | case 0:
20 | if (cur_chr == '.') {*rlt_sch = RS_STR; return 6;}
21 | if (cur_chr == '0') {*rlt_sch = RS_INT; return 37;}
22 | if (cur_chr == 'F') {*rlt_sch = RS_STR; return 2;}
23 | if (cur_chr == 'N') {*rlt_sch = RS_STR; return 16;}
24 | if (cur_chr == 'T') {*rlt_sch = RS_STR; return 13;}
25 | if (cur_chr == 'f') {*rlt_sch = RS_STR; return 17;}
26 | if (cur_chr == 'n') {*rlt_sch = RS_STR; return 29;}
27 | if (cur_chr == 't') {*rlt_sch = RS_STR; return 26;}
28 | if (cur_chr == '~') {*rlt_sch = RS_NUL; return 35;}
29 | if (cur_chr == '+' ||
30 | cur_chr == '-') {*rlt_sch = RS_STR; return 1;}
31 | if (('1' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_INT; return 38;}
32 | break;
33 | case 1:
34 | if (cur_chr == '.') {*rlt_sch = RS_STR; return 7;}
35 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_INT; return 38;}
36 | break;
37 | case 2:
38 | if (cur_chr == 'A') {*rlt_sch = RS_STR; return 9;}
39 | if (cur_chr == 'a') {*rlt_sch = RS_STR; return 22;}
40 | break;
41 | case 3:
42 | if (cur_chr == 'A') {*rlt_sch = RS_STR; return 12;}
43 | if (cur_chr == 'a') {*rlt_sch = RS_STR; return 12;}
44 | break;
45 | case 4:
46 | if (cur_chr == 'E') {*rlt_sch = RS_BOL; return 36;}
47 | break;
48 | case 5:
49 | if (cur_chr == 'F') {*rlt_sch = RS_FLT; return 41;}
50 | break;
51 | case 6:
52 | if (cur_chr == 'I') {*rlt_sch = RS_STR; return 11;}
53 | if (cur_chr == 'N') {*rlt_sch = RS_STR; return 3;}
54 | if (cur_chr == 'i') {*rlt_sch = RS_STR; return 24;}
55 | if (cur_chr == 'n') {*rlt_sch = RS_STR; return 18;}
56 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_FLT; return 42;}
57 | break;
58 | case 7:
59 | if (cur_chr == 'I') {*rlt_sch = RS_STR; return 11;}
60 | if (cur_chr == 'i') {*rlt_sch = RS_STR; return 24;}
61 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_FLT; return 42;}
62 | break;
63 | case 8:
64 | if (cur_chr == 'L') {*rlt_sch = RS_NUL; return 35;}
65 | break;
66 | case 9:
67 | if (cur_chr == 'L') {*rlt_sch = RS_STR; return 14;}
68 | break;
69 | case 10:
70 | if (cur_chr == 'L') {*rlt_sch = RS_STR; return 8;}
71 | break;
72 | case 11:
73 | if (cur_chr == 'N') {*rlt_sch = RS_STR; return 5;}
74 | if (cur_chr == 'n') {*rlt_sch = RS_STR; return 20;}
75 | break;
76 | case 12:
77 | if (cur_chr == 'N') {*rlt_sch = RS_FLT; return 41;}
78 | break;
79 | case 13:
80 | if (cur_chr == 'R') {*rlt_sch = RS_STR; return 15;}
81 | if (cur_chr == 'r') {*rlt_sch = RS_STR; return 28;}
82 | break;
83 | case 14:
84 | if (cur_chr == 'S') {*rlt_sch = RS_STR; return 4;}
85 | break;
86 | case 15:
87 | if (cur_chr == 'U') {*rlt_sch = RS_STR; return 4;}
88 | break;
89 | case 16:
90 | if (cur_chr == 'U') {*rlt_sch = RS_STR; return 10;}
91 | if (cur_chr == 'u') {*rlt_sch = RS_STR; return 23;}
92 | break;
93 | case 17:
94 | if (cur_chr == 'a') {*rlt_sch = RS_STR; return 22;}
95 | break;
96 | case 18:
97 | if (cur_chr == 'a') {*rlt_sch = RS_STR; return 25;}
98 | break;
99 | case 19:
100 | if (cur_chr == 'e') {*rlt_sch = RS_BOL; return 36;}
101 | break;
102 | case 20:
103 | if (cur_chr == 'f') {*rlt_sch = RS_FLT; return 41;}
104 | break;
105 | case 21:
106 | if (cur_chr == 'l') {*rlt_sch = RS_NUL; return 35;}
107 | break;
108 | case 22:
109 | if (cur_chr == 'l') {*rlt_sch = RS_STR; return 27;}
110 | break;
111 | case 23:
112 | if (cur_chr == 'l') {*rlt_sch = RS_STR; return 21;}
113 | break;
114 | case 24:
115 | if (cur_chr == 'n') {*rlt_sch = RS_STR; return 20;}
116 | break;
117 | case 25:
118 | if (cur_chr == 'n') {*rlt_sch = RS_FLT; return 41;}
119 | break;
120 | case 26:
121 | if (cur_chr == 'r') {*rlt_sch = RS_STR; return 28;}
122 | break;
123 | case 27:
124 | if (cur_chr == 's') {*rlt_sch = RS_STR; return 19;}
125 | break;
126 | case 28:
127 | if (cur_chr == 'u') {*rlt_sch = RS_STR; return 19;}
128 | break;
129 | case 29:
130 | if (cur_chr == 'u') {*rlt_sch = RS_STR; return 23;}
131 | break;
132 | case 30:
133 | if (cur_chr == '+' ||
134 | cur_chr == '-') {*rlt_sch = RS_STR; return 32;}
135 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_FLT; return 43;}
136 | break;
137 | case 31:
138 | if (('0' <= cur_chr && cur_chr <= '7')) {*rlt_sch = RS_INT; return 39;}
139 | break;
140 | case 32:
141 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_FLT; return 43;}
142 | break;
143 | case 33:
144 | if (('0' <= cur_chr && cur_chr <= '9') ||
145 | ('A' <= cur_chr && cur_chr <= 'F') ||
146 | ('a' <= cur_chr && cur_chr <= 'f')) {*rlt_sch = RS_INT; return 40;}
147 | break;
148 | case 34:
149 | assert(false);
150 | break;
151 | case 35:
152 | *rlt_sch = RS_NUL;
153 | break;
154 | case 36:
155 | *rlt_sch = RS_BOL;
156 | break;
157 | case 37:
158 | *rlt_sch = RS_INT;
159 | if (cur_chr == '.') {*rlt_sch = RS_FLT; return 42;}
160 | if (cur_chr == 'o') {*rlt_sch = RS_STR; return 31;}
161 | if (cur_chr == 'x') {*rlt_sch = RS_STR; return 33;}
162 | if (cur_chr == 'E' ||
163 | cur_chr == 'e') {*rlt_sch = RS_STR; return 30;}
164 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_INT; return 38;}
165 | break;
166 | case 38:
167 | *rlt_sch = RS_INT;
168 | if (cur_chr == '.') {*rlt_sch = RS_FLT; return 42;}
169 | if (cur_chr == 'E' ||
170 | cur_chr == 'e') {*rlt_sch = RS_STR; return 30;}
171 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_INT; return 38;}
172 | break;
173 | case 39:
174 | *rlt_sch = RS_INT;
175 | if (('0' <= cur_chr && cur_chr <= '7')) {*rlt_sch = RS_INT; return 39;}
176 | break;
177 | case 40:
178 | *rlt_sch = RS_INT;
179 | if (('0' <= cur_chr && cur_chr <= '9') ||
180 | ('A' <= cur_chr && cur_chr <= 'F') ||
181 | ('a' <= cur_chr && cur_chr <= 'f')) {*rlt_sch = RS_INT; return 40;}
182 | break;
183 | case 41:
184 | *rlt_sch = RS_FLT;
185 | break;
186 | case 42:
187 | *rlt_sch = RS_FLT;
188 | if (cur_chr == 'E' ||
189 | cur_chr == 'e') {*rlt_sch = RS_STR; return 30;}
190 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_FLT; return 42;}
191 | break;
192 | case 43:
193 | *rlt_sch = RS_FLT;
194 | if (('0' <= cur_chr && cur_chr <= '9')) {*rlt_sch = RS_FLT; return 43;}
195 | break;
196 | default:
197 | *rlt_sch = RS_STR;
198 | return -1;
199 | }
200 | if (cur_chr != '\r' && cur_chr != '\n' && cur_chr != ' ' && cur_chr != 0) *rlt_sch = RS_STR;
201 | return -1;
202 | }
203 |
204 | }
--------------------------------------------------------------------------------
/lsp/intc_lsp/src/parser_yaml.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import os
7 | import sys
8 | from typing import Dict, List, Optional, Union
9 |
10 | from tree_sitter import Language, Node, Parser
11 |
12 | if sys.platform == "win32":
13 | sys_post_fix = "win"
14 | elif sys.platform == "darwin":
15 | sys_post_fix = "mac"
16 | else:
17 | sys_post_fix = "linux"
18 |
19 |
20 | class YamlParser(object):
21 | """docstring for ParserYAML"""
22 |
23 | def __init__(self):
24 | super(YamlParser, self).__init__()
25 | self._parser = Parser()
26 | dynamic_lib_path = os.path.join(
27 | os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
28 | "lib",
29 | f"yaml_{sys_post_fix}_ts.so",
30 | )
31 | self._parser.set_language(Language(dynamic_lib_path, "yaml"))
32 | self.skip = {"comment"}
33 |
34 | def parser(self, doc: str) -> Optional[Union[Dict, List]]:
35 | """parser the json string to AST
36 | Args:
37 | doc: the source document
38 |
39 | Returns:
40 | the parser tree
41 | """
42 | return self.parser_object(self._parser.parse(bytes(doc, "utf8")).root_node)
43 |
44 | @staticmethod
45 | def print_parser_tree(node: Node, deep=0) -> None:
46 | for children in node.named_children:
47 | print(deep * " " + children.type)
48 | YamlParser.print_parser_tree(children, deep + 1)
49 |
50 | def drop_quote(self, _text: bytes) -> str:
51 | text = _text.decode()
52 | if (
53 | len(text) >= 2
54 | and (text[0] == '"' and text[-1] == '"')
55 | or (text[0] == "'" and text[-1] == "'")
56 | ):
57 | return text[1:-1]
58 | return text
59 |
60 | def parser_pair(self, node: Node, deep: int) -> Dict:
61 | key = self.parser_object(node.named_children[0])
62 | key = self.parser_object(node.named_children[0], deep + 1)
63 | if len(node.named_children) == 2:
64 | value = self.parser_object(node.named_children[1], deep + 1)
65 | else:
66 | assert len(node.named_children) == 1
67 | value = None
68 | result = {}
69 | result["__type"] = "pair"
70 | result["__range"] = (node.start_point, node.end_point)
71 | result["__value"] = value
72 | result["__key"] = key
73 | return result
74 |
75 | def parser_flow_node(self, node: Node, deep: int):
76 | assert len(node.named_children) == 1, node.named_children
77 | return self.parser_object(node.named_children[0], deep + 1)
78 |
79 | def parser_plain_scalar(self, node: Node, deep: int):
80 | assert len(node.named_children) == 1, node.named_children
81 | return self.parser_object(node.named_children[0], deep + 1)
82 |
83 | def parser_string(self, node: Node, deep: int) -> Dict:
84 | assert len(node.named_children) == 0, node.named_children
85 | return {
86 | "__type": "string",
87 | "__value": self.drop_quote(node.text),
88 | "__range": (node.start_point, node.end_point),
89 | }
90 |
91 | def parser_number(self, node: Node, deep: int) -> Dict:
92 | assert len(node.named_children) == 0, node.named_children
93 | return {
94 | "__type": "number",
95 | "__value": eval(node.text.decode()),
96 | "__range": (node.start_point, node.end_point),
97 | }
98 |
99 | def parser_array(self, node: Node, deep: int) -> Dict:
100 | return {
101 | "__type": "array",
102 | "__value": [self.parser_object(child) for child in node.named_children],
103 | "__range": (node.start_point, node.end_point),
104 | }
105 |
106 | def parser_block_node(self, node: Node, deep: int):
107 | assert len(node.named_children) == 1, node.named_children
108 | return self.parser_object(node.named_children[0], deep + 1)
109 |
110 | def parser_null_true_false(self, node: Node, deep: int):
111 | type_map = {
112 | "null_scalar": "null",
113 | "boolean_scalar": "bool",
114 | }
115 | return {
116 | "__type": type_map[node.type],
117 | "__value": "null_or_bool",
118 | "__range": (node.start_point, node.end_point),
119 | }
120 |
121 | def parser_object(self, node: Node, deep: int = 0):
122 | if deep == 0 and node.type == "ERROR":
123 | parser_childs = [
124 | self.parser_object(child)
125 | for child in node.named_children
126 | if child.type not in self.skip
127 | ]
128 | return parser_childs
129 | if node.type in self.skip:
130 | return None
131 | if node.type == "block_mapping_pair":
132 | return self.parser_pair(node, deep + 1)
133 |
134 | if node.type == "flow_sequence":
135 | return self.parser_array(node, deep + 1)
136 | if node.type == "plain_scalar":
137 | return self.parser_plain_scalar(node, deep + 1)
138 | if node.type == "flow_node":
139 | return self.parser_flow_node(node, deep + 1)
140 | if node.type in {"string_scalar", "double_quote_scalar", "single_quote_scalar"}:
141 | return self.parser_string(node, deep + 1)
142 |
143 | if node.type in {"integer_scalar", "float_scalar"}:
144 | return self.parser_number(node, deep + 1)
145 |
146 | if node.type in {"boolean_scalar", "null_scalar"}:
147 | return self.parser_null_true_false(node, deep + 1)
148 |
149 | if node.type == "stream":
150 | for child in node.named_children:
151 | result = self.parser_object(child, deep + 1)
152 | if result is not None:
153 | return result
154 | return {}
155 |
156 | if node.type == "block_node":
157 | return self.parser_block_node(node, deep + 1)
158 | if node.type == "block_mapping":
159 | values = []
160 | for child in node.named_children:
161 | if child.type in self.skip:
162 | continue
163 | value = self.parser_object(child, deep + 1)
164 | if value:
165 | values.append(value)
166 | return values
167 |
168 | if node.type == "block_sequence":
169 | return {
170 | "__type": "array",
171 | "__value": [
172 | self.parser_object(child, deep) for child in node.named_children
173 | ],
174 | "__range": (node.start_point, node.end_point),
175 | }
176 | if node.type == "block_sequence_item":
177 | return self.parser_object(node.named_children[0], deep + 1)
178 |
179 | if node.type == "document":
180 | result = []
181 | for child in node.named_children:
182 | if child.type in self.skip:
183 | continue
184 | result.append(
185 | {
186 | "__type": "document",
187 | "__value": self.parser_object(child, deep + 1),
188 | "__range": (child.start_point, child.end_point),
189 | }
190 | )
191 | return result
192 | return {}
193 |
194 |
195 | if __name__ == "__main__":
196 | parser = YamlParser()
197 | print(
198 | parser.parser(
199 | """
200 | # type=dlk
201 |
202 | "@config":
203 | "_name_": basic
204 | "@optimizer@adam": 1
205 | name:
206 | - a
207 | - b
208 | """
209 | )
210 | )
211 |
--------------------------------------------------------------------------------
/intc/intc/register.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import inspect
7 | from typing import Any, Callable, Dict, Type, TypeVar
8 |
9 | from attrs import asdict, define, field, fields, fields_dict
10 |
11 | from intc.config import Base
12 | from intc.exceptions import (
13 | InConsistentNameError,
14 | NameError,
15 | NoModuleFoundError,
16 | RepeatRegisterError,
17 | )
18 | from intc.share import get_registed_instance, registry
19 | from intc.utils import module_name_check
20 |
21 | ic_repo = {}
22 | ic_help = {}
23 | type_module_map = {}
24 |
25 | SpecificConfig = TypeVar("SpecificConfig")
26 |
27 |
28 | class Register(object):
29 | """Register"""
30 |
31 | registry = registry
32 |
33 | def __init__(self):
34 | self.built_in_field = {
35 | "_base": {
36 | "description": "The inherit base module name",
37 | "type_name": "BaseField",
38 | "type": "string",
39 | },
40 | "_G": {
41 | "description": "The global parameters\nYou can reference them anywhere start with `~` or `_G` ",
42 | "type": "object",
43 | "type_name": "Global",
44 | },
45 | "_anchor": {
46 | "description": "The reference anchor.",
47 | "type_name": "Anchor",
48 | "type": "object",
49 | },
50 | "_search": {
51 | "description": "Search the parameters(cartesian product).",
52 | "type_name": "Search",
53 | "type": "object",
54 | },
55 | }
56 |
57 | def register(self, type_name: str = "", name: str = "") -> Callable:
58 | """register the named module, you can only provide the type name, or type name and module name
59 |
60 | Args:
61 | type_name: the type name
62 | name: the specific module name in the type
63 |
64 | Returns:
65 | the module
66 |
67 | """
68 | skip_regist = False # if the type_name is not provided, we will skip register the module to the registry
69 | if not type_name:
70 | assert (
71 | not name
72 | ), "You must provide the type name if you want to register a module"
73 | skip_regist = True
74 | if not skip_regist:
75 | module_name_check(type_name)
76 | if name:
77 | module_name_check(name)
78 |
79 | def get_help(wrap_module, is_nest=False):
80 | help_dict = {}
81 | lines, line_no = inspect.getsourcelines(wrap_module)
82 | source_file = inspect.getabsfile(wrap_module)
83 | help_dict["position"] = {
84 | "file_path": source_file,
85 | "line_no": line_no,
86 | }
87 | help_dict["lines"] = lines
88 | help_dict["properties"] = {}
89 |
90 | for key in fields(wrap_module):
91 | if (not is_nest) and key.name == "submodule":
92 | assert key.metadata["type_name"] == "SubModule"
93 | module_types = []
94 |
95 | if key.metadata.get("suggestions", None):
96 | module_types = key.metadata["suggestions"]
97 | if key.default and isinstance(key.default, dict):
98 | for module_type in key.default:
99 | module_types.append(module_type)
100 | module_types = [
101 | module_type.lstrip("@") for module_type in module_types
102 | ]
103 | for child in module_types:
104 | help_dict["properties"][f"@{child}"] = {
105 | "type": "object",
106 | "type_name": "SubModule",
107 | "properties": {},
108 | "default": {},
109 | }
110 | if (
111 | isinstance(key.default, dict)
112 | and child in key.default
113 | and isinstance(key.default[child], dict)
114 | and (
115 | "_base" in key.default[child]
116 | or "_name" in key.default[child]
117 | )
118 | ):
119 | base = key.default[child].get("_base", "") or key.default[
120 | child
121 | ].get("_name", "")
122 | help_dict["properties"][f"@{child}"]["default"] = {
123 | "_base": base
124 | }
125 | elif key.metadata["type_name"] == "NestField":
126 | nest_help = get_help(
127 | define(key.metadata["object_type"]), is_nest=True
128 | ).get("properties", {})
129 | metadata_dict = dict(key.metadata)
130 | metadata_dict.pop("object_type")
131 | help_dict["properties"][key.name] = metadata_dict
132 | help_dict["properties"][key.name]["properties"] = nest_help
133 | else:
134 | help_dict["properties"][key.name] = {
135 | k: key.metadata[k]
136 | for k in key.metadata
137 | if k not in {"object_type"}
138 | }
139 | return help_dict
140 |
141 | def decorator(module) -> Base:
142 | if not skip_regist and type_name not in registry:
143 | registry[type_name] = {}
144 | if not skip_regist and name in registry[type_name]:
145 | raise RepeatRegisterError(
146 | f"The {name} is already registered in {type_name}. Registed: {registry[type_name][name]}"
147 | )
148 |
149 | module_doc = module.__doc__
150 | if module.__bases__ == (object,):
151 | module = type(module.__name__, (Base,), dict(module.__dict__))
152 | wrap_module = define(module)
153 |
154 | field_help = get_help(wrap_module)
155 | field_help["properties"] = field_help.get("properties", {})
156 | for built_in in self.built_in_field:
157 | field_help["properties"][built_in] = self.built_in_field[built_in]
158 | field_help["_name"] = name
159 | field_help["description"] = (
160 | module_doc if module_doc else "No Module Document"
161 | )
162 | wrap_module.__meta__ = field_help
163 | if not skip_regist:
164 | registry[type_name][name] = wrap_module
165 | ic_help[(type_name, name)] = field_help
166 | type_modules = type_module_map.get(type_name, set())
167 | type_modules.add(name)
168 | type_module_map[type_name] = type_modules
169 | ic_repo[(type_name, name)] = wrap_module()._to_dict(lazy=True)
170 | return wrap_module
171 |
172 | return decorator
173 |
174 | def get(self, type_name: str, name: str = "", get_class=False) -> Any:
175 | """get the module by name
176 |
177 | Args:
178 | type_name: the module type name
179 | name: the module name
180 | get_class: return the module class if True, else return the module class's _from_dict method
181 |
182 | Returns:
183 | registered module
184 |
185 | """
186 | return get_registed_instance(type_name, name, get_class)
187 |
188 | def __call__(
189 | self, type_name: str = "", name: str = ""
190 | ) -> Callable[[Type[SpecificConfig]], Type[SpecificConfig]]:
191 | """you can directly call the object, the behavior is the same as object.register(name)"""
192 |
193 | return self.register(type_name, name)
194 |
195 | def __getitem__(self, type_and_name: tuple) -> Any:
196 | """wrap for object.get(name)"""
197 | type_name, name = type_and_name
198 | return get_registed_instance(type_name, name)
199 |
200 |
201 | cregister = Register()
202 | dataclass = cregister.register()
203 |
--------------------------------------------------------------------------------
/lsp/intc_lsp/csrc/yaml/src/node-types.json:
--------------------------------------------------------------------------------
1 | [
2 | {
3 | "type": "alias",
4 | "named": true,
5 | "fields": {},
6 | "children": {
7 | "multiple": false,
8 | "required": true,
9 | "types": [
10 | {
11 | "type": "alias_name",
12 | "named": true
13 | }
14 | ]
15 | }
16 | },
17 | {
18 | "type": "anchor",
19 | "named": true,
20 | "fields": {},
21 | "children": {
22 | "multiple": false,
23 | "required": true,
24 | "types": [
25 | {
26 | "type": "anchor_name",
27 | "named": true
28 | }
29 | ]
30 | }
31 | },
32 | {
33 | "type": "block_mapping",
34 | "named": true,
35 | "fields": {},
36 | "children": {
37 | "multiple": true,
38 | "required": true,
39 | "types": [
40 | {
41 | "type": "block_mapping_pair",
42 | "named": true
43 | }
44 | ]
45 | }
46 | },
47 | {
48 | "type": "block_mapping_pair",
49 | "named": true,
50 | "fields": {
51 | "key": {
52 | "multiple": false,
53 | "required": false,
54 | "types": [
55 | {
56 | "type": "block_node",
57 | "named": true
58 | },
59 | {
60 | "type": "flow_node",
61 | "named": true
62 | }
63 | ]
64 | },
65 | "value": {
66 | "multiple": false,
67 | "required": false,
68 | "types": [
69 | {
70 | "type": "block_node",
71 | "named": true
72 | },
73 | {
74 | "type": "flow_node",
75 | "named": true
76 | }
77 | ]
78 | }
79 | }
80 | },
81 | {
82 | "type": "block_node",
83 | "named": true,
84 | "fields": {},
85 | "children": {
86 | "multiple": true,
87 | "required": true,
88 | "types": [
89 | {
90 | "type": "anchor",
91 | "named": true
92 | },
93 | {
94 | "type": "block_mapping",
95 | "named": true
96 | },
97 | {
98 | "type": "block_scalar",
99 | "named": true
100 | },
101 | {
102 | "type": "block_sequence",
103 | "named": true
104 | },
105 | {
106 | "type": "tag",
107 | "named": true
108 | }
109 | ]
110 | }
111 | },
112 | {
113 | "type": "block_scalar",
114 | "named": true,
115 | "fields": {}
116 | },
117 | {
118 | "type": "block_sequence",
119 | "named": true,
120 | "fields": {},
121 | "children": {
122 | "multiple": true,
123 | "required": true,
124 | "types": [
125 | {
126 | "type": "block_sequence_item",
127 | "named": true
128 | }
129 | ]
130 | }
131 | },
132 | {
133 | "type": "block_sequence_item",
134 | "named": true,
135 | "fields": {},
136 | "children": {
137 | "multiple": false,
138 | "required": false,
139 | "types": [
140 | {
141 | "type": "block_node",
142 | "named": true
143 | },
144 | {
145 | "type": "flow_node",
146 | "named": true
147 | }
148 | ]
149 | }
150 | },
151 | {
152 | "type": "document",
153 | "named": true,
154 | "fields": {},
155 | "children": {
156 | "multiple": true,
157 | "required": false,
158 | "types": [
159 | {
160 | "type": "block_node",
161 | "named": true
162 | },
163 | {
164 | "type": "flow_node",
165 | "named": true
166 | },
167 | {
168 | "type": "reserved_directive",
169 | "named": true
170 | },
171 | {
172 | "type": "tag_directive",
173 | "named": true
174 | },
175 | {
176 | "type": "yaml_directive",
177 | "named": true
178 | }
179 | ]
180 | }
181 | },
182 | {
183 | "type": "double_quote_scalar",
184 | "named": true,
185 | "fields": {},
186 | "children": {
187 | "multiple": true,
188 | "required": false,
189 | "types": [
190 | {
191 | "type": "escape_sequence",
192 | "named": true
193 | }
194 | ]
195 | }
196 | },
197 | {
198 | "type": "flow_mapping",
199 | "named": true,
200 | "fields": {},
201 | "children": {
202 | "multiple": true,
203 | "required": false,
204 | "types": [
205 | {
206 | "type": "flow_node",
207 | "named": true
208 | },
209 | {
210 | "type": "flow_pair",
211 | "named": true
212 | }
213 | ]
214 | }
215 | },
216 | {
217 | "type": "flow_node",
218 | "named": true,
219 | "fields": {},
220 | "children": {
221 | "multiple": true,
222 | "required": true,
223 | "types": [
224 | {
225 | "type": "alias",
226 | "named": true
227 | },
228 | {
229 | "type": "anchor",
230 | "named": true
231 | },
232 | {
233 | "type": "double_quote_scalar",
234 | "named": true
235 | },
236 | {
237 | "type": "flow_mapping",
238 | "named": true
239 | },
240 | {
241 | "type": "flow_sequence",
242 | "named": true
243 | },
244 | {
245 | "type": "plain_scalar",
246 | "named": true
247 | },
248 | {
249 | "type": "single_quote_scalar",
250 | "named": true
251 | },
252 | {
253 | "type": "tag",
254 | "named": true
255 | }
256 | ]
257 | }
258 | },
259 | {
260 | "type": "flow_pair",
261 | "named": true,
262 | "fields": {
263 | "key": {
264 | "multiple": false,
265 | "required": false,
266 | "types": [
267 | {
268 | "type": "flow_node",
269 | "named": true
270 | }
271 | ]
272 | },
273 | "value": {
274 | "multiple": false,
275 | "required": false,
276 | "types": [
277 | {
278 | "type": "flow_node",
279 | "named": true
280 | }
281 | ]
282 | }
283 | }
284 | },
285 | {
286 | "type": "flow_sequence",
287 | "named": true,
288 | "fields": {},
289 | "children": {
290 | "multiple": true,
291 | "required": false,
292 | "types": [
293 | {
294 | "type": "flow_node",
295 | "named": true
296 | },
297 | {
298 | "type": "flow_pair",
299 | "named": true
300 | }
301 | ]
302 | }
303 | },
304 | {
305 | "type": "plain_scalar",
306 | "named": true,
307 | "fields": {},
308 | "children": {
309 | "multiple": false,
310 | "required": true,
311 | "types": [
312 | {
313 | "type": "boolean_scalar",
314 | "named": true
315 | },
316 | {
317 | "type": "float_scalar",
318 | "named": true
319 | },
320 | {
321 | "type": "integer_scalar",
322 | "named": true
323 | },
324 | {
325 | "type": "null_scalar",
326 | "named": true
327 | },
328 | {
329 | "type": "string_scalar",
330 | "named": true
331 | }
332 | ]
333 | }
334 | },
335 | {
336 | "type": "reserved_directive",
337 | "named": true,
338 | "fields": {},
339 | "children": {
340 | "multiple": true,
341 | "required": true,
342 | "types": [
343 | {
344 | "type": "directive_name",
345 | "named": true
346 | },
347 | {
348 | "type": "directive_parameter",
349 | "named": true
350 | }
351 | ]
352 | }
353 | },
354 | {
355 | "type": "single_quote_scalar",
356 | "named": true,
357 | "fields": {},
358 | "children": {
359 | "multiple": true,
360 | "required": false,
361 | "types": [
362 | {
363 | "type": "escape_sequence",
364 | "named": true
365 | }
366 | ]
367 | }
368 | },
369 | {
370 | "type": "stream",
371 | "named": true,
372 | "fields": {},
373 | "children": {
374 | "multiple": true,
375 | "required": false,
376 | "types": [
377 | {
378 | "type": "document",
379 | "named": true
380 | }
381 | ]
382 | }
383 | },
384 | {
385 | "type": "tag_directive",
386 | "named": true,
387 | "fields": {},
388 | "children": {
389 | "multiple": true,
390 | "required": true,
391 | "types": [
392 | {
393 | "type": "tag_handle",
394 | "named": true
395 | },
396 | {
397 | "type": "tag_prefix",
398 | "named": true
399 | }
400 | ]
401 | }
402 | },
403 | {
404 | "type": "yaml_directive",
405 | "named": true,
406 | "fields": {},
407 | "children": {
408 | "multiple": false,
409 | "required": true,
410 | "types": [
411 | {
412 | "type": "yaml_version",
413 | "named": true
414 | }
415 | ]
416 | }
417 | },
418 | {
419 | "type": "\"",
420 | "named": false
421 | },
422 | {
423 | "type": "&",
424 | "named": false
425 | },
426 | {
427 | "type": "'",
428 | "named": false
429 | },
430 | {
431 | "type": "*",
432 | "named": false
433 | },
434 | {
435 | "type": ",",
436 | "named": false
437 | },
438 | {
439 | "type": "-",
440 | "named": false
441 | },
442 | {
443 | "type": "---",
444 | "named": false
445 | },
446 | {
447 | "type": "...",
448 | "named": false
449 | },
450 | {
451 | "type": ":",
452 | "named": false
453 | },
454 | {
455 | "type": ">",
456 | "named": false
457 | },
458 | {
459 | "type": "?",
460 | "named": false
461 | },
462 | {
463 | "type": "[",
464 | "named": false
465 | },
466 | {
467 | "type": "]",
468 | "named": false
469 | },
470 | {
471 | "type": "alias_name",
472 | "named": true
473 | },
474 | {
475 | "type": "anchor_name",
476 | "named": true
477 | },
478 | {
479 | "type": "boolean_scalar",
480 | "named": true
481 | },
482 | {
483 | "type": "comment",
484 | "named": true
485 | },
486 | {
487 | "type": "directive_name",
488 | "named": true
489 | },
490 | {
491 | "type": "directive_parameter",
492 | "named": true
493 | },
494 | {
495 | "type": "escape_sequence",
496 | "named": true
497 | },
498 | {
499 | "type": "float_scalar",
500 | "named": true
501 | },
502 | {
503 | "type": "integer_scalar",
504 | "named": true
505 | },
506 | {
507 | "type": "null_scalar",
508 | "named": true
509 | },
510 | {
511 | "type": "string_scalar",
512 | "named": true
513 | },
514 | {
515 | "type": "tag",
516 | "named": true
517 | },
518 | {
519 | "type": "tag_handle",
520 | "named": true
521 | },
522 | {
523 | "type": "tag_prefix",
524 | "named": true
525 | },
526 | {
527 | "type": "yaml_version",
528 | "named": true
529 | },
530 | {
531 | "type": "{",
532 | "named": false
533 | },
534 | {
535 | "type": "|",
536 | "named": false
537 | },
538 | {
539 | "type": "}",
540 | "named": false
541 | }
542 | ]
--------------------------------------------------------------------------------
/lsp/intc_lsp/server.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import importlib
7 | import io
8 | import json
9 | import logging
10 | import os
11 | import re
12 | import sys
13 | from pathlib import Path
14 | from typing import Any, Callable, Dict, List, Optional, Tuple
15 | from urllib.parse import urlparse
16 |
17 | import hjson
18 | from intc import ic_repo
19 | from intc.loader import load_submodule
20 | from lsprotocol.types import (
21 | ALL_TYPES_MAP,
22 | INITIALIZE,
23 | INITIALIZED,
24 | TEXT_DOCUMENT_CODE_ACTION,
25 | TEXT_DOCUMENT_COMPLETION,
26 | TEXT_DOCUMENT_DEFINITION,
27 | TEXT_DOCUMENT_DID_CHANGE,
28 | TEXT_DOCUMENT_DID_OPEN,
29 | TEXT_DOCUMENT_DID_SAVE,
30 | TEXT_DOCUMENT_FORMATTING,
31 | TEXT_DOCUMENT_HOVER,
32 | CodeAction,
33 | CodeActionKind,
34 | CodeActionOptions,
35 | CodeActionParams,
36 | Command,
37 | CompletionItem,
38 | CompletionItemKind,
39 | CompletionList,
40 | CompletionOptions,
41 | CompletionParams,
42 | CompletionTriggerKind,
43 | Diagnostic,
44 | DiagnosticSeverity,
45 | DocumentFormattingParams,
46 | Hover,
47 | InitializeParams,
48 | InitializeResult,
49 | InitializeResultServerInfoType,
50 | Location,
51 | MarkupContent,
52 | MarkupKind,
53 | Position,
54 | Range,
55 | SaveOptions,
56 | ServerCapabilities,
57 | TextDocumentPositionParams,
58 | TextDocumentSaveRegistrationOptions,
59 | TextDocumentSyncKind,
60 | TextEdit,
61 | WorkspaceEdit,
62 | )
63 | from pygls.protocol import lsp_method
64 | from pygls.server import LanguageServer
65 |
66 | from intc_lsp.src import HoverType, IntcResolve
67 | from intc_lsp.version import __version__
68 |
69 | logger = logging.getLogger("intc_lsp")
70 |
71 |
72 | ALL_TYPES_MAP["TextDocumentSaveOptions"] = TextDocumentSaveRegistrationOptions
73 |
74 |
75 | class IntcLanguageServer(LanguageServer):
76 | """The intc LanguageServer"""
77 |
78 | def __init__(
79 | self,
80 | name,
81 | version,
82 | text_document_sync_kind=TextDocumentSyncKind.Full,
83 | max_workers=4,
84 | ) -> None:
85 | super().__init__(
86 | name=name,
87 | version=version,
88 | text_document_sync_kind=text_document_sync_kind,
89 | max_workers=max_workers,
90 | )
91 | self.did_opend_files = set()
92 | self.resolve: IntcResolve = None
93 | self.options = {}
94 | self.modules_pattern = ""
95 | self.support_file_types = ["json", "yaml", "yml", "jsonc", "hjson", "json5"]
96 | self.entry_pattern = ""
97 |
98 | def update_config_partern(self, root_path):
99 | """update the pattern of the entry and modules
100 | Args:
101 | root_path:
102 | the root path of the workspace
103 | Returns:
104 | None
105 | """
106 | if not self.options:
107 | return
108 | entry_pattern = ""
109 | modules_pattern = ""
110 | for entry in self.options.get("entry", []):
111 | entry_path = os.path.join(
112 | root_path, entry, r"[^/]*\." + f"({'|'.join(self.support_file_types)})"
113 | )
114 | entry_pattern = (
115 | f"{entry_path}"
116 | if not entry_pattern
117 | else f"{entry_pattern}|{entry_path}"
118 | )
119 | for module in self.options.get("module", []):
120 | module_path = os.path.join(
121 | root_path, module, r"[^/]*\." + f"({'|'.join(self.support_file_types)})"
122 | )
123 | modules_pattern = (
124 | f"{module_path}"
125 | if not modules_pattern
126 | else f"{modules_pattern}|{module_path}"
127 | )
128 | logger.info(
129 | f"update_config_partern: entry_pattern : {entry_pattern}, modules_pattern : {modules_pattern}"
130 | )
131 | self.entry_pattern = re.compile(entry_pattern)
132 | self.modules_pattern = re.compile(modules_pattern)
133 |
134 | def init_new_file(self, params):
135 | uri = urlparse(params.text_document.uri).path
136 | if uri in self.did_opend_files:
137 | return
138 | logger.info(f"init_new_file: {uri}")
139 | try:
140 | root_path = Path(self.workspace.root_path)
141 | except Exception as e:
142 | root_path = Path(uri).parent
143 |
144 | intc_setting = ""
145 | real_root = Path(uri).parent
146 | while (root_path.parent != real_root) and not intc_setting:
147 | for meta_config in [".intc.json", ".intc.jsonc"]:
148 | if os.path.isfile(os.path.join(real_root, meta_config)):
149 | intc_setting = os.path.join(real_root, meta_config)
150 | break
151 | if not intc_setting:
152 | real_root = real_root.parent
153 |
154 | if not intc_setting:
155 | logger.info(f"self: can not found init setting for {uri}")
156 | return
157 | try:
158 | self.options = dict(hjson.load(open(intc_setting, "r")))
159 | self.update_config_partern(real_root)
160 | logger.info(f"init: load .intc.json done")
161 | logger.info(json.dumps(self.options, indent=4))
162 | except Exception as e:
163 | logger.error(f"init: load .intc.json error : {e}")
164 | self.show_message_log(f"intc_server: load .intc.json error")
165 |
166 | try:
167 | self.resolve = IntcResolve(intc_server)
168 | logger.info(f"init: init the intc_resolve")
169 | except Exception as e:
170 | logger.error(f"init: init intc_resolve error : {e}")
171 | self.show_message_log(f"intc_server: init intc_resolve error")
172 |
173 | logger.info(f"init: before dynamic import")
174 | logger.info(f"init: ic_repo: {ic_repo}")
175 |
176 | null_buffer = io.StringIO()
177 | original_stdout = sys.stdout
178 | original_stderr = sys.stderr
179 | sys.stdout = null_buffer
180 | sys.stderr = null_buffer
181 |
182 | try:
183 | sys.path.append(str(real_root))
184 | except:
185 | logger.error(f"init: add root path {real_root} error")
186 | os.environ["IN_INTC"] = "1"
187 | for package in self.options.get("src", []):
188 | try:
189 | importlib.import_module(package)
190 | logger.info(f"init: import {package}")
191 | except Exception as e:
192 | logger.error(f"init: on import package `{package}` error : {e}")
193 |
194 | try:
195 | logger.info(load_submodule(str(real_root)))
196 | except Exception as e:
197 | logger.info(f"loader sub module error: {e}")
198 |
199 | sys.stdout = original_stdout
200 | sys.stderr = original_stderr
201 | null_buffer.seek(0)
202 | output = null_buffer.read()
203 | logger.info(f"init import output: {output}")
204 | logger.info(f"init: after dynamic import")
205 | logger.info(f"init: ic_repo: {ic_repo}")
206 | logger.info(f"init: init the intc_lsp done")
207 | self.did_opend_files.add(uri)
208 |
209 |
210 | intc_server = IntcLanguageServer("intc-language-server", __version__)
211 |
212 |
213 | @intc_server.feature(INITIALIZED)
214 | def initialize(params: InitializeParams) -> Optional[InitializeResult]:
215 | """Initialize the language server
216 | Args:
217 | params:
218 | the init parameters provide by the client
219 | Returns:
220 | Optional[InitializeResult]
221 | """
222 | logger.info(f"init: paras: {params}")
223 | intc_server.show_message_log(f"intc_server: start init")
224 | if not intc_server.workspace.root_path:
225 | intc_server.show_message_log(f"init: no root path")
226 | return
227 | else:
228 | logger.info(f"init: root path: {intc_server.workspace.root_path}")
229 |
230 |
231 | @intc_server.feature(
232 | TEXT_DOCUMENT_COMPLETION,
233 | CompletionOptions(trigger_characters=['"', ":", " ", "@"]),
234 | )
235 | def completions(params: CompletionParams) -> CompletionList:
236 | """Provide completion list trigger by the trigger_characters
237 | Args:
238 | params:
239 | the parameters provide by the client, provide the completion position and the source uri
240 | Returns:
241 | a list of completion items
242 | """
243 | logger.info(f"completions: paras: {params}")
244 | completions = intc_server.resolve.completions(
245 | params.position, params.text_document.uri, params.context.trigger_character
246 | )
247 | return completions
248 |
249 |
250 | @intc_server.feature(TEXT_DOCUMENT_HOVER)
251 | def hover(params: TextDocumentPositionParams) -> Optional[Hover]:
252 | """Provide the code help information of the hover position
253 | Args:
254 | server:
255 | the intc LanguageServer
256 | params:
257 | the parameters provide by the client, provide the hover position and the source uri
258 | Returns:
259 | a list of completion items
260 | """
261 | intc_server.show_message_log(f"on hover: {params}")
262 |
263 | logger.info(f"hover: paras: {params}")
264 | hover_result = intc_server.resolve.hover(params.position, params.text_document.uri)
265 | if hover_result["type"] not in {
266 | HoverType.RESOLVE_ERROR,
267 | HoverType.UN_COVER_ERROR,
268 | HoverType.CURSOR_WORD_NOT_FOUND,
269 | }:
270 | return Hover(
271 | contents=MarkupContent(
272 | kind=MarkupKind.Markdown, value=hover_result["message"]
273 | ),
274 | range=hover_result["range"],
275 | )
276 | else:
277 | return None
278 |
279 |
280 | @intc_server.feature(TEXT_DOCUMENT_DEFINITION)
281 | def definition(params: TextDocumentPositionParams) -> Optional[List[Location]]:
282 | """Provide completion list
283 | Args:
284 | server:
285 | the intc LanguageServer
286 | params:
287 | the parameters provide by the client, provide the hover position and the source uri
288 | Returns:
289 | a list of completion items
290 | """
291 |
292 | logger.info(f"definition: paras: {params}")
293 | definitions = intc_server.resolve.definition(
294 | params.position, params.text_document.uri
295 | )
296 | if definitions:
297 | return definitions
298 |
299 |
300 | def diagnostics(params: TextDocumentPositionParams) -> None:
301 | """publish the diagnostics hint
302 | Args:
303 | server:
304 | the intc LanguageServer
305 | params:
306 | the parameters provide by the client, provide the source uri
307 | Returns:
308 | None
309 | """
310 |
311 | logger.info(f"diagnostics: paras: {params}")
312 | try:
313 | display_diagnostics = intc_server.resolve.diagnostics(params.text_document.uri)
314 | except Exception as e:
315 | logger.error(f"diagnostics: error : {e}")
316 | return
317 | logger.info(f"display: {display_diagnostics}")
318 | intc_server.publish_diagnostics(params.text_document.uri, display_diagnostics)
319 |
320 |
321 | @intc_server.feature(TEXT_DOCUMENT_DID_OPEN)
322 | def did_open(params):
323 | logger.info(f"did_open: paras: {params}")
324 | intc_server.init_new_file(params)
325 | diagnostics(params)
326 |
327 |
328 | @intc_server.feature(TEXT_DOCUMENT_DID_CHANGE)
329 | def did_change(params):
330 | logger.info(f"did_change: paras: {params}")
331 | diagnostics(params)
332 |
333 |
334 | if __name__ == "__main__":
335 | intc_server.start_io()
336 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright cstsunfu
2 |
3 | Apache License
4 | Version 2.0, January 2004
5 | http://www.apache.org/licenses/
6 |
7 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
8 |
9 | 1. Definitions.
10 |
11 | "License" shall mean the terms and conditions for use, reproduction,
12 | and distribution as defined by Sections 1 through 9 of this document.
13 |
14 | "Licensor" shall mean the copyright owner or entity authorized by
15 | the copyright owner that is granting the License.
16 |
17 | "Legal Entity" shall mean the union of the acting entity and all
18 | other entities that control, are controlled by, or are under common
19 | control with that entity. For the purposes of this definition,
20 | "control" means (i) the power, direct or indirect, to cause the
21 | direction or management of such entity, whether by contract or
22 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
23 | outstanding shares, or (iii) beneficial ownership of such entity.
24 |
25 | "You" (or "Your") shall mean an individual or Legal Entity
26 | exercising permissions granted by this License.
27 |
28 | "Source" form shall mean the preferred form for making modifications,
29 | including but not limited to software source code, documentation
30 | source, and configuration files.
31 |
32 | "Object" form shall mean any form resulting from mechanical
33 | transformation or translation of a Source form, including but
34 | not limited to compiled object code, generated documentation,
35 | and conversions to other media types.
36 |
37 | "Work" shall mean the work of authorship, whether in Source or
38 | Object form, made available under the License, as indicated by a
39 | copyright notice that is included in or attached to the work
40 | (an example is provided in the Appendix below).
41 |
42 | "Derivative Works" shall mean any work, whether in Source or Object
43 | form, that is based on (or derived from) the Work and for which the
44 | editorial revisions, annotations, elaborations, or other modifications
45 | represent, as a whole, an original work of authorship. For the purposes
46 | of this License, Derivative Works shall not include works that remain
47 | separable from, or merely link (or bind by name) to the interfaces of,
48 | the Work and Derivative Works thereof.
49 |
50 | "Contribution" shall mean any work of authorship, including
51 | the original version of the Work and any modifications or additions
52 | to that Work or Derivative Works thereof, that is intentionally
53 | submitted to Licensor for inclusion in the Work by the copyright owner
54 | or by an individual or Legal Entity authorized to submit on behalf of
55 | the copyright owner. For the purposes of this definition, "submitted"
56 | means any form of electronic, verbal, or written communication sent
57 | to the Licensor or its representatives, including but not limited to
58 | communication on electronic mailing lists, source code control systems,
59 | and issue tracking systems that are managed by, or on behalf of, the
60 | Licensor for the purpose of discussing and improving the Work, but
61 | excluding communication that is conspicuously marked or otherwise
62 | designated in writing by the copyright owner as "Not a Contribution."
63 |
64 | "Contributor" shall mean Licensor and any individual or Legal Entity
65 | on behalf of whom a Contribution has been received by Licensor and
66 | subsequently incorporated within the Work.
67 |
68 | 2. Grant of Copyright License. Subject to the terms and conditions of
69 | this License, each Contributor hereby grants to You a perpetual,
70 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
71 | copyright license to reproduce, prepare Derivative Works of,
72 | publicly display, publicly perform, sublicense, and distribute the
73 | Work and such Derivative Works in Source or Object form.
74 |
75 | 3. Grant of Patent License. Subject to the terms and conditions of
76 | this License, each Contributor hereby grants to You a perpetual,
77 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
78 | (except as stated in this section) patent license to make, have made,
79 | use, offer to sell, sell, import, and otherwise transfer the Work,
80 | where such license applies only to those patent claims licensable
81 | by such Contributor that are necessarily infringed by their
82 | Contribution(s) alone or by combination of their Contribution(s)
83 | with the Work to which such Contribution(s) was submitted. If You
84 | institute patent litigation against any entity (including a
85 | cross-claim or counterclaim in a lawsuit) alleging that the Work
86 | or a Contribution incorporated within the Work constitutes direct
87 | or contributory patent infringement, then any patent licenses
88 | granted to You under this License for that Work shall terminate
89 | as of the date such litigation is filed.
90 |
91 | 4. Redistribution. You may reproduce and distribute copies of the
92 | Work or Derivative Works thereof in any medium, with or without
93 | modifications, and in Source or Object form, provided that You
94 | meet the following conditions:
95 |
96 | (a) You must give any other recipients of the Work or
97 | Derivative Works a copy of this License; and
98 |
99 | (b) You must cause any modified files to carry prominent notices
100 | stating that You changed the files; and
101 |
102 | (c) You must retain, in the Source form of any Derivative Works
103 | that You distribute, all copyright, patent, trademark, and
104 | attribution notices from the Source form of the Work,
105 | excluding those notices that do not pertain to any part of
106 | the Derivative Works; and
107 |
108 | (d) If the Work includes a "NOTICE" text file as part of its
109 | distribution, then any Derivative Works that You distribute must
110 | include a readable copy of the attribution notices contained
111 | within such NOTICE file, excluding those notices that do not
112 | pertain to any part of the Derivative Works, in at least one
113 | of the following places: within a NOTICE text file distributed
114 | as part of the Derivative Works; within the Source form or
115 | documentation, if provided along with the Derivative Works; or,
116 | within a display generated by the Derivative Works, if and
117 | wherever such third-party notices normally appear. The contents
118 | of the NOTICE file are for informational purposes only and
119 | do not modify the License. You may add Your own attribution
120 | notices within Derivative Works that You distribute, alongside
121 | or as an addendum to the NOTICE text from the Work, provided
122 | that such additional attribution notices cannot be construed
123 | as modifying the License.
124 |
125 | You may add Your own copyright statement to Your modifications and
126 | may provide additional or different license terms and conditions
127 | for use, reproduction, or distribution of Your modifications, or
128 | for any such Derivative Works as a whole, provided Your use,
129 | reproduction, and distribution of the Work otherwise complies with
130 | the conditions stated in this License.
131 |
132 | 5. Submission of Contributions. Unless You explicitly state otherwise,
133 | any Contribution intentionally submitted for inclusion in the Work
134 | by You to the Licensor shall be under the terms and conditions of
135 | this License, without any additional terms or conditions.
136 | Notwithstanding the above, nothing herein shall supersede or modify
137 | the terms of any separate license agreement you may have executed
138 | with Licensor regarding such Contributions.
139 |
140 | 6. Trademarks. This License does not grant permission to use the trade
141 | names, trademarks, service marks, or product names of the Licensor,
142 | except as required for reasonable and customary use in describing the
143 | origin of the Work and reproducing the content of the NOTICE file.
144 |
145 | 7. Disclaimer of Warranty. Unless required by applicable law or
146 | agreed to in writing, Licensor provides the Work (and each
147 | Contributor provides its Contributions) on an "AS IS" BASIS,
148 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
149 | implied, including, without limitation, any warranties or conditions
150 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
151 | PARTICULAR PURPOSE. You are solely responsible for determining the
152 | appropriateness of using or redistributing the Work and assume any
153 | risks associated with Your exercise of permissions under this License.
154 |
155 | 8. Limitation of Liability. In no event and under no legal theory,
156 | whether in tort (including negligence), contract, or otherwise,
157 | unless required by applicable law (such as deliberate and grossly
158 | negligent acts) or agreed to in writing, shall any Contributor be
159 | liable to You for damages, including any direct, indirect, special,
160 | incidental, or consequential damages of any character arising as a
161 | result of this License or out of the use or inability to use the
162 | Work (including but not limited to damages for loss of goodwill,
163 | work stoppage, computer failure or malfunction, or any and all
164 | other commercial damages or losses), even if such Contributor
165 | has been advised of the possibility of such damages.
166 |
167 | 9. Accepting Warranty or Additional Liability. While redistributing
168 | the Work or Derivative Works thereof, You may choose to offer,
169 | and charge a fee for, acceptance of support, warranty, indemnity,
170 | or other liability obligations and/or rights consistent with this
171 | License. However, in accepting such obligations, You may act only
172 | on Your own behalf and on Your sole responsibility, not on behalf
173 | of any other Contributor, and only if You agree to indemnify,
174 | defend, and hold each Contributor harmless for any liability
175 | incurred by, or claims asserted against, such Contributor by reason
176 | of your accepting any such warranty or additional liability.
177 |
178 | END OF TERMS AND CONDITIONS
179 |
180 | APPENDIX: How to apply the Apache License to your work.
181 |
182 | To apply the Apache License to your work, attach the following
183 | boilerplate notice, with the fields enclosed by brackets "[]"
184 | replaced with your own identifying information. (Don't include
185 | the brackets!) The text should be enclosed in the appropriate
186 | comment syntax for the file format. We also recommend that a
187 | file or class name and description of purpose be included on the
188 | same "printed page" as the copyright notice for easier
189 | identification within third-party archives.
190 |
191 | Copyright [yyyy] [name of copyright owner]
192 |
193 | Licensed under the Apache License, Version 2.0 (the "License");
194 | you may not use this file except in compliance with the License.
195 | You may obtain a copy of the License at
196 |
197 | http://www.apache.org/licenses/LICENSE-2.0
198 |
199 | Unless required by applicable law or agreed to in writing, software
200 | distributed under the License is distributed on an "AS IS" BASIS,
201 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
202 | See the License for the specific language governing permissions and
203 | limitations under the License.
204 |
--------------------------------------------------------------------------------
/plugins/vscode/src/extension.ts:
--------------------------------------------------------------------------------
1 | /* -------------------------------------------------------------------------
2 | * Copyright the author(s) of intc.
3 | *
4 | * This source code is licensed under the Apache license found in the
5 | * LICENSE file in the root directory of this source tree.
6 | *
7 | * There are some code copy from the pygls,
8 | * The code copied Copyright (c) Microsoft Corporation and Open Law Library.
9 | * ----------------------------------------------------------------------- */
10 | "use strict";
11 |
12 | import * as net from "net";
13 | import * as path from "path";
14 | import * as vscode from "vscode";
15 | import * as semver from "semver";
16 | import { workspace, commands, languages, window } from "vscode"
17 |
18 | import { PythonExtension } from "@vscode/python-extension";
19 | import { LanguageClient, LanguageClientOptions, ServerOptions, State, integer } from "vscode-languageclient/node";
20 |
21 | const MIN_PYTHON = semver.parse("3.8.0")
22 |
23 | let client: LanguageClient;
24 | let clientStarting = false
25 | let python: PythonExtension;
26 | let logger: vscode.LogOutputChannel
27 | // opend document set
28 | let openDocuments = new Set()
29 |
30 | /**
31 | * This is the main entry point.
32 | * Called when vscode first activates the extension
33 | */
34 | export async function activate(context: vscode.ExtensionContext) {
35 | logger = vscode.window.createOutputChannel('intclsp', { log: true })
36 | logger.info("Extension activated.")
37 |
38 | await getPythonExtension();
39 | if (!python) {
40 | return
41 | }
42 |
43 | // Restart language server command
44 | context.subscriptions.push(
45 | vscode.commands.registerCommand("intclsp.server.restart", async () => {
46 | logger.info('restarting server...')
47 | await startLangServer(null)
48 | })
49 | )
50 |
51 | // Execute command... command
52 | context.subscriptions.push(
53 | vscode.commands.registerCommand("intclsp.server.executeCommand", async () => {
54 | await executeServerCommand()
55 | })
56 | )
57 |
58 | // Restart the language server if the user switches Python envs...
59 | context.subscriptions.push(
60 | python.environments.onDidChangeActiveEnvironmentPath(async () => {
61 | logger.info('python env modified, restarting server...')
62 | // only restart if the server is already running
63 | if (client) {
64 | await startLangServer(null)
65 | }
66 | })
67 | )
68 |
69 | // ... or if they change a relevant config option
70 | context.subscriptions.push(
71 | vscode.workspace.onDidChangeConfiguration(async (event) => {
72 | if (event.affectsConfiguration("intclsp.server") || event.affectsConfiguration("intclsp.client")) {
73 | if (client) {
74 | logger.info('config modified, restarting server...')
75 | await startLangServer(null)
76 | }
77 | }
78 | })
79 | )
80 | // remove the document from the openDocuments when the document is closed
81 | context.subscriptions.push(
82 | vscode.workspace.onDidCloseTextDocument(
83 | async (event) => {
84 | if (event.languageId === "json" || event.languageId === "jsonc") {
85 | if (openDocuments.has(event.fileName)) {
86 | openDocuments.delete(event.fileName)
87 | }
88 | }
89 | }
90 | )
91 | )
92 |
93 | // restart the language server once the user opens the text document...
94 | context.subscriptions.push(
95 | vscode.workspace.onDidOpenTextDocument(
96 | async (event: vscode.TextDocument) => {
97 | await onOpenOrActiveDocument(event)
98 | }
99 | )
100 | )
101 | }
102 |
103 |
104 | /**
105 | * This function is called when the extension is activated.
106 | * It will be called the first time a command is executed.
107 | * @param event The event that triggered this function.
108 | * @returns
109 | */
110 | async function onOpenOrActiveDocument(event: vscode.TextDocument) {
111 | if (event.languageId === "json" || event.languageId === "jsonc") {
112 | // if client is running and the file is in the openDocuments
113 | if (client && openDocuments.has(event.fileName)) {
114 | return
115 | }
116 | const rootPath = vscode.workspace.workspaceFolders[0].uri.fsPath
117 | let realRoot = path.dirname(event.fileName)
118 | // if the filePath is not in the workspace root
119 | if (!realRoot.startsWith(rootPath)) {
120 | return
121 | }
122 | if (path.dirname(rootPath) === rootPath) {
123 | return
124 | }
125 | while (realRoot !== path.dirname(rootPath)) {
126 | let intcConfig = path.join(realRoot, ".intc.json")
127 | let intcConfigc = path.join(realRoot, ".intc.jsonc")
128 | if (await workspace.fs.stat(vscode.Uri.file(intcConfig)).then(() => true, () => false)) {
129 | await startLangServer(event.fileName)
130 | return
131 | }
132 | if (await workspace.fs.stat(vscode.Uri.file(intcConfigc)).then(() => true, () => false)) {
133 | await startLangServer(event.fileName)
134 | return
135 | }
136 | realRoot = path.dirname(realRoot)
137 | }
138 | }
139 | return
140 | }
141 |
142 |
143 | export function deactivate(): Thenable {
144 | return stopLangServer()
145 | }
146 | /**
147 | * Start (or restart) the language server.
148 | *
149 | * @param command The executable to run
150 | * @param args Arguments to pass to the executable
151 | * @param cwd The working directory in which to run the executable
152 | * @returns
153 | */
154 | async function startLangServer(filePath: string | null) {
155 |
156 | // Don't interfere if we are already in the process of launching the server.
157 | if (clientStarting) {
158 | return
159 | }
160 |
161 | clientStarting = true
162 | if (client) {
163 | await stopLangServer()
164 | }
165 | const config = vscode.workspace.getConfiguration("intclsp.server")
166 |
167 | const cwd = getCwd()
168 |
169 | const resource = vscode.Uri.joinPath(vscode.Uri.file(cwd))
170 | const pythonCommand = await getPythonCommand(resource)
171 | if (!pythonCommand) {
172 | clientStarting = false
173 | return
174 | }
175 |
176 | logger.debug(`python: ${pythonCommand.join(" ")}`)
177 | const serverOptions: ServerOptions = {
178 | command: pythonCommand[0],
179 | args: [...pythonCommand.slice(1), "-m", "intc_lsp.cli"],
180 | //options: { cwd: "/home/sun/workspace/intc/intc/examples/exp1" },
181 | };
182 |
183 | client = new LanguageClient('intclsp', serverOptions, getClientOptions());
184 | const promises = [client.start()]
185 |
186 | if (config.get("debug")) {
187 | promises.push(startDebugging())
188 | }
189 |
190 | const results = await Promise.allSettled(promises)
191 | clientStarting = false
192 | let success = true
193 |
194 | for (const result of results) {
195 | if (result.status === "rejected") {
196 | logger.error(`There was a error starting the server: ${result.reason}`)
197 | success = false
198 | }
199 | }
200 | if (success && filePath) {
201 | openDocuments.add(filePath)
202 | }
203 | }
204 |
205 | async function stopLangServer(): Promise {
206 | if (!client) {
207 | return
208 | }
209 |
210 | if (client.state === State.Running) {
211 | await client.stop()
212 | }
213 |
214 | client.dispose()
215 | client = undefined
216 | openDocuments.clear()
217 | }
218 |
219 | function startDebugging(): Promise {
220 | if (!vscode.workspace.workspaceFolders) {
221 | logger.error("Unable to start debugging, there is no workspace.")
222 | return Promise.reject("Unable to start debugging, there is no workspace.")
223 | }
224 | // TODO: Is there a more reliable way to ensure the debug adapter is ready?
225 | setTimeout(async () => {
226 | await vscode.debug.startDebugging(vscode.workspace.workspaceFolders[0], "intclsp: Debug Server")
227 | }, 2000)
228 | }
229 |
230 | function getClientOptions(): LanguageClientOptions {
231 | const options = {
232 | documentSelector: [{
233 | "scheme": "file",
234 | "language": "jsonc"
235 | }, {
236 | "scheme": "file",
237 | "language": "json"
238 | }],
239 | outputChannel: logger,
240 | connectionOptions: {
241 | maxRestartCount: 0 // don't restart on server failure.
242 | },
243 | };
244 | logger.info(`client options: ${JSON.stringify(options, undefined, 2)}`)
245 | return options
246 | }
247 |
248 | function startLangServerTCP(addr: number): LanguageClient {
249 | const serverOptions: ServerOptions = () => {
250 | return new Promise((resolve /*, reject */) => {
251 | const clientSocket = new net.Socket();
252 | clientSocket.connect(addr, "127.0.0.1", () => {
253 | resolve({
254 | reader: clientSocket,
255 | writer: clientSocket,
256 | });
257 | });
258 | });
259 | };
260 |
261 |
262 | return new LanguageClient(
263 | `tcp lang server (port ${addr})`,
264 | serverOptions,
265 | getClientOptions()
266 | );
267 | }
268 |
269 | /**
270 | * Execute a command provided by the language server.
271 | */
272 | async function executeServerCommand() {
273 | if (!client || client.state !== State.Running) {
274 | await vscode.window.showErrorMessage("There is no language server running.")
275 | return
276 | }
277 |
278 | const knownCommands = client.initializeResult.capabilities.executeCommandProvider?.commands
279 | if (!knownCommands || knownCommands.length === 0) {
280 | const info = client.initializeResult.serverInfo
281 | const name = info?.name || "Server"
282 | const version = info?.version || ""
283 |
284 | await vscode.window.showInformationMessage(`${name} ${version} does not implement any commands.`)
285 | return
286 | }
287 |
288 | const commandName = await vscode.window.showQuickPick(knownCommands, { canPickMany: false })
289 | if (!commandName) {
290 | return
291 | }
292 | logger.info(`executing command: '${commandName}'`)
293 |
294 | const result = await vscode.commands.executeCommand(commandName /* if your command accepts arguments you can pass them here */)
295 | logger.info(`${commandName} result: ${JSON.stringify(result, undefined, 2)}`)
296 | }
297 |
298 | /**
299 | * If the user has explicitly provided a src directory use that.
300 | * Otherwise, fallback to the examples/servers directory.
301 | *
302 | * @returns The working directory from which to launch the server
303 | */
304 | function getCwd(): string {
305 | const config = vscode.workspace.getConfiguration("intclsp.server")
306 | const cwd = config.get('cwd')
307 | if (cwd) {
308 | return cwd
309 | }
310 |
311 | return "."
312 | }
313 |
314 | /**
315 | * Return the python command to use when starting the server.
316 | *
317 | * If debugging is enabled, this will also included the arguments to required
318 | * to wrap the server in a debug adapter.
319 | *
320 | * @returns The full python command needed in order to start the server.
321 | */
322 | async function getPythonCommand(resource?: vscode.Uri): Promise {
323 | const config = vscode.workspace.getConfiguration("intclsp.server", resource)
324 | const pythonPath = await getPythonInterpreter(resource)
325 | if (!pythonPath) {
326 | return
327 | }
328 | const command = [pythonPath]
329 |
330 | return command
331 | }
332 |
333 | /**
334 | * Return the python interpreter to use when starting the server.
335 | *
336 | * This uses the official python extension to grab the user's currently
337 | * configured environment.
338 | *
339 | * @returns The python interpreter to use to launch the server
340 | */
341 | async function getPythonInterpreter(resource?: vscode.Uri): Promise {
342 | const config = vscode.workspace.getConfiguration("intclsp.server", resource)
343 | const pythonPath = config.get('pythonPath')
344 | if (pythonPath) {
345 | logger.info(`Using user configured python environment: '${pythonPath}'`)
346 | return pythonPath
347 | }
348 |
349 | if (!python) {
350 | return
351 | }
352 |
353 | if (resource) {
354 | logger.info(`Looking for environment in which to execute: '${resource.toString()}'`)
355 | }
356 | // Use whichever python interpreter the user has configured.
357 | const activeEnvPath = python.environments.getActiveEnvironmentPath(resource)
358 | logger.info(`Found environment: ${activeEnvPath.id}: ${activeEnvPath.path}`)
359 |
360 | const activeEnv = await python.environments.resolveEnvironment(activeEnvPath)
361 | if (!activeEnv) {
362 | logger.error(`Unable to resolve envrionment: ${activeEnvPath}`)
363 | return
364 | }
365 |
366 | const v = activeEnv.version
367 | const pythonVersion = semver.parse(`${v.major}.${v.minor}.${v.micro}`)
368 |
369 | // Check to see if the environment satisfies the min Python version.
370 | if (semver.lt(pythonVersion, MIN_PYTHON)) {
371 | const message = [
372 | `Your currently configured environment provides Python v${pythonVersion} `,
373 | `but intclsp requires v${MIN_PYTHON}.\n\nPlease choose another environment.`
374 | ].join('')
375 |
376 | const response = await vscode.window.showErrorMessage(message, "Change Environment")
377 | if (!response) {
378 | return
379 | } else {
380 | await vscode.commands.executeCommand('python.setInterpreter')
381 | return
382 | }
383 | }
384 |
385 | const pythonUri = activeEnv.executable.uri
386 | if (!pythonUri) {
387 | logger.error(`URI of Python executable is undefined!`)
388 | return
389 | }
390 |
391 | return pythonUri.fsPath
392 | }
393 |
394 | async function getPythonExtension() {
395 | try {
396 | python = await PythonExtension.api();
397 | } catch (err) {
398 | logger.error(`Unable to load python extension: ${err}`)
399 | }
400 | }
401 |
--------------------------------------------------------------------------------
/intc/intc/utils.py:
--------------------------------------------------------------------------------
1 | # Copyright the author(s) of intc.
2 | #
3 | # This source code is licensed under the Apache license found in the
4 | # LICENSE file in the root directory of this source tree.
5 |
6 | import copy
7 | import inspect
8 | import re
9 | from typing import Any, Callable, Dict, List, Tuple, Type, Union
10 |
11 | from intc.exceptions import KeyNotFoundError, NameError, ValueMissingError
12 |
13 | _module_name_pattern = re.compile(r"^[a-zA-Z_][a-zA-Z0-9_]*-?[a-zA-Z0-9_]*$")
14 |
15 |
16 | def get_meta_rep(metadata):
17 | rep = ""
18 | help = ""
19 | for key in metadata:
20 | if key != "description":
21 | rep += f"{key}: {metadata[key]}. "
22 | else:
23 | help = metadata[key]
24 | if help:
25 | rep += f"help: {help}"
26 |
27 | return rep
28 |
29 |
30 | def split_trace(trace_str: str) -> List[str]:
31 | """
32 | split the trace path to list, especially for the '.global'/'.base'/etc.
33 | `a.b.c` -> `['a', 'b', 'c']`
34 | `a.b.c..base` -> `['a', 'b', 'c', '.base']`
35 |
36 | Args:
37 | trace_str: the trace path like `a.b.c`
38 |
39 | Returns:
40 | trace list
41 |
42 | """
43 | result = []
44 | sub_trace = ""
45 | for c in trace_str:
46 | if c == "." and sub_trace:
47 | result.append(sub_trace)
48 | sub_trace = ""
49 | else:
50 | sub_trace += c
51 | if sub_trace:
52 | result.append(sub_trace)
53 | return result
54 |
55 |
56 | def get_position():
57 | """get the parameter definition position
58 | Returns:
59 | position
60 | """
61 | frame = inspect.currentframe().f_back.f_back
62 | line_no = inspect.getlineno(frame)
63 | file_path = inspect.getabsfile(frame)
64 | return {"file_path": file_path, "line_no": line_no}
65 |
66 |
67 | def module_name_check(name):
68 | """check the module name is valid
69 |
70 | Args:
71 | name: module name
72 |
73 | Returns:
74 | None
75 | Raises:
76 | NameError: if the name is invalid
77 | """
78 | if not _module_name_pattern.match(name):
79 | raise NameError(f"Module name '{name}' is invalid")
80 |
81 |
82 | def _inplace_update_dict(_base: Dict, _new: Dict):
83 | """use the _new dict inplace update the _base dict, recursively
84 |
85 | if the _base['_name'] != _new["_name"], we will use _new cover the _base and logger a warning
86 | otherwise, use _new update the _base recursively
87 |
88 | Args:
89 | _base: will be updated dict
90 | _new: use _new update _base
91 |
92 | Returns:
93 | None
94 |
95 | """
96 | for item in _new:
97 | if (item not in _base) or (not isinstance(_base[item], Dict)):
98 | _base[item] = _new[item]
99 | elif isinstance(_base[item], Dict) and isinstance(_new[item], Dict):
100 | _inplace_update_dict(_base[item], _new[item])
101 | else:
102 | raise AttributeError(
103 | "The base config and update config is not match. base: {}, new: {}. ".format(
104 | _base, _new
105 | )
106 | )
107 |
108 |
109 | def do_update_config(config: dict, update_config: dict) -> Dict:
110 | """use the update_config dict update the config dict, recursively
111 |
112 | see ConfigTool._inplace_update_dict
113 |
114 | Args:
115 | config: will be updated dict
116 | update_confg: config: use _new update _base
117 |
118 | Returns:
119 | updated_config
120 |
121 | """
122 | if not update_config:
123 | update_config = {}
124 | config = copy.deepcopy(config)
125 | _inplace_update_dict(config, update_config)
126 | return config
127 |
128 |
129 | def fix_trace(trace: str, root_config: Dict):
130 | """get the value of trace in config"""
131 | trace_config = root_config
132 |
133 | trace_list = split_trace(trace)
134 | new_trace_list = []
135 | try:
136 | for s in trace_list:
137 | if isinstance(trace_config, list):
138 | assert (s[0] == "-" and str.isdigit(s[1:])) or str.isdigit(
139 | s
140 | ), "list index must be int"
141 | s = int(s)
142 | else:
143 | assert isinstance(trace_config, dict), f"trace {trace} is invalid"
144 | s = UniModuleName(trace_config.keys())[s]
145 | trace_config = trace_config[s]
146 | new_trace_list.append(str(s))
147 | except KeyNotFoundError as e:
148 | raise e
149 | except Exception as e:
150 | raise KeyError(f"Can not find the link trace '{trace}' in config, {e}")
151 | return ".".join(new_trace_list)
152 |
153 |
154 | class UniModuleName(object):
155 | """Unique submodule names"""
156 |
157 | def __init__(self, keys):
158 | super(UniModuleName, self).__init__()
159 | self.origin_keys = list(keys)
160 | self.keys = self.update_uni_keys(keys)
161 |
162 | def update_uni_keys(self, keys: List[str]) -> Dict[str, str]:
163 | """add the clear key which could represent the origin key to the uni_keys
164 | case 1: the original keys are {"@A#a", "@B#a", "@B#c", "@CD#a"}, we can use "@A" represent "@A#a", "#c" represent "@B#c" and "@CD" represent "@CD#a" without ambiguous, but "#a" is ambiguous for "@A#a" and "@CD#a", "@B" is ambiguous for "@B#a" and "@B#c", so we can not use them to represent the origin key. The final update keys are {"@A#a", "@B#a", "@B#c", "@C#a", "@A", "#c", "@CD"}.
165 | case 2: the original keys are {"@A@B#c"}, we can use "@A", "@A@B", "#c" represent the "@A@B#c", we can also omit the "A" or "B", only use the "@@B", "@@B#c", "@A@#c", "@@#c" to represent the "@A@B#c", so the final update keys are {"@A@B#c", "@A", "@B", "@A@B", "@@B", "#c", "@A@#c", "@@#c"}.
166 |
167 | Args:
168 | keys: origin keys
169 |
170 | Returns:
171 | update keys those are no conflict
172 | """
173 | uni_key_map = {}
174 | extend_key_count = {}
175 | reserved_keys = set(keys)
176 | for key in keys:
177 | uni_key_map[key] = key
178 | for key in keys:
179 | if key.startswith("@"):
180 | prefixes = {""}
181 | cur = ""
182 | for c in key:
183 | if c in {"@", "#"}:
184 | if cur:
185 | assert cur[0] in {"@", "#"}
186 | update = set()
187 | for prefix in prefixes:
188 | update.add(f"{prefix}{cur}")
189 | update.add(f"{prefix}{cur[0]}")
190 | prefixes.update(update)
191 | cur = c
192 | else:
193 | cur = f"{cur}{c}"
194 | if cur:
195 | update = set()
196 | for prefix in prefixes:
197 | update.add(f"{prefix}{cur}")
198 | update.add(f"{prefix}{cur[0]}")
199 | prefixes.update(update)
200 | for prefix in prefixes:
201 | if prefix in reserved_keys:
202 | continue
203 | cset = set(prefix)
204 | cset.discard("@")
205 | cset.discard("#")
206 | if not cset:
207 | continue
208 | if prefix[-1] in {"@", "#"}:
209 | continue
210 | extend_key_count[prefix] = extend_key_count.get(prefix, 0) + 1
211 | if extend_key_count[prefix] > 1:
212 | uni_key_map.pop(prefix, "")
213 | else:
214 | uni_key_map[prefix] = key
215 |
216 | return uni_key_map
217 |
218 | def __getitem__(self, key):
219 | if key in self.keys:
220 | return self.keys[key]
221 | raise KeyNotFoundError(f"Key {key} is ambiguous in {self.origin_keys}")
222 |
223 |
224 | def search_lambda_eval(search_para: Any) -> Any:
225 | """eval the lambda function in config
226 |
227 | Args:
228 | search_para:
229 | the value of search_para or the whole(dict) config
230 | search lambda only support @lambda _: eval rep
231 | Returns:
232 | processed search_para
233 | """
234 | if isinstance(search_para, dict):
235 | return {k: search_lambda_eval(v) for k, v in search_para.items()}
236 | elif isinstance(search_para, list):
237 | return [search_lambda_eval(v) for v in search_para]
238 | elif isinstance(search_para, str):
239 | if search_para.startswith("@lambda"):
240 | return eval(f"lambda _:{':'.join(search_para.split(':')[1:])}")(0)
241 | else:
242 | return search_para
243 | else:
244 | return search_para
245 |
246 |
247 | def parser_lambda_key_value_pair(
248 | key: str, lambda_value: str, ref_anchor_maps: Dict, root_config: Dict
249 | ) -> Dict:
250 | """parser the lambda representation to key, paras, lambda
251 | Args:
252 | lambda_key:
253 | the key of a dict, just a string
254 | lambda_value: like
255 | 1. "value" for the vanailla value
256 | 2. "@lambda _: list(range(4))" for the eval value
257 | 3. "@anchor1.key1.value1, @anchor2.key2.value2 @lambda x, y: x+y" for the lambda value
258 | 4. "@lambda @anchor.value" for just reference
259 |
260 | ref_anchor_maps:
261 | the anchor maps like
262 | {
263 | "global": "root.global",
264 | "$": "root.module.module",
265 | "module_root": "root.module",
266 | }
267 | Returns:
268 | the parser result(Dict)
269 | """
270 |
271 | def anchor_parser(para):
272 | if para.strip() == "_":
273 | return "_"
274 | assert para.startswith(
275 | "@"
276 | ), f"the reference {para} is invalid, the reference must be like '@anchor.key.value'"
277 | para_split = split_trace(para[1:])
278 |
279 | assert (
280 | len(para_split) >= 2
281 | ), f"the reference {para} is invalid, the reference must be like '@anchor.key.value'"
282 | para_split[0] = ref_anchor_maps[para_split[0]]
283 | return fix_trace(".".join(para_split), root_config)
284 |
285 | paras = []
286 | lambda_fun = "lambda x: x"
287 | lambda_value = lambda_value.strip()
288 | if lambda_value.startswith("@lambda"):
289 | lambda_value = lambda_value.lstrip("@lambda").strip()
290 | assert (
291 | len(lambda_value.split(":")) == 2
292 | and (lambda_value.split(":")[0].strip() == "_")
293 | ) or (
294 | ":" not in lambda_value and "," not in lambda_value
295 | ), f"the lambda value 'lambda {lambda_value}' is invalid, only support 'lambda _: evaluable_expr' for evaluation value or 'lambda @anchor.key.value' for reference value."
296 | if lambda_value.split(":")[0].strip() == "_":
297 | return {
298 | "key": key,
299 | "paras": ["_"],
300 | "lambda": f"lambda {lambda_value}",
301 | }
302 | else:
303 | return {
304 | "key": key,
305 | "paras": [anchor_parser(lambda_value.strip())],
306 | "lambda": "lambda x: x",
307 | }
308 | elif "@lambda" in lambda_value:
309 | value_splits = lambda_value.split("@lambda")
310 | assert (
311 | len(value_splits) == 2
312 | ), f"the lambda value {lambda_value} is invalid, the lambda value must be like '@anchor.value @lambda x: x+1'"
313 | _paras, lambda_fun = value_splits
314 | for para in _paras.split(","):
315 | paras.append(anchor_parser(para.strip()))
316 | lambda_fun = f"lambda {lambda_fun.strip()}"
317 | return {
318 | "key": key,
319 | "paras": paras,
320 | "lambda": lambda_fun,
321 | }
322 | else:
323 | return {}
324 |
325 |
326 | class TrieNode:
327 | def __init__(self):
328 | self.children = {}
329 | self.is_end_of_word = False
330 |
331 |
332 | class TryTrie:
333 | def __init__(self):
334 | self.root = TrieNode()
335 |
336 | def insert(self, traces: list):
337 | current_node = self.root
338 | for token in traces:
339 | if token not in current_node.children:
340 | current_node.children[token] = TrieNode()
341 | current_node = current_node.children[token]
342 | current_node.is_end_of_word = True
343 |
344 | def try_fix_traces(self, traces, fixed_traces, node):
345 | """try search the traces in the trie"""
346 | if not traces:
347 | return fixed_traces
348 | if traces and not node:
349 | return None
350 | token = traces[0]
351 | traces = traces[1:]
352 | all_possible_traces = []
353 | if token in node.children:
354 | one_possible_traces = self.try_fix_traces(
355 | traces, fixed_traces + [token], node.children[token]
356 | )
357 | if one_possible_traces:
358 | if all_possible_traces:
359 | raise ValueError(
360 | f"more than one possible traces: \none:\n{str(all_possible_traces[0])}\nother:\n{str(one_possible_traces)}"
361 | )
362 | all_possible_traces.append(one_possible_traces)
363 | elif token.startswith("@") or token.startswith("#"):
364 | for child_token in node.children:
365 | if len(child_token) > len(token) and (
366 | child_token.startswith(token) or child_token.endswith(token)
367 | ):
368 | one_possible_traces = self.try_fix_traces(
369 | traces, fixed_traces + [child_token], node.children[child_token]
370 | )
371 | if one_possible_traces:
372 | if all_possible_traces:
373 | raise ValueError(
374 | f"more than one possible traces: \none:\n{str(all_possible_traces[0])}\nother:\n{str(one_possible_traces)}"
375 | )
376 | all_possible_traces.append(one_possible_traces)
377 | else:
378 | # skip current tree
379 | for child_token in node.children:
380 | one_possible_traces = self.try_fix_traces(
381 | [token] + traces,
382 | fixed_traces + [child_token],
383 | node.children[child_token],
384 | )
385 | if one_possible_traces:
386 | if all_possible_traces:
387 | raise ValueError(
388 | f"more than one possible traces: \none:\n{str(all_possible_traces[0])}\nother:\n{str(one_possible_traces)}"
389 | )
390 | all_possible_traces.append(one_possible_traces)
391 | if all_possible_traces:
392 | return all_possible_traces[0]
393 | return None
394 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
Python Intelligence Config Manager
3 |
4 |
5 |
6 | A Python Config Manager for Humans.
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | 简体中文 |
18 | English
19 |
20 |
21 |
22 | 整个项目包含三个部分:
23 |
24 | * `intc`: intc的核心功能部分, config manager和dataclass的核心,项目实际运行中只会依赖这一部分
25 | * `intc-lsp`: 方便人类使用的lsp的server端,在你编辑或阅读由`intc`进行解析的`json`文件时,分析相关python代码和相关的config文件并提供语义补全、代码跳转、参数文档、纠错等功能
26 | * `plugins`: 适配各类编辑器、IDE与`intc-lsp`进行交互的Client端
27 |
28 | ```
29 | ├── intc -- intc package
30 | │ ├── clean.py --
31 | │ ├── examples -- intc examples
32 | │ │ └── exp** --
33 | │ ├── intc -- core or intc
34 | │ │ ├── config.py --
35 | │ │ ├── exceptions.py --
36 | │ │ ├── loader.py --
37 | │ │ ├── parser.py --
38 | │ │ ├── register.py --
39 | │ │ ├── share.py --
40 | │ │ ├── utils.py --
41 | │ │ └── version.txt --
42 | │ └── setup.py --
43 | ├── lsp -- lsp package
44 | │ ├── intc_lsp --
45 | │ │ ├── cli.py -- entry of lsp
46 | │ │ ├── csrc -- c based parser
47 | │ │ │ ├── json --
48 | │ │ │ │ └── src --
49 | │ │ │ │ └── **.cc --
50 | │ │ │ └── yaml --
51 | │ │ │ └── src --
52 | │ │ │ └── **.cc --
53 | │ │ ├── __init__.py --
54 | │ │ ├── server.py -- the server api
55 | │ │ ├── src -- core of lsp
56 | │ │ │ ├── edit.py --
57 | │ │ │ ├── __init__.py --
58 | │ │ │ ├── parser_json.py --
59 | │ │ │ ├── parser_yaml.py --
60 | │ │ │ ├── resolve.py --
61 | │ │ │ └── trace.py --
62 | │ │ └── version.txt --
63 | │ └── setup.py --
64 | ├── pics --
65 | │ └── ... --
66 | ├── plugins --
67 | │ ├── neovim -- neovim extension for lsp
68 | │ │ ├── lsp.lua --
69 | │ │ └── readme.md --
70 | │ └── vscode -- vscode extension for lsp
71 | │ ├── package.json --
72 | │ ├── package-lock.json --
73 | │ ├── pics --
74 | │ │ └── icon.png --
75 | │ ├── README.md --
76 | │ ├── src --
77 | │ │ └── extension.ts --
78 | │ └── tsconfig.json --
79 | └── README.md --
80 | ```
81 |
82 |
83 | * [安装](#安装)
84 | * [Feature List](#feature-list)
85 | * [Intc Use Case](#intc-use-case)
86 | * [参数引用及lambda语法](#参数引用及lambda语法)
87 | * [参数搜索](#参数搜索)
88 | * [DataClass && Json Schema](#dataclass-json-schema)
89 | * [Intc-LSP](#intc-lsp)
90 | * [Hover Document](#hover-document)
91 | * [Diagnostic](#diagnostic)
92 | * [Completion](#completion)
93 | * [Goto/Peek Definition](#gotopeek-definition)
94 | * [Used By](#used-by)
95 |
96 |
97 | `intc`是一个功能强大的智能config配置管理工具,它不仅为我们的配置文件提供模块继承、模块嵌套、参数引用、超参搜索,还支持基于`lambda`表达式的复杂参数动态计算等功能。
98 |
99 | 而`intc`的配套Language Server Protocol(`intc-lsp`)则让我们的编辑浏览体验更加丝滑,它将配置文件与`python`代码紧密联合,`intc-lsp`可以帮助你在书写和阅读`intc`文件时方便的获取`python`的语义信息,提供错误提示、参数补全、智能跳转和参数帮助文档展示的功能。
100 |
101 | 除了用于`config`,`intc`还可以直接作为`dataclass`使用,它可以将我们使用`intc`定义的`dataclass`转化为`json schema`用于如`LLM`的数据类型约束,还可以对如`LLM`返回值在内的`json`数据进行数据检查并生成错误提示用于`LLM`的迭代优化。
102 |
103 | ### 安装
104 |
105 | 1. 准备`python`环境,要求`python>=3.8`,且目前只在`Linux`和`Mac`上面进行了测试,`Windows`的支持可能存在问题
106 | 2. 安装`intc`和`intc-lsp`,如果不需要使用智能补全等功能可以只安装`intc`
107 |
108 | ```bash
109 | # install from pypi
110 | pip install intc
111 | pip install intc-lsp
112 | ```
113 |
114 | 或直接源码安装
115 | ```bash
116 | cd intc/
117 | pip install .
118 | cd ../lsp
119 | pip install .
120 | ```
121 |
122 | 3. 安装智能补全插件(Plugins)
123 |
124 | `intc`提供了一个通用的Language Server Protocol(`LSP`),理论上只要你使用的编辑器或`IDE`支持`LSP`就可以使用,但是我平时主要使用`(neo)vim`,偶尔用一下`vscode`,因此目前只在这两种编辑器上做了适配
125 |
126 | * neovim
127 |
128 | `neovim`功能强大且易于扩展,社区已经对`LSP`提供了非常友好的支持,具体参考[nvim config](plugins/neovim)
129 |
130 | * vscode
131 |
132 | `vscode`也可以通过安装`intc-lsp`对应的插件来获取`lsp`的支持,`vscode`插件市场搜索`intc-lsp`进行安装
133 | 
134 |
135 | * 其他IDE与编辑器
136 |
137 | 需要有相关经验的同学帮忙完善文档
138 |
139 | ### Feature List
140 |
141 | 下面是一些主要功能的简单介绍,具体用法可以跳转到对应的use case进行查看
142 |
143 | * 模块继承
144 | * intc的python类之间可以和正常的python类一样进行继承
145 | * intc的config文件则是python类的实例化参数,同样可以看做是继承自Python类
146 | * 除此之外在一些复杂的config文件中config之间也可以存在继承关系.
147 | * 模块嵌套,模块之间可以进行嵌套,一个更高级别的模块除了自己的参数之外还可以包含其他子模块,如在训练神经网络的任务中一个trainer除了自己的一些参数之外还可以包含model、optimizer和schedule等子模块
148 | * 参数引用,一个参数可以依赖一个或多个其他参数的值,支持lambda动态计算参数的值
149 | * 参数搜索,在很多任务中我们有多种参数组合,intc以笛卡尔积的形式展开所有的参数组合
150 | * dataclass,作为dataclass可以直接作为某个模块的参数类、可以生成json schema、可以对参数进行检查
151 |
152 | * config帮助文档,intc-lsp提供参数hover提示,当指针放到参数上时展示参数的帮助文档
153 | * config错误提示,intc-lsp检查你的参数填写是否正确
154 | * config参数补全,intc-lsp在你编辑config文件时进行语义补全
155 | * config参数跳转,intc-lsp为你在浏览或编辑config文件时提供`goto/peek definition` python源码提供支持
156 |
157 | * etc.
158 |
159 |
160 |
161 | ### Intc Use Case
162 |
163 | 我们将以`intc/examples/exp1`中的例子开始来介绍intc的基本用法
164 |
165 | 样例结构:
166 |
167 | ```
168 | ├── config -- config文件,目前支持json和jsonc文件,需要在.intc.json中指定config所在目录
169 | │ ├── model.json
170 | │ └── model_search.jsonc
171 | ├── .intc.json -- intc需要的meta数据
172 | ├── run.py -- your own code
173 | └── src -- your project
174 | └── __init__.py
175 | ```
176 |
177 | 和一个普通的python项目相比,intc项目需要一个`.intc.json` 文件来描述项目的一些`meta`数据, 下面是这个exp中的配置:
178 |
179 | ```.intc.json
180 | {
181 | // "module": ["config/module"], // submodule所在目录, 相对于当前目录, 这个exp中没有submodule
182 | "entry": ["config"], // config所在目录, 相对于当前目录
183 | "src": [ // config文件中用到的python模块,需要可以直接通过python import
184 | "src"
185 | ]
186 | }
187 |
188 | ```
189 |
190 | 而使用intc的python代码则与dataclass非常相似,相比于python自带的dataclass提供了如数值检查、模型注册、生成json schema等功能
191 |
192 | ```python
193 | from intc import (
194 | MISSING, # MISSING是一个常量(在intc中的值为`???`)表示参数缺失,需要在实例化config时提供
195 | Base, # 所有的intc dataclass 都继承该类(可以隐式继承)
196 | BoolField, # 这些bool field
197 | DictField, # dict field
198 | FloatField, # ...
199 | IntField,
200 | AnyField,
201 | ListField,
202 | NestField, # 嵌套field,相比dict可以提供更多的类型检查、智能补全等功能
203 | StrField,
204 | SubModule,
205 | cregister, # cregister注册intc实例(支持二级命名, ),用于实例的索引(通过直接获取对应的model类,config中也会使用注册名来定位对应的model类
206 | )
207 |
208 |
209 | @cregister("model", "simple_cls") # cregister将Model注册为intc类,`module_type`为`model`, `module_name`为`simple_cls`
210 | class Model(Base): # 显式继承自Base,这里也可以不显式继承,register过程会自动执行这个过程,显式的继承可以提供更好的语义补全
211 | embedding_combine_method = StrField( # dataclass的属性定义
212 | value="concat", # 默认值
213 | options=["concat", "concat_linear"], # options 表示值必须是这几个里面的一个
214 | help="the combine method, just `concat` or use `linear` on the concated embedding",
215 | )
216 | embedding_size = IntField(
217 | value=MISSING, help="the sum of bert and glove embedding size" # 这里的default value为MISSING,需要在实例化时提供
218 | )
219 | active = StrField(
220 | value="relu",
221 | options=["relu", "tanh", "sigmoid", "none"],
222 | help="the activation function",
223 | )
224 | submodule = SubModule( # 子模型,可以嵌套其他模型的定义,这里的子模型可以有多个,引用子模型需要用到这些子模型的注册名
225 | value={},
226 | suggestions=[ # suggestions 表示推荐的一些值, 对于代码阅读和intc-lsp的语义解析有帮助
227 | "embedding",
228 | "decode",
229 | ],
230 | help="submodules for basic model",
231 | )
232 |
233 | @cregister("embedding", "bert")
234 | class BertEmbedding:
235 | hidden_size = IntField(
236 | value=MISSING,
237 | minimum=1,
238 | help="the input/output/hidden size for bert, must >= 1",
239 | )
240 | dropout_rate = FloatField(
241 | value=0.0, minimum=0.0, maximum=1.0, help="the dropout rate for bert" #
242 | )
243 | ....
244 | ```
245 | 实际开发过程中,我们往往使用config文件对业务逻辑进行配置,而json(及其衍生格式,如jsonc)非常适合用来编辑配置文件,`intc`配合`intc-lsp`对此提供了非常好的支持, 下面是针对已有的dataclass进行配置的例子:
246 |
247 | ```jsonc
248 | // 文件 config/model.jsonc
249 | {
250 | "@model@simple_cls": { // 表明是对谁进行配置,格式为@module_type@module_name @model@simple_cls 对应被注册为这个名称的`Model`
251 | "active": "none",
252 | "embedding_size": "@$.@glove.hidden_size, @$.@bert.hidden_size @lambda x, y: x+y", // 这里的值为动态的lambda计算的,喊一声 embedding_size的值为@embedding@glove.hidden_size和@embedding@bert.hidden_size 的和, 关于lambda的语法请看本说明里面关于lambda的介绍
253 | "@embedding@glove": { // submodule, submodule 同样以@module_type@module_name作为标识
254 | "hidden_size": 300,
255 | "vocab_size": 5000
256 | },
257 | "@embedding@bert": {
258 | "hidden_size": 768
259 | }
260 | }
261 | }
262 | ```
263 | ```python
264 | # 文件 main.py
265 | import exp
266 |
267 | ```
268 | #### 参数引用及lambda语法
269 |
270 | 我们经常会遇到如一个encode模块的输出和decode模块的输入的维度相同,在配置文件中我们希望这两个参数的值始终保持一致,intc支持一个参数是另一个参数的引用,这样我们只需要修改其中的一个参数另一个参数的值也同步被修改了。
271 |
272 |
273 |
274 | 有时我们的一个参数的值依赖于多个其他参数,如在一个多encode的模型中,decode模块的输入维度是所有encode模型输出的维度的和,针对这种复杂的引用,intc提供`lambda`支持复杂的动态值计算.
275 |
276 | 在介绍`lambda`表达式之前,我们先对参数的引用规则进行介绍:
277 |
278 | 我们以下面的config为例:
279 | ```json
280 | {
281 | "@parent@p": {
282 | "para_p_a": "value_p_a"
283 | "@wang@lao": {
284 | "para_lao": "value_lao"
285 | },
286 | "@children@wang": {
287 | "_anchor": "cwang",
288 | "para_wang_a": "value_wang_a",
289 | },
290 | "@children@li": {
291 | "para_li_a": "value_li_a",
292 | "para_li_b": "..."
293 | },
294 | "para_p_b": "..."
295 | },
296 | "para_a": "value_a"
297 | }
298 | ```
299 |
300 | 我们想要在计算`para_p_b`时引用其他位置的值:
301 |
302 | 朴素的方式:
303 |
304 | * 如果我们想要引用`para_p_a`的值,`para_p_a`与当前位置处于同级,我们用`$`来表示同级,那`para_p_a`的值在`para_p_b`位置的引用应该写为`$.para_p_a`
305 | * 如果我们想要引用`para_a`的值,`para_a`处于当前位置的上一级,我们用`$$`来表示上一个级别(相信聪明的你已经发现每多一个`$`则表示往上多回溯一个级别),那`para_a`的值在`para_p_b`位置的引用应该写为`$$.para_a`
306 | * 如果我们想要引用`para_li_a`的值,则我们可以发现`para_li_a`位于当前位置同级的`@children@li`的下一级,所以`para_li_a`的值在`para_p_b`位置的引用应该写为`$.@children@li.para_li_a`
307 |
308 | 简化表达:
309 |
310 | 由于作为模块名的如`@children@li`的表达往往很长,书写起来不方便,而往往我们仅需要这个模块名的前缀或后缀就能区分一个模块,因此上面的最后例子在`para_p_b`处引用`para_li_a`的值哈可以写为`$.@li.para_li_a`, 这里将`@children@li`简化成了`@li`而不带来歧义,需要注意的是这里的简化必须为原表达的前缀或后缀,且只能用于模块名(也就是说只能以`@`等特殊符号进行阶段),这么做是为了降低阅读难度,减少歧义的发生。
311 |
312 | 锚点:
313 |
314 | 而如果我们在`para_p_b`处想要引用`para_wang_a`的值,这里的路径同样要经过一个模块名`@children@wang`我们就不能使用上面的简化表达的技巧,这是因为无论我们选择前缀`@children`(`@children@li`的前缀)还是后缀`@wang`(`@wang@old`的前缀是`@wang`)都会产生歧义,那我们只能老老实实的写下全名了吗? 并非如此,intc为了让一些长距离的引用更加方便,还支持`全局锚点`来为远程依赖提供方便,在这个例子中,我们看到 `@children@wang`内部有一个`_anchor`关键词,我们可以在任意位置引用`_anchor`所在位置的值来引用它所在位置的同级元素,因此,在`para_p_b`处我们可以通过`cwang.value_wang_a`来引用`para_wang_a`的值。
315 |
316 | 需要注意的是`_anchor`可以有多个,但是不得出现同名`_anchor`每一个的值必须是全局唯一的, 因此不要在子模块中设置`_anchor`
317 |
318 | 值引用的语法:
319 | intc的引用是通过`@lambda`表达式来实现的,引用规则为:
320 | ```
321 | {
322 | "para1": "value1",
323 | "para2": "@lambda @$.para1"
324 | }
325 | ```
326 |
327 |
328 | `lambda`除了用于值引用以外,还可以用于非常复杂的情况,下面是intc支持的`lambda`语法类型及使用示例:
329 |
330 | 1. 通用语法
331 |
332 | intc 的`lambda`最通用的语法是
333 |
334 | ```
335 | @para_1, @para_2, ...@para_n @lambda x1, x2, ...xn: use x1 to xn calc the value
336 | |__________________________| |________________________________________________|
337 | │ │
338 | 这里是要传个lambda的参数 这里的lambda表达式遵循python的lambda语法规则
339 | 与后面lambda表达式的参数 传入的参数就是前面对应的参数名
340 | 一一对应 x1>..
341 | 这里的每个para的表示遵循
342 | 引用规则
343 | ```
344 |
345 | ```json
346 | {
347 | "para1": 1,
348 | "para2": 2,
349 | "para3": "@$.para1, @$.para2 @lambda x, y: x+y"
350 | }
351 | ```
352 | 这里的`para3`是一个需要`lambda`计算的值,计算结果为`para1`与`para2`的值和`3`
353 |
354 | 2. `lambda` 计算
355 | 有些时候我们单纯的只是想通过`lambda`来计算一个值,而不需要引用其他参数,那我们可以这么写:
356 | ```json
357 | {
358 | "para1": "@lambda _: list(range(100))"
359 | }
360 | ```
361 |
362 | 此时`para1`的值仍然是一个`lambda`表达式,但是这个表达式的输入参数是空的,这个表达式的值为`[0, 1, 2..., 98, 99]`
363 |
364 | 3. 通过lambda进行值引用
365 |
366 | 语法已在参数引用部分进行介绍
367 |
368 |
369 | #### 参数搜索
370 |
371 | 在做实验时,我们需要验证多种参数的组合,`intc`为我们提供了参数grid search的能力,针对每个搜索条件以笛卡尔积的形式进行组合,返回一个config list
372 |
373 | ```jsonc
374 | // data.json
375 | {
376 | "para1": 1,
377 | "para2": 100,
378 | "@children":{
379 | "cpara1": 1000,
380 | "cpara2": "a",
381 | "_search": {
382 | "cpara1": "@lambda _: list(range(1000, 1003))"
383 | }
384 | },
385 | "_search": {
386 | "para1": [1, 2, 3],
387 | "para2": "@lambda _: [100, 200, 300]",
388 | "@children.cpara2": ['a', 'b', 'c']
389 | }
390 | }
391 |
392 | ```
393 |
394 | ```python
395 | import json
396 | from intc import Parser
397 | assert len(Parser(json.load(open('data.json')).parser())) == 81
398 | ```
399 |
400 | 如示例中所示,`intc`的参数搜索的值可以是一个`list`也可以是一个`lambda`表达式返回一个`list`,但是目前在`_search`中使用的`lambda`表达式目前只支持值计算,不可以引用其他的参数参与计算,有这个限制的原因是`_search`本身有可能改变config的结构,而引用必须在config结构固定时才可以。所以实际引用的计算是在`_search`生成固定的config之后发生
401 |
402 | #### DataClass && Json Schema
403 |
404 | `intc`除了可以作为config管理工具使用之外,也可以当做`dataclass`来使用,特别是`intc`除了支持一般的`json`数据的导入导出之外,还可以根据定义导出`json schema`,这对于一些特定的场景如约定大模型的输入输出格式时非常有用
405 |
406 |
407 | ```python
408 | import json
409 |
410 | from intc import MISSING, Base, IntField, NestField, StrField, dataclass
411 |
412 |
413 | @dataclass
414 | class LLMOutput(Base):
415 | """The output of the LLM model"""
416 |
417 | user_name = StrField(value=MISSING, help="Name of the person")
418 |
419 | class Info:
420 | age = IntField(value=MISSING, minimum=1, maximum=150, help="Age of the person")
421 | blood_type = StrField(
422 | value=MISSING, options=["A", "B", "AB", "O"], help="Blood type"
423 | )
424 |
425 | user_info = NestField(value=Info, help="User information")
426 | lines = IntField(value=MISSING, help="Number of lines in the output")
427 | print(json.dumps(LLMOutput._json_schema(), indent=4))
428 | ```
429 |
430 | Json Schema Output:
431 | ```json
432 | {
433 | "properties": {
434 | "user_name": {
435 | "description": "Name of the person",
436 | "type": "string",
437 | "deprecated": false
438 | },
439 | "user_info": {
440 | "description": "User information",
441 | "type": "object",
442 | "properties": {
443 | "age": {
444 | "description": "Age of the person",
445 | "type": "integer",
446 | "deprecated": false,
447 | "minimum": 1,
448 | "maximum": 150
449 | },
450 | "blood_type": {
451 | "description": "Blood type",
452 | "type": "string",
453 | "enum": [
454 | "A",
455 | "B",
456 | "AB",
457 | "O"
458 | ],
459 | "deprecated": false
460 | }
461 | }
462 | }
463 | },
464 | "type": "object",
465 | "description": "The output of the LLM model",
466 | "$schema": "https://json-schema.org/draft/2020-12/schema"
467 | }
468 | ```
469 |
470 | ### Intc-LSP(left vscode, right nvim)
471 | #### Hover Document
472 |
473 |
474 |
475 |
476 |
477 |
478 | #### Diagnostic
479 |
480 |
481 |
482 |
483 |
484 | #### Completion
485 |
486 |
487 |
488 |
489 |
490 | #### Goto/Peek Definition
491 |
492 |
493 |
494 |
495 |
496 | ## Used By
497 |
498 | [DLK](https://github.com/cstsunfu/dlk) 是一个深度学习开发框架,一站式训练、预测、部署和展示PyTorch模型(Intc原本是DLK的一个组件)
499 |
500 | ## Related
501 |
502 | [attrs](https://github.com/python-attrs/attrs) Python Classes Without Boilerplate. (the `dataclass` of `intc` is based `attrs`)
503 |
504 | [hydra](https://github.com/facebookresearch/hydra) Hydra is a framework for elegantly configuring complex applications. (`intc` is similar to `hydra` but more powerful)
505 |
506 | [pydantic](https://github.com/pydantic/pydantic) A tool for data validation using Python type hints. (`intc` provide the data validation and json schema too.)
507 |
--------------------------------------------------------------------------------