├── .eslintrc.json ├── .github ├── ISSUE_TEMPLATE │ ├── proposal.md │ └── question.md └── pull_request_template.md ├── .gitignore ├── .vscode ├── extensions.json └── launch.json ├── .vscodeignore ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── DescribeML-0.1.1.vsix ├── DescribeML-1.0.0.vsix ├── DescribeML-1.1.0.vsix ├── DescribeML-1.2.0.vsix ├── DescribeML-1.2.1.vsix ├── GOVERNANCE.md ├── MIT-LICENSE ├── README.md ├── bin └── cli ├── documentation └── language-reference-guide.md ├── examples ├── Gender.descml ├── Melanoma.descml ├── Polarity.descml ├── Whales-minimal.descml ├── Whales.descml ├── data │ ├── Melanoma.csv │ └── WineQT.csv └── evaluation │ ├── Gender.descml │ ├── Melanoma.descml │ ├── Polarity.descml │ ├── Whales.descml │ ├── empirical │ ├── .DS_Store │ ├── Resources │ │ ├── Empirical-evaluation-instructions.pdf │ │ ├── Empirical-evaluation-instructions.zip │ │ ├── Melanoma.descml │ │ └── ReadingExerciseAnonimizedResults.xlsx │ ├── imdb_video_game_rating.csv │ ├── test.descml │ └── videogames.descML │ ├── videogames.descml │ └── videogames_full.descml ├── fileicons ├── Autcomplete.gif ├── File.svg ├── cloud-computing.png ├── dataset-descriptor.icon-theme.json ├── html.png ├── presentation.png ├── requisito.png └── save.png ├── langium-config.json ├── langium-quickstart.md ├── language-configuration.json ├── out ├── cli │ ├── cli-util.js │ ├── cli-util.js.map │ ├── generator.js │ ├── generator.js.map │ ├── index.js │ └── index.js.map ├── extension.js ├── extension.js.map ├── generator-service │ ├── dataset-descriptor-documentation.js │ ├── dataset-descriptor-documentation.js.map │ └── templates │ │ ├── document.pug │ │ ├── document.twig │ │ └── document2.twig ├── hints-service │ ├── hints-service.js │ └── hints-service.js.map ├── language-server │ ├── dataset-descriptor-module.js │ ├── dataset-descriptor-module.js.map │ ├── dataset-descriptor-naming.js │ ├── dataset-descriptor-naming.js.map │ ├── dataset-descriptor-scope.js │ ├── dataset-descriptor-scope.js.map │ ├── dataset-descriptor-validator.js │ ├── dataset-descriptor-validator.js.map │ ├── generated │ │ ├── ast.js │ │ ├── ast.js.map │ │ ├── grammar.js │ │ ├── grammar.js.map │ │ ├── module.js │ │ └── module.js.map │ ├── main.js │ └── main.js.map └── uploader-service │ ├── dataset-descriptor-uploader.js │ ├── dataset-descriptor-uploader.js.map │ ├── dataset-metrics.js │ └── dataset-metrics.js.map ├── package-lock.json ├── package.json ├── snippets.json ├── src ├── cli │ ├── cli-util.ts │ ├── generator.ts │ └── index.ts ├── extension.ts ├── generator-service │ └── dataset-descriptor-documentation.ts ├── hints-service │ └── hints-service.ts ├── language-server │ ├── dataset-descriptor-module.ts │ ├── dataset-descriptor-naming.ts │ ├── dataset-descriptor-scope.ts │ ├── dataset-descriptor-validator.ts │ ├── dataset-descriptor.langium │ ├── generated │ │ ├── ast.ts │ │ ├── grammar.ts │ │ └── module.ts │ └── main.ts └── uploader-service │ ├── dataset-descriptor-uploader.ts │ └── dataset-metrics.ts ├── syntaxes ├── dataset-descriptor.tmLanguage.json └── static-tmLang.json └── tsconfig.json /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "root": true, 3 | "parser": "@typescript-eslint/parser", 4 | "parserOptions": { 5 | "ecmaVersion": 6, 6 | "sourceType": "module" 7 | }, 8 | "plugins": [ 9 | "@typescript-eslint" 10 | ], 11 | "rules": { 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/proposal.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Proposal & Issue Report 3 | about: Suggest an idea for improving 4 | labels: '' 5 | --- 6 | 7 | 💣 Describe the issue or problem you detected 8 | 9 | (Write your answer here.) 10 | 11 | 📋 Provide the solution you'd like 12 | 13 | (Describe your proposed solution here.) 14 | 15 | 🤔 If any, describe alternatives you've considered 16 | 17 | (Write your answer here.) 18 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/question.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Question 3 | about: Ask something 4 | labels: 'question' 5 | --- 6 | 7 | ❓ Ask us anything 8 | 9 | (Write your answer here.) 10 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 💣 Describe the issue or problem you are addressing with this Pull Request 2 | 3 | (Write your answer here.) 4 | 5 | 📋 Describe the solution you implemented 6 | 7 | (Describe your proposed solution here.) 8 | 9 | 🤔 If any, describe alternatives you've considered 10 | 11 | (Write your answer here.) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | // See https://go.microsoft.com/fwlink/?LinkId=827846 to learn about workspace recommendations. 3 | // Extension identifier format: ${publisher}.${name}. Example: vscode.csharp 4 | 5 | // List of extensions which should be recommended for users of this workspace. 6 | "recommendations": [ 7 | "langium.langium-vscode" 8 | ] 9 | } -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | // A launch configuration that launches the extension inside a new window 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | { 6 | "version": "0.2.0", 7 | "configurations": [ 8 | { 9 | "name": "Run Extension", 10 | "type": "extensionHost", 11 | "request": "launch", 12 | "runtimeExecutable": "${execPath}", 13 | "args": [ 14 | "${workspaceFolder}/examples/", 15 | "--extensionDevelopmentPath=${workspaceFolder}" 16 | ] 17 | }, 18 | { 19 | "name": "Attach to Language Server", 20 | "type": "node", 21 | "port": 6009, 22 | "request": "attach", 23 | "skipFiles": [ 24 | "/**" 25 | ], 26 | "sourceMaps": true, 27 | "outFiles": [ 28 | "${workspaceFolder}/out/**/*.js", 29 | "${workspaceFolder}/node_modules/langium" 30 | ] 31 | } 32 | ] 33 | } 34 | -------------------------------------------------------------------------------- /.vscodeignore: -------------------------------------------------------------------------------- 1 | .vscode/** 2 | .vscode-test/** 3 | .gitignore 4 | langium-quickstart.md 5 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Code of Conduct 2 | 3 | 4 | ## Short Version 5 | We as members, contributors, and leaders pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, visible or invisible disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, caste, color, religion, or sexual identity and orientation. 6 | 7 | We pledge to act and interact in ways that contribute to an open, welcoming, diverse, inclusive, and healthy community. 8 | 9 | ## Need Help? 10 | If ANY behavior makes you uncomfortable, or you believe it breaches the intent of this code of conduct, please contact a project maintainer: 11 | 12 | * [Joan Giner](https://github.com/JoanGi/) (he/him) 13 | * [Abel Gómez](https://github.com/abelgomez/) (he/him) 14 | * [Jordi Cabot](https://github.com/jcabot/) (he/him) 15 | 16 | --- 17 | 18 | ## Long Version 19 | 20 | As contributors and maintainers in this project, and in the interest of fostering an open and welcoming community, we pledge to respect all people who contribute through reporting issues, posting feature requests, updating documentation, submitting pull requests or patches, and other activities. 21 | 22 | We are committed to making participation in our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation. 23 | 24 | ## Our Standards 25 | 26 | Examples of behavior that contributes to creating a positive environment 27 | include: 28 | 29 | * Demonstrating empathy and kindness toward other people 30 | * Using welcoming and inclusive language 31 | * Being respectful of differing viewpoints and experiences 32 | * Giving and gracefully accepting constructive feedback 33 | * Focusing on what is best for the community 34 | * Showing empathy towards other community members 35 | 36 | Examples of unacceptable behavior by participants include: 37 | 38 | * Violent threats or language directed against another person. 39 | * Sexist, racist, or otherwise discriminatory jokes and language. 40 | * Trolling, insulting/derogatory comments, and personal or political attacks 41 | * Public or private harassment 42 | * Publishing others' private information, such as a physical or electronic 43 | address, without explicit permission 44 | * Other conduct which could reasonably be considered inappropriate in a 45 | professional setting 46 | 47 | ## Enforcement Responsibilities 48 | 49 | Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. 50 | 51 | Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. 52 | 53 | ## Scope 54 | 55 | This Code of Conduct applies within all project spaces, including contributors, maintainers, administrators, and any kind of participant in the project. 56 | 57 | Project maintainers will enforce this code at all times. We expect cooperation from all participants to ensure a safe environment for everyone. 58 | 59 | The Code of Conduct, and the project leaders, can only address behavior in the present, not past behavior or fears of what someone might do based on past behavior. 60 | 61 | ## Enforcement 62 | Instances of abusive, harassing, or otherwise unacceptable behavior may be 63 | reported by contacting the project maintainers at: 64 | 65 | * [Joan Giner](https://github.com/JoanGi/) (he/him) 66 | * [Abel Gómez](https://github.com/abelgomez/) (he/him) 67 | * [Jordi Cabot](https://github.com/jcabot/) (he/him) 68 | 69 | Complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. 70 | 71 | The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. 72 | 73 | Project maintainers who do not follow or enforce the Code of Conduct in good 74 | faith may face temporary or permanent repercussions as determined by other 75 | members of the project's leadership. 76 | 77 | ## Enforcement Guidelines 78 | Project maintainers will follow these Community Impact Guidelines in determining the consequences for any action they deem in violation of this Code of Conduct: 79 | 80 | ### 1. Correction 81 | **Community Impact**: Use of inappropriate language or other behavior deemed unprofessional or unwelcome in the community. 82 | 83 | **Consequence**: A private, written warning from community leaders, providing clarity around the nature of the violation and an explanation of why the behavior was inappropriate. A public apology may be requested. 84 | 85 | ### 2. Warning 86 | **Community Impact**: A violation through a single incident or series of actions. 87 | 88 | **Consequence**: A warning with consequences for continued behavior. No interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, for a specified period of time. This includes avoiding interactions in community spaces as well as external channels like social media. Violating these terms may lead to a temporary or permanent ban. 89 | 90 | ### 3. Temporary Ban 91 | **Community Impact**: A serious violation of community standards, including sustained inappropriate behavior. 92 | 93 | **Consequence**: A temporary ban from any sort of interaction or public communication with the community for a specified period of time. No public or private interaction with the people involved, including unsolicited interaction with those enforcing the Code of Conduct, is allowed during this period. Violating these terms may lead to a permanent ban. 94 | 95 | ### 4. Permanent Ban 96 | **Community Impact**: Demonstrating a pattern of violation of community standards, including sustained inappropriate behavior, harassment of an individual, or aggression toward or disparagement of classes of individuals. 97 | 98 | **Consequence**: A permanent ban from any sort of public interaction within the community. 99 | 100 | ## Attribution 101 | 102 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 2.1, available at https://www.contributor-covenant.org/version/2/1/code_of_conduct/ 103 | 104 | Community Impact Guidelines were inspired by [Mozilla’s code of conduct enforcement ladder](https://github.com/mozilla/diversity). 105 | 106 | [homepage]: https://www.contributor-covenant.org 107 | 108 | For answers to common questions about this code of conduct, see 109 | https://www.contributor-covenant.org/faq -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing to DescribeML 2 | 3 | First of all, thank you for your interest in contributing to our project! 4 | 5 | There are several ways in which you can contribute, beyond writing code. The goal of this document is to provide a high-level overview of how you can get involved and how your contribution will be considered. 6 | 7 | ## Asking Questions 8 | 9 | Do you have a question? Feel free to [open an issue](https://github.com/SOM-Research/DescribeML/issues/new?assignees=&labels=question&template=question.md). 10 | 11 | Project maintainers will be eager to listen from you and help you out. Please, try to compose a clear and concise question. The more information you provide, the better we will be able to help you. 12 | 13 | ## Reporting Issues 14 | 15 | Do you have you identified a reproducible problem in our code? or have a feature request? We want to hear about it! Please follow the next steps: 16 | 17 | ### Look For an Existing Issue 18 | 19 | Sometimes the issue you want to report is being already addressed, or is planned to be addressed soon. Before you create a new issue, please do a search in [open issues](https://github.com/SOM-Research/DescribeML/issues) to see if the issue or feature request has already been filed. 20 | 21 | If you find your issue already exists, do not hesittate to make relevant comments and add your [reaction](https://github.com/blog/2119-add-reactions-to-pull-requests-issues-and-comments). Please, use a reaction in place of a "+1" comment, we believe it's easy: 👍 for upvoting and 👎 for downvoting. 22 | 23 | If you cannot find an existing issue that describes your bug or feature, [create a new issue](https://github.com/SOM-Research/DescribeML/issues/new?assignees=&labels=&template=proposal.md). The template will guide you on the issue reporting. 24 | 25 | ### Writing Good Bug Reports and Feature Requests 26 | 27 | Whenever possible, we ask you to file a single issue per problem and feature request. Please do not enumerate multiple bugs or feature requests in the same issue, as it may be hard to track the progress. 28 | 29 | As you can imagine, the more information you can provide, the more likely someone will be successful at reproducing the issue and finding a fix. 30 | 31 | ### Creating Pull Requests 32 | 33 | Please refer to the article on [creating pull requests](https://github.com/microsoft/vscode/wiki/How-to-Contribute#pull-requests) before contributing to this project. 34 | 35 | ### Governance 36 | 37 | Any contribution you send to us will be addressed by the project maintainers following the governance rules described in the [GOVERNANCE.md](GOVERNANCE.md) 38 | 39 | # Thank You! 40 | 41 | Your contributions to open source, large or small, make great projects like this possible. Thank you for taking the time to contribute. 42 | -------------------------------------------------------------------------------- /DescribeML-0.1.1.vsix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/DescribeML-0.1.1.vsix -------------------------------------------------------------------------------- /DescribeML-1.0.0.vsix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/DescribeML-1.0.0.vsix -------------------------------------------------------------------------------- /DescribeML-1.1.0.vsix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/DescribeML-1.1.0.vsix -------------------------------------------------------------------------------- /DescribeML-1.2.0.vsix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/DescribeML-1.2.0.vsix -------------------------------------------------------------------------------- /DescribeML-1.2.1.vsix: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/DescribeML-1.2.1.vsix -------------------------------------------------------------------------------- /GOVERNANCE.md: -------------------------------------------------------------------------------- 1 | # Project Governance 2 | 3 | The development and community management of the project will follow the governance rules described in this document. 4 | 5 | ## Project Maintainers 6 | 7 | Project maintainers have admin access to the GitHub repository. The team of project maintainers is the following: 8 | 9 | * [Joan Giner](https://github.com/JoanGi/) (he/him) 10 | * [Abel Gómez](https://github.com/abelgomez/) (he/him) 11 | * [Jordi Cabot](https://github.com/jcabot/) (he/him) 12 | 13 | ## Project Collaborators & Contributors 14 | 15 | Any member willing to participate in the development of the project will be considered as a collaborator. 16 | 17 | Collaborators may propose changes to the project's source code. The mechanism to propose such a change is a GitHub pull request. A collaborator proposing a pull request is considered a contributor. Project maintainers will review and merge pull requests. 18 | 19 | ## Issue governance 20 | 21 | Both collaborators and project maintainers may propose issues. Participation in the issue discussion is open and must follow the [Code of Conduct](CODE_OF_CONDUCT.md). 22 | 23 | The group of project maintainers will be responsible for assigning labels to issues, as well as assign the issue to a project maintainer or contributor. 24 | 25 | The group of project maintainers commits to answer to any issue in a period of time of 3 weeks. 26 | 27 | ## Pull Request governance 28 | 29 | Both collaborators and project maintainers may propose pull requests. When a collaborator proposes a pull request is considered a contributor. 30 | 31 | Pull requests should comply with the template provided. The assignment of labels and assignees to the pull request is the responsibility of the project maintainers. 32 | 33 | The group of project maintainers commis to answer to any pull request in a period of time of 3 weeks. 34 | 35 | The decision of accepting (or rejecting) a pull request will be taken by the group of project maintainers. The decision will be based on the following criteria: 36 | 37 | * One project maintainer must approve a pull request before the pull request can be merged. 38 | * One project maintainer approval is enough if the pull request has been open for more than 14 days. 39 | * Approving a pull request indicates that the contributor accepts 40 | responsibility for the change. 41 | * If a project maintainer opposes a pull request, the pull request cannot be merged (i.e., _veto_ behavior). Often, discussions or further changes result in collaborators removing their opposition. -------------------------------------------------------------------------------- /MIT-LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2022 SOM Research Group and others 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining 4 | a copy of this software and associated documentation files (the 5 | "Software"), to deal in the Software without restriction, including 6 | without limitation the rights to use, copy, modify, merge, publish, 7 | distribute, sublicense, and/or sell copies of the Software, and to 8 | permit persons to whom the Software is furnished to do so, subject to 9 | the following conditions: 10 | 11 | The above copyright notice and this permission notice shall be 12 | included in all copies or substantial portions of the Software. 13 | 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 15 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 16 | MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 17 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 18 | LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 19 | OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 20 | WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 | # DescribeML ![GitHub tag (latest by date)](https://img.shields.io/github/v/tag/SOM-Research/DescribeML?label=Version&style=for-the-badge) 4 | 5 | DescribeML is a VSCode language plugin to describe machine-learning datasets.
6 | 7 | Precisely describe your data's provenance, composition, and social concerns in a structured format. 8 | 9 | 10 | Make it easy to **reproduce your experiments** to others when you cannot share your data.
11 |
12 | Check out the quick video [presentating](https://www.youtube.com/watch?v=Bf3bhWB-UJY) of the tool, and the [tutorial](https://www.youtube.com/watch?v=1Of1qfuJKvY) presented in the MODELS '22 Conference 13 | 14 |
15 | 16 | ## Installation 17 | 18 | ### Via marketplace 19 | 20 | The easiest way to install the plugin is by using the **Visual Studio Code Market**. Just type "describeML" in the extension tab, and that's it! 21 | 22 | ### Manually 23 | 24 | Instead, you can install it manually using the packaged release of the plugin in this [repository](https://github.com/SOM-Research/DescribeML) that can be found at the root of the project. 25 | 26 | The file is **DescribeML-1.2.1.vsix** 27 | 28 | Open your terminal (or the terminal inside the VSCode) and write this: 29 | 30 | ``` 31 | 32 | git clone https://github.com/SOM-Research/DescribeML.git datasets 33 | cd datasets 34 | code --install-extension DescribeML-1.2.1.vsix 35 | ``` 36 | 37 | *Troubles: If you cannot see the syntax highlight in the examples files (p.e. *Melanoma.descml*) as the image below. Please, reload the VSCode editor and write the code --install command again* 38 | 39 | Great! That's it. 40 | 41 | 42 | 43 | ## Getting Started 44 | 45 | 1) The first step is to create a *.descml* file 46 | 47 | 2) The easy way to start using our tool is to use the *preloader data service*, located at the top left of your editor, clicking at: preloader service 52 | 53 | 3) Select your dataset file (*.csv*), and the tool will generate a draft of your description file. 54 | 55 | 4) To help you, look to the [Language Reference Guide](https://github.com/SOM-Research/DescribeML/blob/main/documentation/language-reference-guide.md) and follow the examples in the **examples/evaluation** [folders](https://github.com/SOM-Research/DescribeML/tree/main/examples/evaluation) to get a sense of the tool's possibilities. Take a look at the *Melanoma.descml* file, for example. 56 | 5) During the documentation process, hitting CTRL + Space (equivalent in other OS) gives you auto-completion help. In addition, the part marked with the points below gives you hints to complete the documentation, and the outline in the right part shows you the document structure. 57 | 58 |
59 | 60 | ![Autocompletion feature](https://github.com/SOM-Research/DescribeML/blob/main/fileicons/Autcomplete.gif?raw=true) 61 | 62 |
63 | 64 | 6) Once you are happy with your documentation, you can generate HTML documentation by clicking the generator button next to the prealoder service: HTML generator 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | For more information, check out the **quick [presentation](https://www.youtube.com/watch?v=Bf3bhWB-UJY) video** and the [**tutorial**](https://www.youtube.com/watch?v=1Of1qfuJKvY) presented in the MODELS '22 Conference 77 | 78 | 79 | 80 | 81 | ## Contributing 82 | 83 | This project is being development as part of a research line of the [SOM Research Lab](https://som-research.github.io/), but we are open to contributions from the community. If you are interested in contributing to this project, please first read the [CONTRIBUTING.md](CONTRIBUTING.md) guidelines file. 84 | 85 | ### Repository structure 86 | 87 | The following tree shows the list of the repository's relevant sections: 88 | 89 | - The *documentation* and *examples* folders contains the mentioend examples and the language reference guide. 90 | - The *out* folder contains the executable plugin in JS. You may not want to dive in as it is generated by the TypeScrpit compiler 91 | - The *src* folder contains the project's source code 92 | - The *cli* folder is the generated grammar and AST from Langium. You may not want to dive in it as it is a generated asset 93 | - The *generator-service* folder contains all the code of the generation service. Could be a good place to start if you want to improve the generation of the tool. 94 | - The *uploader-service* folder contains all the code of the uploader service. Could be a good place to contribute new statistical metrics, or ML techniques to do dataset reverse engineering 95 | - The *language-server* folder contains all the language features, and the grammar declaration. If you want to improve the grammar, or some of the features the plugin offers here is the place you may want to start 96 | - The *dataset-description.langium* file contains the main grammar declaration. This grammar is developed using the [Langium Grammar Language](https://langium.org/docs/grammar-language/). Please refer to the linked documentation to more insights on how to develop the grammar. 97 | 98 | 99 | 100 | 101 | ``` 102 | ├── documentation 103 | │ └── language-reference-guide.md // The language reference guide 104 | ├── examples 105 | │ ├── evaluation 106 | │ ├── Gender.descml // Gender dataset example 107 | | ├── Melanoma.descml // Melanoma dataset example 108 | | └── Polarity.descml // Polarity dataset example 109 | ├── out // The generated JS from the src folder 110 | └── src // The source code of the project 111 | ├── cli // Langium framework utils 112 | ├── generator-service // The tool's HTML generator service 113 | ├── uploader-service // The tool's HTML uploader service 114 | └── language-server // The tool's language features 115 | ├── generated // Generated grammar and AST from Langium 116 | ├── dataset-description-index.ts // Custom index feature 117 | ├── dataset-description-module.ts // Declaration of the custom language features 118 | ├── dataset-description-validator.ts // Custom language features 119 | └── dataset-description.langium // The main grammar file of the tool 120 | 121 | ``` 122 | 123 | 124 | 125 | 126 | #### Debugging the extensions 127 | 128 | This repo comes with an already built-in config to debug. Just go to Debug in VSCode, and launch the Extension config. Please check your port 6009 is free. 129 | 130 | For more information about how the framework works and how the language can be extended, please refer to https://github.com/langium/langium or the VSCode extension API documentation https://code.visualstudio.com/api 131 | 132 | ## Research background and citation 133 | 134 | DescribeML is part of an ongoing research project to improve dataset documentation for machine learning. The core of our proposal is a domain-specific language published in the [Journal of Computer Languages](https://www.sciencedirect.com/science/article/pii/S2590118423000199) that allows data creators to describe relevant aspects of their data for the machine learning field and beyond. The [Critical Dataset Studios](https://knowingmachines.org/reading-list#dataset_documentation_practices) of the [Knowing Machines](https://knowingmachines.org) project have compiled an excellent list of current documentation practices. 135 | 136 | To cite the domain-specific language: 137 | ``` 138 | Giner-Miguelez, J., Gómez, A., & Cabot, J. (2023). A domain-specific language for describing machine learning datasets. Journal of Computer Languages, 76, 101209. 139 | ``` 140 | 141 | The tool has been presented at the [ACM/IEEE 25th International Conference on Model Driven Engineering Languages and Systems](https://conf.researchr.org/details/models-2022/models-2022-tools---demonstrations/5/DescribeML-a-tool-for-describing-machine-learning-datasets) and published as an Original Software Publication in the [Science of Computer Programming](https://www.sciencedirect.com/science/article/pii/S0167642323001120) journal. 142 | 143 | To cite the tool: 144 | ``` 145 | Giner-Miguelez, J., Gómez, A., & Cabot, J. (2023). DescribeML: A dataset description tool for machine learning. Science of Computer Programming, 2023, 103030, ISSN 0167-6423, https://doi.org/10.1016/j.scico.2023.103030. 146 | ``` 147 | 148 | 149 | 150 | # Code of Conduct 151 | 152 | At SOM Research Lab we are dedicated to creating and maintaining welcoming, inclusive, safe, and harassment-free development spaces. Anyone participating will be subject to and agrees to sign on to our [Code of Conduct](CODE_OF_CONDUCT.md). 153 | 154 | ## License 155 | 156 | Shield: [![License: MIT](https://img.shields.io/badge/License-MIT-yellow.svg)](https://opensource.org/licenses/MIT) 157 | 158 | 159 | The source code for the site is licensed under the MIT license, which you can find in the MIT-LICENSE file. 160 | 161 | All graphical assets are licensed under the 162 | [Creative Commons Attribution 3.0 Unported License](https://creativecommons.org/licenses/by/3.0/). 163 | -------------------------------------------------------------------------------- /bin/cli: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | require("../out/cli").default(); -------------------------------------------------------------------------------- /examples/Whales-minimal.descml: -------------------------------------------------------------------------------- 1 | Dataset: Whales 2 | Metadata: 3 | Title: "Whales from space dataset, an annotated satellite image dataset of whales for training machine learning models" 4 | Unique-identifier: whales 5 | Version: v1 6 | Dates: 7 | Release Date: 27-05-2022 8 | Citation: 9 | Raw Citation: "Cubaynes, H.C., Fretwell, P.T. Whales from space dataset, an annotated satellite image dataset of whales for training machine learning models. Sci Data 9, 245 (2022). https://doi.org/10.1038/s41597-022-01377-4" 10 | Main Description: 11 | Purposes: "A dataset of 633 annotated whale objects, created by surveying 6,300 km2 of satellite imagery captured by various very high-resolution satellites (i.e. WorldView-3, WorldView-2, GeoEye-1 and Quickbird-2) in various regions across the globe (e.g. Argentina, New Zealand, South Africa, United States, Mexico). The dataset covers four different species: southern right whale (Eubalaena australis), humpback whale (Megaptera novaeangliae), fin whale (Balaenoptera physalus), and grey whale (Eschrichtius robustus)." 12 | Tasks: [ image-classification ] 13 | Areas: Wildlife 14 | Tags: Whales satellite detection 15 | Distribution: 16 | Licences: CC BY 3.0 (Attribution 3.0 Unported) 17 | Additional Conditions: "To fulfil the End User Licence Agreement with Maxar Technologies18, these image chips are shared in a png format, and access to the dataset is available upon request from the NERC UK Polar Data Centre that can be contacted at PDCServiceDesk@bas.ac.uk. Data access requires user name and email address, which will be shared with Maxar Technologies. Anyone using any of the image chips is also required to attribute the images properly (See Usage Notes)." 18 | Authoring: 19 | Authors: 20 | Name "Hannah C. Cubayanes" email example@example.com 21 | Name "Peter T. Fretwell" email example@example.com 22 | Composition: 23 | Rationale: "The “Whales from space dataset” is separated in two sub-datasets: a dataset that contains the whale annotations (box and point shapefiles with associated csv files) named “Whales from space dataset: Box and point shapefiles”16; and a dataset with the image chips named “Whales from space dataset: Image chips”17. " 24 | 25 | Data Provenance: 26 | Curation Rationale: "The “Whales from space dataset” is separated in two sub-datasets: a dataset that contains the whale annotations (box and point shapefiles with associated csv files) named “Whales from space dataset: Box and point shapefiles”16; and a dataset with the image chips named “Whales from space dataset: Image chips”17. " 27 | Gathering Processes: 28 | Process: SatellitImages 29 | Description: "The satellite images were acquired from Maxar Technologies, a satellite imagery company. The images were acquired from various very high-resolution satellites (i.e. WorldView-3, WorldView-2, GeoEye-1 and Quickbird-2) in various regions across the globe (e.g. Argentina, New Zealand, South Africa, United States, Mexico)." 30 | Source: Maxar_Technologies_satellites 31 | Description: "Satellite company" 32 | How data is collected: Sensors 33 | 34 | LabelingProcesses: 35 | Labeling process: WahlesAnnotation 36 | Description: "Images were annotated using ArGis 10.4" 37 | Type: Bounding boxes 38 | Infrastructure: 39 | Tool "ArcGis 10.4 ESRI 2017" 40 | Version "v10.4" 41 | 42 | Social Concerns: 43 | Social Issue: SpeciesDifferentitation 44 | IssueType: Bias 45 | Description: "As species differentiation has not been tested when analysing satellite images, we reference the most likely species in this database. The most likely species was assigned based on the scientific literature, hence our decision to acquire images of specific areas when only one large whale species was expected to be present" 46 | Social Issue: AnnotationConfidence 47 | IssueType: Bias 48 | Description: "We recommend that only the whales with a “definite” certainty level be used to train automated detection systems." -------------------------------------------------------------------------------- /examples/evaluation/empirical/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/examples/evaluation/empirical/.DS_Store -------------------------------------------------------------------------------- /examples/evaluation/empirical/Resources/Empirical-evaluation-instructions.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/examples/evaluation/empirical/Resources/Empirical-evaluation-instructions.pdf -------------------------------------------------------------------------------- /examples/evaluation/empirical/Resources/Empirical-evaluation-instructions.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/examples/evaluation/empirical/Resources/Empirical-evaluation-instructions.zip -------------------------------------------------------------------------------- /examples/evaluation/empirical/Resources/ReadingExerciseAnonimizedResults.xlsx: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/examples/evaluation/empirical/Resources/ReadingExerciseAnonimizedResults.xlsx -------------------------------------------------------------------------------- /examples/evaluation/empirical/videogames.descML: -------------------------------------------------------------------------------- 1 | Dataset: videoGames 2 | Metadata: 3 | Title: "Video game ratings from imdb" 4 | Unique-identifier: videoGames 5 | Version: v0000 6 | Dates: 7 | Release Date: 00-0000-00 8 | Description: 9 | Tasks: [Automatic-Speech-Recognition,Code Generation,Evaluation of language models] 10 | Areas: videoGames 11 | Tags: games imbd 12 | Applications: 13 | Past Uses:"Used in Kaggle libraries" 14 | Distribution: 15 | Licences: Non-declared 16 | Authoring: 17 | Authors: 18 | Name "David Nyagam" contact "https://nyagami.disha.page/" 19 | 20 | Composition: 21 | Rationale: "Data scrapped from Imbd of video games rating" 22 | Total size: 12600 23 | Data Instances: 24 | Instance: imdb_video_game_rating 25 | Description: "A CSV with 8 attributes scrapped from IMDB" 26 | Type: Record-Data 27 | Attribute number: 8 28 | Attributes: 29 | attribute: index 30 | description: "Index of the data" 31 | count: 12635 32 | ofType: Numerical 33 | attribute: title 34 | description: "Name of the game" 35 | count: 12309 36 | ofType: Categorical 37 | Statistics: 38 | Completeness: 90 39 | attribute: year 40 | description: "Year the game was launched" 41 | count: 63 42 | ofType: Categorical 43 | attribute: genre 44 | description: "XXXXXXXXXXXXX" 45 | count: 000 46 | ofType: Categorical 47 | Statistics: 48 | Mode: "" 49 | attribute: rating 50 | description: "Rating of the game on a scale of 1 to 10" 51 | count: 88 52 | ofType: Numerical 53 | Statistics: 54 | Mean: 6.5 55 | attribute: votes 56 | description: "Numer of the votes" 57 | count: 1638 58 | ofType: Numerical 59 | attribute: directors 60 | description: "Name of the directors" 61 | count: 4160 62 | ofType: Categorical 63 | Statistics: 64 | Completeness: 48 65 | attribute: plot 66 | description: "Description of the game" 67 | count: 9016 68 | ofType: Categorical 69 | Statistics: 70 | Completeness: 72 71 | 72 | Data Provenance: 73 | Curation Rationale: "..." 74 | Gathering Processes: 75 | Process: ProcessID 76 | Description: "XXXXXXXXXXXXXXXXXXXXXX" 77 | Source: SourceName 78 | Description: "XXXXXXXXXXXXXXXXXXXXX" 79 | How data is collected: Scrapping 80 | Social Concerns: 81 | 82 | 83 | -------------------------------------------------------------------------------- /examples/evaluation/videogames.descml: -------------------------------------------------------------------------------- 1 | Dataset: ei 2 | Metadata: 3 | Title: "the priors" 4 | Unique-identifier: xe2 5 | Version: v1 6 | Main Description: 7 | Purposes: "" 8 | Authoring: 9 | Authors: Name "Joan" contact "joan" 10 | Composition: -------------------------------------------------------------------------------- /fileicons/Autcomplete.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/fileicons/Autcomplete.gif -------------------------------------------------------------------------------- /fileicons/File.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /fileicons/cloud-computing.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/fileicons/cloud-computing.png -------------------------------------------------------------------------------- /fileicons/dataset-descriptor.icon-theme.json: -------------------------------------------------------------------------------- 1 | { 2 | "iconDefinitions": { 3 | "_datadesc": { 4 | "iconPath": "./File.svg", 5 | } 6 | }, 7 | "languageIds":{ 8 | "dataset-descriptor":"_datadesc" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /fileicons/html.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/fileicons/html.png -------------------------------------------------------------------------------- /fileicons/presentation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/fileicons/presentation.png -------------------------------------------------------------------------------- /fileicons/requisito.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/fileicons/requisito.png -------------------------------------------------------------------------------- /fileicons/save.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/SOM-Research/DescribeML/994e105f626e295ddfb0c28c2c380ecea945d790/fileicons/save.png -------------------------------------------------------------------------------- /langium-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "projectName": "DatasetDescriptor", 3 | "languages": [{ 4 | "id": "dataset-descriptor", 5 | "grammar": "src/language-server/dataset-descriptor.langium", 6 | "fileExtensions": [".descml"], 7 | "textMate": { 8 | "out": "syntaxes/dataset-descriptor.tmLanguage.json" 9 | } 10 | }], 11 | "out": "src/language-server/generated" 12 | } 13 | -------------------------------------------------------------------------------- /langium-quickstart.md: -------------------------------------------------------------------------------- 1 | # Welcome to your Langium VS Code Extension 2 | 3 | ## What's in the folder 4 | 5 | This folder contains all necessary files for your language extension. 6 | * `package.json` - the manifest file in which you declare your language support. 7 | * `language-configuration.json` - the language configuration used in the VS Code editor, defining the tokens that are used for comments and brackets. 8 | * `src/extension.ts` - the main code of the extension, which is responsible for launching a language server and client. 9 | * `src/language-server/dataset-descriptor.langium` - the grammar definition of your language. 10 | * `src/language-server/main.ts` - the entry point of the language server process. 11 | * `src/language-server/dataset-descriptor-module.ts` - the dependency injection module of your language implementation. Use this to register overridden and added services. 12 | * `src/language-server/dataset-descriptor-validator.ts` - an example validator. You should change it to reflect the semantics of your language. 13 | * `src/cli/index.ts` - the entry point of the command line interface (CLI) of your language. 14 | * `src/cli/generator.ts` - the code generator used by the CLI to write output files from DSL documents. 15 | * `src/cli/cli-util.ts` - utility code for the CLI. 16 | 17 | ## Get up and running straight away 18 | 19 | * Run `npm run langium:generate` to generate TypeScript code from the grammar definition. 20 | * Run `npm run build` to compile all TypeScript code. 21 | * Press `F5` to open a new window with your extension loaded. 22 | * Create a new file with a file name suffix matching your language. 23 | * Verify that syntax highlighting, validation, completion etc. are working as expected. 24 | * Run `./bin/cli` to see options for the CLI; `./bin/cli generate ` generates code for a given DSL file. 25 | 26 | ## Make changes 27 | 28 | * Run `npm run watch` to have the TypeScript compiler run automatically after every change of the source files. 29 | * Run `npm run langium:watch` to have the Langium generator run automatically afer every change of the grammar declaration. 30 | * You can relaunch the extension from the debug toolbar after making changes to the files listed above. 31 | * You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes. 32 | 33 | ## Install your extension 34 | 35 | * To start using your extension with VS Code, copy it into the `/.vscode/extensions` folder and restart Code. 36 | * To share your extension with the world, read the [VS Code documentation](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) about publishing an extension. 37 | 38 | ## To Go Further 39 | 40 | Documentation about the Langium framework is available at https://langium.org 41 | -------------------------------------------------------------------------------- /language-configuration.json: -------------------------------------------------------------------------------- 1 | { 2 | "comments": { 3 | // symbol used for single line comment. Remove this entry if your language does not support line comments 4 | "lineComment": "//", 5 | // symbols used for start and end a block comment. Remove this entry if your language does not support block comments 6 | "blockComment": [ "/*", "*/" ] 7 | }, 8 | // symbols used as brackets 9 | "brackets": [ 10 | ["{", "}"], 11 | ["[", "]"], 12 | ["(", ")"] 13 | ], 14 | // symbols that are auto closed when typing 15 | "autoClosingPairs": [ 16 | ["{", "}"], 17 | ["[", "]"], 18 | ["(", ")"], 19 | ["\"", "\""], 20 | ["'", "'"] 21 | ], 22 | // symbols that can be used to surround a selection 23 | "surroundingPairs": [ 24 | ["{", "}"], 25 | ["[", "]"], 26 | ["(", ")"], 27 | ["\"", "\""], 28 | ["'", "'"] 29 | ], 30 | "indentationRules": { 31 | "increaseIndentPattern": "^((?!\\/\\/).)*(\\{[^}\"'`]*|\\([^)\"'`]*|\\[[^\\]\"'`]*)$", 32 | "decreaseIndentPattern": "^((?!.*?\\/\\*).*\\*/)?\\s*[\\)\\}\\]].*$" 33 | }, 34 | "onEnterRules": [ 35 | { 36 | "beforeText": "^\\s*(?:Authors:|Founders:|Maintainer:).*?\\s*$", 37 | "action": { "indent": "indent", "appendText": "Name " } 38 | }, 39 | { 40 | "beforeText": "^\\s*(?:Definition:).*?\\s*$", 41 | "action": { "indent": "indent", "appendText": "General description: \"\"" } 42 | }, 43 | { 44 | "beforeText": "^\\s*(?:Instances:).*?\\s*$", 45 | "action": { "indent": "indent", "appendText": "composedBy: " } 46 | }, 47 | { 48 | "beforeText": "^\\s*(?:composedBy:).*?\\s*$", 49 | "action": { "indent": "indent", "appendText": "Instance: InstanceId " } 50 | }, 51 | { 52 | "beforeText": "^\\s*(?:Instance:).*?\\s*$", 53 | "action": { "indent": "indent", "appendText": "description \"\"" } 54 | }, 55 | { 56 | "beforeText": "^\\s*(?:withAttributes:).*?\\s*$", 57 | "action": { "indent": "indent", "appendText": "attribute attributeId" } 58 | }, 59 | ] 60 | } -------------------------------------------------------------------------------- /out/cli/cli-util.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { 3 | function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } 4 | return new (P || (P = Promise))(function (resolve, reject) { 5 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } 6 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } 7 | function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } 8 | step((generator = generator.apply(thisArg, _arguments || [])).next()); 9 | }); 10 | }; 11 | var __importDefault = (this && this.__importDefault) || function (mod) { 12 | return (mod && mod.__esModule) ? mod : { "default": mod }; 13 | }; 14 | Object.defineProperty(exports, "__esModule", { value: true }); 15 | exports.extractDestinationAndName = exports.extractAstNode = exports.extractDocument = void 0; 16 | const chalk_1 = __importDefault(require("chalk")); 17 | const path_1 = __importDefault(require("path")); 18 | const fs_1 = __importDefault(require("fs")); 19 | const vscode_uri_1 = require("vscode-uri"); 20 | function extractDocument(fileName, services) { 21 | var _a; 22 | return __awaiter(this, void 0, void 0, function* () { 23 | const extensions = services.LanguageMetaData.fileExtensions; 24 | if (!extensions.includes(path_1.default.extname(fileName))) { 25 | console.error(chalk_1.default.yellow(`Please choose a file with one of these extensions: ${extensions}.`)); 26 | process.exit(1); 27 | } 28 | if (!fs_1.default.existsSync(fileName)) { 29 | console.error(chalk_1.default.red(`File ${fileName} does not exist.`)); 30 | process.exit(1); 31 | } 32 | const document = services.shared.workspace.LangiumDocuments.getOrCreateDocument(vscode_uri_1.URI.file(path_1.default.resolve(fileName))); 33 | yield services.shared.workspace.DocumentBuilder.build([document], { validationChecks: 'all' }); 34 | const validationErrors = ((_a = document.diagnostics) !== null && _a !== void 0 ? _a : []).filter(e => e.severity === 1); 35 | if (validationErrors.length > 0) { 36 | console.error(chalk_1.default.red('There are validation errors:')); 37 | for (const validationError of validationErrors) { 38 | console.error(chalk_1.default.red(`line ${validationError.range.start.line + 1}: ${validationError.message} [${document.textDocument.getText(validationError.range)}]`)); 39 | } 40 | process.exit(1); 41 | } 42 | return document; 43 | }); 44 | } 45 | exports.extractDocument = extractDocument; 46 | function extractAstNode(fileName, services) { 47 | var _a; 48 | return __awaiter(this, void 0, void 0, function* () { 49 | return (_a = (yield extractDocument(fileName, services)).parseResult) === null || _a === void 0 ? void 0 : _a.value; 50 | }); 51 | } 52 | exports.extractAstNode = extractAstNode; 53 | function extractDestinationAndName(filePath, destination) { 54 | filePath = path_1.default.basename(filePath, path_1.default.extname(filePath)).replace(/[.-]/g, ''); 55 | return { 56 | destination: destination !== null && destination !== void 0 ? destination : path_1.default.join(path_1.default.dirname(filePath), 'generated'), 57 | name: path_1.default.basename(filePath) 58 | }; 59 | } 60 | exports.extractDestinationAndName = extractDestinationAndName; 61 | //# sourceMappingURL=cli-util.js.map -------------------------------------------------------------------------------- /out/cli/cli-util.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"cli-util.js","sourceRoot":"","sources":["../../src/cli/cli-util.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;AAAA,kDAA0B;AAC1B,gDAAwB;AACxB,4CAAoB;AAEpB,2CAAiC;AAEjC,SAAsB,eAAe,CAAC,QAAgB,EAAE,QAAyB;;;QAC7E,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,cAAc,CAAC;QAC5D,IAAI,CAAC,UAAU,CAAC,QAAQ,CAAC,cAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,EAAE;YAC9C,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,MAAM,CAAC,sDAAsD,UAAU,GAAG,CAAC,CAAC,CAAC;YACjG,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnB;QAED,IAAI,CAAC,YAAE,CAAC,UAAU,CAAC,QAAQ,CAAC,EAAE;YAC1B,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CAAC,QAAQ,QAAQ,kBAAkB,CAAC,CAAC,CAAC;YAC7D,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnB;QAED,MAAM,QAAQ,GAAG,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,gBAAgB,CAAC,mBAAmB,CAAC,gBAAG,CAAC,IAAI,CAAC,cAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC;QAClH,MAAM,QAAQ,CAAC,MAAM,CAAC,SAAS,CAAC,eAAe,CAAC,KAAK,CAAC,CAAC,QAAQ,CAAC,EAAE,EAAE,gBAAgB,EAAE,KAAK,EAAE,CAAC,CAAC;QAE/F,MAAM,gBAAgB,GAAG,CAAC,MAAA,QAAQ,CAAC,WAAW,mCAAI,EAAE,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,KAAK,CAAC,CAAC,CAAC;QACpF,IAAI,gBAAgB,CAAC,MAAM,GAAG,CAAC,EAAE;YAC7B,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CAAC,8BAA8B,CAAC,CAAC,CAAC;YACzD,KAAK,MAAM,eAAe,IAAI,gBAAgB,EAAE;gBAC5C,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CACnB,QAAQ,eAAe,CAAC,KAAK,CAAC,KAAK,CAAC,IAAI,GAAG,CAAC,KAAK,eAAe,CAAC,OAAO,KAAK,QAAQ,CAAC,YAAY,CAAC,OAAO,CAAC,eAAe,CAAC,KAAK,CAAC,GAAG,CACvI,CAAC,CAAC;aACN;YACD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACnB;QAED,OAAO,QAAQ,CAAC;;CACnB;AA3BD,0CA2BC;AAED,SAAsB,cAAc,CAAoB,QAAgB,EAAE,QAAyB;;;QAC/F,OAAO,MAAA,CAAC,MAAM,eAAe,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC,WAAW,0CAAE,KAAU,CAAC;;CAC9E;AAFD,wCAEC;AAOD,SAAgB,yBAAyB,CAAC,QAAgB,EAAE,WAA+B;IACvF,QAAQ,GAAG,cAAI,CAAC,QAAQ,CAAC,QAAQ,EAAE,cAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,EAAE,EAAE,CAAC,CAAC;IAChF,OAAO;QACH,WAAW,EAAE,WAAW,aAAX,WAAW,cAAX,WAAW,GAAI,cAAI,CAAC,IAAI,CAAC,cAAI,CAAC,OAAO,CAAC,QAAQ,CAAC,EAAE,WAAW,CAAC;QAC1E,IAAI,EAAE,cAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC;KAChC,CAAC;AACN,CAAC;AAND,8DAMC"} -------------------------------------------------------------------------------- /out/cli/generator.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /* 3 | import fs from 'fs'; 4 | import { CompositeGeneratorNode, NL, processGeneratorNode } from 'langium'; 5 | import path from 'path'; 6 | import { Model } from '../language-server/generated/ast'; 7 | import { extractDestinationAndName } from './cli-util'; 8 | 9 | export function generateJavaScript(model: Model, filePath: string, destination: string | undefined): string { 10 | const data = extractDestinationAndName(filePath, destination); 11 | const generatedFilePath = `${path.join(data.destination, data.name)}.js`; 12 | 13 | const fileNode = new CompositeGeneratorNode(); 14 | fileNode.append('"use strict";', NL, NL); 15 | model.greetings.forEach(greeting => fileNode.append(`console.log('Hello, ${greeting.person.ref?.name}!');`, NL)); 16 | 17 | if (!fs.existsSync(data.destination)) { 18 | fs.mkdirSync(data.destination, { recursive: true }); 19 | } 20 | fs.writeFileSync(generatedFilePath, processGeneratorNode(fileNode)); 21 | return generatedFilePath; 22 | } 23 | */ 24 | //# sourceMappingURL=generator.js.map -------------------------------------------------------------------------------- /out/cli/generator.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"generator.js","sourceRoot":"","sources":["../../src/cli/generator.ts"],"names":[],"mappings":";AAAA;;;;;;;;;;;;;;;;;;;;;EAqBE"} -------------------------------------------------------------------------------- /out/cli/index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { 3 | function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } 4 | return new (P || (P = Promise))(function (resolve, reject) { 5 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } 6 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } 7 | function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } 8 | step((generator = generator.apply(thisArg, _arguments || [])).next()); 9 | }); 10 | }; 11 | Object.defineProperty(exports, "__esModule", { value: true }); 12 | exports.generateAction = void 0; 13 | //import chalk from 'chalk'; 14 | const commander_1 = require("commander"); 15 | //import { Model } from '../language-server/generated/ast'; 16 | const module_1 = require("../language-server/generated/module"); 17 | //import { createDatasetDescriptorServices } from '../language-server/dataset-descriptor-module'; 18 | //import { extractAstNode } from './cli-util'; 19 | //import { generateJavaScript } from './generator'; 20 | //import { NodeFileSystem } from 'langium/node'; 21 | const generateAction = (fileName, opts) => __awaiter(void 0, void 0, void 0, function* () { 22 | // const services = createDatasetDescriptorServices(NodeFileSystem).DatasetDescriptor; 23 | //const model = await extractAstNode(fileName, services); 24 | // const generatedFilePath = generateJavaScript(model, fileName, opts.destination); 25 | //console.log(chalk.green(`JavaScript code generated successfully: ${generatedFilePath}`)); 26 | }); 27 | exports.generateAction = generateAction; 28 | function default_1() { 29 | const program = new commander_1.Command(); 30 | program 31 | // eslint-disable-next-line @typescript-eslint/no-var-requires 32 | .version(require('../../package.json').version); 33 | const fileExtensions = module_1.DatasetDescriptorLanguageMetaData.fileExtensions.join(', '); 34 | program 35 | .command('generate') 36 | .argument('', `source file (possible file extensions: ${fileExtensions})`) 37 | .option('-d, --destination ', 'destination directory of generating') 38 | .description('generates JavaScript code that prints "Hello, {name}!" for each greeting in a source file') 39 | .action(exports.generateAction); 40 | program.parse(process.argv); 41 | } 42 | exports.default = default_1; 43 | //# sourceMappingURL=index.js.map -------------------------------------------------------------------------------- /out/cli/index.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/cli/index.ts"],"names":[],"mappings":";;;;;;;;;;;;AAAA,4BAA4B;AAC5B,yCAAoC;AACpC,2DAA2D;AAC3D,gEAAwF;AACxF,iGAAiG;AACjG,8CAA8C;AAC9C,mDAAmD;AACnD,gDAAgD;AAEzC,MAAM,cAAc,GAAG,CAAO,QAAgB,EAAE,IAAqB,EAAiB,EAAE;IAC5F,sFAAsF;IACrF,gEAAgE;IACnE,qFAAqF;IAClF,2FAA2F;AAC/F,CAAC,CAAA,CAAC;AALW,QAAA,cAAc,kBAKzB;AAMF;IACI,MAAM,OAAO,GAAG,IAAI,mBAAO,EAAE,CAAC;IAE9B,OAAO;QACH,8DAA8D;SAC7D,OAAO,CAAC,OAAO,CAAC,oBAAoB,CAAC,CAAC,OAAO,CAAC,CAAC;IAEpD,MAAM,cAAc,GAAG,0CAAiC,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;IACnF,OAAO;SACF,OAAO,CAAC,UAAU,CAAC;SACnB,QAAQ,CAAC,QAAQ,EAAE,0CAA0C,cAAc,GAAG,CAAC;SAC/E,MAAM,CAAC,yBAAyB,EAAE,qCAAqC,CAAC;SACxE,WAAW,CAAC,2FAA2F,CAAC;SACxG,MAAM,CAAC,sBAAc,CAAC,CAAC;IAE5B,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC;AAChC,CAAC;AAhBD,4BAgBC"} -------------------------------------------------------------------------------- /out/extension.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"extension.js","sourceRoot":"","sources":["../src/extension.ts"],"names":[],"mappings":";;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;AAAA,+CAAiC;AACjC,2CAA6B;AAC7B,4CAAoB;AACpB,qDAEoC;AACpC,2GAA8F;AAC9F,gGAAiF;AACjF,iEAA6D;AAI7D,IAAI,MAAsB,CAAC;AAC3B,IAAI,YAAkC,CAAC;AAEvC,2DAA2D;AAC3D,SAAgB,QAAQ,CAAC,OAAgC;IACrD,MAAM,GAAG,mBAAmB,CAAC,OAAO,CAAC,CAAC;IACtC,sCAAsC;IACtC,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,sBAAsB,EAAE,GAAS,EAAE;QAC1F,MAAM,CAAC,MAAM,CAAC,YAAY,CACtB;YACI,QAAQ,EAAE,MAAM,CAAC,gBAAgB,CAAC,YAAY;YAC9C,KAAK,EAAE,kCAAkC;SAC5C,EACD,CAAM,QAAQ,EAAC,EAAE;YACb,MAAM,QAAQ,GAAG,MAAM,MAAM,CAAC,MAAM,CAAC,cAAc,CAAC,EAAE,gBAAgB,EAAE,KAAK,EAAE,cAAc,EAAE,IAAI,EAAE,aAAa,EAAE,IAAI,EAAE,SAAS,EAAE,wBAAwB,EAAE,CAAC,CAAC;YACjK,IAAI,QAAQ,EAAC;gBACT,MAAM,eAAe,CAAC,OAAO,EAAE,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;aAC/C;QACL,CAAC,CAAA,CAAC,CAAC;IACX,CAAC,CAAA,CAAC,CAAC,CAAC;IAEJ,OAAO,CAAC,aAAa,CAAC,IAAI,CACtB,MAAM,CAAC,SAAS,CAAC,qBAAqB,CAClC,oBAAoB,EAAE;QAClB,YAAY,CAAC,QAAQ,EAAE,QAAQ,EAAE,KAAK;YAClC,IAAI,KAAK,GAAG,IAAI,4BAAY,EAAE,CAAC;YAC/B,IAAI,OAAO,GAAG,KAAK,CAAC,aAAa,CAAC,QAAQ,EAAE,QAAQ,CAAC,CAAC;YAEtD,IAAI,OAAO,IAAI,OAAO;gBAAE,OAAO,IAAI,MAAM,CAAC,KAAK,CAAC,IAAI,MAAM,CAAC,cAAc,CAAC,OAAO,CAAC,CAAC,CAAC;YACpF,OAAO,IAAI,CAAC;QACpB,CAAC;KACN,CAAC,CAAC,CAAC;IAEN,+CAA+C;IAC/C,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,gCAAgC,EAAE,GAAS,EAAE;QACpG,MAAM,oBAAoB,CAAC,OAAO,CAAC,CAAC;IACvC,CAAC,CAAA,CAAC,CAAC,CAAC;IAEJ,6DAA6D;IAC7D,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,CAAC,eAAe,CAAC,2BAA2B,EAAE,GAAS,EAAE;QAC/F,MAAM,gBAAgB,CAAC,OAAO,CAAC,CAAC;IACnC,CAAC,CAAA,CAAC,CAAC,CAAC;AACV,CAAC;AAtCD,4BAsCC;AAED,6DAA6D;AAC7D,SAAgB,UAAU;IACtB,IAAI,MAAM,EAAE;QACR,OAAO,MAAM,CAAC,IAAI,EAAE,CAAC;KACxB;IACD,OAAO,SAAS,CAAC;AACrB,CAAC;AALD,gCAKC;AAED,SAAS,mBAAmB,CAAC,OAAgC;IACzD,MAAM,YAAY,GAAG,OAAO,CAAC,cAAc,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,EAAE,iBAAiB,EAAE,MAAM,CAAC,CAAC,CAAC;IACzF,mCAAmC;IACnC,8GAA8G;IAC9G,sHAAsH;IACtH,MAAM,YAAY,GAAG,EAAE,QAAQ,EAAE,CAAC,UAAU,EAAE,YAAY,OAAO,CAAC,GAAG,CAAC,WAAW,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,IAAI,OAAO,CAAC,GAAG,CAAC,YAAY,IAAI,MAAM,EAAE,CAAC,EAAE,CAAC;IAE3I,oFAAoF;IACpF,qCAAqC;IACrC,MAAM,aAAa,GAAkB;QACjC,GAAG,EAAE,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,EAAE,oBAAa,CAAC,GAAG,EAAE;QAC3D,KAAK,EAAE,EAAE,MAAM,EAAE,YAAY,EAAE,SAAS,EAAE,oBAAa,CAAC,GAAG,EAAE,OAAO,EAAE,YAAY,EAAE;KACvF,CAAC;IAEF,MAAM,iBAAiB,GAAG,MAAM,CAAC,SAAS,CAAC,uBAAuB,CAAC,aAAa,CAAC,CAAC;IAClF,OAAO,CAAC,aAAa,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC;IAE9C,yCAAyC;IACzC,MAAM,aAAa,GAA0B;QACzC,gBAAgB,EAAE,CAAC,EAAE,MAAM,EAAE,MAAM,EAAE,QAAQ,EAAE,oBAAoB,EAAE,CAAC;QACtE,WAAW,EAAE;YACT,2EAA2E;YAC3E,UAAU,EAAE,iBAAiB;SAChC;KACJ,CAAC;IAEF,mDAAmD;IACnD,MAAM,MAAM,GAAG,IAAI,qBAAc,CAC7B,oBAAoB,EACpB,oBAAoB,EACpB,aAAa,EACb,aAAa,CAChB,CAAC;IAEF,qDAAqD;IACrD,MAAM,CAAC,KAAK,EAAE,CAAC;IACf,OAAO,MAAM,CAAC;AAClB,CAAC;AAED,SAAe,eAAe,CAAC,OAAgC,EAAE,QAAoB;;QACjF,OAAO,CAAC,GAAG,CAAC,OAAO,CAAC,CAAC;QACrB,IAAI,QAAQ,GAAG,IAAI,6CAAe,EAAE,CAAC;QACrC,MAAM,IAAI,GAAU,MAAM,QAAQ,CAAC,aAAa,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC;QAClE,IAAI,OAAO,GAAG,IAAI,MAAM,CAAC,aAAa,EAAE,CAAC;QACzC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC;QACzB,2FAA2F;QAC3F,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,gBAAgB,CAAC;QAC/C,sDAAsD;QACrD,IAAI,MAAM,EAAE;YACR,MAAM,QAAQ,GAAG,MAAM,CAAC,QAAQ,CAAC;YACjC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE;gBACtB,wEAAwE;gBACxE,MAAM,MAAM,GAAG,IAAI,MAAM,CAAC,gBAAgB,CAAC,CAAC;gBAC5C,IAAI,eAAe,GAAG,IAAI,MAAM,CAAC,QAAQ,CAAC,QAAQ,CAAC,SAAS,EAAE,CAAC,CAAC,CAAC;gBACjE,KAAK,IAAI,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,QAAQ,CAAC,SAAS,EAAE,KAAK,EAAE,EAAE;oBACrD,IAAI,UAAU,GAAG,MAAM,CAAC,QAAQ,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;oBAC/C,IAAI,IAAI,GAAG,UAAU,CAAC,IAAI,CAAC;oBAC3B,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;oBACjB,IAAG,UAAU,CAAC,IAAI,CAAC,KAAK,CAAC,MAAM,CAAC,EAAE;wBAC9B,eAAe,GAAG,IAAI,MAAM,CAAC,QAAQ,CAAC,KAAK,GAAC,CAAC,EAAE,CAAC,CAAC,CAAC;qBACrD;iBAEJ;gBACD,MAAM,CAAC,aAAa,CAAC,OAAO,EAAE,eAAe,CAAC,CAAC;YACnD,CAAC,CAAC,CAAC;SACN;QACD,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,qDAAqD,CAAC,CAAC;IAChG,CAAC;CAAA;AAED,SAAe,oBAAoB,CAAC,OAAgC;;;QAChE,IAAI,KAAK,GAAU,uBAAuB,CAAC;QAC3C,YAAY,GAAG,MAAM,CAAC,MAAM,CAAC,kBAAkB;QAC3C,aAAa;QACb,mBAAmB;QACnB,gBAAgB;QAChB,KAAK;QACL,6DAA6D;QAC7D,CAAC,EACD;YACI,gCAAgC;YAChC,aAAa,EAAE,KAAK;YACpB,uBAAuB,EAAE,KAAK;YAC9B,4FAA4F;YAC5F,kBAAkB,EAAE,CAAC,MAAM,CAAC,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,aAAa,EAAE,QAAQ,CAAC,CAAC,CAAC;SAEpF,CACJ,CAAA;QACD,uBAAuB,CAAC,IAAI,CAAC,CAAC;QAC9B,MAAM,SAAS,GAAI,IAAI,yDAAsB,EAAE,CAAC;QAChD,MAAM,IAAI,GAAG,MAAA,MAAM,CAAC,MAAM,CAAC,gBAAgB,0CAAE,QAAQ,CAAC,OAAO,EAAE,CAAC;QAChE,IAAI,IAAI,EAAE;YACN,MAAM,QAAQ,GAAG,SAAS,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC;YAC1C,iBAAiB,CAAC,QAAQ,CAAC,CAAC;YAC5B,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,CAAC;SACzB;QACD,YAAY,CAAC,YAAY,CAAC,GAAG,EAAE;YAC3B,uBAAuB,CAAC,KAAK,CAAC,CAAC;QACnC,CAAC,CAAC,CAAA;;CACL;AAED,SAAS,iBAAiB,CAAC,IAAoB;IAC3C,IAAI,YAAY,IAAI,IAAI,EAAE;QACtB,YAAY,CAAC,OAAO,CAAC,IAAI,GAAG,IAAI,CAAC;KACpC;AACL,CAAC;AAED,SAAS,uBAAuB,CAAC,KAAc;IAC3C,MAAM,CAAC,QAAQ,CAAC,cAAc,CAAC,YAAY,EAAE,mBAAmB,EAAE,KAAK,CAAC,CAAC;AAC7E,CAAC;AAED,SAAS,gBAAgB,CAAC,OAAgC;;IACtD,MAAM,IAAI,GAAG,YAAY,CAAC,OAAO,CAAC,IAAI,CAAC;IACvC,MAAM,KAAK,GAAG,YAAY,CAAC,KAAK,CAAC;IACjC,IAAI,IAAI,EAAE;QACN,+DAA+D;QAC/D,MAAA,MAAM,CAAC,SAAS,CAAC,gBAAgB,0CAAE,OAAO,CAAC,SAAS,CAAC,EAAE;YACnD,MAAM,QAAQ,GAAG,SAAS,CAAC,GAAG,CAAC,MAAM,GAAG,GAAG,GAAG,KAAK,GAAG,OAAO,CAAC;YAC9D,YAAE,CAAC,aAAa,CAAC,QAAQ,EAAE,IAAI,EAAE,MAAM,CAAC,CAAC;YACzC,oCAAoC;YAC1C,MAAM,CAAC,MAAM,CAAC,sBAAsB,CAAC,uBAAuB,GAAC,KAAK,GAAC,4DAA4D,CAAC,CAAC;QAC/H,CAAC,CAAC,CAAC;KACN;AACL,CAAC"} -------------------------------------------------------------------------------- /out/generator-service/dataset-descriptor-documentation.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | exports.DocumentationGenerator = void 0; 4 | const dataset_descriptor_module_1 = require("../language-server/dataset-descriptor-module"); 5 | const ast_1 = require("../language-server/generated/ast"); 6 | const node_1 = require("langium/node"); 7 | /** 8 | * Generator HTML service main class 9 | * To generate the HTML we parse the description and we use PUG as a engine teamplate to build the HTML 10 | */ 11 | class DocumentationGenerator { 12 | constructor() { 13 | let services = (0, dataset_descriptor_module_1.createDatasetDescriptorServices)(node_1.NodeFileSystem); 14 | this.parser = services.DatasetDescriptor.parser.LangiumParser; 15 | } 16 | generate(DescriptionDataset) { 17 | const astNode = (typeof (DescriptionDataset) == 'string' ? this.parser.parse(DescriptionDataset).value : DescriptionDataset); 18 | return ((0, ast_1.isDescriptionDataset)(astNode) ? this.declaration2Html(astNode) : undefined); 19 | } 20 | // Generation of the HTML 21 | declaration2Html(DescriptionDataset) { 22 | const description = { 23 | title: DescriptionDataset.elements[0].name, 24 | metadata: DescriptionDataset.elements[0].generalinfo, 25 | composition: DescriptionDataset.elements[0].composition, 26 | provenance: DescriptionDataset.elements[0].provenance, 27 | socialConcerns: DescriptionDataset.elements[0].socialConcerns, 28 | }; 29 | let head = ` 30 | 31 | 32 | ${description.title} 33 | `; 34 | head = this.addSchemaOrg(description, head, description.title); 35 | head = this.addStyles(head); 36 | let body = this.buildBody(description); 37 | head = head + 38 | ``; 39 | const html = head + body; 40 | return html; 41 | } 42 | addSchemaOrg(description, head, title) { 43 | // Add Authors 44 | let authors = ""; 45 | description.metadata.authoring.authors[0].authors.forEach(function (author) { 46 | authors = authors + 47 | `"creator":{ 48 | "@type":"Author", 49 | "url": "", 50 | "name":"${author.name}", 51 | "contactPoint":{ 52 | "@type":"ContactPoint", 53 | "contactType": "email, 54 | "email":${author.email} 55 | } 56 | }, 57 | `; 58 | }); 59 | // Add funders 60 | let funders = ""; 61 | if (description.metadata.authoring.founding.lenght > 0) { 62 | description.metadata.authoring.founding[0].funders.forEach(function (funder) { 63 | funders = funders + 64 | `"funder":{ 65 | "@type":"Funder", 66 | "name":"${funder.name}", 67 | "sameAs":"${funder.type}" 68 | }, 69 | `; 70 | }); 71 | } 72 | let areas = ''; 73 | let tags = ''; 74 | description.metadata.desc.tags.tags.forEach(function (tag) { 75 | tags = tags + tag.name + ','; 76 | }); 77 | description.metadata.desc.area.areas.forEach(function (area) { 78 | areas = areas + area.name + ','; 79 | }); 80 | head = head + ` 81 | `; 123 | return head; 124 | } 125 | addStyles(head) { 126 | return head + ` 127 | `; 145 | } 146 | // The PUG file is located inside the "out" folder. As needs to be released in the executable plugin 147 | // So, you may need to go to /out/templates/document.pug to customize the template. 148 | buildBody(description) { 149 | const pug = require('pug'); 150 | const path = require('path'); 151 | let sep = path.sep; 152 | let dirname = __dirname; 153 | // Compile the source code using PUG 154 | console.log(dirname + sep + 'templates' + sep + 'document.pug'); 155 | const compiledFunction = pug.compileFile(dirname + sep + 'templates' + sep + 'document.pug'); 156 | // Compile the source code 157 | return compiledFunction({ 158 | description: description 159 | }); 160 | } 161 | } 162 | exports.DocumentationGenerator = DocumentationGenerator; 163 | //# sourceMappingURL=dataset-descriptor-documentation.js.map -------------------------------------------------------------------------------- /out/generator-service/dataset-descriptor-documentation.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-descriptor-documentation.js","sourceRoot":"","sources":["../../src/generator-service/dataset-descriptor-documentation.ts"],"names":[],"mappings":";;;AAMA,4FAA+F;AAC9F,0DAA4F;AAC5F,uCAA8C;AAY9C;;;GAGG;AACH,MAAa,sBAAsB;IAI/B;QAEG,IAAI,QAAQ,GAAG,IAAA,2DAA+B,EAAC,qBAAc,CAAC,CAAC;QAC/D,IAAI,CAAC,MAAM,GAAG,QAAQ,CAAC,iBAAiB,CAAC,MAAM,CAAC,aAAa,CAAC;IAClE,CAAC;IAEA,QAAQ,CAAC,kBAAqC;QAC1C,MAAM,OAAO,GAAG,CAAC,OAAM,CAAC,kBAAkB,CAAC,IAAI,QAAQ,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,kBAAkB,CAAC,CAAC;QAC5H,OAAO,CAAC,IAAA,0BAAoB,EAAC,OAAO,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC;IACxF,CAAC;IAED,yBAAyB;IACzB,gBAAgB,CAAC,kBAAuC;QACpD,MAAM,WAAW,GAAG;YAChB,KAAK,EAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,IAAI;YAC3C,QAAQ,EAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,WAAW;YACrD,WAAW,EAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,WAAW;YACxD,UAAU,EAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,UAAU;YACtD,cAAc,EAAG,kBAAkB,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,cAAc;SACjE,CAAA;QACD,IAAI,IAAI,GAAG;;;sBAGE,WAAW,CAAC,KAAK;qCACF,CAAC;QAC7B,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,WAAW,EAAE,IAAI,EAAE,WAAW,CAAC,KAAK,CAAC,CAAA;QAC9D,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC;QAC5B,IAAI,IAAI,GAAG,IAAI,CAAC,SAAS,CAAC,WAAW,CAAC,CAAC;QACvC,IAAI,GAAG,IAAI;YACf,SAAS,CAAA;QACL,MAAM,IAAI,GAAG,IAAI,GAAG,IAAI,CAAA;QACxB,OAAO,IAAI,CAAA;IACf,CAAC;IAED,YAAY,CAAC,WAAgB,EAAE,IAAY,EAAE,KAAa;QAEtD,cAAc;QACd,IAAI,OAAO,GAAG,EAAE,CAAA;QAChB,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,MAAW;YAE3E,OAAO,GAAG,OAAO;gBACrB;;;uBAGc,MAAM,CAAC,IAAI;;;;2BAIP,MAAM,CAAC,KAAK;;;MAGjC,CAAA;QACG,CAAC,CAAC,CAAA;QAEF,cAAc;QACd,IAAI,OAAO,GAAG,EAAE,CAAA;QAChB,IAAI,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,GAAG,CAAC,EAAE;YACpD,WAAW,CAAC,QAAQ,CAAC,SAAS,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,OAAO,CAAC,OAAO,CAAC,UAAU,MAAW;gBACxE,OAAO,GAAG,OAAO;oBACrB;;+BAEc,MAAM,CAAC,IAAI;iCACT,MAAM,CAAC,IAAI;;cAE9B,CAAA;YACD,CAAC,CAAC,CAAA;SACL;QAED,IAAI,KAAK,GAAG,EAAE,CAAA;QACd,IAAI,IAAI,GAAG,EAAE,CAAA;QACb,WAAW,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,UAAU,GAAQ;YAC1D,IAAI,GAAG,IAAI,GAAG,GAAG,CAAC,IAAI,GAAG,GAAG,CAAA;QAChC,CAAC,CAAC,CAAA;QACF,WAAW,CAAC,QAAQ,CAAC,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,UAAU,IAAS;YAC5D,KAAK,GAAG,KAAK,GAAG,IAAI,CAAC,IAAI,GAAG,GAAG,CAAA;QACnC,CAAC,CAAC,CAAA;QAEF,IAAI,GAAG,IAAI,GAAG;;;;;mBAKJ,KAAK;yBACC,WAAW,CAAC,QAAQ,CAAC,IAAI,CAAC,kBAAkB;;;0BAG3C,WAAW,CAAC,QAAQ,CAAC,KAAK;;uBAE7B,KAAK;uBACL,IAAI;;uBAEJ,WAAW,CAAC,QAAQ,CAAC,YAAY,CAAC,OAAO;;;;uBAIzC,WAAW,CAAC,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,IAAI;8BAC5C,WAAW,CAAC,WAAW,CAAC,SAAS,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,QAAQ;;;;;;;;;;;;;;;;;;;+BAmBtD,WAAW,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM,IAAI,WAAW,CAAC,QAAQ,CAAC,KAAK,CAAC,MAAM;OAC9F,CAAC;QAEC,IAAI,GAAG,IAAI,GAAG,OAAO,GAAG,OAAO,GAAG;;mBAExB,CAAA;QACV,OAAO,IAAI,CAAC;IAChB,CAAC;IAED,SAAS,CAAC,IAAY;QAClB,OAAO,IAAI,GAAG;;;;;;;;;;;;;;;;;;cAkBT,CAAA;IACT,CAAC;IAED,oGAAoG;IACpG,mFAAmF;IACnF,SAAS,CAAC,WAAgB;QACtB,MAAM,GAAG,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC;QAC3B,MAAM,IAAI,GAAG,OAAO,CAAC,MAAM,CAAC,CAAA;QAC5B,IAAI,GAAG,GAAG,IAAI,CAAC,GAAG,CAAA;QAClB,IAAI,OAAO,GAAG,SAAS,CAAC;QACxB,oCAAoC;QACpC,OAAO,CAAC,GAAG,CAAC,OAAO,GAAC,GAAG,GAAC,WAAW,GAAC,GAAG,GAAC,cAAc,CAAC,CAAC;QACxD,MAAM,gBAAgB,GAAG,GAAG,CAAC,WAAW,CAAC,OAAO,GAAC,GAAG,GAAC,WAAW,GAAC,GAAG,GAAC,cAAc,CAAC,CAAC;QACrF,0BAA0B;QAC1B,OAAO,gBAAgB,CAAC;YACpB,WAAW,EAAE,WAAW;SACzB,CAAC,CAAC;IACT,CAAC;CACJ;AArKD,wDAqKC"} -------------------------------------------------------------------------------- /out/generator-service/templates/document.pug: -------------------------------------------------------------------------------- 1 | body 2 | h1 Documentation of #{description.title}'s dataset 3 | h2 Metadata: 4 | div 5 | p identifier: #{ description.metadata.ident } 6 | p version: #{ description.metadata.version } 7 | if (description.metadata.datesR) 8 | p Release date: #{ description.metadata.dates.datesR } 9 | if (description.metadata.datesU) 10 | p Update date: #{ description.metadata.dates.datesU } 11 | if (description.metadata.datesP) 12 | p Published date: #{ description.metadata.dates.datesP } 13 | if description.metadata.citation 14 | p Citation: #{ description.metadata.citation.citeText } 15 | end 16 | div 17 | h3 For what prupose was the dataset created? 18 | p #{ description.metadata.desc.descriptionpurpose} 19 | div 20 | h3 Was there any specific tasks? 21 | each element in description.metadata.desc.descriptionTasks 22 | p #{element.name} 23 | 24 | div 25 | h3 Was there a specific gap that needed to be filled? 26 | p #{ description.metadata.desc.descriptionGaps} 27 | div 28 | h3 Distribution, licences and applications 29 | if description.metadata.distribution.licence 30 | p Licence: #{ description.metadata.distribution.licence} 31 | if description.metadata.distribution.public 32 | p Is public? #{ description.metadata.distribution.public} 33 | if description.metadata.distribution.addCond 34 | p Additional Conditions? #{ description.metadata.distribution.addCond} 35 | if description.metadata.uses 36 | if description.metadata.uses.past 37 | p Past applications: #{ description.metadata.uses.past} 38 | if description.metadata.uses.recommend 39 | p Recommended applications: #{ description.metadata.uses.recommend} 40 | if description.metadata.uses.future 41 | p Non-recommended: #{ description.metadata.uses.future} 42 | div 43 | h3 Keywords 44 | p Area: 45 | each element in description.metadata.desc.area.areas 46 | div #{element.name} 47 | p Tags: 48 | each element in description.metadata.desc.tags.tags 49 | div #{element.name} 50 | div 51 | h2 Authoring: 52 | div 53 | if (description.metadata.authoring.authors[0].authors.length > 0) 54 | h3 Who created the dataset? 55 | table 56 | tr 57 | th Name 58 | th email 59 | 60 | each author in description.metadata.authoring.authors[0].authors 61 | tr 62 | td #{author.name} 63 | td #{author.email} 64 | div 65 | if (description.metadata.authoring.founding.length > 0) 66 | h3 Who funded the dataset? 67 | table 68 | tr 69 | th Name 70 | th type 71 | th Grant 72 | 73 | each funder in description.metadata.authoring.founding[0].funders 74 | tr 75 | td #{funder.name} 76 | td #{funder.type} 77 | if funder.grantId 78 | td #{funder.grantId} 79 | else 80 | td Not Provided 81 | div 82 | if (description.metadata.authoring.maintainers.length > 0) 83 | h3 Who Maintains the dataset? 84 | table 85 | tr 86 | th Name 87 | th Email 88 | each mantainer in description.metadata.authoring.maintainers 89 | tr 90 | td #{mantainer.name} 91 | td #{mantainer.email} 92 | if description.metadata.erratum 93 | p Erratum: #{ description.metadata.erratum} 94 | if description.metadata.dataRetention 95 | 96 | p Data Retention: #{ description.metadata.dataRetention} 97 | if description.metadata.support 98 | p Version lifecycle: #{ description.metadata.support} 99 | if description.metadata.contribGuides 100 | p Contribution guidelines: #{ description.metadata.contribGuides} 101 | 102 | h2 Composition: 103 | div 104 | if description.composition.compodesc 105 | p Rationale: #{description.composition.compodesc} 106 | if description.composition.numberInst 107 | p Total Size: #{description.composition.numberInst} 108 | div 109 | if description.composition.instances 110 | p Instance: #{description.composition.instances.instances[0].name} 111 | p Description: #{description.composition.instances.instances[0].descrip} 112 | p Size: #{description.composition.instances.instances[0].numIns} 113 | p Number of attributes: #{description.composition.instances.instances[0].attrnum} 114 | p Type: #{description.composition.instances.instances[0].type} 115 | h3 Attributes: 116 | if description.composition.instances.instances[0].attributes 117 | each attribute in description.composition.instances.instances[0].attributes 118 | div 119 | p Name: #{attribute.name} 120 | if attribute.attdesc 121 | p Description: #{attribute.attdesc} 122 | if attribute.unique 123 | p Unique Values: #{attribute.unique} 124 | if attribute.count 125 | p Count: #{attribute.count} 126 | if attribute.attType 127 | p Type: #{attribute.attType.name} 128 | 129 | h2 Provenance: 130 | p Curation Rationale: #{description.provenance.curation} 131 | 132 | if description.socialConcerns 133 | h2 Social Concerns 134 | p Rationale: #{description.socialConcerns.desc} 135 | each issue in description.socialConcerns.socialIssues 136 | div 137 | p Issue: #{issue.name} 138 | p Issue Type: #{issue.IssueType} 139 | p Description: #{issue.desc} 140 | if issue.senseAtt 141 | p Related Attributes: #{issue.senseAtt.$refText} 142 | 143 | 144 | 145 | 146 | 147 | 148 | -------------------------------------------------------------------------------- /out/generator-service/templates/document.twig: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 |

Documentation of {{ description.title }}

5 |
6 | 7 |

Description:

8 |
9 |

identifier: {{ description.metadata.ident }}

10 |

version: {{ description.metadata.version }}

11 |

Release date: {{ description.metadata.dateR }}

12 |

Update date: {{ description.metadata.dateU }}

13 |

Published date: {{ description.metadata.dateP }}

14 | {% if description.metadata.citation %} 15 |

Citation: {{ description.metadata.citation}}

16 | {% endif %} 17 |
18 |

For what prupose was the dataset created?

19 |

{{ description.metadata.descriptionpurpose}}

20 |
21 |
22 |

Was there any specific tasks?

23 |

{{ description.metadata.descriptionTasks}}

24 |
25 |
26 |

Was there a specific gap that needed to be filled?

27 |

{{ description.metadata.descriptionGaps}}

28 |
29 |
30 |

Distribution, licences and applications

31 |

Licence: {{ description.metadata.licence}}

32 |

Is public? {{ description.metadata.distribution.public}}

33 |

How is distributed? {{ description.metadata.distribution.past}}

34 |

Past applications: {{ description.metadata.uses.past}}

35 |

Recommended applications: {{ description.metadata.uses.recommend}}

36 |

Non-recommended: {{ description.metadata.uses.future}}

37 |
38 |
39 |

Keywords

40 |

Area: {{ description.metadata.area}}

41 |

Tags: {{ description.metadata.tags}}

42 |
43 | 44 |
45 | 46 |
47 |

Authoring:

48 |
49 |

Who created the dataset?

50 | 51 | 52 | 53 | 54 | 55 | {% for author in description.metadata.authoring.authors[0] %} 56 | 57 | 58 | 59 | 60 | {% endfor %} 61 |
Nameemail
{{author.name}}{{author.email}}
62 |
63 |
64 |

Who funded the dataset?

65 | 66 | 67 | 68 | 69 | 70 | 71 | {% for funders in description.metadata.authoring.founding[0] %} 72 | 73 | 74 | 75 | 76 | 77 | {% endfor %} 78 |
NametypeGrant
{{funder.name}}{{funder.type}}{{funder.grantId}}
79 |
80 |
81 |

Maintenance:

82 | 83 |
84 |
85 | -------------------------------------------------------------------------------- /out/generator-service/templates/document2.twig: -------------------------------------------------------------------------------- 1 | 2 |

Documentation of {{ foo }}

3 | 4 | -------------------------------------------------------------------------------- /out/hints-service/hints-service.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | exports.HintsService = void 0; 4 | /** 5 | */ 6 | class HintsService { 7 | constructor() { 8 | } 9 | populateHints(document, position) { 10 | const wordRange = document.getWordRangeAtPosition(position); 11 | const word = document.getText(wordRange); 12 | switch (word) { 13 | case "Metadata": 14 | return `## Metadata 15 | In this section authors are expected to provide the metadata of the dataset 16 | `; 17 | case "Dates": 18 | return "Set the release, published and last updated date in DD-MM-YYYY format"; 19 | case "Citation": 20 | return "Set the citations of the dataset"; 21 | // Description 22 | case "Description": 23 | return `### Description 24 | In this section authors are expected to provide a description of the dataset 25 | 26 | #### Purposes: 27 | For what propose was the dataser created? 28 | 29 | #### Tasks: 30 | For what tasks this dataset is inteded for 31 | 32 | #### Gaps: 33 | Was there specific gap that needed to be filled? Please provide a description 34 | `; 35 | case "Purposes": 36 | return "For what propose was the dataser created?"; 37 | case "Tasks": 38 | return "For what tasks this dataset is inteded for"; 39 | case "Gaps": 40 | return "Was there specific gap that needed to be filled?\nPlease provide a description"; 41 | case "Tags": 42 | return "Set the tags separated by a whitespace"; 43 | case "Areas": 44 | return "Set the areas separated by a whitespace"; 45 | // Distribution 46 | case "Distribution": 47 | return `## Distribution 48 | In this section authors are expected to indicate the distribution of the dataset 49 | 50 | ### Licenses: 51 | Set the licence of the dataset. 52 | 53 | ## Rights stand-alone: 54 | Choose the level of distribution of the stand-alone data. 55 | 56 | ## Rights of the models: 57 | Choose the level of distribution of the models trained with the data. 58 | `; 59 | case "Licences": 60 | return "If any listed license fill your use-case, please provide a STRING with the description of the license"; 61 | // Applications 62 | case "Applications": 63 | return `## Applications 64 | In this section authors are expected to indicate the recommneded and non-recommneded uses of the dataset 65 | 66 | ### Benchmarking 67 | If the dataset have been used in the past, authors are expected to indicate the benchmarking results 68 | Models names, and results should be provided (accuracy, precision, recall, F1-score) 69 | `; 70 | // Authoring 71 | case "Authoring": 72 | return `## Authoring 73 | In this section authors are expected to indicate who created the dataset and who funded the dataset 74 | Please provide information about the organization grating the work 75 | 76 | ### Maintenance 77 | Who maintains the dataset, but also the contribution policies, if theere is any erratum, and the data life cycle should be informed in this chapter 78 | `; 79 | case "Funders": 80 | return "Who founded the creation of the dataset?\n2 - If is there any associated grant, please provide the number and the name of the grantor and the gran name and number \n Set a `_` or a `-` as a white spaces in the name e.g: 'John_Smith'? "; 81 | case "Authors": 82 | return "Who is the author of the dataset?"; 83 | case "Maintainers": 84 | return "Who maintan the dataset? How can be contacted?"; 85 | // Composition 86 | case "Composition": 87 | return `## Composition 88 | Please provide information about the composition of the dataset. The type of files (data instances), it's number, and information regarding attributes 89 | 90 | ### Statistics 91 | A set of statistics can be provided for each attribute and at a data instance level. Please provide only the statistics that are relevant for the specific dataset use case. 92 | 93 | ### Consistency rules 94 | The Consistency rules can be expressed following OCL. OCL is a language for expressing constraints on models. It is based on the Object Constraint Language (OCL) defined by OMG. OCL is a language for expressing constraints on models. It is based on the Object Constraint Language (OCL) defined by OMG. \n 95 | 96 | `; 97 | // Provenance 98 | case "Provenance": 99 | return `## Provenance 100 | In this section authors are expected to fill information about the process applied to create the dataset 101 | 102 | ### Curation Rationale 103 | This explanation intend to be a shor and comprhensive enumartion of the processes applied over the data, and to provide specific use-case details for this dataset 104 | 105 | ### Gathering 106 | How the dataset has been gathered? Who gathered the dataset? Which are the sources of the data? 107 | 108 | ### Annotation 109 | How the dataset has been annotated? Who annotated the dataset? Which are the infrastructure used to annotate the data? 110 | 111 | ### Data preparation 112 | Indicate the process done to prepare the data, and it's type 113 | 114 | `; 115 | // Social Concers 116 | case "Concerns": 117 | return ` 118 | ## Social Concerns 119 | In this section authors are expected to fill information about the social concerns of the data. Is expected to inform 4 types of social concerns \n 120 | 121 | ### Bias concers 122 | Whether the dataset may be biased against a specific social group 123 | 124 | ### Representativeness concerns 125 | Whether the dataset could misrepresent any specific social group 126 | 127 | ### Sensitivity concerns 128 | Does the dataset contains data that can offend a social group? 129 | 130 | ### Privacy Concerns 131 | Is there any privacy concerns on the data? 132 | 133 | `; 134 | default: 135 | return "empty"; 136 | } 137 | } 138 | } 139 | exports.HintsService = HintsService; 140 | //# sourceMappingURL=hints-service.js.map -------------------------------------------------------------------------------- /out/hints-service/hints-service.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"hints-service.js","sourceRoot":"","sources":["../../src/hints-service/hints-service.ts"],"names":[],"mappings":";;;AAeC;GACG;AACH,MAAa,YAAY;IAGrB;IAED,CAAC;IAEA,aAAa,CAAC,QAAuB,EAAE,QAAa;QACjD,MAAM,SAAS,GAAG,QAAQ,CAAC,sBAAsB,CAAC,QAAQ,CAAC,CAAC;QAC5D,MAAM,IAAI,GAAG,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;QACzC,QAAQ,IAAI,EAAE;YACV,KAAK,UAAU;gBACX,OAAO;;iBAEN,CAAA;YACL,KAAK,OAAO;gBACR,OAAO,uEAAuE,CAAC;YACnF,KAAK,UAAU;gBACX,OAAO,kCAAkC,CAAC;YAC1C,cAAc;YAClB,KAAK,aAAa;gBACd,OAAO;;;;;;;;;;;6BAWM,CAAC;YACF,KAAK,UAAU;gBACX,OAAO,2CAA2C,CAAC;YACvD,KAAK,OAAO;gBACR,OAAO,4CAA4C,CAAC;YACxD,KAAK,MAAM;gBACP,OAAO,gFAAgF,CAAC;YAC5G,KAAK,MAAM;gBACP,OAAO,wCAAwC,CAAC;YACpD,KAAK,OAAO;gBACR,OAAO,yCAAyC,CAAC;YACzD,eAAe;YACX,KAAK,cAAc;gBACf,OAAO;;;;;;;;;;;yBAWE,CAAC;YACd,KAAK,UAAU;gBACX,OAAO,uGAAuG,CAAC;YACvH,eAAe;YACX,KAAK,cAAc;gBACf,OAAO;;;;;;iBAMN,CAAC;YACV,YAAY;YACZ,KAAK,WAAW;gBACZ,OAAO;;;;;;aAMN,CAAC;YACF,KAAK,SAAS;gBACV,OAAO,4OAA4O,CAAC;YACxP,KAAK,SAAS;gBACV,OAAO,mCAAmC,CAAC;YAC/C,KAAK,aAAa;gBACd,OAAO,gDAAgD,CAAC;YAChE,cAAc;YACV,KAAK,aAAa;gBAClB,OAAO;;;;;;;;;aASN,CAAC;YACL,aAAa;YACb,KAAK,YAAY;gBACd,OAAO;;;;;;;;;;;;;;;aAeN,CAAC;YACN,iBAAiB;YACjB,KAAK,UAAU;gBACf,OAAO;;;;;;;;;;;;;;;;SAgBN,CAAC;YACC;gBACC,OAAO,OAAO,CAAA;SACjB;IACJ,CAAC;CACJ;AAxID,oCAwIC"} -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-module.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | exports.createDatasetDescriptorServices = exports.DatasetDescriptorModule = void 0; 4 | const langium_1 = require("langium"); 5 | const module_1 = require("./generated/module"); 6 | const dataset_descriptor_validator_1 = require("./dataset-descriptor-validator"); 7 | const dataset_descriptor_scope_1 = require("./dataset-descriptor-scope"); 8 | const dataset_descriptor_naming_1 = require("./dataset-descriptor-naming"); 9 | const dataset_descriptor_documentation_1 = require("../generator-service/dataset-descriptor-documentation"); 10 | /** 11 | * Dependency injection module that overrides Langium default services and contributes the 12 | * declared custom services. The Langium defaults can be partially specified to override only 13 | * selected services, while the custom services must be fully specified. 14 | */ 15 | exports.DatasetDescriptorModule = { 16 | validation: { 17 | ValidationRegistry: (services) => new dataset_descriptor_validator_1.DatasetDescriptorValidationRegistry(services), 18 | DatasetDescriptorValidator: () => new dataset_descriptor_validator_1.DatasetDescriptorValidator() 19 | }, 20 | references: { 21 | ScopeComputation: (services) => new dataset_descriptor_scope_1.DatasetDescriptorScopeComputation(services), 22 | NameProvider: () => new dataset_descriptor_naming_1.DatasetDescriptorNameProvider() 23 | }, 24 | generation: { 25 | DocumentationGenerator: (services) => new dataset_descriptor_documentation_1.DocumentationGenerator() 26 | }, 27 | }; 28 | /** 29 | * Create the full set of services required by Langium. 30 | * 31 | * First inject the shared services by merging two modules: 32 | * - Langium default shared services 33 | * - Services generated by langium-cli 34 | * 35 | * Then inject the language-specific services by merging three modules: 36 | * - Langium default language-specific services 37 | * - Services generated by langium-cli 38 | * - Services specified in this file 39 | * 40 | * @param context Optional module context with the LSP connection 41 | * @returns An object wrapping the shared services and the language-specific services 42 | */ 43 | function createDatasetDescriptorServices(context) { 44 | const shared = (0, langium_1.inject)((0, langium_1.createDefaultSharedModule)(context), module_1.DatasetDescriptorGeneratedSharedModule); 45 | const DatasetDescriptor = (0, langium_1.inject)((0, langium_1.createDefaultModule)({ shared }), module_1.DatasetDescriptorGeneratedModule, exports.DatasetDescriptorModule); 46 | shared.ServiceRegistry.register(DatasetDescriptor); 47 | return { shared, DatasetDescriptor }; 48 | } 49 | exports.createDatasetDescriptorServices = createDatasetDescriptorServices; 50 | //# sourceMappingURL=dataset-descriptor-module.js.map -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-module.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-descriptor-module.js","sourceRoot":"","sources":["../../src/language-server/dataset-descriptor-module.ts"],"names":[],"mappings":";;;AAAA,qCAGiB;AACjB,+CAA8G;AAC9G,iFAAiH;AACjH,yEAA+E;AAC/E,2EAA4E;AAC5E,4GAAyG;AAoBzG;;;;GAIG;AACU,QAAA,uBAAuB,GAA+F;IAC/H,UAAU,EAAE;QACR,kBAAkB,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,IAAI,kEAAmC,CAAC,QAAQ,CAAC;QACnF,0BAA0B,EAAE,GAAG,EAAE,CAAC,IAAI,yDAA0B,EAAE;KACrE;IACD,UAAU,EAAE;QACR,gBAAgB,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,IAAI,4DAAiC,CAAC,QAAQ,CAAC;QAC/E,YAAY,EAAE,GAAG,EAAE,CAAC,IAAI,yDAA6B,EAAE;KAC1D;IACD,UAAU,EAAE;QACR,sBAAsB,EAAE,CAAC,QAAQ,EAAE,EAAE,CAAC,IAAI,yDAAsB,EAAE;KACrE;CACJ,CAAC;AAEF;;;;;;;;;;;;;;GAcG;AACH,SAAgB,+BAA+B,CAAC,OAAmC;IAI/E,MAAM,MAAM,GAAG,IAAA,gBAAM,EACjB,IAAA,mCAAyB,EAAC,OAAO,CAAC,EAClC,+CAAsC,CACzC,CAAC;IACF,MAAM,iBAAiB,GAAG,IAAA,gBAAM,EAC5B,IAAA,6BAAmB,EAAC,EAAE,MAAM,EAAE,CAAC,EAC/B,yCAAgC,EAChC,+BAAuB,CAC1B,CAAC;IACF,MAAM,CAAC,eAAe,CAAC,QAAQ,CAAC,iBAAiB,CAAC,CAAC;IACnD,OAAO,EAAE,MAAM,EAAE,iBAAiB,EAAE,CAAC;AACzC,CAAC;AAfD,0EAeC"} -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-naming.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /****************************************************************************** 3 | * Copyright 2021 TypeFox GmbH 4 | * This program and the accompanying materials are made available under the 5 | * terms of the MIT License, which is available in the project root. 6 | ******************************************************************************/ 7 | Object.defineProperty(exports, "__esModule", { value: true }); 8 | exports.DatasetDescriptorNameProvider = exports.toQualifiedName = void 0; 9 | const langium_1 = require("langium"); 10 | const ast_1 = require("./generated/ast"); 11 | function toQualifiedName(pack, childName) { 12 | return ((0, ast_1.isDeclaration)(pack.$container) ? toQualifiedName(pack.$container, pack.name) : pack.name) + '.' + childName; 13 | } 14 | exports.toQualifiedName = toQualifiedName; 15 | class DatasetDescriptorNameProvider extends langium_1.DefaultNameProvider { 16 | /** 17 | * @param qualifier if the qualifier is a `string`, simple string concatenation is done: `qualifier.name`. 18 | * if the qualifier is a `PackageDeclaration` fully qualified name is created: `package1.package2.name`. 19 | * @param name simple name 20 | * @returns qualified name separated by `.` 21 | */ 22 | getQualifiedName(qualifier, name) { 23 | let prefix = qualifier; 24 | if ((0, ast_1.isDeclaration)(prefix)) { 25 | prefix = ((0, ast_1.isDeclaration)(prefix.$container) 26 | ? this.getQualifiedName(prefix.$container, prefix.name) : prefix.name); 27 | } 28 | return (prefix ? prefix + '.' : '') + name; 29 | } 30 | } 31 | exports.DatasetDescriptorNameProvider = DatasetDescriptorNameProvider; 32 | //# sourceMappingURL=dataset-descriptor-naming.js.map -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-naming.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-descriptor-naming.js","sourceRoot":"","sources":["../../src/language-server/dataset-descriptor-naming.ts"],"names":[],"mappings":";AACA;;;;gFAIgF;;;AAE/E,qCAA8C;AAC9C,yCAA6D;AAE7D,SAAgB,eAAe,CAAC,IAAiB,EAAE,SAAiB;IAChE,OAAO,CAAC,IAAA,mBAAa,EAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,CAAC,eAAe,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,GAAG,SAAS,CAAC;AACxH,CAAC;AAFD,0CAEC;AAED,MAAa,6BAA8B,SAAQ,6BAAmB;IAElE;;;;;OAKG;IACH,gBAAgB,CAAC,SAA+B,EAAE,IAAY;QAC1D,IAAI,MAAM,GAAG,SAAS,CAAC;QACvB,IAAI,IAAA,mBAAa,EAAC,MAAM,CAAC,EAAE;YACvB,MAAM,GAAG,CAAC,IAAA,mBAAa,EAAC,MAAM,CAAC,UAAU,CAAC;gBACtC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,MAAM,CAAC,UAAU,EAAE,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;SAC9E;QACD,OAAO,CAAC,MAAM,CAAC,CAAC,CAAC,MAAM,GAAG,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC;IAC/C,CAAC;CAEJ;AAjBD,sEAiBC"} -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-scope.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /****************************************************************************** 3 | * Copyright 2022 SOM Research 4 | * This program and the accompanying materials are made available under the 5 | * terms of the MIT License, which is available in the project root. 6 | ******************************************************************************/ 7 | var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { 8 | function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } 9 | return new (P || (P = Promise))(function (resolve, reject) { 10 | function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } 11 | function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } 12 | function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } 13 | step((generator = generator.apply(thisArg, _arguments || [])).next()); 14 | }); 15 | }; 16 | Object.defineProperty(exports, "__esModule", { value: true }); 17 | exports.DatasetDescriptorScopeComputation = void 0; 18 | const langium_1 = require("langium"); 19 | const vscode_jsonrpc_1 = require("vscode-jsonrpc"); 20 | const ast_1 = require("./generated/ast"); 21 | class DatasetDescriptorScopeComputation extends langium_1.DefaultScopeComputation { 22 | constructor(services) { 23 | super(services); 24 | } 25 | /** 26 | * Exports only types (`DataType or `Entity`) with their qualified names. 27 | */ 28 | computeExports(document, cancelToken = vscode_jsonrpc_1.CancellationToken.None) { 29 | return __awaiter(this, void 0, void 0, function* () { 30 | const descr = []; 31 | for (const modelNode of (0, langium_1.streamAllContents)(document.parseResult.value)) { 32 | yield (0, langium_1.interruptAndCheck)(cancelToken); 33 | let name = this.nameProvider.getName(modelNode); 34 | let container = modelNode.$container; 35 | if (name) { 36 | if ((0, ast_1.isAttribute)(modelNode) || (0, ast_1.isDataInstance)(modelNode) || (0, ast_1.isLabels)(modelNode) || (0, ast_1.isSocialIssue)(modelNode)) { 37 | descr.push(this.descriptions.createDescription(modelNode, container.name + '.' + name, document)); 38 | //name = (this.nameProvider as DomainModelNameProvider).getQualifiedName(modelNode.$container as PackageDeclaration, name); 39 | } 40 | descr.push(this.descriptions.createDescription(modelNode, name, document)); 41 | } 42 | } 43 | return descr; 44 | }); 45 | } 46 | } 47 | exports.DatasetDescriptorScopeComputation = DatasetDescriptorScopeComputation; 48 | //# sourceMappingURL=dataset-descriptor-scope.js.map -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-scope.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-descriptor-scope.js","sourceRoot":"","sources":["../../src/language-server/dataset-descriptor-scope.ts"],"names":[],"mappings":";AAAA;;;;gFAIgF;;;;;;;;;;;;AAE/E,qCAA8I;AAC9I,mDAAmD;AACnD,yCAAoG;AAEpG,MAAa,iCAAkC,SAAQ,iCAAuB;IAE1E,YAAY,QAAyB;QACjC,KAAK,CAAC,QAAQ,CAAC,CAAC;IACpB,CAAC;IAED;;OAEG;IACG,cAAc,CAAC,QAAyB,EAAE,WAAW,GAAG,kCAAiB,CAAC,IAAI;;YAChF,MAAM,KAAK,GAAyB,EAAE,CAAC;YACvC,KAAK,MAAM,SAAS,IAAI,IAAA,2BAAiB,EAAC,QAAQ,CAAC,WAAW,CAAC,KAAK,CAAC,EAAE;gBACnE,MAAM,IAAA,2BAAiB,EAAC,WAAW,CAAC,CAAC;gBAEjC,IAAI,IAAI,GAAG,IAAI,CAAC,YAAY,CAAC,OAAO,CAAC,SAAS,CAAC,CAAC;gBAChD,IAAI,SAAS,GAAG,SAAS,CAAC,UAA0B,CAAC;gBACrD,IAAI,IAAI,EAAE;oBACP,IAAI,IAAA,iBAAW,EAAC,SAAS,CAAC,IAAI,IAAA,oBAAc,EAAC,SAAS,CAAC,IAAI,IAAA,cAAQ,EAAE,SAAS,CAAC,IAAI,IAAA,mBAAa,EAAC,SAAS,CAAC,EAAE;wBACzG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,SAAS,EAAE,SAAS,CAAC,IAAI,GAAC,GAAG,GAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;wBAC/F,2HAA2H;qBAC9H;oBACD,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,YAAY,CAAC,iBAAiB,CAAC,SAAS,EAAE,IAAI,EAAE,QAAQ,CAAC,CAAC,CAAC;iBAC7E;aAER;YACD,OAAO,KAAK,CAAC;QACjB,CAAC;KAAA;CAEJ;AA5BD,8EA4BC"} -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-validator.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /****************************************************************************** 3 | * Copyright 2022 SOM Research 4 | * This program and the accompanying materials are made available under the 5 | * terms of the MIT License, which is available in the project root. 6 | ******************************************************************************/ 7 | Object.defineProperty(exports, "__esModule", { value: true }); 8 | exports.DatasetDescriptorValidator = exports.DatasetDescriptorValidationRegistry = void 0; 9 | const langium_1 = require("langium"); 10 | /** 11 | * In this class we implement the custom validation services for the tool 12 | */ 13 | //type DatasetDescriptorChecks = { [type in DatasetDescriptorAstType ]: ValidationCheck | ValidationCheck[] } 14 | /** 15 | * Registry for validation checks. 16 | */ 17 | class DatasetDescriptorValidationRegistry extends langium_1.ValidationRegistry { 18 | constructor(services) { 19 | super(services); 20 | const validator = services.validation.DatasetDescriptorValidator; 21 | const checks = { 22 | Description: validator.hintsOfDescription, 23 | Author: validator.authorValidator, 24 | Funder: validator.hintsOfFunder, 25 | Authoring: validator.hintsOfAuthoring, 26 | Composition: validator.hintOfComposition, 27 | Areas: validator.hintsOfAreas, 28 | Tags: validator.hintsofTags, 29 | Distribution: validator.hintsOfDistribution 30 | }; 31 | this.register(checks, validator); 32 | } 33 | } 34 | exports.DatasetDescriptorValidationRegistry = DatasetDescriptorValidationRegistry; 35 | /** 36 | * Implementation of custom validations. 37 | */ 38 | class DatasetDescriptorValidator { 39 | hintsofTags(type, accept) { 40 | accept('hint', 'Set the tags separated by a whitespace', { node: type, property: 'tags' }); 41 | } 42 | hintsOfAreas(type, accept) { 43 | accept('hint', 'Set the areas separated by a whitespace', { node: type, property: 'areas' }); 44 | } 45 | hintsOfDistribution(type, accept) { 46 | accept('hint', 'Set the licence of the dataset. Indicate in `others:` if any other policy is applied to the data', { node: type, property: 'name' }); 47 | accept('hint', 'Stand-alone: Choose the level of distribution of the stand-alone data.', { node: type, property: 'rights' }); 48 | accept('hint', 'Rights-model: Choose the level of distribution of the models trained with the data.', { node: type, property: 'rightsModels' }); 49 | } 50 | hintsOfDescription(type, accept) { 51 | //new MultilineCommentHoverProvider(services: DatasetDescriptorServices).getHoverContent(type, params); 52 | accept('hint', 'For what propose was the dataser created? \nPlease provide a description', { node: type, property: 'descriptionpurpose' }); 53 | accept('hint', 'For what tasks this dataset is inteded for', { node: type, property: 'tasks' }); 54 | accept('hint', 'Was there specific gap that needed to be filled?\nPlease provide a description', { node: type, property: 'descriptionGaps' }); 55 | } 56 | hintsOfTasks(type, accept) { 57 | accept('hint', 'Was there a specific task in mind?\nPlease provide a description', { node: type, property: 'name' }); 58 | } 59 | hintsOfFunder(type, accept) { 60 | accept('hint', '1 - Who founded the creation of the dataset?\n2 - If is there any associated grant, please provide the number and the name of the grantor and the gran name and number \n Set a `_` or a `-` as a white spaces in the name e.g: "John_Smith"? ', { node: type, property: 'name' }); 61 | } 62 | hintsOfAuthoring(type, accept) { 63 | accept('hint', 'Who is the author of the dataset?', { node: type, property: 'name' }); 64 | accept('hint', 'Who maintan the dataset? How can be contacted?', { node: type, property: 'maintainers' }); 65 | accept('hint', 'Is there an erratum? If so, please provide a link or other access point?', { node: type, property: 'erratum' }); 66 | accept('hint', 'If the dataset belongs to people, are there applicable limits on the retention of the data associated with them? If so, please describre how. If not, please describre how its obsolescence will be communicated to the dataset', { node: type, property: 'dataRetention' }); 67 | accept('hint', '1 - Will the dataset by updated (p.e: to correct labels, add or delete new instances)? If so, please describre how \n2 - Will older version of the dataset continue to be supported/hosted/maintained?', { node: type, property: 'support' }); 68 | accept('hint', 'Please describre the mechanism for contribution here', { node: type, property: 'contribGuides' }); 69 | } 70 | hintOfComposition(type, accept) { 71 | accept('hint', 'What do the instances that comprise the dataset represent(for example, documents, photos, people, countries)', { node: type, property: 'compodesc' }); 72 | accept('hint', 'How many instances are there in total?', { node: type, property: 'numberInst' }); 73 | } 74 | authorValidator(type, accept) { 75 | accept('hint', 'Please, set a `_` or a `-` as a white spaces in the name e.g: "John_Smith"?', { node: type, property: 'name' }); 76 | if (type.name) { 77 | const firstChar = type.name.substring(0, 1); 78 | if (firstChar.toUpperCase() !== firstChar) { 79 | accept('warning', 'Type name should start with a capital.', { node: type, property: 'name' }); 80 | } 81 | } 82 | } 83 | } 84 | exports.DatasetDescriptorValidator = DatasetDescriptorValidator; 85 | //# sourceMappingURL=dataset-descriptor-validator.js.map -------------------------------------------------------------------------------- /out/language-server/dataset-descriptor-validator.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-descriptor-validator.js","sourceRoot":"","sources":["../../src/language-server/dataset-descriptor-validator.ts"],"names":[],"mappings":";AAAA;;;;gFAIgF;;;AAE/E,qCAAmF;AAInF;;GAEG;AACH,6GAA6G;AAE7G;;GAEG;AACH,MAAa,mCAAoC,SAAQ,4BAAkB;IACvE,YAAY,QAAmC;QAC3C,KAAK,CAAC,QAAQ,CAAC,CAAC;QAChB,MAAM,SAAS,GAAG,QAAQ,CAAC,UAAU,CAAC,0BAA0B,CAAC;QACjE,MAAM,MAAM,GAA+C;YACvD,WAAW,EAAE,SAAS,CAAC,kBAAkB;YACzC,MAAM,EAAE,SAAS,CAAC,eAAe;YACjC,MAAM,EAAE,SAAS,CAAC,aAAa;YAC/B,SAAS,EAAE,SAAS,CAAC,gBAAgB;YACrC,WAAW,EAAC,SAAS,CAAC,iBAAiB;YACvC,KAAK,EAAE,SAAS,CAAC,YAAY;YAC7B,IAAI,EAAE,SAAS,CAAC,WAAW;YAC3B,YAAY,EAAC,SAAS,CAAC,mBAAmB;SAC7C,CAAC;QACF,IAAI,CAAC,QAAQ,CAAC,MAAM,EAAE,SAAS,CAAC,CAAC;IACrC,CAAC;CACJ;AAhBD,kFAgBC;AAED;;GAEG;AACH,MAAa,0BAA0B;IAEnC,WAAW,CAAC,IAAU,EAAE,MAA0B;QAC9C,MAAM,CAAC,MAAM,EAAE,wCAAwC,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAC;IAC9F,CAAC;IACD,YAAY,CAAC,IAAW,EAAE,MAA0B;QAChD,MAAM,CAAC,MAAM,EAAE,yCAAyC,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAC,CAAC,CAAC;IAChG,CAAC;IAED,mBAAmB,CAAC,IAAkB,EAAE,MAA0B;QAC9D,MAAM,CAAC,MAAM,EAAE,kGAAkG,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAC;QACpJ,MAAM,CAAC,MAAM,EAAE,wEAAwE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,QAAQ,EAAC,CAAC,CAAC;QAC5H,MAAM,CAAC,MAAM,EAAE,qFAAqF,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,cAAc,EAAC,CAAC,CAAC;IACnJ,CAAC;IAED,kBAAkB,CAAC,IAAgB,EAAE,MAA0B;QACvD,uGAAuG;QACvG,MAAM,CAAC,MAAM,EAAE,0EAA0E,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,oBAAoB,EAAE,CAAC,CAAC;QAC3I,MAAM,CAAC,MAAM,EAAE,4CAA4C,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,CAAC;QAChG,MAAM,CAAC,MAAM,EAAE,gFAAgF,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,iBAAiB,EAAC,CAAC,CAAC;IACrJ,CAAC;IAGD,YAAY,CAAC,IAAW,EAAE,MAA0B;QAChD,MAAM,CAAC,MAAM,EAAE,kEAAkE,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAC,CAAC,CAAC;IAExH,CAAC;IAED,aAAa,CAAC,IAAY,EAAE,MAA0B;QAClD,MAAM,CAAC,MAAM,EAAE,gPAAgP,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAC,MAAM,EAAE,CAAC,CAAC;IACtS,CAAC;IACD,gBAAgB,CAAC,IAAe,EAAE,MAA0B;QAEvD,MAAM,CAAC,MAAM,EAAE,mCAAmC,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAC,MAAM,EAAE,CAAC,CAAC;QACrF,MAAM,CAAC,MAAM,EAAE,gDAAgD,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,aAAa,EAAE,CAAC,CAAC;QAC1G,MAAM,CAAC,MAAM,EAAE,0EAA0E,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC;QAChI,MAAM,CAAC,MAAM,EAAE,iOAAiO,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,eAAe,EAAE,CAAC,CAAC;QAC7R,MAAM,CAAC,MAAM,EAAE,wMAAwM,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,SAAS,EAAE,CAAC,CAAC;QAC9P,MAAM,CAAC,MAAM,EAAE,sDAAsD,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,eAAe,EAAE,CAAC,CAAC;IACtH,CAAC;IAGF,iBAAiB,CAAC,IAAiB,EAAE,MAA0B;QAC3D,MAAM,CAAC,MAAM,EAAE,8GAA8G,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,WAAW,EAAE,CAAC,CAAC;QACtK,MAAM,CAAC,MAAM,EAAE,wCAAwC,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,YAAY,EAAE,CAAC,CAAC;IACrG,CAAC;IAGD,eAAe,CAAC,IAAY,EAAE,MAA0B;QAEpD,MAAM,CAAC,MAAM,EAAE,6EAA6E,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAC,MAAM,EAAE,CAAC,CAAC;QAC/H,IAAI,IAAI,CAAC,IAAI,EAAE;YACX,MAAM,SAAS,GAAG,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC;YAC5C,IAAI,SAAS,CAAC,WAAW,EAAE,KAAK,SAAS,EAAE;gBACvC,MAAM,CAAC,SAAS,EAAE,wCAAwC,EAAE,EAAE,IAAI,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,EAAE,CAAC,CAAC;aACjG;SACJ;IACL,CAAC;CAEJ;AA3DD,gEA2DC"} -------------------------------------------------------------------------------- /out/language-server/generated/grammar.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"grammar.js","sourceRoot":"","sources":["../../../src/language-server/generated/grammar.ts"],"names":[],"mappings":";AAAA;;;gFAGgF;;;AAEhF,qCAAuD;AAEvD,IAAI,8BAAmD,CAAC;AACjD,MAAM,wBAAwB,GAAG,GAAY,EAAE,CAAC,8BAA8B,aAA9B,8BAA8B,cAA9B,8BAA8B,GAAI,CAAC,8BAA8B,GAAG,IAAA,6BAAmB,EAAC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAguQ7I,CAAC,CAAC,CAAC;AAhuQQ,QAAA,wBAAwB,4BAguQhC"} -------------------------------------------------------------------------------- /out/language-server/generated/module.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /****************************************************************************** 3 | * This file was generated by langium-cli 1.1.0. 4 | * DO NOT EDIT MANUALLY! 5 | ******************************************************************************/ 6 | Object.defineProperty(exports, "__esModule", { value: true }); 7 | exports.DatasetDescriptorGeneratedModule = exports.DatasetDescriptorGeneratedSharedModule = exports.DatasetDescriptorLanguageMetaData = void 0; 8 | const ast_1 = require("./ast"); 9 | const grammar_1 = require("./grammar"); 10 | exports.DatasetDescriptorLanguageMetaData = { 11 | languageId: 'dataset-descriptor', 12 | fileExtensions: ['.descml'], 13 | caseInsensitive: false 14 | }; 15 | exports.DatasetDescriptorGeneratedSharedModule = { 16 | AstReflection: () => new ast_1.DatasetDescriptorAstReflection() 17 | }; 18 | exports.DatasetDescriptorGeneratedModule = { 19 | Grammar: () => (0, grammar_1.DatasetDescriptorGrammar)(), 20 | LanguageMetaData: () => exports.DatasetDescriptorLanguageMetaData, 21 | parser: {} 22 | }; 23 | //# sourceMappingURL=module.js.map -------------------------------------------------------------------------------- /out/language-server/generated/module.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"module.js","sourceRoot":"","sources":["../../../src/language-server/generated/module.ts"],"names":[],"mappings":";AAAA;;;gFAGgF;;;AAGhF,+BAAuD;AACvD,uCAAqD;AAExC,QAAA,iCAAiC,GAAqB;IAC/D,UAAU,EAAE,oBAAoB;IAChC,cAAc,EAAE,CAAC,SAAS,CAAC;IAC3B,eAAe,EAAE,KAAK;CACzB,CAAC;AAEW,QAAA,sCAAsC,GAAkE;IACjH,aAAa,EAAE,GAAG,EAAE,CAAC,IAAI,oCAA8B,EAAE;CAC5D,CAAC;AAEW,QAAA,gCAAgC,GAAsD;IAC/F,OAAO,EAAE,GAAG,EAAE,CAAC,IAAA,kCAAwB,GAAE;IACzC,gBAAgB,EAAE,GAAG,EAAE,CAAC,yCAAiC;IACzD,MAAM,EAAE,EAAE;CACb,CAAC"} -------------------------------------------------------------------------------- /out/language-server/main.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | Object.defineProperty(exports, "__esModule", { value: true }); 3 | const langium_1 = require("langium"); 4 | const node_1 = require("langium/node"); 5 | const node_2 = require("vscode-languageserver/node"); 6 | const dataset_descriptor_module_1 = require("./dataset-descriptor-module"); 7 | // Create a connection to the client 8 | const connection = (0, node_2.createConnection)(node_2.ProposedFeatures.all); 9 | // Inject the shared services and language-specific services 10 | const { shared } = (0, dataset_descriptor_module_1.createDatasetDescriptorServices)(Object.assign({ connection }, node_1.NodeFileSystem)); 11 | // Start the language server with the shared services 12 | (0, langium_1.startLanguageServer)(shared); 13 | //# sourceMappingURL=main.js.map -------------------------------------------------------------------------------- /out/language-server/main.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"main.js","sourceRoot":"","sources":["../../src/language-server/main.ts"],"names":[],"mappings":";;AAAA,qCAA8C;AAC9C,uCAA8C;AAC9C,qDAAgF;AAChF,2EAA8E;AAE9E,oCAAoC;AACpC,MAAM,UAAU,GAAG,IAAA,uBAAgB,EAAC,uBAAgB,CAAC,GAAG,CAAC,CAAC;AAE1D,4DAA4D;AAC5D,MAAM,EAAE,MAAM,EAAE,GAAG,IAAA,2DAA+B,kBAAG,UAAU,IAAK,qBAAc,EAAG,CAAC;AAEtF,qDAAqD;AACrD,IAAA,6BAAmB,EAAC,MAAM,CAAC,CAAC"} -------------------------------------------------------------------------------- /out/uploader-service/dataset-descriptor-uploader.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-descriptor-uploader.js","sourceRoot":"","sources":["../../src/uploader-service/dataset-descriptor-uploader.ts"],"names":[],"mappings":";AAAA;;;;gFAIgF;;;;;;;;;;;;;;;AAE/E,yCAAuC;AACvC,4CAAoB;AACrB,gDAAwB;AACvB,uDAAmD;AASnD;;EAEE;AACF,MAAa,eAAe;IAExB;IACA,CAAC;IAED,2EAA2E;IACrE,aAAa,CAAC,QAAgB;;YAChC,mBAAmB;YACnB,MAAM,WAAW,GAAG,YAAE,CAAC,YAAY,CAAC,QAAQ,EAAE,EAAE,QAAQ,EAAE,OAAO,EAAE,CAAC,CAAC;YAErE,mBAAmB;YACnB,MAAM,MAAM,GAAe,IAAA,YAAK,EAAC,WAAW,CAAe,CAAC;YAC5D,uBAAuB;YACvB,MAAM,gBAAgB,GAAG,IAAI,CAAC,qBAAqB,CAAC,MAAM,EAAE,QAAQ,CAAC,CAAC;YACtE,OAAO,gBAAgB,CAAC;QAC5B,CAAC;KAAA;IAED,kCAAkC;IAClC,qBAAqB,CAAC,IAAgB,EAAE,QAAgB;QACpD,6BAA6B;QAC7B,IAAI,IAAI,GAAW,IAAI,CAAC,oBAAoB,EAAE,CAAC;QAC/C,4BAA4B;QAC5B,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,uBAAuB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QAC3D,+CAA+C;QAC/C,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,sBAAsB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QAC1D,+BAA+B;QAC/B,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,yBAAyB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAC;QAC7D,OAAO,IAAI,CAAC;IAChB,CAAC;IACD,gCAAgC;IAChC,oBAAoB;QAChB,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;YAgCJ,CAAC;IACR,CAAC;IACD,+BAA+B;IAC/B,uBAAuB,CAAC,IAAgB,EAAE,QAAgB;QAEtD,MAAM,cAAc,GAAG,IAAI,gCAAc,CAAC;QAC1C,cAAc;QACd,MAAM,OAAO,GAAkB,IAAI,CAAC,CAAC,CAAC,CAAC;QACvC,qBAAqB;QACrB,MAAM,eAAe,GAAG,IAAI,CAAC,MAAM,GAAG,CAAC,CAAA;QACvC,IAAI,IAAI,GAAW;;;mBAGT,eAAe;;sBAEZ,cAAI,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC;;;iCAG1B,OAAO,CAAC,MAAM;2BACpB,CAAC;QACnB,qBAAqB;QACrB,OAAO,CAAC,OAAO,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;YAC5B,IAAI,QAAQ,GAAG,IAAI,CAAC,GAAG,CAAC,UAAU,KAAK,EAAE,MAAM,IAAI,OAAO,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YAC3E,MAAM,UAAU,GAAG,QAAQ,CAAC,KAAK,EAAE,CAAC;YACpC,wBAAwB;YAExB,MAAM,WAAW,GAAG,cAAc,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC;YAClE,0BAA0B;YAC1B,MAAM,MAAM,GAAG,cAAc,CAAC,eAAe,CAAC,QAAQ,CAAC,CAAC;YACxD,wCAAwC;YACxC,IAAI,QAAQ,GAAG,cAAc,CAAC,oBAAoB,CAAC,QAAQ,CAAC,CAAC;YAC7D,mDAAmD;YACnD,IAAI,CAAC,QAAQ,CAAC,MAAM,GAAG,EAAE,IAAI,QAAQ,CAAC,MAAM,GAAG,IAAI,GAAG,MAAM,CAAC,IAAI,CAAC,QAAQ,IAAI,KAAK,CAAC,EAAE;gBAClF,cAAc;gBACd,iBAAiB;gBACjB,MAAM,IAAI,GAAG,cAAc,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;gBACpD,qCAAqC;gBACrC,MAAM,OAAO,GAAG,cAAc,CAAC,gBAAgB,CAAC,QAAQ,CAAC,CAAC;gBAAA,CAAC;gBAC3D,IAAI,OAAO,KAAK,KAAK,EAAE;oBACnB,IAAI,GAAG,IAAI;wBACP,uBAAuB,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,GAAG,CAAC;;kCAE7C,MAAM;;;sCAGF,IAAI;;iDAEO,WAAW,KAAK,CAAC;iBAChD;qBAAM;oBACH,IAAI,GAAG,IAAI;wBACP,uBAAuB,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,GAAG,CAAC;;kCAE7C,MAAM;;;sCAGF,IAAI;uDACa,OAAO;;iDAEb,WAAW,KAAK,CAAC;iBAChD;aAEJ;iBAAM;gBACH,YAAY;gBACZ,mBAAmB;gBACnB,MAAM,aAAa,GAAa,QAAQ,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;gBACpD,mBAAmB;gBACnB,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,aAAa,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,GAAG,aAAa,CAAC,MAAM,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC;gBAClG,qBAAqB;gBACrB,MAAM,QAAQ,GAAG,QAAQ,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,GAAG,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC;gBACtF,uBAAuB;gBACvB,MAAM,GAAG,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC;gBACxD,qBAAqB;gBACrB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;gBAClD,qBAAqB;gBACrB,MAAM,GAAG,GAAG,IAAI,CAAC,GAAG,CAAC,GAAG,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC;gBACnD,IAAI,GAAG,IAAI,GAAG,uBAAuB,UAAU,CAAC,UAAU,CAAC,GAAG,EAAE,GAAG,CAAC;;8BAEvD,MAAM;;;iCAGH,IAAI;gDACW,GAAG;qCACd,GAAG;qCACH,GAAG,IAAI,CAAC;aAC/B;QAKL,CAAC,CAAC,CAAC;QAEH,IAAI,GAAG,IAAI;YACP;;;;;uBAKU,CAAA;QAEd,OAAO,IAAI,CAAC;IAChB,CAAC;IACD,+BAA+B;IAC/B,sBAAsB,CAAC,IAAgB,EAAE,QAAgB;QACrD,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;EA0Bd,CAAA;IAEG,CAAC;IAED,yBAAyB,CAAC,IAAgB,EAAE,QAAgB;QACxD,OAAO;;;;;;;;;UASN,CAAC;IACN,CAAC;CAEJ;AAjND,0CAiNC"} -------------------------------------------------------------------------------- /out/uploader-service/dataset-metrics.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /****************************************************************************** 3 | * Copyright 2022 SOM Research 4 | * This program and the accompanying materials are made available under the 5 | * terms of the MIT License, which is available in the project root. 6 | ******************************************************************************/ 7 | Object.defineProperty(exports, "__esModule", { value: true }); 8 | exports.DatasetMetrics = void 0; 9 | /** 10 | * Data uploader service main class 11 | */ 12 | class DatasetMetrics { 13 | attributeUnique(attrData) { 14 | let uniques = attrData.filter((v, i, a) => a.indexOf(v) === i); 15 | return uniques.length; 16 | } 17 | attributeCompletness(attrData) { 18 | let validValues = 0; 19 | attrData.forEach(element => { 20 | if (element === undefined || element === null || element == '' || element == 'NaN') { 21 | } 22 | else { 23 | validValues = validValues + 1; 24 | } 25 | }); 26 | if (validValues != 0) 27 | return ((validValues / attrData.length) * 100).toFixed(0); 28 | else 29 | return 0; 30 | } 31 | attributeCatDist(attrData) { 32 | let percent = 0; 33 | let body = "["; 34 | let uniques = attrData.filter((v, i, a) => a.indexOf(v) === i); 35 | uniques.forEach((value, idx, array) => { 36 | percent = (attrData.filter((v) => (v === value)).length / attrData.length) * 100; 37 | if (percent >= 0.1) { 38 | body = body + '"' + value + '"' + ":" + (Math.round(percent * 10) / 10) + '%, '; 39 | } 40 | }); 41 | body = body.slice(0, -2) + "]"; 42 | if (body.length < 5) 43 | return false; 44 | else 45 | return body; 46 | } 47 | isAttributeNumerical(attrData) { 48 | let isNumber = true; 49 | attrData.forEach(value => { 50 | if (isNaN(Number(value))) 51 | isNumber = false; 52 | }); 53 | return isNumber; 54 | } 55 | attributeMode(arr) { 56 | const mode = []; 57 | let max = 0, count = 0; 58 | for (let i = 0; i < arr.length; i++) { 59 | const item = arr[i]; 60 | if (mode[item]) { 61 | mode[item]++; 62 | } 63 | else { 64 | mode[item] = 1; 65 | } 66 | if (count < mode[item]) { 67 | max = item; 68 | count = mode[item]; 69 | } 70 | } 71 | return max; 72 | } 73 | ; 74 | } 75 | exports.DatasetMetrics = DatasetMetrics; 76 | //# sourceMappingURL=dataset-metrics.js.map -------------------------------------------------------------------------------- /out/uploader-service/dataset-metrics.js.map: -------------------------------------------------------------------------------- 1 | {"version":3,"file":"dataset-metrics.js","sourceRoot":"","sources":["../../src/uploader-service/dataset-metrics.ts"],"names":[],"mappings":";AAAA;;;;gFAIgF;;;AAehF;;EAEE;AACF,MAAa,cAAc;IAEvB,eAAe,CAAC,QAAoB;QAChC,IAAI,OAAO,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;QAC/D,OAAO,OAAO,CAAC,MAAM,CAAC;IAC1B,CAAC;IAED,oBAAoB,CAAC,QAAoB;QACrC,IAAI,WAAW,GAAG,CAAC,CAAC;QACpB,QAAQ,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE;YACvB,IAAI,OAAO,KAAK,SAAS,IAAI,OAAO,KAAK,IAAI,IAAI,OAAO,IAAI,EAAE,IAAI,OAAO,IAAI,KAAK,EAAE;aACnF;iBAAM;gBACH,WAAW,GAAG,WAAW,GAAG,CAAC,CAAC;aACjC;QACL,CAAC,CAAC,CAAC;QACH,IAAI,WAAW,IAAI,CAAC;YAAE,OAAO,CAAC,CAAC,WAAW,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC;;YAC3E,OAAO,CAAC,CAAA;IACjB,CAAC;IAED,gBAAgB,CAAC,QAAoB;QACjC,IAAI,OAAO,GAAG,CAAC,CAAC;QAChB,IAAI,IAAI,GAAG,GAAG,CAAC;QACf,IAAI,OAAO,GAAG,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC;QAC/D,OAAO,CAAC,OAAO,CAAC,CAAC,KAAK,EAAE,GAAG,EAAE,KAAK,EAAE,EAAE;YAClC,OAAO,GAAG,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,CAAC,CAAC,MAAM,GAAG,QAAQ,CAAC,MAAM,CAAC,GAAG,GAAG,CAAC;YACjF,IAAI,OAAO,IAAI,GAAG,EAAE;gBAChB,IAAI,GAAG,IAAI,GAAG,GAAG,GAAG,KAAK,GAAG,GAAG,GAAG,GAAG,GAAG,CAAC,IAAI,CAAC,KAAK,CAAC,OAAO,GAAG,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,KAAK,CAAC;aACnF;QACL,CAAC,CAAC,CAAC;QACH,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,GAAG,CAAC;QAC/B,IAAI,IAAI,CAAC,MAAM,GAAG,CAAC;YAAE,OAAO,KAAK,CAAA;;YAC5B,OAAO,IAAI,CAAA;IACpB,CAAC;IAED,oBAAoB,CAAC,QAAoB;QACrC,IAAI,QAAQ,GAAG,IAAI,CAAC;QACpB,QAAQ,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YACrB,IAAI,KAAK,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;gBAAE,QAAQ,GAAG,KAAK,CAAC;QAC/C,CAAC,CAAC,CAAC;QACH,OAAO,QAAQ,CAAC;IACpB,CAAC;IAED,aAAa,CAAC,GAAe;QACzB,MAAM,IAAI,GAAG,EAAE,CAAC;QAChB,IAAI,GAAG,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC;QACvB,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,GAAG,CAAC,MAAM,EAAE,CAAC,EAAE,EAAE;YACjC,MAAM,IAAI,GAAG,GAAG,CAAC,CAAC,CAAC,CAAC;YACpB,IAAI,IAAI,CAAC,IAAI,CAAC,EAAE;gBACZ,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;aAChB;iBAAM;gBACH,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC;aAClB;YACD,IAAI,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,EAAE;gBACpB,GAAG,GAAG,IAAI,CAAC;gBACX,KAAK,GAAG,IAAI,CAAC,IAAI,CAAC,CAAC;aACtB;SACJ;QACD,OAAO,GAAG,CAAC;IACf,CAAC;IAAA,CAAC;CACL;AA3DD,wCA2DC"} -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "DescribeML", 3 | "displayName": "DescribeML", 4 | "description": "DescribeML is a language plug-in to describe machine-learning datasets in a standard format. Build better data describing the composition, provenance and social concerns of your dataset.", 5 | "version": "1.2.1", 6 | "engines": { 7 | "vscode": "^1.56.0" 8 | }, 9 | "publisher": "SOMResearchGroup", 10 | "author": { 11 | "name": "Joan Giner-Miguelez" 12 | }, 13 | "homepage": "https://github.com/SOM-Research/DescribeML", 14 | "repository": { 15 | "type": "git", 16 | "url": "https://github.com/SOM-Research/DescribeML" 17 | }, 18 | "icon": "fileicons/requisito.png", 19 | "keywords": [ 20 | "Data Science", 21 | "Datasets", 22 | "ML", 23 | "Machine Learning", 24 | "DSL" 25 | ], 26 | "license": "GPLv3", 27 | "categories": [ 28 | "Programming Languages" 29 | ], 30 | "contributes": { 31 | "languages": [ 32 | { 33 | "id": "dataset-descriptor", 34 | "aliases": [ 35 | "dataset-descriptor", 36 | "dataset-descriptor" 37 | ], 38 | "extensions": [ 39 | ".descml" 40 | ], 41 | "configuration": "./language-configuration.json" 42 | } 43 | ], 44 | "grammars": [ 45 | { 46 | "language": "dataset-descriptor", 47 | "scopeName": "source.dataset-descriptor", 48 | "path": "./syntaxes/dataset-descriptor.tmLanguage.json" 49 | } 50 | ], 51 | "commands": [ 52 | { 53 | "command": "datadesc.loadDataset", 54 | "title": "Load Dataset Files", 55 | "shortTitle": "Dataset", 56 | "category": "Dataset", 57 | "icon": { 58 | "light": "fileicons/cloud-computing.png", 59 | "dark": "fileicons/cloud-computing.png" 60 | } 61 | }, 62 | { 63 | "command": "datadesc.generateDocumentation", 64 | "title": "Generate documentation", 65 | "shortTitle": "Documentation", 66 | "category": "Dataset", 67 | "icon": { 68 | "light": "fileicons/html.png", 69 | "dark": "fileicons/html.png" 70 | } 71 | }, 72 | { 73 | "command": "datadesc.saveDocumentHTML", 74 | "title": "Save HTML", 75 | "shortTitle": "Save HTML", 76 | "category": "Dataset", 77 | "icon": { 78 | "light": "fileicons/save.png", 79 | "dark": "fileicons/save.png" 80 | } 81 | } 82 | ], 83 | "menus": { 84 | "editor/title": [ 85 | { 86 | "when": "resourceLangId == dataset-descriptor", 87 | "command": "datadesc.generateDocumentation", 88 | "group": "navigation" 89 | }, 90 | { 91 | "when": "resourceLangId == dataset-descriptor", 92 | "command": "datadesc.loadDataset", 93 | "group": "navigation" 94 | }, 95 | { 96 | "when": "liveHTMLPreviewer", 97 | "command": "datadesc.saveDocumentHTML", 98 | "group": "navigation" 99 | } 100 | ] 101 | }, 102 | "iconThemes": [ 103 | { 104 | "id": "datasetdescriptor", 105 | "label": "Dataset Descriptor", 106 | "path": "./fileicons/dataset-descriptor.icon-theme.json" 107 | } 108 | ] 109 | }, 110 | "activationEvents": [ 111 | "onLanguage:dataset-descriptor" 112 | ], 113 | "files": [ 114 | "bin", 115 | "out", 116 | "src" 117 | ], 118 | "bin": { 119 | "dataset-descriptor-cli": "./bin/cli" 120 | }, 121 | "main": "./out/extension.js", 122 | "scripts": { 123 | "vscode:prepublish": "npm run build && npm run lint", 124 | "build": "tsc -b tsconfig.json", 125 | "watch": "tsc -b tsconfig.json --watch", 126 | "lint": "eslint src --ext ts", 127 | "langium:generate": "langium generate", 128 | "langium:watch": "langium generate --watch" 129 | }, 130 | "dependencies": { 131 | "chalk": "^4.1.2", 132 | "chevrotain": "^9.1.0", 133 | "commander": "^8.0.0", 134 | "csv-parse": "^5.3.2", 135 | "langium": "^1.1.0", 136 | "pug": "^3.0.2", 137 | "vscode-languageclient": "^8.0.2", 138 | "vscode-languageserver": "^8.0.2", 139 | "vscode-uri": "^3.0.2" 140 | }, 141 | "devDependencies": { 142 | "@types/node": "^14.17.3", 143 | "@types/vscode": "^1.56.0", 144 | "@typescript-eslint/eslint-plugin": "^5.28.0", 145 | "@typescript-eslint/parser": "^5.28.0", 146 | "eslint": "^8.17.0", 147 | "langium-cli": "1.1.0", 148 | "typescript": "^4.7.4" 149 | } 150 | } 151 | -------------------------------------------------------------------------------- /snippets.json: -------------------------------------------------------------------------------- 1 | { 2 | "Datasets General Information": { 3 | "prefix": ["Dataset:"], 4 | "body": [ 5 | "Dataset: YourDatasetId", 6 | "\tGeneral information: ", 7 | "\t\tTitle: '$1' ", 8 | "\t\tUnique-identifier: uniqueId ", 9 | "\t\tVersion: yourId $2 ", 10 | "\t\tDescription: ", 11 | "\t\t\tPurpose: \"$11\" ", 12 | "\t\t\tTasks: \"$8\" ", 13 | "\t\t\tGaps: \"$9\" ", 14 | "\t\t\tCitation: '$4' ", 15 | "\t\tArea: Dataset_Area", 16 | "\t\tTags: Classification_Tags", 17 | "\t\tAuthoring:", 18 | "\t\t\tAuthors:", 19 | "\t\t\t\tName Author1Name email \"\"", 20 | "\t\t\t\tName Author2Name\temail \"\"", 21 | "\t\t\tFounders:", 22 | "\t\t\t\tName Founder1Name type mixed grantor \"EEUU\" grantId: miID", 23 | "\t\t\t\tLicense Information: \"\" ", 24 | "\t\t\tMaintenance:", 25 | "\t\t\t\tMaintainer:", 26 | "\t\t\t\t\tName Mantainer1Name email \"\"", 27 | "\t\t\t\tErratum? \"\" ", 28 | "\t\t\t\tContribution guides: \"\"", 29 | "", 30 | "\tComposition:", 31 | "\t\t Description: \"General Description of the data\"", 32 | "\t\t Total number of instances: 0 ", 33 | "\t\t Instances: ", 34 | "\t\t\t\t Instance: Instance1Id ", 35 | "\t\t\t\t\t Description: \"Instance Description\"", 36 | "\t\t\t\t\t Type: Record_Data", 37 | "\t\t\t\t\t Number of attributes: 00", 38 | "\t\t\t\t\t Completness: 100", 39 | "\t\t\t\t\t Associated Labels: \"labelId\"", 40 | "\t\t\t\t\t Composition:", 41 | "\t\t\t\t\t\t attribute: attribute1Id", 42 | "\t\t\t\t\t\t\t description: \"Describre the attribute \" ", 43 | "\t\t\t\t\t\t\t // completness: 100 ", 44 | "\t\t\t\t\t\t\t // count: 100 ", 45 | "\t\t\t\t\t\t\t // ofType: 100 ", 46 | "\t\t\t\t\t\t attribute: attribute2Id", 47 | "\t\t\t\t\t\t\t description: \"Describre the attribute \" ", 48 | "\t\t\t\t\t\t\t // completness: 100 ", 49 | "\t\t\t\t\t\t\t // count: 100 ", 50 | "\t\t\t\t\t\t\t // ofType: 100 ", 51 | "\t\t\t\t Instance: Instance2Id ", 52 | "\t\t\t\t\t Description: \"Instance Description\"", 53 | "\t\t\t\t\t Type: Record_Data", 54 | "\t\t\t\t\t Composition:", 55 | "\t\t\t\t\t\t attribute: attribute3Id", 56 | "\t\t\t\t\t\t\t description: \"Describre the attribute \" ", 57 | "\t\t\t\t\t\t\t // completness: 100 ", 58 | "\t\t\t\t\t\t\t // count: 100 ", 59 | "\t\t\t\t\t\t\t // ofType: 100 ", 60 | "\t Social Concerns:", 61 | "\t\t // All the social stuff ", 62 | "\t Data Provenance:", 63 | "\t\t Curation Rationale: \"the rationale of curation\"", 64 | "\t\t // All the gathering and annotation stuff " 65 | ], 66 | "description": "Dataset's general information scaffold" 67 | }, 68 | "Instance": { 69 | "prefix": "Instance:", 70 | "body":[ 71 | "Instance: $1InstanceId", 72 | "\tdescription \"$2Instance Description\"", 73 | "\ttype ${3|images,raw,features,people|}", 74 | "\ttotal number 000", 75 | "\twithAttributes:", 76 | "\t\t attribute attribute1Id $5 ofType ${6|string,int,bool|} description: \"$7Describre the attribute \"", 77 | "\t\t attribute attribute2Id ofType ${6|string,int,bool|} description: \"Describre the attribute\"" 78 | ], 79 | "description":"Instance scaffold" 80 | } 81 | } -------------------------------------------------------------------------------- /src/cli/cli-util.ts: -------------------------------------------------------------------------------- 1 | import chalk from 'chalk'; 2 | import path from 'path'; 3 | import fs from 'fs'; 4 | import { AstNode, LangiumDocument, LangiumServices } from 'langium'; 5 | import { URI } from 'vscode-uri'; 6 | 7 | export async function extractDocument(fileName: string, services: LangiumServices): Promise { 8 | const extensions = services.LanguageMetaData.fileExtensions; 9 | if (!extensions.includes(path.extname(fileName))) { 10 | console.error(chalk.yellow(`Please choose a file with one of these extensions: ${extensions}.`)); 11 | process.exit(1); 12 | } 13 | 14 | if (!fs.existsSync(fileName)) { 15 | console.error(chalk.red(`File ${fileName} does not exist.`)); 16 | process.exit(1); 17 | } 18 | 19 | const document = services.shared.workspace.LangiumDocuments.getOrCreateDocument(URI.file(path.resolve(fileName))); 20 | await services.shared.workspace.DocumentBuilder.build([document], { validationChecks: 'all' }); 21 | 22 | const validationErrors = (document.diagnostics ?? []).filter(e => e.severity === 1); 23 | if (validationErrors.length > 0) { 24 | console.error(chalk.red('There are validation errors:')); 25 | for (const validationError of validationErrors) { 26 | console.error(chalk.red( 27 | `line ${validationError.range.start.line + 1}: ${validationError.message} [${document.textDocument.getText(validationError.range)}]` 28 | )); 29 | } 30 | process.exit(1); 31 | } 32 | 33 | return document; 34 | } 35 | 36 | export async function extractAstNode(fileName: string, services: LangiumServices): Promise { 37 | return (await extractDocument(fileName, services)).parseResult?.value as T; 38 | } 39 | 40 | interface FilePathData { 41 | destination: string, 42 | name: string 43 | } 44 | 45 | export function extractDestinationAndName(filePath: string, destination: string | undefined): FilePathData { 46 | filePath = path.basename(filePath, path.extname(filePath)).replace(/[.-]/g, ''); 47 | return { 48 | destination: destination ?? path.join(path.dirname(filePath), 'generated'), 49 | name: path.basename(filePath) 50 | }; 51 | } 52 | -------------------------------------------------------------------------------- /src/cli/generator.ts: -------------------------------------------------------------------------------- 1 | /* 2 | import fs from 'fs'; 3 | import { CompositeGeneratorNode, NL, processGeneratorNode } from 'langium'; 4 | import path from 'path'; 5 | import { Model } from '../language-server/generated/ast'; 6 | import { extractDestinationAndName } from './cli-util'; 7 | 8 | export function generateJavaScript(model: Model, filePath: string, destination: string | undefined): string { 9 | const data = extractDestinationAndName(filePath, destination); 10 | const generatedFilePath = `${path.join(data.destination, data.name)}.js`; 11 | 12 | const fileNode = new CompositeGeneratorNode(); 13 | fileNode.append('"use strict";', NL, NL); 14 | model.greetings.forEach(greeting => fileNode.append(`console.log('Hello, ${greeting.person.ref?.name}!');`, NL)); 15 | 16 | if (!fs.existsSync(data.destination)) { 17 | fs.mkdirSync(data.destination, { recursive: true }); 18 | } 19 | fs.writeFileSync(generatedFilePath, processGeneratorNode(fileNode)); 20 | return generatedFilePath; 21 | } 22 | */ -------------------------------------------------------------------------------- /src/cli/index.ts: -------------------------------------------------------------------------------- 1 | //import chalk from 'chalk'; 2 | import { Command } from 'commander'; 3 | //import { Model } from '../language-server/generated/ast'; 4 | import { DatasetDescriptorLanguageMetaData } from '../language-server/generated/module'; 5 | //import { createDatasetDescriptorServices } from '../language-server/dataset-descriptor-module'; 6 | //import { extractAstNode } from './cli-util'; 7 | //import { generateJavaScript } from './generator'; 8 | //import { NodeFileSystem } from 'langium/node'; 9 | 10 | export const generateAction = async (fileName: string, opts: GenerateOptions): Promise => { 11 | // const services = createDatasetDescriptorServices(NodeFileSystem).DatasetDescriptor; 12 | //const model = await extractAstNode(fileName, services); 13 | // const generatedFilePath = generateJavaScript(model, fileName, opts.destination); 14 | //console.log(chalk.green(`JavaScript code generated successfully: ${generatedFilePath}`)); 15 | }; 16 | 17 | export type GenerateOptions = { 18 | destination?: string; 19 | } 20 | 21 | export default function(): void { 22 | const program = new Command(); 23 | 24 | program 25 | // eslint-disable-next-line @typescript-eslint/no-var-requires 26 | .version(require('../../package.json').version); 27 | 28 | const fileExtensions = DatasetDescriptorLanguageMetaData.fileExtensions.join(', '); 29 | program 30 | .command('generate') 31 | .argument('', `source file (possible file extensions: ${fileExtensions})`) 32 | .option('-d, --destination ', 'destination directory of generating') 33 | .description('generates JavaScript code that prints "Hello, {name}!" for each greeting in a source file') 34 | .action(generateAction); 35 | 36 | program.parse(process.argv); 37 | } 38 | -------------------------------------------------------------------------------- /src/extension.ts: -------------------------------------------------------------------------------- 1 | import * as vscode from 'vscode'; 2 | import * as path from 'path'; 3 | import fs from 'fs'; 4 | import { 5 | LanguageClient, LanguageClientOptions, ServerOptions, TransportKind 6 | } from 'vscode-languageclient/node'; 7 | import { DocumentationGenerator } from './generator-service/dataset-descriptor-documentation'; 8 | import { DatasetUploader } from './uploader-service/dataset-descriptor-uploader'; 9 | import { HintsService } from './hints-service/hints-service'; 10 | 11 | 12 | 13 | let client: LanguageClient; 14 | let previewPanel : vscode.WebviewPanel; 15 | 16 | // This function is called when the extension is activated. 17 | export function activate(context: vscode.ExtensionContext): void { 18 | client = startLanguageClient(context); 19 | // Here we register the upload service 20 | context.subscriptions.push(vscode.commands.registerCommand('datadesc.loadDataset', async () => { 21 | vscode.window.withProgress( 22 | { 23 | location: vscode.ProgressLocation.Notification, 24 | title: "Loading your data... please wait" 25 | }, 26 | async progress => { 27 | const fileUris = await vscode.window.showOpenDialog({ canSelectFolders: false, canSelectFiles: true, canSelectMany: true, openLabel: 'Select your data files' }); 28 | if (fileUris){ 29 | await uploaderService(context, fileUris[0]); 30 | } 31 | }); 32 | })); 33 | 34 | context.subscriptions.push( 35 | vscode.languages.registerHoverProvider( 36 | 'dataset-descriptor', { 37 | provideHover(document, position, token) { 38 | let hints = new HintsService(); 39 | let content = hints.populateHints(document, position); 40 | 41 | if (content != "empty") return new vscode.Hover(new vscode.MarkdownString(content)); 42 | return null; 43 | } 44 | })); 45 | 46 | // Here we register the HTML generation service 47 | context.subscriptions.push(vscode.commands.registerCommand('datadesc.generateDocumentation', async () => { 48 | await generatorHTMLService(context); 49 | })); 50 | 51 | // Here we register the HTML generation service (save action) 52 | context.subscriptions.push(vscode.commands.registerCommand('datadesc.saveDocumentHTML', async () => { 53 | await saveDocumentHTML(context); 54 | })); 55 | } 56 | 57 | // This function is called when the extension is deactivated. 58 | export function deactivate(): Thenable | undefined { 59 | if (client) { 60 | return client.stop(); 61 | } 62 | return undefined; 63 | } 64 | 65 | function startLanguageClient(context: vscode.ExtensionContext): LanguageClient { 66 | const serverModule = context.asAbsolutePath(path.join('out', 'language-server', 'main')); 67 | // The debug options for the server 68 | // --inspect=6009: runs the server in Node's Inspector mode so VS Code can attach to the server for debugging. 69 | // By setting `process.env.DEBUG_BREAK` to a truthy value, the language server will wait until a debugger is attached. 70 | const debugOptions = { execArgv: ['--nolazy', `--inspect${process.env.DEBUG_BREAK ? '-brk' : ''}=${process.env.DEBUG_SOCKET || '6009'}`] }; 71 | 72 | // If the extension is launched in debug mode then the debug server options are used 73 | // Otherwise the run options are used 74 | const serverOptions: ServerOptions = { 75 | run: { module: serverModule, transport: TransportKind.ipc }, 76 | debug: { module: serverModule, transport: TransportKind.ipc, options: debugOptions } 77 | }; 78 | 79 | const fileSystemWatcher = vscode.workspace.createFileSystemWatcher('**/*.descml'); 80 | context.subscriptions.push(fileSystemWatcher); 81 | 82 | // Options to control the language client 83 | const clientOptions: LanguageClientOptions = { 84 | documentSelector: [{ scheme: 'file', language: 'dataset-descriptor' }], 85 | synchronize: { 86 | // Notify the server about file changes to files contained in the workspace 87 | fileEvents: fileSystemWatcher 88 | } 89 | }; 90 | 91 | // Create the language client and start the client. 92 | const client = new LanguageClient( 93 | 'dataset-descriptor', 94 | 'dataset-descriptor', 95 | serverOptions, 96 | clientOptions 97 | ); 98 | 99 | // Start the client. This will also launch the server 100 | client.start(); 101 | return client; 102 | } 103 | 104 | async function uploaderService(context: vscode.ExtensionContext, filepath: vscode.Uri) { 105 | console.log('start'); 106 | let uploader = new DatasetUploader(); 107 | const text:string = await uploader.uploadDataset(filepath.fsPath); 108 | let snippet = new vscode.SnippetString(); 109 | snippet.appendText(text); 110 | //createDatasetDescriptorServices().shared.workspace.LangiumDocuments.getOrCreateDocument() 111 | const editor = vscode.window.activeTextEditor; 112 | // editor?.insertSnippet(snippet,editor.revealRange()) 113 | if (editor) { 114 | const document = editor.document; 115 | editor.edit(editBuilder => { 116 | //editBuilder.insert(new vscode.Position(document.lineCount,8),snippet); 117 | const regexp = new RegExp('(?:Instances:)'); 118 | let snippetPosition = new vscode.Position(document.lineCount, 5); 119 | for (let index = 0; index < document.lineCount; index++) { 120 | let actualLine = editor.document.lineAt(index); 121 | let text = actualLine.text; 122 | console.log(text) 123 | if(actualLine.text.match(regexp)) { 124 | snippetPosition = new vscode.Position(index+1, 4); 125 | } 126 | 127 | } 128 | editor.insertSnippet(snippet, snippetPosition); 129 | }); 130 | } 131 | vscode.window.showInformationMessage('File Loaded! Start creating your documentation :) '); 132 | } 133 | 134 | async function generatorHTMLService(context: vscode.ExtensionContext) { 135 | let title:string = 'Dataset Documentation'; 136 | previewPanel = vscode.window.createWebviewPanel( 137 | // Webview id 138 | 'liveHTMLPreviewer', 139 | // Webview title 140 | title, 141 | // This will open the second column for preview inside editor 142 | 2, 143 | { 144 | // Enable scripts in the webview 145 | enableScripts: false, 146 | retainContextWhenHidden: false, 147 | // And restrict the webview to only loading content from our extension's `assets` directory. 148 | localResourceRoots: [vscode.Uri.file(path.join(context.extensionPath, 'assets'))] 149 | 150 | } 151 | ) 152 | setPreviewActiveContext(true); 153 | const generator = new DocumentationGenerator(); 154 | const text = vscode.window.activeTextEditor?.document.getText(); 155 | if (text) { 156 | const returner = generator.generate(text); 157 | updateHtmlPreview(returner); 158 | console.log(returner); 159 | } 160 | previewPanel.onDidDispose(() => { 161 | setPreviewActiveContext(false); 162 | }) 163 | } 164 | 165 | function updateHtmlPreview(html : string | void) { 166 | if (previewPanel && html) { 167 | previewPanel.webview.html = html; 168 | } 169 | } 170 | 171 | function setPreviewActiveContext(value: boolean) { 172 | vscode.commands.executeCommand('setContext', 'liveHTMLPreviewer', value); 173 | } 174 | 175 | function saveDocumentHTML(context: vscode.ExtensionContext) { 176 | const text = previewPanel.webview.html; 177 | const title = previewPanel.title; 178 | if (text) { 179 | // Save the file. TO DO: Ensure only in the workspace is saved 180 | vscode.workspace.workspaceFolders?.forEach(workspace => { 181 | const filePath = workspace.uri.fsPath + "/" + title + ".html"; 182 | fs.writeFileSync(filePath, text, 'utf8'); 183 | // Display a message box to the user 184 | vscode.window.showInformationMessage('Congrats! Your file, '+title+'.html, has been saved in your root folder of the workspace'); 185 | }); 186 | } 187 | } 188 | 189 | -------------------------------------------------------------------------------- /src/generator-service/dataset-descriptor-documentation.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * Copyright 2022 SOM Research 3 | * This program and the accompanying materials are made available under the 4 | * terms of the MIT License, which is available in the project root. 5 | ******************************************************************************/ 6 | import { AstNode, LangiumParser} from 'langium'; 7 | import { createDatasetDescriptorServices } from '../language-server/dataset-descriptor-module'; 8 | import { DescriptionDataset, isDescriptionDataset } from '../language-server/generated/ast'; 9 | import { NodeFileSystem } from 'langium/node'; 10 | 11 | 12 | 13 | 14 | export interface Generator { 15 | // Load the Abstract Syntax Tree of the .descML active file 16 | generate(Declaration : string | AstNode) : string | undefined; 17 | // Recieves the parsed AST, generates the HTML, and returns it 18 | declaration2Html(DescriptionDataset : DescriptionDataset) : string; 19 | } 20 | 21 | /** 22 | * Generator HTML service main class 23 | * To generate the HTML we parse the description and we use PUG as a engine teamplate to build the HTML 24 | */ 25 | export class DocumentationGenerator implements Generator { 26 | 27 | private readonly parser: LangiumParser; 28 | 29 | constructor() { 30 | 31 | let services = createDatasetDescriptorServices(NodeFileSystem); 32 | this.parser = services.DatasetDescriptor.parser.LangiumParser; 33 | } 34 | 35 | generate(DescriptionDataset : string | AstNode) : string | undefined { 36 | const astNode = (typeof(DescriptionDataset) == 'string' ? this.parser.parse(DescriptionDataset).value : DescriptionDataset); 37 | return (isDescriptionDataset(astNode) ? this.declaration2Html(astNode) : undefined); 38 | } 39 | 40 | // Generation of the HTML 41 | declaration2Html(DescriptionDataset : DescriptionDataset) : string { 42 | const description = { 43 | title : DescriptionDataset.elements[0].name, 44 | metadata : DescriptionDataset.elements[0].generalinfo, 45 | composition : DescriptionDataset.elements[0].composition, 46 | provenance : DescriptionDataset.elements[0].provenance, 47 | socialConcerns : DescriptionDataset.elements[0].socialConcerns, 48 | } 49 | let head = ` 50 | 51 | 52 | ${description.title} 53 | `; 54 | head = this.addSchemaOrg(description, head, description.title) 55 | head = this.addStyles(head); 56 | let body = this.buildBody(description); 57 | head = head + 58 | `` 59 | const html = head + body 60 | return html 61 | } 62 | 63 | addSchemaOrg(description: any, head: string, title: string) : string { 64 | 65 | // Add Authors 66 | let authors = "" 67 | description.metadata.authoring.authors[0].authors.forEach(function (author: any) { 68 | 69 | authors = authors + 70 | `"creator":{ 71 | "@type":"Author", 72 | "url": "", 73 | "name":"${author.name}", 74 | "contactPoint":{ 75 | "@type":"ContactPoint", 76 | "contactType": "email, 77 | "email":${author.email} 78 | } 79 | }, 80 | ` 81 | }) 82 | 83 | // Add funders 84 | let funders = "" 85 | if (description.metadata.authoring.founding.lenght > 0) { 86 | description.metadata.authoring.founding[0].funders.forEach(function (funder: any) { 87 | funders = funders + 88 | `"funder":{ 89 | "@type":"Funder", 90 | "name":"${funder.name}", 91 | "sameAs":"${funder.type}" 92 | }, 93 | ` 94 | }) 95 | } 96 | 97 | let areas = '' 98 | let tags = '' 99 | description.metadata.desc.tags.tags.forEach(function (tag: any) { 100 | tags = tags + tag.name + ',' 101 | }) 102 | description.metadata.desc.area.areas.forEach(function (area: any) { 103 | areas = areas + area.name + ',' 104 | }) 105 | 106 | head = head + ` 107 | ` 150 | return head; 151 | } 152 | 153 | addStyles(head: string) : string { 154 | return head + ` 155 | ` 173 | } 174 | 175 | // The PUG file is located inside the "out" folder. As needs to be released in the executable plugin 176 | // So, you may need to go to /out/templates/document.pug to customize the template. 177 | buildBody(description: any) : any { 178 | const pug = require('pug'); 179 | const path = require('path') 180 | let sep = path.sep 181 | let dirname = __dirname; 182 | // Compile the source code using PUG 183 | console.log(dirname+sep+'templates'+sep+'document.pug'); 184 | const compiledFunction = pug.compileFile(dirname+sep+'templates'+sep+'document.pug'); 185 | // Compile the source code 186 | return compiledFunction({ 187 | description: description 188 | }); 189 | } 190 | } 191 | -------------------------------------------------------------------------------- /src/hints-service/hints-service.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * Copyright 2022 SOM Research 3 | * This program and the accompanying materials are made available under the 4 | * terms of the MIT License, which is available in the project root. 5 | ******************************************************************************/ 6 | import { TextDocument } from 'vscode'; 7 | 8 | 9 | 10 | 11 | export interface Hints { 12 | // Load the Abstract Syntax Tree of the .descML active file 13 | populateHints(Declaration : string | TextDocument, position: any) : string | undefined; 14 | } 15 | 16 | /** 17 | */ 18 | export class HintsService implements Hints { 19 | 20 | 21 | constructor() { 22 | 23 | } 24 | 25 | populateHints(document : TextDocument, position: any) : string { 26 | const wordRange = document.getWordRangeAtPosition(position); 27 | const word = document.getText(wordRange); 28 | switch (word) { 29 | case "Metadata": 30 | return `## Metadata 31 | In this section authors are expected to provide the metadata of the dataset 32 | ` 33 | case "Dates": 34 | return "Set the release, published and last updated date in DD-MM-YYYY format"; 35 | case "Citation": 36 | return "Set the citations of the dataset"; 37 | // Description 38 | case "Description": 39 | return `### Description 40 | In this section authors are expected to provide a description of the dataset 41 | 42 | #### Purposes: 43 | For what propose was the dataser created? 44 | 45 | #### Tasks: 46 | For what tasks this dataset is inteded for 47 | 48 | #### Gaps: 49 | Was there specific gap that needed to be filled? Please provide a description 50 | `; 51 | case "Purposes": 52 | return "For what propose was the dataser created?"; 53 | case "Tasks": 54 | return "For what tasks this dataset is inteded for"; 55 | case "Gaps": 56 | return "Was there specific gap that needed to be filled?\nPlease provide a description"; 57 | case "Tags": 58 | return "Set the tags separated by a whitespace"; 59 | case "Areas": 60 | return "Set the areas separated by a whitespace"; 61 | // Distribution 62 | case "Distribution": 63 | return `## Distribution 64 | In this section authors are expected to indicate the distribution of the dataset 65 | 66 | ### Licenses: 67 | Set the licence of the dataset. 68 | 69 | ## Rights stand-alone: 70 | Choose the level of distribution of the stand-alone data. 71 | 72 | ## Rights of the models: 73 | Choose the level of distribution of the models trained with the data. 74 | `; 75 | case "Licences": 76 | return "If any listed license fill your use-case, please provide a STRING with the description of the license"; 77 | // Applications 78 | case "Applications": 79 | return `## Applications 80 | In this section authors are expected to indicate the recommneded and non-recommneded uses of the dataset 81 | 82 | ### Benchmarking 83 | If the dataset have been used in the past, authors are expected to indicate the benchmarking results 84 | Models names, and results should be provided (accuracy, precision, recall, F1-score) 85 | `; 86 | // Authoring 87 | case "Authoring": 88 | return `## Authoring 89 | In this section authors are expected to indicate who created the dataset and who funded the dataset 90 | Please provide information about the organization grating the work 91 | 92 | ### Maintenance 93 | Who maintains the dataset, but also the contribution policies, if theere is any erratum, and the data life cycle should be informed in this chapter 94 | `; 95 | case "Funders": 96 | return "Who founded the creation of the dataset?\n2 - If is there any associated grant, please provide the number and the name of the grantor and the gran name and number \n Set a `_` or a `-` as a white spaces in the name e.g: 'John_Smith'? "; 97 | case "Authors": 98 | return "Who is the author of the dataset?"; 99 | case "Maintainers": 100 | return "Who maintan the dataset? How can be contacted?"; 101 | // Composition 102 | case "Composition": 103 | return `## Composition 104 | Please provide information about the composition of the dataset. The type of files (data instances), it's number, and information regarding attributes 105 | 106 | ### Statistics 107 | A set of statistics can be provided for each attribute and at a data instance level. Please provide only the statistics that are relevant for the specific dataset use case. 108 | 109 | ### Consistency rules 110 | The Consistency rules can be expressed following OCL. OCL is a language for expressing constraints on models. It is based on the Object Constraint Language (OCL) defined by OMG. OCL is a language for expressing constraints on models. It is based on the Object Constraint Language (OCL) defined by OMG. \n 111 | 112 | `; 113 | // Provenance 114 | case "Provenance": 115 | return `## Provenance 116 | In this section authors are expected to fill information about the process applied to create the dataset 117 | 118 | ### Curation Rationale 119 | This explanation intend to be a shor and comprhensive enumartion of the processes applied over the data, and to provide specific use-case details for this dataset 120 | 121 | ### Gathering 122 | How the dataset has been gathered? Who gathered the dataset? Which are the sources of the data? 123 | 124 | ### Annotation 125 | How the dataset has been annotated? Who annotated the dataset? Which are the infrastructure used to annotate the data? 126 | 127 | ### Data preparation 128 | Indicate the process done to prepare the data, and it's type 129 | 130 | `; 131 | // Social Concers 132 | case "Concerns": 133 | return ` 134 | ## Social Concerns 135 | In this section authors are expected to fill information about the social concerns of the data. Is expected to inform 4 types of social concerns \n 136 | 137 | ### Bias concers 138 | Whether the dataset may be biased against a specific social group 139 | 140 | ### Representativeness concerns 141 | Whether the dataset could misrepresent any specific social group 142 | 143 | ### Sensitivity concerns 144 | Does the dataset contains data that can offend a social group? 145 | 146 | ### Privacy Concerns 147 | Is there any privacy concerns on the data? 148 | 149 | `; 150 | default: 151 | return "empty" 152 | } 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /src/language-server/dataset-descriptor-module.ts: -------------------------------------------------------------------------------- 1 | import { 2 | createDefaultModule, createDefaultSharedModule, DefaultSharedModuleContext, inject, 3 | LangiumServices, LangiumSharedServices, Module, PartialLangiumServices 4 | } from 'langium'; 5 | import { DatasetDescriptorGeneratedModule, DatasetDescriptorGeneratedSharedModule } from './generated/module'; 6 | import { DatasetDescriptorValidationRegistry, DatasetDescriptorValidator } from './dataset-descriptor-validator'; 7 | import { DatasetDescriptorScopeComputation } from './dataset-descriptor-scope'; 8 | import { DatasetDescriptorNameProvider } from './dataset-descriptor-naming'; 9 | import { Generator, DocumentationGenerator } from '../generator-service/dataset-descriptor-documentation' 10 | 11 | /** 12 | * Declaration of custom services - add your own service classes here. 13 | */ 14 | export type DatasetDescriptorAddedServices = { 15 | validation: { 16 | DatasetDescriptorValidator: DatasetDescriptorValidator 17 | }, 18 | generation: { 19 | DocumentationGenerator: Generator 20 | } 21 | } 22 | 23 | /** 24 | * Union of Langium default services and your custom services - use this as constructor parameter 25 | * of custom service classes. 26 | */ 27 | export type DatasetDescriptorServices = LangiumServices & DatasetDescriptorAddedServices 28 | 29 | /** 30 | * Dependency injection module that overrides Langium default services and contributes the 31 | * declared custom services. The Langium defaults can be partially specified to override only 32 | * selected services, while the custom services must be fully specified. 33 | */ 34 | export const DatasetDescriptorModule: Module = { 35 | validation: { 36 | ValidationRegistry: (services) => new DatasetDescriptorValidationRegistry(services), 37 | DatasetDescriptorValidator: () => new DatasetDescriptorValidator() 38 | }, 39 | references: { 40 | ScopeComputation: (services) => new DatasetDescriptorScopeComputation(services), 41 | NameProvider: () => new DatasetDescriptorNameProvider() 42 | }, 43 | generation: { 44 | DocumentationGenerator: (services) => new DocumentationGenerator() 45 | }, 46 | }; 47 | 48 | /** 49 | * Create the full set of services required by Langium. 50 | * 51 | * First inject the shared services by merging two modules: 52 | * - Langium default shared services 53 | * - Services generated by langium-cli 54 | * 55 | * Then inject the language-specific services by merging three modules: 56 | * - Langium default language-specific services 57 | * - Services generated by langium-cli 58 | * - Services specified in this file 59 | * 60 | * @param context Optional module context with the LSP connection 61 | * @returns An object wrapping the shared services and the language-specific services 62 | */ 63 | export function createDatasetDescriptorServices(context: DefaultSharedModuleContext): { 64 | shared: LangiumSharedServices, 65 | DatasetDescriptor: DatasetDescriptorServices 66 | } { 67 | const shared = inject( 68 | createDefaultSharedModule(context), 69 | DatasetDescriptorGeneratedSharedModule 70 | ); 71 | const DatasetDescriptor = inject( 72 | createDefaultModule({ shared }), 73 | DatasetDescriptorGeneratedModule, 74 | DatasetDescriptorModule 75 | ); 76 | shared.ServiceRegistry.register(DatasetDescriptor); 77 | return { shared, DatasetDescriptor }; 78 | } 79 | -------------------------------------------------------------------------------- /src/language-server/dataset-descriptor-naming.ts: -------------------------------------------------------------------------------- 1 | 2 | /****************************************************************************** 3 | * Copyright 2021 TypeFox GmbH 4 | * This program and the accompanying materials are made available under the 5 | * terms of the MIT License, which is available in the project root. 6 | ******************************************************************************/ 7 | 8 | import { DefaultNameProvider } from 'langium'; 9 | import { isDeclaration, Declaration } from './generated/ast'; 10 | 11 | export function toQualifiedName(pack: Declaration, childName: string): string { 12 | return (isDeclaration(pack.$container) ? toQualifiedName(pack.$container, pack.name) : pack.name) + '.' + childName; 13 | } 14 | 15 | export class DatasetDescriptorNameProvider extends DefaultNameProvider { 16 | 17 | /** 18 | * @param qualifier if the qualifier is a `string`, simple string concatenation is done: `qualifier.name`. 19 | * if the qualifier is a `PackageDeclaration` fully qualified name is created: `package1.package2.name`. 20 | * @param name simple name 21 | * @returns qualified name separated by `.` 22 | */ 23 | getQualifiedName(qualifier: Declaration | string, name: string): string { 24 | let prefix = qualifier; 25 | if (isDeclaration(prefix)) { 26 | prefix = (isDeclaration(prefix.$container) 27 | ? this.getQualifiedName(prefix.$container, prefix.name) : prefix.name); 28 | } 29 | return (prefix ? prefix + '.' : '') + name; 30 | } 31 | 32 | } -------------------------------------------------------------------------------- /src/language-server/dataset-descriptor-scope.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * Copyright 2022 SOM Research 3 | * This program and the accompanying materials are made available under the 4 | * terms of the MIT License, which is available in the project root. 5 | ******************************************************************************/ 6 | 7 | import { AstNodeDescription, DefaultScopeComputation, interruptAndCheck, LangiumDocument, LangiumServices, streamAllContents } from 'langium'; 8 | import { CancellationToken } from 'vscode-jsonrpc'; 9 | import { isAttribute, DataInstance, isLabels, isDataInstance, isSocialIssue} from './generated/ast'; 10 | 11 | export class DatasetDescriptorScopeComputation extends DefaultScopeComputation { 12 | 13 | constructor(services: LangiumServices) { 14 | super(services); 15 | } 16 | 17 | /** 18 | * Exports only types (`DataType or `Entity`) with their qualified names. 19 | */ 20 | async computeExports(document: LangiumDocument, cancelToken = CancellationToken.None): Promise { 21 | const descr: AstNodeDescription[] = []; 22 | for (const modelNode of streamAllContents(document.parseResult.value)) { 23 | await interruptAndCheck(cancelToken); 24 | 25 | let name = this.nameProvider.getName(modelNode); 26 | let container = modelNode.$container as DataInstance; 27 | if (name) { 28 | if (isAttribute(modelNode) || isDataInstance(modelNode) || isLabels (modelNode) || isSocialIssue(modelNode)) { 29 | descr.push(this.descriptions.createDescription(modelNode, container.name+'.'+ name, document)); 30 | //name = (this.nameProvider as DomainModelNameProvider).getQualifiedName(modelNode.$container as PackageDeclaration, name); 31 | } 32 | descr.push(this.descriptions.createDescription(modelNode, name, document)); 33 | } 34 | 35 | } 36 | return descr; 37 | } 38 | 39 | } -------------------------------------------------------------------------------- /src/language-server/dataset-descriptor-validator.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * Copyright 2022 SOM Research 3 | * This program and the accompanying materials are made available under the 4 | * terms of the MIT License, which is available in the project root. 5 | ******************************************************************************/ 6 | 7 | import { ValidationAcceptor, ValidationChecks, ValidationRegistry } from 'langium'; 8 | import { DatasetDescriptorAstType, Author, Funder, Authoring, Tasks, Description, Areas, Tags, Distribution, Categor } from './generated/ast'; 9 | import { DatasetDescriptorServices } from './dataset-descriptor-module'; 10 | 11 | /** 12 | * In this class we implement the custom validation services for the tool 13 | */ 14 | //type DatasetDescriptorChecks = { [type in DatasetDescriptorAstType ]: ValidationCheck | ValidationCheck[] } 15 | 16 | /** 17 | * Registry for validation checks. 18 | */ 19 | export class DatasetDescriptorValidationRegistry extends ValidationRegistry { 20 | constructor(services: DatasetDescriptorServices) { 21 | super(services); 22 | const validator = services.validation.DatasetDescriptorValidator; 23 | const checks: ValidationChecks = { 24 | Description: validator.hintsOfDescription, 25 | Author: validator.authorValidator, 26 | Funder: validator.hintsOfFunder, 27 | Authoring: validator.hintsOfAuthoring, 28 | Categor:validator.statVerification, 29 | Areas: validator.hintsOfAreas, 30 | Tags: validator.hintsofTags, 31 | Distribution:validator.hintsOfDistribution 32 | }; 33 | this.register(checks, validator); 34 | } 35 | } 36 | 37 | /** 38 | * Implementation of custom validations. 39 | */ 40 | export class DatasetDescriptorValidator { 41 | 42 | hintsofTags(type: Tags, accept: ValidationAcceptor): void { 43 | accept('hint', 'Set the tags separated by a whitespace', { node: type, property: 'tags'}); 44 | } 45 | hintsOfAreas(type: Areas, accept: ValidationAcceptor): void { 46 | accept('hint', 'Set the areas separated by a whitespace', { node: type, property: 'areas'}); 47 | } 48 | 49 | hintsOfDistribution(type: Distribution, accept: ValidationAcceptor): void { 50 | accept('hint', 'Set the licence of the dataset. Indicate in `others:` if any other policy is applied to the data', { node: type, property: 'name'}); 51 | accept('hint', 'Stand-alone: Choose the level of distribution of the stand-alone data.', { node: type, property: 'rights'}); 52 | accept('hint', 'Rights-model: Choose the level of distribution of the models trained with the data.', { node: type, property: 'rightsModels'}); 53 | } 54 | 55 | hintsOfDescription(type:Description, accept: ValidationAcceptor): void { 56 | //new MultilineCommentHoverProvider(services: DatasetDescriptorServices).getHoverContent(type, params); 57 | accept('hint', 'For what propose was the dataser created? \nPlease provide a description', { node: type, property: 'descriptionpurpose' }); 58 | accept('hint', 'For what tasks this dataset is inteded for', { node: type, property: 'tasks' }); 59 | accept('hint', 'Was there specific gap that needed to be filled?\nPlease provide a description', { node: type, property: 'descriptionGaps'}); 60 | } 61 | 62 | 63 | hintsOfTasks(type: Tasks, accept: ValidationAcceptor): void { 64 | accept('hint', 'Was there a specific task in mind?\nPlease provide a description', { node: type, property: 'name'}); 65 | 66 | } 67 | 68 | hintsOfFunder(type: Funder, accept: ValidationAcceptor): void { 69 | accept('hint', '1 - Who founded the creation of the dataset?\n2 - If is there any associated grant, please provide the number and the name of the grantor and the gran name and number \n Set a `_` or a `-` as a white spaces in the name e.g: "John_Smith"? ', { node: type, property:'name' }); 70 | } 71 | hintsOfAuthoring(type: Authoring, accept: ValidationAcceptor): void { 72 | 73 | accept('hint', 'Who is the author of the dataset?', { node: type, property:'name' }); 74 | accept('hint', 'Who maintan the dataset? How can be contacted?', { node: type, property: 'maintainers' }); 75 | accept('hint', 'Is there an erratum? If so, please provide a link or other access point?', { node: type, property: 'erratum' }); 76 | accept('hint', 'If the dataset belongs to people, are there applicable limits on the retention of the data associated with them? If so, please describre how. If not, please describre how its obsolescence will be communicated to the dataset', { node: type, property: 'dataRetention' }); 77 | accept('hint', '1 - Will the dataset by updated (p.e: to correct labels, add or delete new instances)? If so, please describre how \n2 - Will older version of the dataset continue to be supported/hosted/maintained?', { node: type, property: 'support' }); 78 | accept('hint', 'Please describre the mechanism for contribution here', { node: type, property: 'contribGuides' }); 79 | } 80 | 81 | 82 | statVerification(type: Categor, accept: ValidationAcceptor): void { 83 | if (type.complet) { 84 | if (type.complet > 100) { 85 | accept('error', 'Completeness should be between 0 and 100', { node: type, property: 'complet' }); 86 | } 87 | } 88 | // accept('hint', 'What do the instances that comprise the dataset represent(for example, documents, photos, people, countries)', { node: type, property: 'compodesc' }); 89 | // accept('hint', 'How many instances are there in total?', { node: type, property: 'numberInst' }); 90 | } 91 | 92 | 93 | authorValidator(type: Author, accept: ValidationAcceptor): void { 94 | 95 | accept('hint', 'Please, set a `_` or a `-` as a white spaces in the name e.g: "John_Smith"?', { node: type, property:'name' }); 96 | if (type.name) { 97 | const firstChar = type.name.substring(0, 1); 98 | if (firstChar.toUpperCase() !== firstChar) { 99 | accept('warning', 'Type name should start with a capital.', { node: type, property: 'name' }); 100 | } 101 | } 102 | } 103 | 104 | } 105 | -------------------------------------------------------------------------------- /src/language-server/generated/module.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * This file was generated by langium-cli 1.1.0. 3 | * DO NOT EDIT MANUALLY! 4 | ******************************************************************************/ 5 | 6 | import { LangiumGeneratedServices, LangiumGeneratedSharedServices, LangiumSharedServices, LangiumServices, LanguageMetaData, Module } from 'langium'; 7 | import { DatasetDescriptorAstReflection } from './ast'; 8 | import { DatasetDescriptorGrammar } from './grammar'; 9 | 10 | export const DatasetDescriptorLanguageMetaData: LanguageMetaData = { 11 | languageId: 'dataset-descriptor', 12 | fileExtensions: ['.descml'], 13 | caseInsensitive: false 14 | }; 15 | 16 | export const DatasetDescriptorGeneratedSharedModule: Module = { 17 | AstReflection: () => new DatasetDescriptorAstReflection() 18 | }; 19 | 20 | export const DatasetDescriptorGeneratedModule: Module = { 21 | Grammar: () => DatasetDescriptorGrammar(), 22 | LanguageMetaData: () => DatasetDescriptorLanguageMetaData, 23 | parser: {} 24 | }; 25 | -------------------------------------------------------------------------------- /src/language-server/main.ts: -------------------------------------------------------------------------------- 1 | import { startLanguageServer } from 'langium'; 2 | import { NodeFileSystem } from 'langium/node'; 3 | import { createConnection, ProposedFeatures } from 'vscode-languageserver/node'; 4 | import { createDatasetDescriptorServices } from './dataset-descriptor-module'; 5 | 6 | // Create a connection to the client 7 | const connection = createConnection(ProposedFeatures.all); 8 | 9 | // Inject the shared services and language-specific services 10 | const { shared } = createDatasetDescriptorServices({ connection, ...NodeFileSystem }); 11 | 12 | // Start the language server with the shared services 13 | startLanguageServer(shared); 14 | -------------------------------------------------------------------------------- /src/uploader-service/dataset-descriptor-uploader.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * Copyright 2022 SOM Research 3 | * This program and the accompanying materials are made available under the 4 | * terms of the MIT License, which is available in the project root. 5 | ******************************************************************************/ 6 | 7 | import { parse } from 'csv-parse/sync'; 8 | import fs from 'fs'; 9 | import path from 'path'; 10 | import { DatasetMetrics } from './dataset-metrics'; 11 | 12 | export interface Uploader { 13 | // The main function, recieves a filepath of the selected .csv from the user and loads the content 14 | uploadDataset(filepath: string): Promise; 15 | // Build the snippet and return it to the LSP 16 | buildDescriptionDraft(data: Array, filepath: string): string; 17 | } 18 | 19 | /** 20 | * Data uploader service main class 21 | */ 22 | export class DatasetUploader implements Uploader { 23 | 24 | constructor() { 25 | } 26 | 27 | // Get the dataset file, read it, parse it, and build the description draft 28 | async uploadDataset(filepath: string): Promise { 29 | // Loading the .CSV 30 | const fileContent = fs.readFileSync(filepath, { encoding: 'utf-8' }); 31 | 32 | // Parsing the .CSV 33 | const parsed: Array = parse(fileContent) as Array; 34 | // Building the snnipet 35 | const descriptionDraft = this.buildDescriptionDraft(parsed, filepath); 36 | return descriptionDraft; 37 | } 38 | 39 | // Building the description draft 40 | buildDescriptionDraft(data: Array, filepath: string) { 41 | // Build the metadata snippet 42 | let body: string = this.buildMetadataSnippet(); 43 | // Build Composition snippet 44 | body = body + this.buildCompositionSnippet(data, filepath); 45 | // Build Provenance ans social concerns snippet 46 | body = body + this.buildProvenanceSnippet(data, filepath); 47 | // Build Social Concern snippet 48 | body = body + this.buildSocialConcernSnippet(data, filepath); 49 | return body; 50 | } 51 | // Building the metadata snippet 52 | buildMetadataSnippet() { 53 | return `Dataset: datasetName 54 | Metadata: 55 | Title: "" 56 | Unique-identifier: aUniqueId 57 | Version: v0000 58 | Dates: 59 | Release Date: 11-11-1989 60 | Citation: 61 | Raw Citation: "" 62 | Main Description: 63 | Purposes: "" 64 | Tasks: [other] 65 | Gaps: "" 66 | Areas: datasetArea 67 | Tags: datasetTags 68 | Applications: 69 | Past Uses:"" 70 | Recommended:"" 71 | Non-recommended:"" 72 | Benchmarking: [ ] 73 | Distribution: 74 | Licences: CC0: Public Domain 75 | Rights(stand-alone): Access 76 | Rights(with models): Benchmark 77 | Authoring: 78 | Authors: 79 | Name "authorName" email XXXX@mail.com 80 | Funders: 81 | Name "founderName" type mixed 82 | Erratum?: "" 83 | Version lifecycle:"" 84 | Contribution guidelines: "" 85 | \n`; 86 | } 87 | // Building Composition snippet 88 | buildCompositionSnippet(data: Array, filepath: string) { 89 | 90 | const datasetMetrics = new DatasetMetrics; 91 | // Get Headers 92 | const headers: Array = data[0]; 93 | // Get number of rows 94 | const numberofResults = data.length - 1 95 | let body: string = ` 96 | Composition: 97 | Rationale: "" 98 | Total size: ${numberofResults} 99 | Data Instances: 100 | Instance: ${path.basename(filepath).split('.')[0]} 101 | Description: \"Describe the instance\" 102 | Type: Record-Data 103 | Attribute number: ${headers.length} 104 | Attributes:\n`; 105 | // For each attribute 106 | headers.forEach((attr, index) => { 107 | let attrData = data.map(function (value, index2) { return value[index]; }); 108 | const datHeaders = attrData.shift(); 109 | // Calculate completness 110 | 111 | const completness = datasetMetrics.attributeCompletness(attrData); 112 | // Calculate unique values 113 | const unique = datasetMetrics.attributeUnique(attrData); 114 | // Check if all the elements are numeric 115 | let isNumber = datasetMetrics.isAttributeNumerical(attrData); 116 | // Check if attributes are categorical or numerical 117 | if ((attrData.length < 50 || attrData.length * 0.80 > unique) || (isNumber == false)) { 118 | // Categorical 119 | // Calculate Mode 120 | const mode = datasetMetrics.attributeMode(attrData); 121 | // Calculate Cateogrical Distribution 122 | const catDist = datasetMetrics.attributeCatDist(attrData);; 123 | if (catDist === false) { 124 | body = body + 125 | `\t\t\t\tAttribute: ${datHeaders.replaceAll(' ', '_')} 126 | Description: \"Describe the attribute\" 127 | Count: ${unique} 128 | OfType: Categorical 129 | Statistics: 130 | Mode: "${mode}" 131 | Quality Metrics: 132 | Completeness: ${completness} \n`; 133 | } else { 134 | body = body + 135 | `\t\t\t\tAttribute: ${datHeaders.replaceAll(' ', '_')} 136 | Description: \"Describe the attribute\" 137 | Count: ${unique} 138 | OfType: Categorical 139 | Statistics: 140 | Mode: "${mode}" 141 | Categoric Distribution: ${catDist} 142 | Quality Metrics: 143 | Completeness: ${completness} \n`; 144 | } 145 | 146 | } else { 147 | // Numerical 148 | // Parse to numbers 149 | const attrNumerical: number[] = attrData.map(Number) 150 | // Calculate mean 151 | const mean = Math.round((attrNumerical.reduce((a, b) => a + b) / attrNumerical.length) * 10) / 10; 152 | // Calculate variance 153 | const variance = attrData.map(ele => Math.pow(ele - mean, 2)).reduce((a, b) => a + b); 154 | // Calculate covariance 155 | const std = Math.round((Math.sqrt(variance)) * 10) / 10; 156 | // Calculate maximmum 157 | const max = Math.max(...attrNumerical.map(o => o)) 158 | // Calculate minimmum 159 | const min = Math.min(...attrNumerical.map(o => o)); 160 | body = body + `\t\t\t\tAttribute: ${datHeaders.replaceAll(' ', '_')} 161 | Description: \"Describe the attribute\" 162 | Count: ${unique} 163 | OfType: Numerical 164 | Statistics: 165 | Mean: ${mean} 166 | Standard Desviation: ${std} 167 | Minimmum: ${min} 168 | Maximmum: ${max}\n`; 169 | } 170 | 171 | 172 | 173 | 174 | }); 175 | 176 | body = body + 177 | ` Statistics: 178 | Quality Metrics: 179 | Sparsity: 00 // Not calculated, to be filled 180 | Dependencies: 181 | Description: "" 182 | Data Splits: ""\n` 183 | 184 | return body; 185 | } 186 | // Building the provenance part 187 | buildProvenanceSnippet(data: Array, filepath: string) { 188 | return ` 189 | Data Provenance: 190 | Curation Rationale: "" 191 | Gathering Processes: 192 | Process: gatherProcesId 193 | Description: "" 194 | Source: SourceID 195 | Description: "" 196 | Noise: "" 197 | How data is collected: Others // Choose an option from the list 198 | Gather Requirements: 199 | Requirement: "" 200 | LabelingProcesses: 201 | Labeling process: labelProcesIDReference 202 | Description: "" 203 | Type: 3D cuboids // Choose an option from the list 204 | Labels: 205 | Label: labelIdReference 206 | Description: "" 207 | Mapping: DECLARED_ATTRIBUTE_ID // Set the ID of the label attribute 208 | Label Requirements: 209 | Requirement: "" 210 | Preprocesses: 211 | Preprocess: preprocessId 212 | Description: "" 213 | Type: Others 214 | ` 215 | 216 | } 217 | 218 | buildSocialConcernSnippet(data: Array, filepath: string) { 219 | return ` 220 | Social Concerns: 221 | Social Issue: issueId 222 | IssueType: Privacy // Choose one from the list 223 | Description: "" 224 | Related-Attributes: 225 | attribute: DECLARED_ATTRIBUTE_ID // Set the affected attribute 226 | 227 | 228 | `; 229 | } 230 | 231 | } -------------------------------------------------------------------------------- /src/uploader-service/dataset-metrics.ts: -------------------------------------------------------------------------------- 1 | /****************************************************************************** 2 | * Copyright 2022 SOM Research 3 | * This program and the accompanying materials are made available under the 4 | * terms of the MIT License, which is available in the project root. 5 | ******************************************************************************/ 6 | 7 | export interface Metrics { 8 | // Calculate attribute unique values 9 | attributeUnique(attrData: Array): any; 10 | // Calculate attribute completeness 11 | attributeCompletness(attrData: Array): any; 12 | // Calculate attribute categorical distribution 13 | attributeCatDist(attrData: Array): any; 14 | // Calculate attribute mode 15 | attributeMode(arr: Array): any; 16 | // Calculate if the attribute is composed of numbers 17 | isAttributeNumerical(attrData: Array): any; 18 | } 19 | 20 | /** 21 | * Data uploader service main class 22 | */ 23 | export class DatasetMetrics implements Metrics { 24 | 25 | attributeUnique(attrData: Array): Number { 26 | let uniques = attrData.filter((v, i, a) => a.indexOf(v) === i); 27 | return uniques.length; 28 | } 29 | 30 | attributeCompletness(attrData: Array): any { 31 | let validValues = 0; 32 | attrData.forEach(element => { 33 | if (element === undefined || element === null || element == '' || element == 'NaN') { 34 | } else { 35 | validValues = validValues + 1; 36 | } 37 | }); 38 | if (validValues != 0) return ((validValues / attrData.length) * 100).toFixed(0); 39 | else return 0 40 | } 41 | 42 | attributeCatDist(attrData: Array) { 43 | let percent = 0; 44 | let body = "["; 45 | let uniques = attrData.filter((v, i, a) => a.indexOf(v) === i); 46 | uniques.forEach((value, idx, array) => { 47 | percent = (attrData.filter((v) => (v === value)).length / attrData.length) * 100; 48 | if (percent >= 0.1) { 49 | body = body + '"' + value + '"' + ":" + (Math.round(percent * 10) / 10) + '%, '; 50 | } 51 | }); 52 | body = body.slice(0, -2) + "]"; 53 | if (body.length < 5) return false 54 | else return body 55 | } 56 | 57 | isAttributeNumerical(attrData: Array) { 58 | let isNumber = true; 59 | attrData.forEach(value => { 60 | if (isNaN(Number(value))) isNumber = false; 61 | }); 62 | return isNumber; 63 | } 64 | 65 | attributeMode(arr: Array) { 66 | const mode = []; 67 | let max = 0, count = 0; 68 | for (let i = 0; i < arr.length; i++) { 69 | const item = arr[i]; 70 | if (mode[item]) { 71 | mode[item]++; 72 | } else { 73 | mode[item] = 1; 74 | } 75 | if (count < mode[item]) { 76 | max = item; 77 | count = mode[item]; 78 | } 79 | } 80 | return max; 81 | }; 82 | } 83 | -------------------------------------------------------------------------------- /syntaxes/dataset-descriptor.tmLanguage.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dataset-descriptor", 3 | "scopeName": "source.dataset-descriptor", 4 | "fileTypes": [ 5 | ".descml" 6 | ], 7 | "patterns": [ 8 | { 9 | "include": "#comments" 10 | }, 11 | { 12 | "name": "keyword.control.dataset-descriptor", 13 | "match": "\\b(3D cuboids|abstractive-qa|Access|and|API|Asymmetric|asynchronous intercation|audio-classification|automatic-speech-recognition|Automatic-Speech-Recognition|Benchmark|between|Bias|bigger|Bounding boxes|caption-retrieval|Categorical|CC BY-NC-SA 4\\.0|CC BY-SA 3\\.0|CC BY-SA 4\\.0|CC0: Public Domain|classification|closed-domain-abstrative-qa|Cluster Sampling|Code Generation|code-generation|Comments|Community Data License|Community Data License Agreement - Permissive - Version 1\\.0|Community Data License Agreement - Sharing - Version 1\\.0|computer-vision|conditional-text-generation|contact|Content and textual categorization|Contractors|conversational|Creative Commons|cross-language-transcription|crowdsourced|Crowdsourcing|Data Augmentation|Data Normalization|Data reduction|data-integration|data-to-text|Database: Open Database, Contents: © Original Authors|Database: Open Database, Contents: Database Contents|deduplication|dialog-response-generation|diferent|Distribute|email|email subject|Entity annotation|entity-extraction|equal|equal\\+bigger|EU ODP Legal Notice|Evaluation of language models|External|external source|feature-extraction|fill-mask|generative-modelling|GPL|GPL 2|gpt-3|grammatical error correction|grantor|Graph|Haphazard Sampling|image|Image and video annotations|image-captioning|image-classification|image-enhancement|image-retrieval|image-segmentation|image-to-text|Inclusive Language|influence-attribution|Information Retrieval|information-retrieval|Internal|Internal Use|Landmark and key-point|Language|Language-model|language-modeling|language-modelling|linear-regression|Lines and splines|litle|long-texts|machine translation|machine-translation|Manual Human Curator|masked-language-modeling|meeting title|Might be offensive|minus|Missing Values|mixed|Model Commercialization|more|multi-class classification|Multi-stage Sampling|multiple-choice|Name|named-entity-disambiguation|named-entity-recognition|natural-language-inference|neural-machine-translation|news-classification|No|NO|Non-declared|Numerical|object-detection|one liner summary|Open Data Commons|Ordered|other|Other|other-test|other-text-search|Others|Outlier Filtering|Output Commercialization|paraphrase|paraphrase detection|paraphrasing|patents|Platform|Polygonal segmentation|Privacy|private|public|Publish|query-paraphrasing|question_answering|question-answering|question-answering-retrieval|question-generation|question-pairing|Re-Represent|reasoning|Record-Data|Reddit API Terms|Remove Duplicates|Research|Sampling|Sampling Distribution|Sampling Mean|Sampling Rate|Sampling Standard Deviation|Sampling Variation|Scrapping|Scripted/edited|Semantic Search|Semantinc Segmentation|Sensitive Data|Sensors|sentence-similarity|sentiment-analysis|sequence-modeling|sequence-modelling|sequence2sequence|simplification|Social Impact|Software|Special|speech-processing|speech-synthesis|spoken/signed|spontaneous|Stratified Sampling|structure-prediction|summarization|Summarization|super-resolution|symbolic-regression|Symmetric|synchronous interaction|syntactic-evaluation|Systematic Sampling|table-question-answering|table-to-text|tabular-classification|tabular-to-text|Tagging|text|Text Neutralization|text_classification|text-classification|text-generation|text-generation-other-code-modeling|text-generation-other-common-sense-inference|text-generation-other-discourse-analysis|text-mining|text-regression|text-retrieval|text-scoring|text-to-slide|text-to-speech|text-to-structured|text-to-tabular|Text2Text generation|text2text-generation|textual-entailment|The Montreal data licence|Time-Series|time-series-forecasting|Token Classification|token-classification|Tool|topic modeling|topic-classification|Total Data Sampled|transkation|translation|Translation|tts|type|Type|U\\.S\\. Government Works|unpaired-image-to-image-translation|Version|video-captionning|visual-question-answering|Weighted Sampling|World Bank Dataset Terms of Use|written|Yes|YES|zero-shot-classification|zero-shot-information-retrieval|zero-shot-retrieval)\\b|\\b(Acurracy:|Additional Conditions:|Age:|AGPL 3\\.0 \\(GNU Affero General Public License 3\\.0\\)|and attribute:|Are there protected groups\\?:|Areas:|Attribute number:|Attribute Sparsity:|attribute:|attribute:\\(|Binary attribute:|Categoric Distribution: \\[|CC BY 3\\.0 \\(Attribution 3\\.0 Unported\\)|CC BY 3\\.0 IGO \\(Attribution 3\\.0 IGO\\)|CC BY 4\\.0 \\(Attribution 4\\.0 International\\)|CC BY-NC 4\\.0 \\(Attribution-NonCommercial 4\\.0 International\\)|CC BY-NC-ND 4\\.0 \\(Attribution-NonCommercial-NoDerivatives 4\\.0 International\\)|CC BY-NC-SA 3\\.0 IGO \\(Attribution-NonCommercial-ShareAlike 3\\.0 IGO\\)|CC BY-ND 4\\.0 \\(Attribution-NoDerivatives 4\\.0 International\\)|Chi-Squared:|Cite authors:|Class Balance:|Complete archival version:|Contribution guidelines:|Correlations:|Count:|Country/Region:|Cramers: \\[|Credit/Attribution Notice:|Curation Rationale:|Data Instances:|Data Retention:|Data Splits:|Dependencies:|Description:|Designated Third Partes:|DOI:|Erratum\\?:|F1:|FDL 1\\.3 \\(GNU Free Documentation License 1\\.3\\)|First Rows:|Gaps:|Gathering dates range:|Gathering Processes:|Gender:|Golden Questions:|grantId:|Have sensitive attributes\\?|How data is collected:|Infrastructure:|Instance belongs to people:|Instances relation:|Inteded Audience:|Inter-annotation agreement:|inv:|IQR:|Is language data\\?:|Is public\\?:|is related to:|Is sample:|IssueType:|Journal:|Kendall: \\[|Keywords:|Label:|Labels:|Lenght-histogram: \\[|LGPL 3\\.0 \\(GNU Lesser General Public License 3\\.0\\)|Licences:|Links:|Maintainer:|Mapping:|Max-lenght:|Maximmum:|Mean:|Median-lenght:|Median:|Metrics: \\[|Min-lenght:|Minimmum:|Missing Values:|Modality:|Mode:|Model:|Native Language:|Noise:|Noisy labels:|Non-recommended:|Number of speakers represented:|ODC-BY 1\\.0 \\(ODC Attribution License\\)|OfType:|p-value:|Past Uses:|PDDL \\(ODC Public Domain Dedication and Licence\\)|Pearson: \\[|Phi-k: \\[|Precision:|Presence of disordered speech:|Process:|Published Date:|Purposes:|Q1:|Q2:|Q3:|Q4:|Quartiles:|Question:|Race/Ethnicity:|Rate Limit Usage:|Rationale:|Raw Citation:|Recall:|Recommended:|Reference:|Related-Attributes:|Related-Instances:|Relation:|Release Date:|Requirement:|Restriction:|Rights\\(stand-alone\\):|Rights\\(with models\\):|Sampling Characteristics:|Sampling Criteria:|Sampling Method:|Size:|Social Issues:|Socioeconomic status|Sparsity:|Spearman: \\[|Standard Desviation:|statistic:|Symmetry:|Syncrony:|Tags:|Task:|Tasks:|Time and place:|Title:|Total size:|Training in linguistics/other relevan disciplines:|Type:|Unique percentage:|Unique values:|Unique-identifier:|Unique:|Updated Date:|Url:|Uses repository:|Validation Dates:|Validation Method:|Validation Requirements:|Validation:|Version lifecycle:|Version:|When data was collected:|Who collects the data:|Year:|Completeness|From:)\\B" 14 | }, 15 | { 16 | "name": "entity.name.type.class", 17 | "match": "\\b(Dates:|Distribution:|Main Description:|Applications:|Social Issue:|Preprocess:|Preprocesses:|Label Requirements:|Labeling Team:|Labeling process:|LabelingProcesses:|Gather Requirements:|Gathering Team:|Process Demographics:|Gathering Processes:|Consistency rules:|Pair Correlation:|Quality Metrics:|Statistics:|Attributes:|Dataset:|Metadata:|Authoring:|Authors:|Funders:|Composition:|Instance:|Social Concerns:|Data Provenance:|Source:|Benchmarking:|Infraestructure:|Attribute:|Citation:)\\B" 18 | }, 19 | { 20 | "name": "string.quoted.double.dataset-descriptor", 21 | "begin": "\"", 22 | "end": "\"" 23 | }, 24 | { 25 | "name": "string.quoted.single.dataset-descriptor", 26 | "begin": "'", 27 | "end": "'" 28 | } 29 | ], 30 | "repository": { 31 | "comments": { 32 | "patterns": [ 33 | { 34 | "name": "comment.block.dataset-descriptor", 35 | "begin": "/\\*", 36 | "beginCaptures": { 37 | "0": { 38 | "name": "punctuation.definition.comment.dataset-descriptor" 39 | } 40 | }, 41 | "end": "\\*/", 42 | "endCaptures": { 43 | "0": { 44 | "name": "punctuation.definition.comment.dataset-descriptor" 45 | } 46 | } 47 | }, 48 | { 49 | "begin": "//", 50 | "beginCaptures": { 51 | "1": { 52 | "name": "punctuation.whitespace.comment.leading.dataset-descriptor" 53 | } 54 | }, 55 | "end": "(?=$)", 56 | "name": "comment.line.dataset-descriptor" 57 | } 58 | ] 59 | } 60 | } 61 | } -------------------------------------------------------------------------------- /syntaxes/static-tmLang.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dataset-descriptor", 3 | "scopeName": "source.dataset-descriptor", 4 | "fileTypes": [ 5 | ".descml" 6 | ], 7 | "patterns": [ 8 | { 9 | "include": "#comments" 10 | }, 11 | { 12 | "name": "keyword.control.dataset-descriptor", 13 | "match": "\\b(3D cuboids|abstractive-qa|Access|and|API|Asymmetric|asynchronous intercation|audio-classification|automatic-speech-recognition|Automatic-Speech-Recognition|Benchmark|between|Bias|bigger|Bounding boxes|caption-retrieval|Categorical|CC BY-NC-SA 4\\.0|CC BY-SA 3\\.0|CC BY-SA 4\\.0|CC0: Public Domain|classification|closed-domain-abstrative-qa|Cluster Sampling|Code Generation|code-generation|Comments|Community Data License|Community Data License Agreement - Permissive - Version 1\\.0|Community Data License Agreement - Sharing - Version 1\\.0|computer-vision|conditional-text-generation|contact|Content and textual categorization|Contractors|conversational|Creative Commons|cross-language-transcription|crowdsourced|Crowdsourcing|Data Augmentation|Data Normalization|Data reduction|data-integration|data-to-text|Database: Open Database, Contents: © Original Authors|Database: Open Database, Contents: Database Contents|deduplication|dialog-response-generation|diferent|Distribute|email|email subject|Entity annotation|entity-extraction|equal|equal\\+bigger|EU ODP Legal Notice|Evaluation of language models|External|external source|feature-extraction|fill-mask|generative-modelling|GPL|GPL 2|gpt-3|grammatical error correction|grantor|Graph|Haphazard Sampling|image|Image and video annotations|image-captioning|image-classification|image-enhancement|image-retrieval|image-segmentation|image-to-text|Inclusive Language|influence-attribution|Information Retrieval|information-retrieval|Internal|Internal Use|Landmark and key-point|Language|Language-model|language-modeling|language-modelling|linear-regression|Lines and splines|litle|long-texts|machine translation|machine-translation|Manual Human Curator|masked-language-modeling|meeting title|Might be offensive|minus|Missing Values|mixed|Model Commercialization|more|multi-class classification|Multi-stage Sampling|multiple-choice|Name|named-entity-disambiguation|named-entity-recognition|natural-language-inference|neural-machine-translation|news-classification|No|NO|Non-declared|Numerical|object-detection|one liner summary|Open Data Commons|Ordered|other|Other|other-test|other-text-search|Others|Outlier Filtering|Output Commercialization|paraphrase|paraphrase detection|paraphrasing|patents|Platform|Polygonal segmentation|Privacy|private|public|Publish|query-paraphrasing|question_answering|question-answering|question-answering-retrieval|question-generation|question-pairing|Re-Represent|reasoning|Record-Data|Reddit API Terms|Remove Duplicates|Research|Sampling|Sampling Distribution|Sampling Mean|Sampling Rate|Sampling Standard Deviation|Sampling Variation|Scrapping|Scripted/edited|Semantic Search|Semantinc Segmentation|Sensitive Data|Sensors|sentence-similarity|sentiment-analysis|sequence-modeling|sequence-modelling|sequence2sequence|simplification|Social Impact|Software|Special|speech-processing|speech-synthesis|spoken/signed|spontaneous|Stratified Sampling|structure-prediction|summarization|Summarization|super-resolution|symbolic-regression|Symmetric|synchronous interaction|syntactic-evaluation|Systematic Sampling|table-question-answering|table-to-text|tabular-classification|tabular-to-text|Tagging|text|Text Neutralization|text_classification|text-classification|text-generation|text-generation-other-code-modeling|text-generation-other-common-sense-inference|text-generation-other-discourse-analysis|text-mining|text-regression|text-retrieval|text-scoring|text-to-slide|text-to-speech|text-to-structured|text-to-tabular|Text2Text generation|text2text-generation|textual-entailment|The Montreal data licence|Time-Series|time-series-forecasting|Token Classification|token-classification|Tool|topic modeling|topic-classification|Total Data Sampled|transkation|translation|Translation|tts|type|Type|U\\.S\\. Government Works|unpaired-image-to-image-translation|Version|video-captionning|visual-question-answering|Weighted Sampling|World Bank Dataset Terms of Use|written|Yes|YES|zero-shot-classification|zero-shot-information-retrieval|zero-shot-retrieval)\\b|\\b(Acurracy:|Additional Conditions:|Age:|AGPL 3\\.0 \\(GNU Affero General Public License 3\\.0\\)|and attribute:|Are there protected groups\\?:|Areas:|Attribute number:|Attribute Sparsity:|attribute:|attribute:\\(|Binary attribute:|Categoric Distribution: \\[|CC BY 3\\.0 \\(Attribution 3\\.0 Unported\\)|CC BY 3\\.0 IGO \\(Attribution 3\\.0 IGO\\)|CC BY 4\\.0 \\(Attribution 4\\.0 International\\)|CC BY-NC 4\\.0 \\(Attribution-NonCommercial 4\\.0 International\\)|CC BY-NC-ND 4\\.0 \\(Attribution-NonCommercial-NoDerivatives 4\\.0 International\\)|CC BY-NC-SA 3\\.0 IGO \\(Attribution-NonCommercial-ShareAlike 3\\.0 IGO\\)|CC BY-ND 4\\.0 \\(Attribution-NoDerivatives 4\\.0 International\\)|Chi-Squared:|Cite authors:|Class Balance:|Complete archival version:|Contribution guidelines:|Correlations:|Count:|Country/Region:|Cramers: \\[|Credit/Attribution Notice:|Curation Rationale:|Data Instances:|Data Retention:|Data Splits:|Dependencies:|Description:|Designated Third Partes:|DOI:|Erratum\\?:|F1:|FDL 1\\.3 \\(GNU Free Documentation License 1\\.3\\)|First Rows:|Gaps:|Gathering dates range:|Gathering Processes:|Gender:|Golden Questions:|grantId:|Have sensitive attributes\\?|How data is collected:|Infrastructure:|Instance belongs to people:|Instances relation:|Inteded Audience:|Inter-annotation agreement:|inv:|IQR:|Is language data\\?:|Is public\\?:|is related to:|Is sample:|IssueType:|Journal:|Kendall: \\[|Keywords:|Label:|Labels:|Lenght-histogram: \\[|LGPL 3\\.0 \\(GNU Lesser General Public License 3\\.0\\)|Licences:|Links:|Maintainer:|Mapping:|Max-lenght:|Maximmum:|Mean:|Median-lenght:|Median:|Metrics: \\[|Min-lenght:|Minimmum:|Missing Values:|Modality:|Mode:|Model:|Native Language:|Noise:|Noisy labels:|Non-recommended:|Number of speakers represented:|ODC-BY 1\\.0 \\(ODC Attribution License\\)|OfType:|p-value:|Past Uses:|PDDL \\(ODC Public Domain Dedication and Licence\\)|Pearson: \\[|Phi-k: \\[|Precision:|Presence of disordered speech:|Process:|Published Date:|Purposes:|Q1:|Q2:|Q3:|Q4:|Quartiles:|Question:|Race/Ethnicity:|Rate Limit Usage:|Rationale:|Raw Citation:|Recall:|Recommended:|Reference:|Related-Attributes:|Related-Instances:|Relation:|Release Date:|Requirement:|Restriction:|Rights\\(stand-alone\\):|Rights\\(with models\\):|Sampling Characteristics:|Sampling Criteria:|Sampling Method:|Size:|Social Issues:|Socioeconomic status|Sparsity:|Spearman: \\[|Standard Desviation:|statistic:|Symmetry:|Syncrony:|Tags:|Task:|Tasks:|Time and place:|Title:|Total size:|Training in linguistics/other relevan disciplines:|Type:|Unique percentage:|Unique values:|Unique-identifier:|Unique:|Updated Date:|Url:|Uses repository:|Validation Dates:|Validation Method:|Validation Requirements:|Validation:|Version lifecycle:|Version:|When data was collected:|Who collects the data:|Year:|Completeness|From:)\\B" 14 | }, 15 | { 16 | "name": "entity.name.type.class", 17 | "match": "\\b(Dates:|Distribution:|Applications:|Social Issue:|Preprocess:|Preprocesses:|Label Requirements:|Labeling Team:|Labeling process:|LabelingProcesses:|Gather Requirements:|Gathering Team:|Process Demographics:|Gathering Processes:|Consistency rules:|Pair Correlation:|Quality Metrics:|Statistics:|Attributes:|Dataset:|Metadata:|Authoring:|Authors:|Funders:|Composition:|Instance:|Social Concerns:|Data Provenance:|Source:|Benchmarking:|Infraestructure:|Attribute:|Citation:)\\B" 18 | }, 19 | { 20 | "name": "string.quoted.double.dataset-descriptor", 21 | "begin": "\"", 22 | "end": "\"" 23 | }, 24 | { 25 | "name": "string.quoted.single.dataset-descriptor", 26 | "begin": "'", 27 | "end": "'" 28 | } 29 | ], 30 | "repository": { 31 | "comments": { 32 | "patterns": [ 33 | { 34 | "name": "comment.block.dataset-descriptor", 35 | "begin": "/\\*", 36 | "beginCaptures": { 37 | "0": { 38 | "name": "punctuation.definition.comment.dataset-descriptor" 39 | } 40 | }, 41 | "end": "\\*/", 42 | "endCaptures": { 43 | "0": { 44 | "name": "punctuation.definition.comment.dataset-descriptor" 45 | } 46 | } 47 | }, 48 | { 49 | "begin": "//", 50 | "beginCaptures": { 51 | "1": { 52 | "name": "punctuation.whitespace.comment.leading.dataset-descriptor" 53 | } 54 | }, 55 | "end": "(?=$)", 56 | "name": "comment.line.dataset-descriptor" 57 | } 58 | ] 59 | } 60 | } 61 | } -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES6", 4 | "module": "commonjs", 5 | "lib": ["ESNext"], 6 | "sourceMap": true, 7 | "outDir": "out", 8 | "strict": true, 9 | "noUnusedLocals": true, 10 | "noImplicitReturns": true, 11 | "moduleResolution": "node", 12 | "esModuleInterop": true, 13 | "skipLibCheck": true, 14 | "forceConsistentCasingInFileNames": true 15 | }, 16 | "include": [ 17 | "src/**/*.ts" 18 | ], 19 | "exclude": [ 20 | "out", 21 | "node_modules" 22 | ] 23 | } 24 | --------------------------------------------------------------------------------