├── .github
└── workflows
│ ├── ci-adapter.yml
│ ├── db-tests.yml
│ └── publish-adapter.yml
├── .gitignore
├── .vscode
└── extensions.json
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── docs
├── design
│ ├── HLD
│ │ └── HLD.jpg
│ └── specs
│ │ ├── dbMigration
│ │ └── .gitkeep
│ │ ├── dbSchema
│ │ └── .gitkeep
│ │ └── sqlParser
│ │ ├── delete.json
│ │ ├── insert.json
│ │ ├── select.json
│ │ └── update.json
├── readme.md
└── tutorials
│ ├── 01.md
│ ├── 02.md
│ ├── 03.md
│ ├── 04.md
│ ├── 05.md
│ ├── 06.md
│ ├── 07.md
│ ├── 08.md
│ ├── 09.md
│ ├── 10.md
│ ├── 11.md
│ ├── 12.md
│ ├── 13.md
│ ├── 14.md
│ ├── 15.md
│ ├── 16.md
│ ├── 17.md
│ ├── 18.md
│ ├── 19.md
│ ├── 20.md
│ ├── 21.md
│ └── assets
│ └── tutorial-19.gif
├── enrollment.csv
├── example
├── prisma
│ ├── .gitignore
│ ├── client.js
│ ├── index.js
│ ├── package-lock.json
│ ├── package.json
│ ├── schema.prisma
│ └── tsconfig.json
└── socket
│ ├── .gitignore
│ ├── client.js
│ ├── index.js
│ └── package.json
├── package-lock.json
├── package.json
├── prisma
└── prisma-adapter
│ ├── .gitignore
│ ├── README.md
│ ├── dev_guide.md
│ ├── package-lock.json
│ ├── package.json
│ ├── src
│ ├── index.ts
│ └── queryUtils.ts
│ ├── tsconfig.build.json
│ └── tsconfig.json
├── src
├── cli.js
├── csvStorage.js
├── index.js
├── queryExecuter.js
├── queryParser.js
└── server.js
├── student.csv
├── tests
├── appoximateLargeFile.test.js
├── cli.test.js
├── csvReader.test.js
├── deleteExecuter.test.js
├── insertExecuter.test.js
├── queryExecuter.test.js
└── queryParser.test.js
├── util
└── generateLargeFile.js
└── web
├── server
├── enrollment.csv
├── index.js
├── package-lock.json
├── package.json
└── student.csv
└── www
├── frontend.js
├── index.html
├── xterm.css
└── xterm.js
/.github/workflows/ci-adapter.yml:
--------------------------------------------------------------------------------
1 | name: CI Adapter
2 |
3 | on:
4 | pull_request:
5 | workflow_dispatch:
6 |
7 | jobs:
8 | ci:
9 | runs-on: ubuntu-latest
10 | steps:
11 | - uses: actions/checkout@v4
12 | - uses: actions/setup-node@v3
13 | with:
14 | node-version: '18'
15 | cache: npm
16 |
17 | - name: install dependencies
18 | run: |
19 | cd prisma/prisma-adapter
20 | npm install --frozen-lockfile
21 |
22 | - name: lint
23 | run: |
24 | cd prisma/prisma-adapter
25 | npm run lint
26 |
27 | - name: build
28 | run: |
29 | cd prisma/prisma-adapter
30 | npm run build
--------------------------------------------------------------------------------
/.github/workflows/db-tests.yml:
--------------------------------------------------------------------------------
1 | name: Run Tests on DB
2 |
3 | on:
4 | push:
5 | branches: [ main ]
6 | pull_request:
7 | branches: [ main ]
8 |
9 | jobs:
10 | build:
11 |
12 | runs-on: ubuntu-latest
13 |
14 | strategy:
15 | matrix:
16 | node-version: [16.x, 18.x]
17 |
18 | steps:
19 | - uses: actions/checkout@v3
20 | - name: Use Node.js ${{ matrix.node-version }}
21 | uses: actions/setup-node@v3
22 | with:
23 | node-version: ${{ matrix.node-version }}
24 | - run: npm i
25 | - run: npm run generate
26 | - run: npm test
27 |
--------------------------------------------------------------------------------
/.github/workflows/publish-adapter.yml:
--------------------------------------------------------------------------------
1 | name: Publish Adapter to NPM
2 |
3 | on:
4 | release:
5 | types: [published]
6 | workflow_dispatch:
7 |
8 | jobs:
9 | publish-npm:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v3
13 | - uses: npm/action-setup@v2
14 | with:
15 | version: 8.x.x
16 | - uses: actions/setup-node@v3
17 | with:
18 | node-version: '18'
19 | registry-url: 'https://registry.npmjs.org'
20 | cache: npm
21 |
22 | # Publish Adapter
23 | - run: |
24 | cd prisma/prisma-adapter
25 | npm install --frozen-lockfile
26 |
27 | - run: |
28 | cd prisma/prisma-adapter
29 | npm build
30 | - run: |
31 | cd prisma/prisma-adapter
32 | npm publish --access public
33 | env:
34 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
35 |
36 | # Publish Database
37 | - run: npm publish --access public
38 | env:
39 | NODE_AUTH_TOKEN: ${{ secrets.NPM_TOKEN }}
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Optional stylelint cache
58 | .stylelintcache
59 |
60 | # Microbundle cache
61 | .rpt2_cache/
62 | .rts2_cache_cjs/
63 | .rts2_cache_es/
64 | .rts2_cache_umd/
65 |
66 | # Optional REPL history
67 | .node_repl_history
68 |
69 | # Output of 'npm pack'
70 | *.tgz
71 |
72 | # Yarn Integrity file
73 | .yarn-integrity
74 |
75 | # dotenv environment variable files
76 | .env
77 | .env.development.local
78 | .env.test.local
79 | .env.production.local
80 | .env.local
81 |
82 | # parcel-bundler cache (https://parceljs.org/)
83 | .cache
84 | .parcel-cache
85 |
86 | # Next.js build output
87 | .next
88 | out
89 |
90 | # Nuxt.js build / generate output
91 | .nuxt
92 | dist
93 |
94 | # Gatsby files
95 | .cache/
96 | # Comment in the public line in if your project uses Gatsby and not Next.js
97 | # https://nextjs.org/blog/next-9-1#public-directory-support
98 | # public
99 |
100 | # vuepress build output
101 | .vuepress/dist
102 |
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 |
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 |
110 | # Serverless directories
111 | .serverless/
112 |
113 | # FuseBox cache
114 | .fusebox/
115 |
116 | # DynamoDB Local files
117 | .dynamodb/
118 |
119 | # TernJS port file
120 | .tern-port
121 |
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 |
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 |
132 |
133 | **/.DS_Store
134 |
135 | student_large.csv
--------------------------------------------------------------------------------
/.vscode/extensions.json:
--------------------------------------------------------------------------------
1 | {
2 | "recommendations": [
3 | "Orta.vscode-jest"
4 | ]
5 | }
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | By contributing, you agree to release your modifications under the LGPL-2.1 license.
2 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
StylusDB SQL
2 | A SQL database engine written in JavaScript
3 |
4 | This database is for educational purposes only. It is not intended for production use. It is written ground up in JavaScript and is a great way to learn how databases work. You can find all the code in the [src](./src) directory and the tutorial in the [docs](./docs) directory.
5 |
6 | ### Features
7 | - [x] Support for [Prisma](https://www.prisma.io/)
8 | - [x] `INSERT`, `DELETE`, `SELECT`
9 | - [x] CLI
10 | - [x] Server/Client Basic Protocol
11 | - [x] NPM Package for StylusDB-SQL
12 | - [ ] `UPDATE`, `CREATE TABLE`, `DROP TABLE`
13 | - [ ] SQL Spec Tracker
14 | - [ ] Minimal PostgreSQL Protocol for Server/Client Communication
15 |
16 | ### Installation
17 |
18 | ```bash
19 | npm i stylusdb-sql
20 | ```
21 |
22 | ### Usage
23 |
24 | #### Client
25 | For usage with the client, see documentation [here](./examples/client/README.md).
26 |
27 | #### With Prisma Client
28 | For usage with Prisma Client, see documentation [here](./prisma/prisma-adapter/README.md).
29 |
30 | #### SELECT
31 | Assuming you have a CSV file called `student.csv` with the following data:
32 | ```
33 | id,name,age
34 | 1,John,25
35 | 2,Jane,25
36 | ```
37 | ```javascript
38 | const { executeSELECTQuery } = require('stylusdb-sql');
39 | const query = 'SELECT id, name FROM student WHERE age = 25';
40 | const result = await executeSELECTQuery(query);
41 |
42 | // result = [{ id: '1', name: 'John' }, { id: '2', name: 'Jane' }]
43 | ```
44 |
45 | #### INSERT
46 | ```javascript
47 | const { executeINSERTQuery } = require('../src/queryExecuter');
48 | const { readCSV, writeCSV } = require('../src/csvStorage');
49 | async function createGradesCSV() {
50 | const initialData = [
51 | { student_id: '1', course: 'Mathematics', grade: 'A' },
52 | { student_id: '2', course: 'Chemistry', grade: 'B' },
53 | { student_id: '3', course: 'Mathematics', grade: 'C' }
54 | ];
55 | await writeCSV('grades.csv', initialData);
56 | }
57 | await createGradesCSV();
58 | const insertQuery = "INSERT INTO grades (student_id, course, grade) VALUES ('4', 'Physics', 'A')";
59 | await executeINSERTQuery(insertQuery);
60 | ```
61 |
62 | Verify the new entry in `grades.csv`:
63 | ```csv
64 | student_id,course,grade
65 | 1,Mathematics,A
66 | 2,Chemistry,B
67 | 3,Mathematics,C
68 | 4,Physics,A
69 | ```
70 |
71 | #### DELETE
72 | ```javascript
73 | async function createCoursesCSV() {
74 | const initialData = [
75 | { course_id: '1', course_name: 'Mathematics', instructor: 'Dr. Smith' },
76 | { course_id: '2', course_name: 'Chemistry', instructor: 'Dr. Jones' },
77 | { course_id: '3', course_name: 'Physics', instructor: 'Dr. Taylor' }
78 | ];
79 | await writeCSV('courses.csv', initialData);
80 | }
81 | await createCoursesCSV();
82 |
83 | // Execute DELETE statement
84 | const deleteQuery = "DELETE FROM courses WHERE course_id = '2'";
85 | await executeDELETEQuery(deleteQuery);
86 |
87 | // Verify the course was removed
88 | const updatedData = await readCSV('courses.csv');
89 | const deletedCourse = updatedData.find(course => course.course_id === '2');
90 | ```
91 |
92 | ### CLI Usage
93 |
94 | Open a terminal and run the following command to start the CLI:
95 | ```bash
96 | stylusdb-sql
97 | ```
98 | Execute a query as shown below.
99 |
100 |
101 | ### Contributing
102 |
103 | See [CONTRIBUTING.md](./CONTRIBUTING.md)
--------------------------------------------------------------------------------
/docs/design/HLD/HLD.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChakshuGautam/stylusdb-sql/c52af47d8e8f0ba35a2bf7f063c7e0da78816f80/docs/design/HLD/HLD.jpg
--------------------------------------------------------------------------------
/docs/design/specs/dbMigration/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChakshuGautam/stylusdb-sql/c52af47d8e8f0ba35a2bf7f063c7e0da78816f80/docs/design/specs/dbMigration/.gitkeep
--------------------------------------------------------------------------------
/docs/design/specs/dbSchema/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChakshuGautam/stylusdb-sql/c52af47d8e8f0ba35a2bf7f063c7e0da78816f80/docs/design/specs/dbSchema/.gitkeep
--------------------------------------------------------------------------------
/docs/design/specs/sqlParser/delete.json:
--------------------------------------------------------------------------------
1 | {
2 | "dialect": "PostgreSQL",
3 | "queryType": "DELETE",
4 | "delete": {
5 | "from": "student",
6 | "where": [
7 | {
8 | "field": "age",
9 | "operator": ">",
10 | "placeholder": ":age",
11 | "valueType": "integer"
12 | }
13 | ]
14 | },
15 | "returning": [
16 | "id",
17 | "name"
18 | ],
19 | "parameters": {
20 | ":age": 25
21 | }
22 | }
--------------------------------------------------------------------------------
/docs/design/specs/sqlParser/insert.json:
--------------------------------------------------------------------------------
1 | {
2 | "dialect": "PostgreSQL",
3 | "queryType": "INSERT",
4 | "insert": {
5 | "into": "student",
6 | "columns": [
7 | "name",
8 | "age",
9 | "department"
10 | ],
11 | "values": [
12 | [
13 | "John Doe",
14 | 22,
15 | "Science"
16 | ],
17 | [
18 | "Jane Smith",
19 | 20,
20 | "Mathematics"
21 | ]
22 | ]
23 | },
24 | "returning": [
25 | "id",
26 | "name"
27 | ]
28 | }
--------------------------------------------------------------------------------
/docs/design/specs/sqlParser/select.json:
--------------------------------------------------------------------------------
1 | {
2 | "dialect": "PostgreSQL",
3 | "with": [
4 | {
5 | "name": "recursiveCTE",
6 | "as": {
7 | "baseQuery": {
8 | "select": [
9 | "id",
10 | "parent_id",
11 | "name"
12 | ],
13 | "from": [
14 | {
15 | "table": "categories",
16 | "alias": "c"
17 | }
18 | ],
19 | "where": [
20 | {
21 | "field": "c.parent_id",
22 | "operator": "IS",
23 | "value": "NULL",
24 | "valueType": "raw"
25 | }
26 | ]
27 | },
28 | "recursiveQuery": {
29 | "select": [
30 | "c.id",
31 | "c.parent_id",
32 | "c.name"
33 | ],
34 | "from": [
35 | {
36 | "table": "categories",
37 | "alias": "c"
38 | }
39 | ],
40 | "join": {
41 | "table": "recursiveCTE",
42 | "alias": "r",
43 | "on": [
44 | {
45 | "left": "c.parent_id",
46 | "right": "r.id",
47 | "operator": "="
48 | }
49 | ]
50 | }
51 | }
52 | }
53 | }
54 | ],
55 | "select": [
56 | {
57 | "field": "student.name",
58 | "alias": "studentName"
59 | },
60 | {
61 | "field": "enrollment.course",
62 | "alias": "course"
63 | },
64 | {
65 | "subquery": {
66 | "select": [
67 | "COUNT(*)"
68 | ],
69 | "from": [
70 | {
71 | "table": "exam_results",
72 | "alias": "er"
73 | }
74 | ],
75 | "where": [
76 | {
77 | "field": "er.student_id",
78 | "operator": "=",
79 | "fieldFromOuterQuery": "s.id"
80 | }
81 | ]
82 | },
83 | "alias": "examCount"
84 | }
85 | ],
86 | "from": [
87 | {
88 | "table": "student",
89 | "alias": "s",
90 | "indexHint": "USE INDEX (idx_student_name)"
91 | },
92 | {
93 | "table": "recursiveCTE",
94 | "alias": "r"
95 | }
96 | ],
97 | "joins": [
98 | {
99 | "type": "INNER",
100 | "table": "enrollment",
101 | "alias": "e",
102 | "on": [
103 | {
104 | "left": "s.id",
105 | "right": "e.student_id",
106 | "operator": "="
107 | },
108 | {
109 | "left": "s.department",
110 | "right": "e.department",
111 | "operator": "=",
112 | "logic": "AND"
113 | }
114 | ],
115 | "indexHint": "FORCE INDEX (idx_enrollment_date)"
116 | }
117 | ],
118 | "where": [
119 | {
120 | "field": "s.age",
121 | "operator": ">",
122 | "placeholder": ":age",
123 | "valueType": "integer"
124 | }
125 | ],
126 | "groupBy": [
127 | "e.department"
128 | ],
129 | "having": [
130 | {
131 | "field": "COUNT(*)",
132 | "operator": ">",
133 | "placeholder": ":count",
134 | "valueType": "integer"
135 | }
136 | ],
137 | "orderBy": [
138 | {
139 | "field": "studentName",
140 | "direction": "ASC"
141 | }
142 | ],
143 | "limit": ":limit",
144 | "offset": ":offset",
145 | "parameters": {
146 | ":age": 20,
147 | ":count": 5,
148 | ":limit": 10,
149 | ":offset": 0
150 | },
151 | "performanceHints": {
152 | "useIndex": [
153 | "student (idx_student_age)"
154 | ],
155 | "optimizeFor": "speed",
156 | "avoidFullTableScans": true
157 | }
158 | }
--------------------------------------------------------------------------------
/docs/design/specs/sqlParser/update.json:
--------------------------------------------------------------------------------
1 | {
2 | "dialect": "PostgreSQL",
3 | "queryType": "UPDATE",
4 | "update": {
5 | "table": "student",
6 | "set": [
7 | {
8 | "field": "name",
9 | "value": ":newName",
10 | "valueType": "string"
11 | },
12 | {
13 | "field": "age",
14 | "value": ":newAge",
15 | "valueType": "integer"
16 | }
17 | ],
18 | "where": [
19 | {
20 | "field": "id",
21 | "operator": "=",
22 | "placeholder": ":id",
23 | "valueType": "integer"
24 | }
25 | ]
26 | },
27 | "returning": [
28 | "id",
29 | "name",
30 | "age"
31 | ],
32 | "parameters": {
33 | ":newName": "Jane Doe",
34 | ":newAge": 23,
35 | ":id": 1
36 | }
37 | }
--------------------------------------------------------------------------------
/docs/readme.md:
--------------------------------------------------------------------------------
1 | ## SQL Query Engine over CSV in JavaScript
2 |
3 | Let's start building a SQL query engine over CSV in JavaScript. We'll use Node.js for this project.
4 |
5 | This project will be complete in 20 steps and will take about 2-20 hours to complete depending on your level of expertise.
6 |
7 | - [x] Step 1: Setting up the Project
8 | - [x] Step 2: Create a CSV Reading Function
9 | - [x] Step 3: Building a Basic SQL Query Parser
10 | - [x] Step 4: Integrating CSV Reading with Query Parsing
11 | - [x] Step 5: Adding Support for WHERE Clauses
12 | - [x] Step 6: Handling multiple conditions in WHERE clause
13 | - [x] Step 7: Handling Different Comparison Operators
14 | - [x] Step 8: Adding INNER JOIN support
15 | - [x] Step 9: Adding LEFT and RIGHT JOIN support
16 | - [x] Step 10: Group By and Aggregate Functions
17 | - [x] Step 11: Implementing ORDER BY Clause
18 | - [x] Step 12: Adding LIMIT Clause
19 | - [x] Step 13: Error Handling and Validation
20 | - [x] Step 14: Implementing DISTINCT Keyword
21 | - [x] Step 15: Adding Support for LIKE Operator
22 | - [x] Step 16: Adding CI Support
23 | - [x] Step 17: Basic INSERT Statement Support
24 | - [x] Step 18: Basic DELETE Statement Support
25 | - [x] Step 19: CLI Integration
26 | - [x] Step 20: Packaging and Publishing
27 |
28 |
29 | ## Refactoring and Code Cleanup
30 | This will be done post Step 20 to ensure that the code is clean and readable. This will also be a good time to add tests and documentation.
31 |
32 |
33 | ## Next Steps
34 | There is a laundry list of features and improvements that can be added to this project. Here are some ideas for further development. The objective is to always add more features in a similar format - A tutorial-style guide with step-by-step instructions. A lot of these are challenging and should be broken down into smaller steps.
35 |
36 | - [ ] Implementing SET Operations (UNION, INTERSECT, EXCEPT)
37 | - [ ] INSERT Statement Support: Implement the capability to insert new data into existing CSV files. This includes parsing INSERT SQL statements and updating the CSV file while ensuring data integrity.
38 | - [ ] Enhancing the Parser for Subqueries
39 | - [ ] Data Update and Delete Operations: Along with INSERT, support UPDATE and DELETE operations. This allows for full data manipulation capabilities, similar to a traditional database.
40 | - [ ] Schema Definition and Evolution: Provide functionality for defining a schema for CSV files (column names, data types, etc.) and mechanisms to evolve the schema over time (adding/removing columns, changing data types).
41 | - [ ] Schema Validation: Include features to validate data against the defined schema during insertions and updates, ensuring data quality and consistency.
42 | Data Integrity Constraints: Implement constraints like primary keys, unique constraints, and foreign keys. This would require additional logic for constraint enforcement during data modifications.
43 | - [ ] ACID Properties Compliance: Aim to bring the system closer to compliance with ACID (Atomicity, Consistency, Isolation, Durability) properties, enhancing its reliability and robustness.
44 | - [ ] Data Compression and Storage Optimization: Introduce data compression techniques to reduce the storage footprint of CSV files, especially important for large datasets.
45 | - [ ] Bulk Data Insertion and Modification: Develop functionality for handling bulk data operations efficiently, which is crucial for large-scale data processing.
46 | - [ ] Data Partitioning and Sharding: Implement data partitioning and sharding for handling very large CSV files, improving performance and manageability.
47 | - [ ] Row-level Security: Add features for row-level security to restrict data access at a granular level, based on user roles or other criteria.
48 | - [ ] High Availability and Fault Tolerance: Build mechanisms for ensuring high availability and fault tolerance, such as replicating data across multiple locations.
49 | - [ ] Data Auditing Features: Introduce data auditing capabilities to track who made what changes and when, which is important for compliance and security.
50 | - [ ] Disaster Recovery Mechanisms: Develop a system for backing up data and schemas, along with recovery procedures in case of data loss or corruption. This could involve regular snapshots of the CSV files and schema definitions.
51 | - [ ] Transaction Log for Data Recovery: Maintain a transaction log to record all data manipulation operations. This can be used for point-in-time recovery and auditing purposes.
52 | - [ ] Support for Indexing: Develop advanced indexing mechanisms like B-trees or hash indexes for faster query processing. This can significantly improve the performance of SELECT queries, especially on large CSV files.
53 | - [ ] Query Optimization Engine: Implement a query optimizer that rewrites queries for optimal execution. This could include optimizing join orders, using indexes effectively, or simplifying complex queries.
54 | - [ ] Custom Function Support: Allow users to define custom functions in JavaScript that can be used within SQL queries. This would add a layer of flexibility and power to the query engine.
55 | - [ ] Data Type Casting and Conversion: Implement features for automatic or manual data type casting and conversion. This is crucial for handling different data types present in CSV files.
56 | - [ ] Parallel Query Processing: Introduce multi-threading or parallel processing capabilities to handle large datasets more efficiently. This would enable the engine to execute multiple parts of a query in parallel, reducing overall query time.
57 | - [ ] Custom Function Support: Allow users to define custom functions in JavaScript that can be used within SQL queries. This would add a layer of flexibility and power to the query engine.
58 | - [ ] Regular Expression Support in Queries: Add support for using regular expressions in WHERE clauses, providing more powerful data filtering capabilities.
59 | - [ ] Full-text Search Capability: Incorporate a full-text search feature, which is essential for efficiently searching through large text data.
60 | - [ ] Data Import/Export Features: Allow importing data from and exporting data to different formats like JSON, XML, or even other databases.
61 | - [ ] Performance Monitoring: Develop a system for monitoring query performance and logging slow queries. This could help in identifying bottlenecks and areas for improvement.
62 | - [ ] Automatic Query Caching: Implement a caching mechanism that automatically caches frequent queries or query results for faster retrieval.
63 | - [ ] Support for Transactions: Add basic transaction support with features like commit, rollback, and transaction logs. This would be particularly challenging but also a unique feature for a CSV-based query engine.
64 | - [ ] Advanced Analytics: Incorporate more complex statistical functions and operations, making the engine useful for data analysis tasks.
65 | - [ ] Security Features: Implement security features like query validation, SQL injection prevention, and access control mechanisms.
66 | - [ ] Optimized Storage Formats: Add support for optimized storage formats for CSV files, like columnar storage, to enhance read performance.
67 |
68 | ### Process to a new step
69 | This project is built to be understood by someone with basic knowledge of JavaScript and SQL and then following the steps. Ensure that the documentation is updated with each step and uses the same style, format, and structure as the previous steps. Best if it can use some of the older _gyan_ as well.
70 |
71 | Checklist
72 | - Find a feature that you would want to implement.
73 | - Break it down into steps such that each step can be completed in at most 20 mins.
74 | - Create a new GitHub issue for the feature.
75 | - Get the issue reviewed and approved by a maintainer and get it assigned.
76 | - Create a new branch for the step.
77 | - Implement the step.
78 | - Create a PR for the step.
79 | - Get the implementation, tests and documentation approved.
80 | - Get the PR merged.
81 |
82 |
--------------------------------------------------------------------------------
/docs/tutorials/01.md:
--------------------------------------------------------------------------------
1 | ## Step 1: Project Setup and Basic Test
2 |
3 | In this step, we setup the basic structure and testing for our project.
4 |
5 | ### 1.1 Create a New Node.js Project
6 | - Open your terminal.
7 | - Navigate to the directory where you want to create your project.
8 | - Run the following command to create a `package.json` file with default values:
9 | ```bash
10 | npm init -y
11 | ```
12 |
13 | ### 1.2 Install Necessary Packages
14 | - We will use `csv-parser` for CSV parsing and `jest` for testing. Install both packages using:
15 | ```bash
16 | npm install csv-parser jest --save-dev
17 | ```
18 |
19 | ### 1.3 Create a Basic Project Structure
20 | - Create a directory named `src`.
21 | - Inside `src`, create a file named `index.js`. This will be your main file.
22 |
23 | ### 1.4 Configure Jest
24 | - Add a Jest configuration to your `package.json` file. Add the following under the `scripts` section:
25 | ```json
26 | "scripts": {
27 | "test": "jest"
28 | }
29 | ```
30 |
31 | - Create a directory named `tests` at the root of your project.
32 | - Inside `tests`, create a file named `index.test.js` for your Jest tests.
33 |
34 | ### 1.5 Write a Basic Jest Test
35 | - In `index.test.js`, write a simple test to check your Jest setup. Here's a basic test example:
36 |
37 | ```javascript
38 | // tests/index.test.js
39 |
40 | test('Basic Jest Test', () => {
41 | expect(1).toBe(1);
42 | });
43 | ```
44 |
45 | ### 1.6 Run Your Jest Test
46 | - To run the test, navigate to your project root in the terminal.
47 | - Execute the test using the following command:
48 |
49 | ```bash
50 | npm test
51 | ```
52 |
53 | You should see the test passing in the console.
54 | This completes the initial setup with Jest testing. Make sure everything works up to this point before moving to the next step.
--------------------------------------------------------------------------------
/docs/tutorials/02.md:
--------------------------------------------------------------------------------
1 | ## Step 2: Reading a CSV File
2 |
3 | In this step we add the functionality to read from CSV files, since our DB would read from and write to CSV files, considering them as tables.
4 |
5 | ### 2.1 Create a Sample CSV File
6 | Create a new CSV file in your project directory. Let's call it `sample.csv` and add some sample data to it:
7 |
8 | ```csv
9 | id,name,age
10 | 1,John,30
11 | 2,Jane,25
12 | 3,Bob,22
13 | ```
14 |
15 | ### 2.2 Create a CSV Reading Function
16 | - In the `src` directory, create a new file named `csvReader.js`.
17 | - Implement a function to read the CSV file using `csv-parser`.
18 |
19 | ```javascript
20 | // src/csvReader.js
21 |
22 | const fs = require('fs');
23 | const csv = require('csv-parser');
24 |
25 | function readCSV(filePath) {
26 | const results = [];
27 |
28 | return new Promise((resolve, reject) => {
29 | fs.createReadStream(filePath)
30 | .pipe(csv())
31 | .on('data', (data) => results.push(data))
32 | .on('end', () => {
33 | resolve(results);
34 | })
35 | .on('error', (error) => {
36 | reject(error);
37 | });
38 | });
39 | }
40 |
41 | module.exports = readCSV;
42 | ```
43 |
44 | ### 2.3 Update Test to Check CSV Reading
45 | Modify the test in `tests/index.test.js` to include a test for reading the CSV file.
46 |
47 | Example test:
48 |
49 | ```javascript
50 | // tests/index.test.js
51 |
52 | const readCSV = require('../src/csvReader');
53 |
54 | test('Read CSV File', async () => {
55 | const data = await readCSV('./sample.csv');
56 | expect(data.length).toBeGreaterThan(0);
57 | expect(data.length).toBe(3);
58 | expect(data[0].name).toBe('John');
59 | expect(data[0].age).toBe('30'); //ignore the string type here, we will fix this later
60 | });
61 | ```
62 |
63 | ### 2.4 Run the Updated Test
64 | Run the test using `npm test`. Ensure the test passes and correctly reads the CSV file.
65 |
66 |
67 |
68 |
--------------------------------------------------------------------------------
/docs/tutorials/03.md:
--------------------------------------------------------------------------------
1 | ## Step 3: Building a Basic SQL Query Parser
2 |
3 | In this section we will implement the basic functionality for our DB to make our DB understand SQL so that we can query data from it using SQL.
4 |
5 | ### 3.1 Create the Parser Function
6 | In the `src` directory, create a new file named `queryParser.js`
7 |
8 | Write a function that takes a SQL query string and parses it using regex to identify the `SELECT` fields and the `FROM` table name.
9 |
10 | Example implementation:
11 | ```javascript
12 | // src/queryParser.js
13 |
14 | function parseQuery(query) {
15 | const selectRegex = /SELECT (.+) FROM (.+)/i;
16 | const match = query.match(selectRegex);
17 |
18 | if (match) {
19 | const [, fields, table] = match;
20 | return {
21 | fields: fields.split(',').map(field => field.trim()),
22 | table: table.trim()
23 | };
24 | } else {
25 | throw new Error('Invalid query format');
26 | }
27 | }
28 |
29 | module.exports = parseQuery;
30 | ```
31 |
32 | ### 3.2 Update Test to Check Query Parsing
33 | Modify the test in `tests/index.test.js` to include a test for the SQL query parsing.
34 | Example test:
35 |
36 | ```javascript
37 | // tests/index.test.js
38 |
39 | const parseQuery = require('../src/queryParser');
40 |
41 | test('Parse SQL Query', () => {
42 | const query = 'SELECT id, name FROM sample';
43 | const parsed = parseQuery(query);
44 | expect(parsed).toEqual({
45 | fields: ['id', 'name'],
46 | table: 'sample'
47 | });
48 | });
49 | ```
50 | > **💡Do it yourself:** Try writing a test for the case the above function would throw an error.
51 |
52 |
53 | ### 3.3 Run the Updated Test
54 | Run the test using `npm test`. Ensure the test passes and correctly parses the SQL query.
55 |
--------------------------------------------------------------------------------
/docs/tutorials/04.md:
--------------------------------------------------------------------------------
1 | ## Step 4: Integrating CSV Reading with Query Parsing
2 |
3 | In this step we integrate the functionalities implemented in the previous two steps together to get a basic `SELECT` query working in our DB.
4 |
5 | ### 4.1 Create the Query Execution Function
6 | - In `src/index.js`, rename the function to `executeSELECTQuery` to indicate its specific purpose.
7 | - Ensure the function handles only SELECT queries.
8 | Here's a basic implementation:
9 |
10 | ```javascript
11 | // src/index.js
12 |
13 | const parseQuery = require('./queryParser');
14 | const readCSV = require('./csvReader');
15 |
16 | async function executeSELECTQuery(query) {
17 | const { fields, table } = parseQuery(query);
18 | const data = await readCSV(`${table}.csv`);
19 |
20 | // Filter the fields based on the query
21 | return data.map(row => {
22 | const filteredRow = {};
23 | fields.forEach(field => {
24 | filteredRow[field] = row[field];
25 | });
26 | return filteredRow;
27 | });
28 | }
29 |
30 | module.exports = executeSELECTQuery;
31 | ```
32 |
33 | > **💡Do it yourself:** Find and list out conditions where this function can error out and handle them.
34 |
35 | ### 4.2 Update Test to Check Query Execution
36 | Modify the test in `tests/index.test.js` to include a test for executing the SQL query.
37 | Example test:
38 |
39 | ```javascript
40 | // tests/index.test.js
41 |
42 | const executeSELECTQuery = require('../src/index');
43 |
44 | test('Execute SQL Query', async () => {
45 | const query = 'SELECT id, name FROM sample';
46 | const result = await executeSELECTQuery(query);
47 | expect(result.length).toBeGreaterThan(0);
48 | expect(result[0]).toHaveProperty('id');
49 | expect(result[0]).toHaveProperty('name');
50 | expect(result[0]).not.toHaveProperty('age');
51 | expect(result[0]).toEqual({ id: '1', name: 'John' });
52 | });
53 | ```
54 |
55 | ### 4.3 Run the Updated Test
56 | Run the test using `npm test`.
57 | Ensure the test passes and correctly executes the SQL query, returning the filtered data.
58 |
--------------------------------------------------------------------------------
/docs/tutorials/05.md:
--------------------------------------------------------------------------------
1 | ## Step 5: Adding WHERE Clause Handling to the Parser
2 |
3 | In this step we add filtering capabilities by adding `WHERE` clause parsing to our DB.
4 |
5 | ### 5.1 Update the Parser to Handle WHERE Clauses
6 | - In the `src` directory, update `queryParser.js` to parse the `WHERE` clause from the SQL query.
7 | The parser should be able to extract the condition in a simple format, like `field = value`.
8 |
9 | Here's an updated implementation:
10 |
11 | ```javascript
12 | // src/queryParser.js
13 |
14 | function parseQuery(query) {
15 | const selectRegex = /SELECT (.+?) FROM (.+?)(?: WHERE (.*))?$/i;
16 | const match = query.match(selectRegex);
17 |
18 | if (match) {
19 | const [, fields, table, whereClause] = match;
20 | return {
21 | fields: fields.split(',').map(field => field.trim()),
22 | table: table.trim(),
23 | whereClause: whereClause ? whereClause.trim() : null
24 | };
25 | } else {
26 | throw new Error('Invalid query format');
27 | }
28 | }
29 |
30 | module.exports = parseQuery;
31 | ```
32 |
33 | ### 5.2 Run the Updated Tests
34 | - Run the tests using `npm test`. Ensure all tests pass, particularly the new test for WHERE clause parsing.
35 | - If you see your older test fail, update that to include `"whereClause": null` in the expected output.
36 |
37 | ### 5.3 Update the Execute Function to apply `WHERE` clauses
38 | Modify `executeSELECTQuery` in `src/index.js` to filter results based on the `WHERE` clause.
39 |
40 | Example implementation:
41 |
42 | ```javascript
43 | // src/index.js
44 |
45 | const parseQuery = require('./queryParser');
46 | const readCSV = require('./csvReader');
47 |
48 | async function executeSELECTQuery(query) {
49 | const { fields, table, whereClause } = parseQuery(query);
50 | const data = await readCSV(`${table}.csv`);
51 |
52 | // Filtering based on WHERE clause
53 | const filteredData = whereClause
54 | ? data.filter(row => {
55 | const [field, value] = whereClause.split('=').map(s => s.trim());
56 | return row[field] === value;
57 | })
58 | : data;
59 |
60 | // Selecting the specified fields
61 | return filteredData.map(row => {
62 | const selectedRow = {};
63 | fields.forEach(field => {
64 | selectedRow[field] = row[field];
65 | });
66 | return selectedRow;
67 | });
68 | }
69 |
70 | module.exports = executeSELECTQuery;
71 | ```
72 |
73 | > **💡Ask yourself:** Is the above implementation case-insensitive?
74 |
75 | ### 5.3 Update Tests for `WHERE` Clause
76 | Modify the tests in `tests/index.test.js` to include tests for queries with `WHERE` clauses.
77 |
78 | Example test:
79 | ```javascript
80 | // tests/index.test.js
81 |
82 | const executeSELECTQuery = require('../src/index');
83 |
84 | test('Execute SQL Query with WHERE Clause', async () => {
85 | const query = 'SELECT id, name FROM sample WHERE age = 25';
86 | const result = await executeSELECTQuery(query);
87 | expect(result.length).toBe(1);
88 | expect(result[0]).toHaveProperty('id');
89 | expect(result[0]).toHaveProperty('name');
90 | expect(result[0].id).toBe('2');
91 | });
92 | ```
93 |
94 | > **💡Do it yourself:** Add tests where the above
95 | implementation would fail and to check for its case sensitivity/insensitivity.
96 |
97 | ### 5.4 Run the Updated Tests
98 | - Run the tests using `npm test`
99 |
100 |
101 |
--------------------------------------------------------------------------------
/docs/tutorials/06.md:
--------------------------------------------------------------------------------
1 | ## Step 6: Handling multiple conditions in WHERE clause
2 |
3 | In this step we add the functionality to add multiple filters in our SQL query.
4 |
5 | ### 6.1 Update the Parser for Complex WHERE Clauses
6 | - Modify `queryParser.js` to handle multiple conditions in the `WHERE` clause.
7 | - The parser should split the conditions and identify the logical operators.
8 |
9 | Here's an example implementation:
10 |
11 | ```javascript
12 | // src/queryParser.js
13 |
14 | function parseQuery(query) {
15 | const selectRegex = /SELECT (.+?) FROM (.+?)(?: WHERE (.*))?$/i;
16 | const match = query.match(selectRegex);
17 |
18 | if (match) {
19 | const [, fields, table, whereString] = match;
20 | const whereClauses = whereString ? parseWhereClause(whereString) : [];
21 | return {
22 | fields: fields.split(',').map(field => field.trim()),
23 | table: table.trim(),
24 | whereClauses
25 | };
26 | } else {
27 | throw new Error('Invalid query format');
28 | }
29 | }
30 |
31 | function parseWhereClause(whereString) {
32 | const conditions = whereString.split(/ AND | OR /i);
33 | return conditions.map(condition => {
34 | const [field, operator, value] = condition.split(/\s+/);
35 | return { field, operator, value };
36 | });
37 | }
38 |
39 | module.exports = parseQuery;
40 | ```
41 |
42 | > **💡Do it yourself:** Add error handling to the newly added function.
43 |
44 | ### 6.2 Update the Execute Function for Complex WHERE Clauses
45 | - Modify `executeSELECTQuery` in `src/index.js` to evaluate multiple conditions in the `WHERE` clause.
46 |
47 | Here's an example implementation:
48 |
49 | ```javascript
50 | // src/index.js
51 |
52 | const parseQuery = require('./queryParser');
53 | const readCSV = require('./csvReader');
54 |
55 | async function executeSELECTQuery(query) {
56 | const { fields, table, whereClauses } = parseQuery(query);
57 | const data = await readCSV(`${table}.csv`);
58 |
59 | // Apply WHERE clause filtering
60 | const filteredData = whereClauses.length > 0
61 | ? data.filter(row => whereClauses.every(clause => {
62 | // You can expand this to handle different operators
63 | return row[clause.field] === clause.value;
64 | }))
65 | : data;
66 |
67 | // Select the specified fields
68 | return filteredData.map(row => {
69 | const selectedRow = {};
70 | fields.forEach(field => {
71 | selectedRow[field] = row[field];
72 | });
73 | return selectedRow;
74 | });
75 | }
76 |
77 | module.exports = executeSELECTQuery;
78 | ```
79 |
80 | ### 6.3 Update Tests for Complex WHERE Clauses
81 | - Modify the tests in `tests/index.test.js` to include queries with complex WHERE clauses.
82 |
83 | Example test:
84 |
85 | ```javascript
86 | test('Parse SQL Query with Multiple WHERE Clauses', () => {
87 | const query = 'SELECT id, name FROM sample WHERE age = 30 AND name = John';
88 | const parsed = parseQuery(query);
89 | expect(parsed).toEqual({
90 | fields: ['id', 'name'],
91 | table: 'sample',
92 | whereClauses: [{
93 | "field": "age",
94 | "operator": "=",
95 | "value": "30",
96 | }, {
97 | "field": "name",
98 | "operator": "=",
99 | "value": "John",
100 | }]
101 | });
102 | });
103 |
104 | test('Execute SQL Query with Multiple WHERE Clause', async () => {
105 | const query = 'SELECT id, name FROM sample WHERE age = 30 AND name = John';
106 | const result = await executeSELECTQuery(query);
107 | expect(result.length).toBe(1);
108 | expect(result[0]).toEqual({ id: '1', name: 'John' });
109 | });
110 | ```
111 |
112 | > **💡Do it yourself:** Add test cases where the implementation might throw errors and make sure they are handled.
113 |
114 | - Verify if all 7 of the tests are working fine by running `npm test`. If not update the older tests to work with the new parser.
115 |
--------------------------------------------------------------------------------
/docs/tutorials/07.md:
--------------------------------------------------------------------------------
1 | ## Step 7: Handling Different Comparison Operators
2 |
3 | In this step we move further ahead from the realm on equalities to the realm of inequalities by implementing support for different comparison operators in our query parser.
4 |
5 | ### 7.1 Update the Parser to Recognize Comparison Operators
6 | - Modify `parseWhereClause` function to handle a variety of comparison operators (`=`, `!=`, `>`, `<`, `>=`, `<=`).
7 | - The parser should identify the operator and split the condition accordingly.
8 |
9 | Example implementation:
10 |
11 | ```javascript
12 | // src/queryParser.js
13 | function parseWhereClause(whereString) {
14 | const conditionRegex = /(.*?)(=|!=|>|<|>=|<=)(.*)/;
15 | return whereString.split(/ AND | OR /i).map(conditionString => {
16 | const match = conditionString.match(conditionRegex);
17 | if (match) {
18 | const [, field, operator, value] = match;
19 | return { field: field.trim(), operator, value: value.trim() };
20 | }
21 | throw new Error('Invalid WHERE clause format');
22 | });
23 | }
24 | ```
25 |
26 | ### 7.2 Update the Execute Function to Apply Different Operators
27 | - Modify `executeSELECTQuery` in `src/index.js` to evaluate conditions using the identified operators.
28 |
29 | Example implementation:
30 |
31 | ```javascript
32 | // src/index.js
33 | function evaluateCondition(row, clause) {
34 | const { field, operator, value } = clause;
35 | switch (operator) {
36 | case '=': return row[field] === value;
37 | case '!=': return row[field] !== value;
38 | case '>': return row[field] > value;
39 | case '<': return row[field] < value;
40 | case '>=': return row[field] >= value;
41 | case '<=': return row[field] <= value;
42 | default: throw new Error(`Unsupported operator: ${operator}`);
43 | }
44 | }
45 | ```
46 | - Update the filteredData assignment to use the new `evaluateCondition` function.
47 | ```javascript
48 | const filteredData = whereClauses.length > 0
49 | ? data.filter(row => whereClauses.every(clause => evaluateCondition(row, clause)))
50 | : data;
51 | ```
52 |
53 |
54 | ### 7.3 Update Tests for Different Comparison Operators
55 | Modify the tests in `tests/index.test.js` to include queries with different comparison operators and run the tests using `npm test`.
56 |
57 | Example test:
58 |
59 | ```javascript
60 | // tests/index.test.js
61 | test('Execute SQL Query with Greater Than', async () => {
62 | const queryWithGT = 'SELECT id FROM sample WHERE age > 22';
63 | const result = await executeSELECTQuery(queryWithGT);
64 | expect(result.length).toEqual(2);
65 | expect(result[0]).toHaveProperty('id');
66 | });
67 |
68 | test('Execute SQL Query with Not Equal to', async () => {
69 | const queryWithGT = 'SELECT name FROM sample WHERE age != 25';
70 | const result = await executeSELECTQuery(queryWithGT);
71 | expect(result.length).toEqual(2);
72 | expect(result[0]).toHaveProperty('name');
73 | });
74 | ```
75 |
76 | > **💡Do it yourself:** Add negative test cases for the above implementation.
--------------------------------------------------------------------------------
/docs/tutorials/08.md:
--------------------------------------------------------------------------------
1 | ## Step 8: Inner Join
2 |
3 | In this step we implement the `JOIN` functionality from SQL.
4 |
5 | ### 8.1 Preparing CSV Files for INNER JOIN
6 | - Rename `sample.csv` to `student.csv`
7 | - Update the tests in `tests/index.test.js` to use `student.csv` instead of `sample.csv` and update the expected results accordingly.
8 |
9 | ### 8.2 Create a new Enrollment CSV File
10 | This setup gives us two CSV files: one for student information and another for their course enrollments. We can now use these files to demonstrate the INNER JOIN functionality in the next step.
11 | Create a new file named `enrollment.csv` in the root of your project and add the following contents to it.
12 |
13 | ```csv
14 | student_id,course
15 | 1,Mathematics
16 | 1,Physics
17 | 2,Chemistry
18 | 3,Mathematics
19 | ```
20 |
21 | ### 8.3 Update the Parser to Handle JOIN Clauses
22 | - Define what the parser should output for a `JOIN` query.
23 | Assuming the following `JOIN` query:
24 | ```sql
25 | SELECT student.name, enrollment.course FROM student INNER JOIN enrollment ON student.id=enrollment.student_id
26 | ```
27 | ```json
28 | {
29 | "fields": ["student.name", "enrollment.course"],
30 | "table": "student",
31 | "whereClauses": [],
32 | "joinTable": "enrollment",
33 | "joinCondition": {
34 | "left": "student.id",
35 | "right": "enrollment.student_id"
36 | }
37 | }
38 | ```
39 |
40 | - When there is a `WHERE` clause it would be like this:
41 | ```sql
42 | SELECT student.name, enrollment.course FROM student INNER JOIN enrollment ON student.id=enrollment.student_id WHERE student.name = John
43 | ```
44 |
45 | ```json
46 | {
47 | "fields": ["student.name", "enrollment.course"],
48 | "table": "student",
49 | "whereClauses": [{
50 | "field": "student.name",
51 | "operator": "=",
52 | "value": "John"
53 | }],
54 | "joinTable": "enrollment",
55 | "joinCondition": {
56 | "left": "student.id",
57 | "right": "enrollment.student_id"
58 | }
59 | }
60 | ```
61 |
62 | Now, modify `queryParser.js` to parse `JOIN` clauses in the SQL query.
63 | The parser should identify the tables and the join condition.
64 |
65 | Example implementation:
66 |
67 | ```javascript
68 | // src/queryParser.js
69 |
70 | function parseQuery(query) {
71 | // First, let's trim the query to remove any leading/trailing whitespaces
72 | query = query.trim();
73 |
74 | // Initialize variables for different parts of the query
75 | let selectPart, fromPart;
76 |
77 | // Split the query at the WHERE clause if it exists
78 | const whereSplit = query.split(/\sWHERE\s/i);
79 | query = whereSplit[0]; // Everything before WHERE clause
80 |
81 | // WHERE clause is the second part after splitting, if it exists
82 | const whereClause = whereSplit.length > 1 ? whereSplit[1].trim() : null;
83 |
84 | // Split the remaining query at the JOIN clause if it exists
85 | const joinSplit = query.split(/\sINNER JOIN\s/i);
86 | selectPart = joinSplit[0].trim(); // Everything before JOIN clause
87 |
88 | // JOIN clause is the second part after splitting, if it exists
89 | const joinPart = joinSplit.length > 1 ? joinSplit[1].trim() : null;
90 |
91 | // Parse the SELECT part
92 | const selectRegex = /^SELECT\s(.+?)\sFROM\s(.+)/i;
93 | const selectMatch = selectPart.match(selectRegex);
94 | if (!selectMatch) {
95 | throw new Error('Invalid SELECT format');
96 | }
97 |
98 | const [, fields, table] = selectMatch;
99 |
100 | // Parse the JOIN part if it exists
101 | let joinTable = null, joinCondition = null;
102 | if (joinPart) {
103 | const joinRegex = /^(.+?)\sON\s([\w.]+)\s*=\s*([\w.]+)/i;
104 | const joinMatch = joinPart.match(joinRegex);
105 | if (!joinMatch) {
106 | throw new Error('Invalid JOIN format');
107 | }
108 |
109 | joinTable = joinMatch[1].trim();
110 | joinCondition = {
111 | left: joinMatch[2].trim(),
112 | right: joinMatch[3].trim()
113 | };
114 | }
115 |
116 | // Parse the WHERE part if it exists
117 | let whereClauses = [];
118 | if (whereClause) {
119 | whereClauses = parseWhereClause(whereClause);
120 | }
121 |
122 | return {
123 | fields: fields.split(',').map(field => field.trim()),
124 | table: table.trim(),
125 | whereClauses,
126 | joinTable,
127 | joinCondition
128 | };
129 | }
130 | ```
131 |
132 | ### 8.4 Update the Execute Function for INNER JOIN
133 | Modify `executeSELECTQuery` in `src/index.js` to perform an `INNER JOIN` operation. Given that JOIN are generally performed before the where clause, we can apply the WHERE clause filtering after the JOIN operation.
134 |
135 | Example implementation:
136 | ```javascript
137 | // src/index.js at executeSELECTQuery
138 |
139 | // Now we will have joinTable, joinCondition in the parsed query
140 | const { fields, table, whereClauses, joinTable, joinCondition } = parseQuery(query);
141 | let data = await readCSV(`${table}.csv`);
142 |
143 | // Perform INNER JOIN if specified
144 | if (joinTable && joinCondition) {
145 | const joinData = await readCSV(`${joinTable}.csv`);
146 | data = data.flatMap(mainRow => {
147 | return joinData
148 | .filter(joinRow => {
149 | const mainValue = mainRow[joinCondition.left.split('.')[1]];
150 | const joinValue = joinRow[joinCondition.right.split('.')[1]];
151 | return mainValue === joinValue;
152 | })
153 | .map(joinRow => {
154 | return fields.reduce((acc, field) => {
155 | const [tableName, fieldName] = field.split('.');
156 | acc[field] = tableName === table ? mainRow[fieldName] : joinRow[fieldName];
157 | return acc;
158 | }, {});
159 | });
160 | });
161 | }
162 |
163 | // Apply WHERE clause filtering after JOIN (or on the original data if no join)
164 | const filteredData = whereClauses.length > 0
165 | ? data.filter(row => whereClauses.every(clause => evaluateCondition(row, clause)))
166 | : data;
167 | ```
168 |
169 | This would also require us to update the `SELECT` logic to handle the new `fields` format which should now work for both `SELECT` (`id`) and `SELECT JOIN` (`student.id`) queries.
170 |
171 | Sample implementation:
172 | ```javascript
173 | // src/index.js at executeSELECTQuery
174 |
175 | filteredData.map(row => {
176 | const selectedRow = {};
177 | fields.forEach(field => {
178 | // Assuming 'field' is just the column name without table prefix
179 | selectedRow[field] = row[field];
180 | });
181 | return selectedRow;
182 | ```
183 |
184 | Verify if the results are as expected manually.
185 |
186 | ```json
187 | /*
188 | result = [
189 | { 'student.name': 'John', 'enrollment.course': 'Mathematics' },
190 | { 'student.name': 'John', 'enrollment.course': 'Physics' },
191 | { 'student.name': 'Jane', 'enrollment.course': 'Chemistry' },
192 | { 'student.name': 'Bob', 'enrollment.course': 'Mathematics' }
193 | ]
194 | */
195 | ```
196 |
197 | ### 8.5 Update Tests for INNER JOIN
198 |
199 | Modify the tests in `tests/index.test.js` to include tests for `INNER JOIN` queries. Add these 4 tests:
200 |
201 | ```javascript
202 | test('Parse SQL Query with INNER JOIN', async () => {/*implement*/});
203 | test('Parse SQL Query with INNER JOIN and WHERE Clause', async () => {/*implement*/});
204 | test('Execute SQL Query with INNER JOIN', async () => {/*implement*/});
205 | test('Execute SQL Query with INNER JOIN and a WHERE Clause', async () => {/*implement*/});
206 | ```
207 |
208 | Make sure the tests pass. If you are unsure take a peek at the tests in the `08` commit.
209 |
210 | > **💡Do it yourself:** Implement the tests mentioned in the code block above and also figure out negative test cases.
211 |
--------------------------------------------------------------------------------
/docs/tutorials/09.md:
--------------------------------------------------------------------------------
1 | ## Step 9: Implementing LEFT and RIGHT JOINs
2 |
3 | In this step we extend the `JOIN` functionality of our database to add support for `LEFT` and `RIGHT` joins as well.
4 |
5 | ### 9.0 Update the CSV Files
6 | - Update the CSV Files to include the following data:
7 |
8 | > `enrollment.csv` would become
9 | ```csv
10 | student_id,course
11 | 1,Mathematics
12 | 1,Physics
13 | 2,Chemistry
14 | 3,Mathematics
15 | 5,Biology
16 | ```
17 |
18 | and
19 | > `student.csv` would become
20 | ```csv
21 | id,name,age
22 | 1,John,30
23 | 2,Jane,25
24 | 3,Bob,22
25 | 4,Alice,24
26 | ```
27 |
28 | This will ensure that that the `JOIN` queries can now have data that is not present in both tables.
29 |
30 | > 💡**Ask yourself**: Why was this update necessary?
31 |
32 |
33 | ### 9.1 Update the Parser to Recognize Different JOIN Types
34 | - Modify `queryParser.js` to differentiate between `INNER JOIN`, `LEFT JOIN`, and `RIGHT JOIN`. Adjust the regex and logic to capture the type of JOIN.
35 |
36 | Updated `queryParser.js`:
37 |
38 | ```javascript
39 | // src/queryParser.js
40 |
41 | // ...existing code...
42 |
43 | function parseJoinClause(query) {
44 | const joinRegex = /\s(INNER|LEFT|RIGHT) JOIN\s(.+?)\sON\s([\w.]+)\s*=\s*([\w.]+)/i;
45 | const joinMatch = query.match(joinRegex);
46 |
47 | if (joinMatch) {
48 | return {
49 | joinType: joinMatch[1].trim(),
50 | joinTable: joinMatch[2].trim(),
51 | joinCondition: {
52 | left: joinMatch[3].trim(),
53 | right: joinMatch[4].trim()
54 | }
55 | };
56 | }
57 |
58 | return {
59 | joinType: null,
60 | joinTable: null,
61 | joinCondition: null
62 | };
63 | }
64 |
65 | // Update the parseQuery function to use parseJoinClause
66 | // ...existing code...
67 |
68 | module.exports = { parseQuery, parseJoinClause };
69 | ```
70 |
71 | > 💡**Do it yourself**: Remember to update the `parseQuery` function to use the `parseJoinClause` function implemented above
72 |
73 | ### 9.2 Updating the Execute Function for Different JOIN Types
74 | - Modify `executeSELECTQuery` in `src/index.js` to handle `LEFT JOIN` and `RIGHT JOIN` alongside `INNER JOIN`. Implement `performLeftJoin` and `performRightJoin` functions to encapsulate the specific logic for these JOIN types.
75 |
76 | ```javascript
77 | // src/index.js
78 |
79 | // ...existing imports...
80 |
81 | // Helper functions for different JOIN types
82 | function performInnerJoin(/* parameters */) {
83 | // Logic for INNER JOIN
84 | // ...
85 | }
86 |
87 | function performLeftJoin(/* parameters */) {
88 | // Logic for LEFT JOIN
89 | // ...
90 | }
91 |
92 | function performRightJoin(/* parameters */) {
93 | // Logic for RIGHT JOIN
94 | // ...
95 | }
96 |
97 | async function executeSELECTQuery(query) {
98 | const { fields, table, whereClauses, joinType, joinTable, joinCondition } = parseQuery(query);
99 | let data = await readCSV(`${table}.csv`);
100 |
101 | // Logic for applying JOINs
102 | if (joinTable && joinCondition) {
103 | const joinData = await readCSV(`${joinTable}.csv`);
104 | switch (joinType.toUpperCase()) {
105 | case 'INNER':
106 | data = performInnerJoin(data, joinData, joinCondition, fields, table);
107 | break;
108 | case 'LEFT':
109 | data = performLeftJoin(data, joinData, joinCondition, fields, table);
110 | break;
111 | case 'RIGHT':
112 | data = performRightJoin(data, joinData, joinCondition, fields, table);
113 | break;
114 | // Handle default case or unsupported JOIN types
115 | }
116 | }
117 |
118 | // ...existing code for WHERE clause and field selection...
119 | }
120 |
121 | module.exports = executeSELECTQuery;
122 | ```
123 |
124 | > 💡**Do it yourself**: Implement the `performInnerJoin`, `performLeftJoin`, `performRightJoin` and update the `executeSELECTQuery` function.
125 |
126 | ### 9.3 Adding Tests for LEFT and RIGHT JOINs
127 | - Given the number of tests that we will have, would be good to refactor them in separate files. Consider creating `csvReader.test.js`, `queryExecuter.test.js`, and `queryParser.test.js` files in the `tests` folder and segrate the tests accordingly.
128 |
129 | > 💡**Do it yourself**: Refactor tests
130 |
131 | The following tests should pass now - [Link to the Commit](https://github.com/ChakshuGautam/stylusdb-sql/commit/7d4877d09055da7ef63ee6f2321db2e3fa87ad24)
--------------------------------------------------------------------------------
/docs/tutorials/10.md:
--------------------------------------------------------------------------------
1 | ## Step 10: Implementing GROUP BY and Aggregate Functions
2 |
3 | In this step we provide the much needed aggregation capabilities to our SQL engine by implementing support for `GROUP BY` and other aggregation functions.
4 |
5 | ### 10.1 Update the Parser to Handle GROUP BY Clauses
6 | - Modify `queryParser.js` to parse `GROUP BY` clauses in the SQL query. The parser should identify the fields to group by.
7 |
8 | Example snippet for updating `queryParser.js`:
9 |
10 | ```javascript
11 | // src/queryParser.js
12 |
13 | // ...existing code...
14 |
15 | function parseQuery(query) {
16 | // ...existing parsing logic...
17 |
18 | // Updated regex to capture GROUP BY clause
19 | const groupByRegex = /\sGROUP BY\s(.+)/i;
20 | const groupByMatch = query.match(groupByRegex);
21 |
22 | let groupByFields = null;
23 | if (groupByMatch) {
24 | groupByFields = groupByMatch[1].split(',').map(field => field.trim());
25 | }
26 |
27 | return {
28 | // ...existing parsed parts,
29 | groupByFields
30 | };
31 | }
32 |
33 | // ...remaining code...
34 |
35 | module.exports = { parseQuery, parseJoinClause };
36 | ```
37 |
38 | > 💡**Do it yourself**: Check if the aggregate function is being used with or without a groupBy clause and update the `parseQuery` function based on the hints in the above code snippet.
39 |
40 | ### 10.2 Update the Execute Function to Apply GROUP BY and Aggregate Functions
41 | - Modify `executeSELECTQuery` in `src/index.js` to perform grouping based on the `GROUP BY` clause and apply aggregate functions like `COUNT`, `SUM`, `AVG`, `MIN`, and `MAX`.
42 |
43 | Example snippet for updating `executeSELECTQuery`:
44 | ```javascript
45 | // src/index.js
46 |
47 | // ...existing imports...
48 |
49 | // Helper function to apply GROUP BY and aggregate functions
50 | function applyGroupBy(data, groupByFields, aggregateFunctions) {
51 | // Implement logic to group data and calculate aggregates
52 | // ...
53 | }
54 |
55 | async function executeSELECTQuery(query) {
56 | const { fields, table, whereClauses, joinType, joinTable, joinCondition, groupByFields } = parseQuery(query);
57 | let data = await readCSV(`${table}.csv`);
58 |
59 | // ...existing logic for JOINs and WHERE clause...
60 |
61 | if (groupByFields) {
62 | data = applyGroupBy(data, groupByFields, fields);
63 | }
64 |
65 | // ...existing logic for field selection...
66 | }
67 |
68 | module.exports = executeSELECTQuery;
69 | ```
70 |
71 | > 💡**Do it yourself**: Implement the `applyGroupBy` function and update the `executeSELECTQuery` function based on the hints in the above code snippet.
72 |
73 | ### 10.3 Add Tests for GROUP BY and Aggregate Functions
74 | Update the test suite to include tests for queries using the `GROUP BY` clause and aggregate functions. [Commit for reference](https://github.com/ChakshuGautam/stylusdb-sql/commit/2df5a81650ce1f3846ec8e0b605aa2e7371dcf79)
75 |
76 | > 💡 **Do it yourself**: Think of both negative and positive scenarios and make sure to cover all cases.
77 |
--------------------------------------------------------------------------------
/docs/tutorials/11.md:
--------------------------------------------------------------------------------
1 | ## Step 11: Implementing ORDER BY Clause
2 |
3 | In this step we implement the functionality of ordering our records in a particular sequence by adding support for the `ORDER BY` SQL clause.
4 |
5 | ### 11.1 Update the Parser to Handle ORDER BY Clauses
6 | - Modify `queryParser.js` to parse `ORDER BY` clauses in the SQL query. The parser should identify the columns to order by and the sort direction (`ASC` or `DESC`).
7 |
8 | Example snippet for updating `queryParser.js`:
9 | ```javascript
10 | // src/queryParser.js
11 |
12 | // ...existing code...
13 |
14 | function parseQuery(query) {
15 | // ...existing parsing logic...
16 |
17 | // Updated regex to capture ORDER BY clause
18 | const orderByRegex = /\sORDER BY\s(.+)/i;
19 | const orderByMatch = query.match(orderByRegex);
20 |
21 | let orderByFields = null;
22 | if (orderByMatch) {
23 | orderByFields = orderByMatch[1].split(',').map(field => {
24 | const [fieldName, order] = field.trim().split(/\s+/);
25 | return { fieldName, order: order ? order.toUpperCase() : 'ASC' };
26 | });
27 | }
28 |
29 | return {
30 | // ...existing parsed parts,
31 | orderByFields
32 | };
33 | }
34 |
35 | // ...remaining code...
36 |
37 | module.exports = { parseQuery, parseJoinClause };
38 | ```
39 |
40 | > 💡 **Do it yourself**: Update the `parseQuery` function to add support for `ORDER BY` clause with the help of hints in the above code snippet.
41 |
42 | > 💡 **Ask yourself**: Is there some scope of refactoring in the above code snippet?
43 |
44 |
45 | ### 11.2 Update the Execute Function to Apply ORDER BY Clause
46 | - Modify `executeSELECTQuery` in `src/index.js` to sort the results based on the `ORDER BY` clause.
47 |
48 | ```javascript
49 | // src/index.js
50 |
51 | // ...existing imports and functions...
52 |
53 | async function executeSELECTQuery(query) {
54 | const { fields, table, whereClauses, joinType, joinTable, joinCondition, groupByFields, orderByFields } = parseQuery(query);
55 | let data = await readCSV(`${table}.csv`);
56 |
57 | // ...existing logic for JOINs, WHERE clause, and GROUP BY...
58 |
59 | if (orderByFields) {
60 | data.sort((a, b) => {
61 | for (let { fieldName, order } of orderByFields) {
62 | if (a[fieldName] < b[fieldName]) return order === 'ASC' ? -1 : 1;
63 | if (a[fieldName] > b[fieldName]) return order === 'ASC' ? 1 : -1;
64 | }
65 | return 0;
66 | });
67 | }
68 |
69 | // ...existing logic for field selection...
70 | }
71 |
72 | module.exports = executeSELECTQuery;
73 | ```
74 |
75 | > 💡 **Do it yourself**: Update the `executeSELECTQuery` function to add support for `ORDER BY` clause with the help of hints in the above code snippet.
76 |
77 | ### 11.3 Add Tests for ORDER BY Clause
78 |
79 | - Add tests for the `ORDER BY` clause. You can see the existing tests [here](https://github.com/ChakshuGautam/stylusdb-sql/commit/39efbc7d7a81296c58a31e5fe84224938f64bcf7) to do TDD.
80 |
81 | > 💡 **Ask yourself**: What is Test Drive Development?
--------------------------------------------------------------------------------
/docs/tutorials/12.md:
--------------------------------------------------------------------------------
1 | ## Step 12: Implementing LIMIT Clause
2 |
3 | In this step we add support to fetch only a specific number of records as a result of the SQL Query by implementing the `LIMIT` clause.
4 |
5 | ### 12.1 Update the Parser to Handle LIMIT Clauses
6 | - Modify `queryParser.js` to parse `LIMIT` clauses in the SQL query.
7 | The parser should identify the limit value.
8 |
9 | Example snippet for updating `queryParser.js`:
10 |
11 | ```javascript
12 | // src/queryParser.js
13 |
14 | // ...existing code...
15 |
16 | function parseQuery(query) {
17 | // ...existing parsing logic...
18 |
19 | // Updated regex to capture LIMIT clause
20 | const limitRegex = /\sLIMIT\s(\d+)/i;
21 | const limitMatch = query.match(limitRegex);
22 |
23 | let limit = null;
24 | if (limitMatch) {
25 | limit = parseInt(limitMatch[1]);
26 | }
27 |
28 | return {
29 | // ...existing parsed parts,
30 | limit
31 | };
32 | }
33 |
34 | // ...remaining code...
35 |
36 | module.exports = { parseQuery, parseJoinClause };
37 | ```
38 |
39 | > 💡 **Do it yourself**: Update the `parseQuery` function to add support for `LIMIT` clause with the help of hints in the above code snippet.
40 |
41 | > 💡 **Ask yourself**: Is there some scope of refactoring in the above code snippet?
42 |
43 | ### 12.2 Update the Execute Function to Apply LIMIT
44 | Modify `executeSELECTQuery` in `src/index.js` to apply the `LIMIT` clause on the result set.
45 | Example snippet for updating `executeSELECTQuery`:
46 |
47 | ```javascript
48 | // src/index.js
49 |
50 | // ...existing imports and functions...
51 |
52 | async function executeSELECTQuery(query) {
53 | const { fields, table, whereClauses, joinType, joinTable, joinCondition, groupByFields, orderByFields, limit } = parseQuery(query);
54 | let data = await readCSV(`${table}.csv`);
55 |
56 | // ...existing logic for JOINs, WHERE clause, GROUP BY, and ORDER BY...
57 |
58 | if (limit !== null) {
59 | data = data.slice(0, limit);
60 | }
61 |
62 | // ...existing logic for field selection...
63 | }
64 |
65 | module.exports = executeSELECTQuery;
66 | ```
67 |
68 | > 💡 **Do it yourself**: Update the `executeSELECTQuery` function to add support for `LIMIT` clause with the help of hints in the above code snippet.
69 |
70 | ### 2.3 Add Tests for LIMIT Clause
71 | - Update the test suite to include tests for queries using the `LIMIT` clause. [Commit for reference](https://github.com/ChakshuGautam/stylusdb-sql/commit/fd5ce77fd91e80655072fc6348d19d426fd12673)
72 |
--------------------------------------------------------------------------------
/docs/tutorials/13.md:
--------------------------------------------------------------------------------
1 | ## Step 13: Error Handling and Validation
2 |
3 | Every piece of software can error out and hence making it extremely crucial to handle errors and validate the data. We'll add support or error handling and validation in this step.
4 |
5 | ### 3.1 Enhance Error Handling in Parser
6 | - Update `parseQuery` function in `queryParser.js` to include more comprehensive error messages and checks. Ensure that it validates the structure of the SQL query and catches common syntax errors.
7 |
8 | Example updates for `queryParser.js`:
9 |
10 | ```javascript
11 | // src/queryParser.js
12 |
13 | // ...existing code...
14 |
15 | function parseQuery(query) {
16 | try {
17 | // ...existing parsing logic...
18 |
19 | // Include validation checks and throw errors with descriptive messages
20 | if (!selectMatch) {
21 | throw new Error("Invalid SELECT clause. Ensure it follows 'SELECT field1, field2 FROM table' format.");
22 | }
23 | // Add more checks as needed for JOIN, WHERE, GROUP BY, ORDER BY, and LIMIT
24 |
25 | return {
26 | // ...existing parsed parts
27 | };
28 | } catch (error) {
29 | // Customize error message or log details if needed
30 | throw new Error(`Query parsing error: ${error.message}`);
31 | }
32 | }
33 |
34 | // ...remaining code...
35 |
36 | module.exports = { parseQuery, parseJoinClause };
37 | ```
38 |
39 | > 💡 **Do it yourself**: Update the `parseQuery` function to add some level of error handling.
40 |
41 | ### 13.2 Enhance Error Handling in Execute Function
42 | - Update `executeSELECTQuery` function in `src/index.js` to handle errors gracefully. Catch errors during the execution process and provide informative messages.
43 |
44 | Example updates for `executeSELECTQuery`:
45 |
46 | ```javascript
47 | // src/index.js
48 |
49 | // ...existing imports and functions...
50 |
51 | async function executeSELECTQuery(query) {
52 | try {
53 | const { fields, table, whereClauses, joinType, joinTable, joinCondition, groupByFields, orderByFields, limit } = parseQuery(query);
54 |
55 | // ...existing logic for reading data and applying JOINs, WHERE, GROUP BY, ORDER BY, and LIMIT...
56 |
57 | // Return the final data or handle specific error cases
58 | return data;
59 | } catch (error) {
60 | // Log error and provide user-friendly message
61 | console.error("Error executing query:", error);
62 | throw new Error(`Failed to execute query: ${error.message}`);
63 | }
64 | }
65 |
66 | module.exports = executeSELECTQuery;
67 | ```
68 |
69 | > 💡 **Do it yourself**: Update the `executeSELECTQuery` to add some level of error handling.
70 |
71 | ### 13.3 Update Tests to Cover Error Scenarios
72 | - Add tests to cover error scenarios in the parser and execute function. Ensure that the error messages are as expected. [Commit for reference](https://github.com/ChakshuGautam/stylusdb-sql/commit/c2214a1a75de155786a54b353964235d8d17864a)
73 |
74 | Note that above is just an introduction and more detailed exceptions will be added later.
--------------------------------------------------------------------------------
/docs/tutorials/14.md:
--------------------------------------------------------------------------------
1 | ## Step 14: Implementing DISTINCT Keyword
2 | ### 14.1 Update the Parser to Handle DISTINCT
3 | - Modify parseQuery in `queryParser.js` to recognize the `DISTINCT` keyword in the SQL query. The parser should identify when `DISTINCT` is used and modify the parsed query accordingly.
4 |
5 | Example updates for queryParser.js:
6 | ```javascript
7 | // src/queryParser.js
8 |
9 | // Find were to add this code
10 | if (query.toUpperCase().includes('SELECT DISTINCT')) {
11 | isDistinct = true;
12 | query = query.replace('SELECT DISTINCT', 'SELECT');
13 | }
14 | ```
15 |
16 | ### 14.2 Update the Execute Function to Apply DISTINCT
17 | Modify `executeSELECTQuery` in `src/index.js` to apply the DISTINCT keyword, removing duplicates from the result set.
18 | Example updates for `executeSELECTQuery`:
19 |
20 | ```javascript
21 | // src/index.js
22 |
23 | // Find were to add this code
24 | if (isDistinct) {
25 | data = [...new Map(data.map(item => [fields.map(field => item[field]).join('|'), item])).values()];
26 | }
27 |
28 | module.exports = executeSELECTQuery;
29 | ```
30 |
31 | ### 14.3 Add Tests for DISTINCT Keyword
32 | Update the `enrollment.csv` to include `5,Physics,` and `student.csv` to include `5,Jane,22`. This will ensure the following tests pass now.
33 |
34 | ```javascript
35 | test('Parse SQL Query with Basic DISTINCT', () => {
36 | const query = 'SELECT DISTINCT age FROM student';
37 | const parsed = parseQuery(query);
38 | expect(parsed).toEqual({
39 | fields: ['age'],
40 | table: 'student',
41 | isDistinct: true,
42 | whereClauses: [],
43 | groupByFields: null,
44 | joinType: null,
45 | joinTable: null,
46 | joinCondition: null,
47 | orderByFields: null,
48 | limit: null,
49 | hasAggregateWithoutGroupBy: false
50 | });
51 | });
52 |
53 | test('Parse SQL Query with DISTINCT and Multiple Columns', () => {
54 | const query = 'SELECT DISTINCT student_id, course FROM enrollment';
55 | const parsed = parseQuery(query);
56 | expect(parsed).toEqual({
57 | fields: ['student_id', 'course'],
58 | table: 'enrollment',
59 | isDistinct: true,
60 | whereClauses: [],
61 | groupByFields: null,
62 | joinType: null,
63 | joinTable: null,
64 | joinCondition: null,
65 | orderByFields: null,
66 | limit: null,
67 | hasAggregateWithoutGroupBy: false
68 | });
69 | });
70 |
71 | test('Parse SQL Query with DISTINCT and WHERE Clause', () => {
72 | const query = 'SELECT DISTINCT course FROM enrollment WHERE student_id = "1"';
73 | const parsed = parseQuery(query);
74 | expect(parsed).toEqual({
75 | fields: ['course'],
76 | table: 'enrollment',
77 | isDistinct: true,
78 | whereClauses: [{ field: 'student_id', operator: '=', value: '"1"' }],
79 | groupByFields: null,
80 | joinType: null,
81 | joinTable: null,
82 | joinCondition: null,
83 | orderByFields: null,
84 | limit: null,
85 | hasAggregateWithoutGroupBy: false
86 | });
87 | });
88 |
89 | test('Parse SQL Query with DISTINCT and JOIN Operations', () => {
90 | const query = 'SELECT DISTINCT student.name FROM student INNER JOIN enrollment ON student.id = enrollment.student_id';
91 | const parsed = parseQuery(query);
92 | expect(parsed).toEqual({
93 | fields: ['student.name'],
94 | table: 'student',
95 | isDistinct: true,
96 | whereClauses: [],
97 | groupByFields: null,
98 | joinType: 'INNER',
99 | joinTable: 'enrollment',
100 | joinCondition: {
101 | left: 'student.id',
102 | right: 'enrollment.student_id'
103 | },
104 | orderByFields: null,
105 | limit: null,
106 | hasAggregateWithoutGroupBy: false
107 | });
108 | });
109 |
110 | test('Parse SQL Query with DISTINCT, ORDER BY, and LIMIT', () => {
111 | const query = 'SELECT DISTINCT age FROM student ORDER BY age DESC LIMIT 2';
112 | const parsed = parseQuery(query);
113 | expect(parsed).toEqual({
114 | fields: ['age'],
115 | table: 'student',
116 | isDistinct: true,
117 | whereClauses: [],
118 | groupByFields: null,
119 | joinType: null,
120 | joinTable: null,
121 | joinCondition: null,
122 | orderByFields: [{ fieldName: 'age', order: 'DESC' }],
123 | limit: 2,
124 | hasAggregateWithoutGroupBy: false
125 | });
126 | });
127 |
128 | test('Parse SQL Query with DISTINCT on All Columns', () => {
129 | const query = 'SELECT DISTINCT * FROM student';
130 | const parsed = parseQuery(query);
131 | expect(parsed).toEqual({
132 | fields: ['*'],
133 | table: 'student',
134 | isDistinct: true,
135 | whereClauses: [],
136 | groupByFields: null,
137 | joinType: null,
138 | joinTable: null,
139 | joinCondition: null,
140 | orderByFields: null,
141 | limit: null,
142 | hasAggregateWithoutGroupBy: false
143 | });
144 | });
145 |
146 | test('Basic DISTINCT Usage', async () => {
147 | const query = 'SELECT DISTINCT age FROM student';
148 | const result = await executeSELECTQuery(query);
149 | expect(result).toEqual([{ age: '30' }, { age: '25' }, { age: '22' }, { age: '24' }]);
150 | });
151 |
152 | test('DISTINCT with Multiple Columns', async () => {
153 | const query = 'SELECT DISTINCT student_id, course FROM enrollment';
154 | const result = await executeSELECTQuery(query);
155 | // Expecting unique combinations of student_id and course
156 | expect(result).toEqual([
157 | { student_id: '1', course: 'Mathematics' },
158 | { student_id: '1', course: 'Physics' },
159 | { student_id: '2', course: 'Chemistry' },
160 | { student_id: '3', course: 'Mathematics' },
161 | { student_id: '5', course: 'Biology' },
162 | { student_id: '5', course: 'Physics' }
163 | ]);
164 | });
165 |
166 | // Not a good test right now
167 | test('DISTINCT with WHERE Clause', async () => {
168 | const query = 'SELECT DISTINCT course FROM enrollment WHERE student_id = "1"';
169 | const result = await executeSELECTQuery(query);
170 | // Expecting courses taken by student with ID 1
171 | expect(result).toEqual([{ course: 'Mathematics' }, { course: 'Physics' }]);
172 | });
173 |
174 | test('DISTINCT with JOIN Operations', async () => {
175 | const query = 'SELECT DISTINCT student.name FROM student INNER JOIN enrollment ON student.id = enrollment.student_id';
176 | const result = await executeSELECTQuery(query);
177 | // Expecting names of students who are enrolled in any course
178 | expect(result).toEqual([{ "student.name": 'John' }, { "student.name": 'Jane' }, { "student.name": 'Bob' }]);
179 | });
180 |
181 | test('DISTINCT with ORDER BY and LIMIT', async () => {
182 | const query = 'SELECT DISTINCT age FROM student ORDER BY age DESC LIMIT 2';
183 | const result = await executeSELECTQuery(query);
184 | // Expecting the two highest unique ages
185 | expect(result).toEqual([{ age: '30' }, { age: '25' }]);
186 | });
187 | ```
188 |
189 | Update all the others to ensure they pass as well.
--------------------------------------------------------------------------------
/docs/tutorials/15.md:
--------------------------------------------------------------------------------
1 | ## Step 15: Adding Support for LIKE Operator
2 | ### 15.1 Update the Parser to Handle LIKE Clauses
3 | Modify `parseQuery` in `queryParser.js` to recognize `LIKE`` conditions in the `WHERE`` clause.
4 |
5 | The parser should be capable of identifying LIKE patterns within the query.
6 | Example updates for queryParser.js:
7 |
8 | ```javascript
9 | if (conditionString.includes(' LIKE ')) {
10 | const [field, , pattern] = conditionString.split(/\sLIKE\s/i);
11 | return { field: field.trim(), operator: 'LIKE', value: pattern.trim() };
12 | }
13 | ```
14 |
15 | ### 15.2 Update the Execute Function to Apply LIKE Conditions
16 | Modify `executeSELECTQuery` in `src/index.js` to filter data using the LIKE operator.
17 | Example updates for `executeSELECTQuery`:
18 |
19 | ```javascript
20 | // Inside operator matching logic
21 | if (clause.operator === 'LIKE') {
22 | // Transform SQL LIKE pattern to JavaScript RegExp pattern
23 | const regexPattern = '^' + clause.value.replace(/%/g, '.*') + '$';
24 | return new RegExp(regexPattern, 'i').test(row[clause.field]);
25 | }
26 | ```
27 |
28 | ### 15.3 Add Tests for LIKE Operator
29 | Make sure the following test passes now.
30 |
31 | ```javascript
32 | test('Parse SQL Query with LIKE Clause', () => {
33 | const query = "SELECT name FROM student WHERE name LIKE '%Jane%'";
34 | const parsed = parseQuery(query);
35 | expect(parsed).toEqual({
36 | fields: ['name'],
37 | table: 'student',
38 | whereClauses: [{ field: 'name', operator: 'LIKE', value: '%Jane%' }],
39 | isDistinct: false,
40 | groupByFields: null,
41 | joinType: null,
42 | joinTable: null,
43 | joinCondition: null,
44 | orderByFields: null,
45 | limit: null,
46 | hasAggregateWithoutGroupBy: false
47 | });
48 | });
49 |
50 | test('Parse SQL Query with LIKE Clause and Wildcards', () => {
51 | const query = "SELECT name FROM student WHERE name LIKE 'J%'";
52 | const parsed = parseQuery(query);
53 | expect(parsed).toEqual({
54 | fields: ['name'],
55 | table: 'student',
56 | whereClauses: [{ field: 'name', operator: 'LIKE', value: 'J%' }],
57 | isDistinct: false,
58 | groupByFields: null,
59 | joinType: null,
60 | joinTable: null,
61 | joinCondition: null,
62 | orderByFields: null,
63 | limit: null,
64 | hasAggregateWithoutGroupBy: false
65 | });
66 | });
67 |
68 | test('Parse SQL Query with Multiple LIKE Clauses', () => {
69 | const query = "SELECT name FROM student WHERE name LIKE 'J%' AND age LIKE '2%'";
70 | const parsed = parseQuery(query);
71 | expect(parsed).toEqual({
72 | fields: ['name'],
73 | table: 'student',
74 | whereClauses: [
75 | { field: 'name', operator: 'LIKE', value: 'J%' },
76 | { field: 'age', operator: 'LIKE', value: '2%' }
77 | ],
78 | isDistinct: false,
79 | groupByFields: null,
80 | joinType: null,
81 | joinTable: null,
82 | joinCondition: null,
83 | orderByFields: null,
84 | limit: null,
85 | hasAggregateWithoutGroupBy: false
86 | });
87 | });
88 |
89 | test('Parse SQL Query with LIKE and ORDER BY Clauses', () => {
90 | const query = "SELECT name FROM student WHERE name LIKE '%e%' ORDER BY age DESC";
91 | const parsed = parseQuery(query);
92 | expect(parsed).toEqual({
93 | fields: ['name'],
94 | table: 'student',
95 | whereClauses: [{ field: 'name', operator: 'LIKE', value: '%e%' }],
96 | orderByFields: [{ fieldName: 'age', order: 'DESC' }],
97 | isDistinct: false,
98 | groupByFields: null,
99 | joinType: null,
100 | joinTable: null,
101 | joinCondition: null,
102 | limit: null,
103 | hasAggregateWithoutGroupBy: false
104 | });
105 | });
106 |
107 | test('Execute SQL Query with LIKE Operator for Name', async () => {
108 | const query = "SELECT name FROM student WHERE name LIKE '%Jane%'";
109 | const result = await executeSELECTQuery(query);
110 | // Expecting names containing 'Jane'
111 | expect(result).toEqual([{ name: 'Jane' }, { name: 'Jane' }]);
112 | });
113 |
114 | test('Execute SQL Query with LIKE Operator and Wildcards', async () => {
115 | const query = "SELECT name FROM student WHERE name LIKE 'J%'";
116 | const result = await executeSELECTQuery(query);
117 | // Expecting names starting with 'J'
118 | expect(result).toEqual([{ name: 'John' }, { name: 'Jane' }, { name: 'Jane' }]);
119 | });
120 |
121 | test('Execute SQL Query with LIKE Operator Case Insensitive', async () => {
122 | const query = "SELECT name FROM student WHERE name LIKE '%bob%'";
123 | const result = await executeSELECTQuery(query);
124 | // Expecting names 'Bob' (case insensitive)
125 | expect(result).toEqual([{ name: 'Bob' }]);
126 | });
127 |
128 | test('Execute SQL Query with LIKE Operator and DISTINCT', async () => {
129 | const query = "SELECT DISTINCT name FROM student WHERE name LIKE '%e%'";
130 | const result = await executeSELECTQuery(query);
131 | // Expecting unique names containing 'e'
132 | expect(result).toEqual([{ name: 'Jane' }, { name: 'Alice' }]);
133 | });
134 |
135 | test('LIKE with ORDER BY and LIMIT', async () => {
136 | const query = "SELECT name FROM student WHERE name LIKE '%a%' ORDER BY name ASC LIMIT 2";
137 | const result = await executeSELECTQuery(query);
138 | // Expecting the first two names alphabetically that contain 'a'
139 | expect(result).toEqual([{ name: 'Alice' }, { name: 'Jane' }]);
140 | });
141 | ```
142 |
--------------------------------------------------------------------------------
/docs/tutorials/16.md:
--------------------------------------------------------------------------------
1 | ## Step 16: Adding CI Support
2 |
3 | Let's add continuous integration (CI) support in our application using `GitHub actions` so that it auto runs the test on each commit to github and let's us know if something is off or is failing.
4 | > **💡Do it yourself**: Read more about CI/CD.
5 |
6 | ### 16.1 Setup directory
7 | The code for Github Actions CI/CD resides inside the `.github/workflows` folder at the root of your project (github repository).
8 |
9 | Create the folder either using the GUI of your editor/file manager or you can follow the steps below to create it using the command line.
10 |
11 | ```bash
12 | cd /path/to/your/project/root
13 | mkdir .github/workflows
14 | ```
15 |
16 | ### 16.2 Create a `tests.yml` file
17 |
18 | Create a file named `tests.yml` in the `.github/workflows` directory and paste the below configuration.
19 |
20 | ```yaml
21 | name: Run Tests
22 |
23 | on:
24 | push:
25 | branches: [ main ]
26 | pull_request:
27 | branches: [ main ]
28 |
29 | jobs:
30 | build:
31 |
32 | runs-on: ubuntu-latest
33 |
34 | strategy:
35 | matrix:
36 | node-version: [14.x, 16.x, 18.x]
37 |
38 | steps:
39 | - uses: actions/checkout@v3
40 | - name: Use Node.js ${{ matrix.node-version }}
41 | uses: actions/setup-node@v3
42 | with:
43 | node-version: ${{ matrix.node-version }}
44 | - run: npm i
45 | - run: npm test
46 | ```
47 |
48 | ### 16.3 Commit to github and see the tests passing
49 |
50 | Commit this folder to your github repository which you must have created to submit your assignment and you will see the tests passing.
51 |
52 | > **💡Do it yourself**: Look into toolings like `husky`.
--------------------------------------------------------------------------------
/docs/tutorials/17.md:
--------------------------------------------------------------------------------
1 | ## Basic INSERT Statement Support
2 | In this exercise we will just support staments of the following format `INSERT INTO grades (student_id, course, grade) VALUES ('4', 'Physics', 'A')`.
3 |
4 | ### 17.1 Update the Query Parser
5 | Add a function called `parseINSERTQuery` to the `queryParser.js` file. This function should take a query string as input and return an object with the following structure:
6 |
7 | ```javascript
8 | {
9 | type: 'INSERT',
10 | table: 'grades',
11 | columns: [ 'student_id', 'course', 'grade' ],
12 | values: [ "'4'", "'Physics'", "'A'" ]
13 | }
14 | ```
15 |
16 | ### 17.2 Update the Query Executer
17 | Add a function called `executeINSERTQuery` to the `queryExecuter.js` file. This function should take a query object as input and insert the data into the CSV file. The function should return a promise that resolves to the number of rows inserted.
18 |
19 |
20 | ### 17.3. Make sure the following test passes
21 |
22 | Create this test in a file called `tests/insertExecuter.test.js`
23 |
24 | ```javascript
25 | const { executeINSERTQuery } = require('../src/queryExecuter');
26 | const { readCSV, writeCSV } = require('../src/csvStorage');
27 | const fs = require('fs');
28 |
29 | // Helper function to create grades.csv with initial data
30 | async function createGradesCSV() {
31 | const initialData = [
32 | { student_id: '1', course: 'Mathematics', grade: 'A' },
33 | { student_id: '2', course: 'Chemistry', grade: 'B' },
34 | { student_id: '3', course: 'Mathematics', grade: 'C' }
35 | ];
36 | await writeCSV('grades.csv', initialData);
37 | }
38 |
39 | // Test to INSERT a new grade and verify
40 | test('Execute INSERT INTO Query for grades.csv', async () => {
41 | // Create grades.csv with initial data
42 | await createGradesCSV();
43 |
44 | // Execute INSERT statement
45 | const insertQuery = "INSERT INTO grades (student_id, course, grade) VALUES ('4', 'Physics', 'A')";
46 | await executeINSERTQuery(insertQuery);
47 |
48 | // Verify the new entry
49 | const updatedData = await readCSV('grades.csv');
50 | const newEntry = updatedData.find(row => row.student_id === '4' && row.course === 'Physics');
51 | console.log(updatedData)
52 | expect(newEntry).toBeDefined();
53 | expect(newEntry.grade).toEqual('A');
54 |
55 | // Cleanup: Delete grades.csv
56 | fs.unlinkSync('grades.csv');
57 | });
58 | ```
59 |
60 | Note that there is some refactoring also done in this step which is reflected in the above test case. Make sure you make relevant changes to your code as well.
--------------------------------------------------------------------------------
/docs/tutorials/18.md:
--------------------------------------------------------------------------------
1 | ## Step 18: Implementing DELETE Statement Support
2 |
3 | ### 18.1 Update the Parser to Handle DELETE Statements
4 | Modify `queryParser.js` to recognize `DELETE FROM` statements.
5 | The parser should identify the table name and conditions for deletion.
6 |
7 | It should return an object with the following structure:
8 | ```javascript
9 | {
10 | type: 'DELETE',
11 | table: 'grades',
12 | whereClauses: [
13 | { column: 'student_id', operator: '=', value: "'4'" },
14 | { column: 'course', operator: '=', value: "'Physics'" }
15 | ]
16 | }
17 | ```
18 |
19 | ### 18.2 Update the Executer to Handle DELETE Statements
20 |
21 | ```javascript
22 | // src/queryExecuter.js
23 |
24 | async function executeDELETEQuery(query) {
25 | const { table, whereClauses } = parseDeleteQuery(query);
26 | let data = await readCSV(`${table}.csv`);
27 |
28 | if (whereClauses.length > 0) {
29 | // Filter out the rows that meet the where clause conditions
30 | // Implement this.
31 | } else {
32 | // If no where clause, clear the entire table
33 | data = [];
34 | }
35 |
36 | // Save the updated data back to the CSV file
37 | await writeCSV(`${table}.csv`, data);
38 |
39 | return { message: "Rows deleted successfully." };
40 | }
41 |
42 | module.exports = { executeSELECTQuery, executeINSERTQuery, executeDELETEQuery };
43 | ```
44 |
45 | ### 18.3 Make sure the following test passes
46 |
47 | Create this test in a file called `tests/deleteExecuter.test.js`
48 |
49 | ```javascript
50 | const { executeDELETEQuery } = require('../src/queryExecuter');
51 | const { readCSV, writeCSV } = require('../src/csvStorage');
52 | const fs = require('fs');
53 |
54 | // Helper function to create courses.csv with initial data
55 | async function createCoursesCSV() {
56 | const initialData = [
57 | { course_id: '1', course_name: 'Mathematics', instructor: 'Dr. Smith' },
58 | { course_id: '2', course_name: 'Chemistry', instructor: 'Dr. Jones' },
59 | { course_id: '3', course_name: 'Physics', instructor: 'Dr. Taylor' }
60 | ];
61 | await writeCSV('courses.csv', initialData);
62 | }
63 |
64 | // Test to DELETE a course and verify
65 | test('Execute DELETE FROM Query for courses.csv', async () => {
66 | // Create courses.csv with initial data
67 | await createCoursesCSV();
68 |
69 | // Execute DELETE statement
70 | const deleteQuery = "DELETE FROM courses WHERE course_id = '2'";
71 | await executeDELETEQuery(deleteQuery);
72 |
73 | // Verify the course was removed
74 | const updatedData = await readCSV('courses.csv');
75 | const deletedCourse = updatedData.find(course => course.course_id === '2');
76 | expect(deletedCourse).toBeUndefined();
77 |
78 | // Cleanup: Delete courses.csv
79 | fs.unlinkSync('courses.csv');
80 | });
81 | ```
82 |
--------------------------------------------------------------------------------
/docs/tutorials/19.md:
--------------------------------------------------------------------------------
1 | ## Step 19: CLI Integration
2 | ### 19.1 Create a Basic CLI for the SQL Engine
3 | We'll use Node.js to create a simple CLI that takes SQL commands as input and displays the results. Node's `readline` module can be used for input, and console logging can display the results.
4 |
5 | - *Setup a New CLI File*: Create a new file `cli.js` in your project.
6 | - *Implement Basic Command Line Interface*:
7 |
8 | Example implementation in cli.js:
9 |
10 | ```javascript
11 | const readline = require('readline');
12 | const { executeSELECTQuery, executeINSERTQuery, executeDELETEQuery } = require('./queryExecuter');
13 |
14 | const rl = readline.createInterface({
15 | input: process.stdin,
16 | output: process.stdout
17 | });
18 |
19 | rl.setPrompt('SQL> ');
20 | console.log('SQL Query Engine CLI. Enter your SQL commands, or type "exit" to quit.');
21 |
22 | rl.prompt();
23 |
24 | rl.on('line', async (line) => {
25 | if (line.toLowerCase() === 'exit') {
26 | rl.close();
27 | return;
28 | }
29 |
30 | try {
31 | // Execute the query - do your own implementation
32 | }catch (error) {
33 | console.error('Error:', error.message);
34 | }
35 |
36 | rl.prompt();
37 | }).on('close', () => {
38 | console.log('Exiting SQL CLI');
39 | process.exit(0);
40 | });
41 | ```
42 |
43 | - *Running the CLI*: Users can run this CLI by executing node cli.js in their command line `node src/cli.js`.
44 | It should look something like this:
45 |
46 |
47 |
48 | - *Interacting with the CLI*: Users can type SQL commands directly into the CLI. The CLI will then use the appropriate function (`SELECT`, `INSERT`, or `DELETE`) to execute the command and display the results.
49 |
50 | ### 19.2 Make sure the following test passes
51 |
52 | Create this test in a file called `tests/cli.test.js`
53 |
54 | ```javascript
55 | const child_process = require('child_process');
56 | const path = require('path');
57 |
58 | test('DISTINCT with Multiple Columns via CLI', (done) => {
59 | const cliPath = path.join(__dirname, '..', 'src', 'cli.js');
60 | const cliProcess = child_process.spawn('node', [cliPath]);
61 |
62 | let outputData = "";
63 | cliProcess.stdout.on('data', (data) => {
64 | outputData += data.toString();
65 | });
66 |
67 | cliProcess.on('exit', () => {
68 | // Define a regex pattern to extract the JSON result
69 | const cleanedOutput = outputData.replace(/\s+/g, ' ');
70 |
71 | const resultRegex = /Result: (\[.+\])/s;
72 | const match = cleanedOutput.match(resultRegex);
73 | // Fix JSON outputput
74 | match[1] = match[1].replace(/'/g, '"').replace(/(\w+):/g, '"$1":');
75 |
76 | if (match && match[1]) {
77 | // Parse the captured JSON string
78 | const results = JSON.parse(match[1]);
79 |
80 | // Validation logic
81 | expect(results).toEqual([
82 | { student_id: '1', course: 'Mathematics' },
83 | { student_id: '1', course: 'Physics' },
84 | { student_id: '2', course: 'Chemistry' },
85 | { student_id: '3', course: 'Mathematics' },
86 | { student_id: '5', course: 'Biology' },
87 | { student_id: '5', course: 'Physics' }
88 | ]);
89 | console.log("Test passed successfully");
90 | } else {
91 | throw new Error('Failed to parse CLI output');
92 | }
93 |
94 | done();
95 | });
96 |
97 | // Introduce a delay before sending the query
98 | setTimeout(() => {
99 | cliProcess.stdin.write("SELECT DISTINCT student_id, course FROM enrollment\n");
100 | setTimeout(() => {
101 | cliProcess.stdin.write("exit\n");
102 | }, 1000); // 1 second delay
103 | }, 1000); // 1 second delay
104 | });
105 | ```
106 |
--------------------------------------------------------------------------------
/docs/tutorials/20.md:
--------------------------------------------------------------------------------
1 | ## Step 20: Publish Your Package to NPM
2 | - Add a Shebang Line: Ensure the first line of your cli.js file starts with a shebang line that specifies the path to the Node.js interpreter. This line is crucial for letting the system know that the script should be run with Node.js.
3 | - Make sure everything is perfect on the package.json. See the additional commands added for CLI to work. Test this locally with `npm link` and running the command.
4 | - *Update the Version*: Update the version in package.json.
5 | - *Create an npm Account*: If you don't have an npm account, create one at npmjs.com.
6 | - *Update the Readme*: Update the Readme to include the commands that you would want to show up on npmjs.com.
7 | - *Login to npm*: In your terminal, log in to npm using npm login and enter your credentials.
8 | - *Publish the Package*:
9 | - Run `npm publish` in your project directory.
10 | - Ensure there are no errors in the output, and your package should now be live on npm.
11 | - *Verify Publication*:
12 | - Check your package on the npm website - https://www.npmjs.com/package/stylusdb-sql.
13 | - Try installing it in a separate project using `npm install stylusdb-sql`
--------------------------------------------------------------------------------
/docs/tutorials/21.md:
--------------------------------------------------------------------------------
1 | ## Step 21: Add Approximate Counting using `HyperLogLog`
2 |
3 | ### 21.1 First things first, create a utility to generate large files
4 |
5 | Since the effect of `HyperLogLog` is best seen on large files, we need to create a utility function which generates large files, let's say with `10_000_000` data points. To do this create a file named `generateLargeFile.js` in a `utils` folder and add the following logic to it.
6 |
7 | ```js
8 | const fs = require('fs');
9 | const { faker, da } = require('@faker-js/faker');
10 | const { parse } = require('json2csv');
11 |
12 | async function generateLargeCSV(filename) {
13 | let data = [];
14 | for (let i = 1; i <= 10_000_000; i++) {
15 | const record = {
16 | id: i,
17 | name: faker.person.firstName(),
18 | age: faker.number.int({ min: 18, max: 100 }),
19 | };
20 | data.push(record);
21 |
22 | let rows;
23 | if (i % 500_000 === 0) {
24 | console.log(`Generated ${i} records`);
25 | if (!fs.existsSync(filename)) {
26 | rows = parse(data, { header: true });
27 | } else {
28 | // Rows without headers.
29 | rows = parse(data, { header: false });
30 | }
31 | fs.appendFileSync(filename, rows);
32 | data = [];
33 | }
34 |
35 | }
36 | // Append file function can create new file too.
37 |
38 | // Always add new line if file already exists.
39 | fs.appendFileSync(filename, "\r\n");
40 | }
41 |
42 | generateLargeCSV('student_large.csv')
43 | ```
44 |
45 | ### 21.2 Implement CSV reader for `HyperLogLog`
46 |
47 | Since `HyperLogLog` is a data structure which keeps data stored in a hashed format, we implement a separete CSV reader for it. Create a function named `readCSVforHLL` in your `csvStorage.js`.
48 | Sample logic for it can be found here:
49 | ```js
50 | function readCSVForHLL(filePath, bitSampleSize = 12, digestSize = 128) {
51 | const results = [];
52 | var h = hll({ bitSampleSize: bitSampleSize, digestSize: digestSize });
53 |
54 | return new Promise((resolve, reject) => {
55 | fs.createReadStream(filePath)
56 | .pipe(csv())
57 | .on('data', (data) => h.insert(JSON.stringify(data)))
58 | .on('end', () => {
59 | resolve(h);
60 | })
61 | .on('error', (error) => {
62 | reject(error);
63 | });
64 | });
65 | }
66 | ```
67 |
68 | ### 21.3 Update the `queryParser` implementation to identify `COUNT` and `APPROXIMATE_COUNT` as valid tokens
69 |
70 | Since our SQL queries will now be accepting the `COUNT` and `APPROXIMATE_COUNT` tokens as valid tokens, we need to update the logic of our parser to identify and process them accordingly. Update your `queryParser.js` with the required logic (regex) to identify that.
71 |
72 |
73 | ### 21.4 Update the `executeSELECTQuery` function to add support for `COUNT` and `APPROXIMATE_COUNT`
74 |
75 | Update the existing logic in the `executeSELECTQuery` function to identify and process the `COUNT` and `APPROXIMATE_COUNT` commands in your SQL query.
76 | Some snippets that might be helpful are:
77 | ```js
78 | // getting approx counts
79 | if (isApproximateCount && fields.length === 1 && fields[0] === 'COUNT(*)' && whereClauses.length === 0) {
80 | let hll = await readCSVForHLL(`${table}.csv`);
81 | return [{ 'APPROXIMATE_COUNT(*)': hll.estimate() }];
82 | }
83 |
84 | // Distinct inside count - example "SELECT COUNT (DISTINCT student.name) FROM student"
85 | if (isCountDistinct) {
86 |
87 | if (isApproximateCount) {
88 | var h = hll({ bitSampleSize: 12, digestSize: 128 });
89 | orderedResults.forEach(row => h.insert(distinctFields.map(field => row[field]).join('|')));
90 | return [{ [`APPROXIMATE_${fields[0]}`]: h.estimate() }];
91 | }
92 | else {
93 | let distinctResults = [...new Map(orderedResults.map(item => [distinctFields.map(field => item[field]).join('|'), item])).values()];
94 | return [{ [fields[0]]: distinctResults.length }];
95 | }
96 | }
97 | ```
98 |
99 |
100 | ### 21.5 Write a test case for approximate count
101 |
102 | Since we are following `TDD` in this tutorial, we are going to be writing a test case to test our implementation now.
103 | Create a file named `approximateLargeFile.test.js` in your `tests` folder and add the following test cases:
104 |
105 | ```js
106 | const fs = require('fs');
107 | const { executeSELECTQuery } = require('../src/queryExecuter');
108 | const jestConsole = console;
109 |
110 | beforeEach(() => {
111 | global.console = require('console');
112 | });
113 |
114 | afterEach(() => {
115 | global.console = jestConsole;
116 | });
117 |
118 | test('Large File Count(*) - Approximate and Exact', async () => {
119 | // Test Exact Count
120 |
121 | const startMemoryUsageExact = process.memoryUsage().heapUsed;
122 | const startTimeExact = performance.now();
123 |
124 | const queryExact = "SELECT COUNT(*) FROM student_large";
125 | const resultExact = await executeSELECTQuery(queryExact);
126 | const exactResult = resultExact[0]['COUNT(*)'];
127 |
128 | const endTimeExact = performance.now();
129 | const endMemoryUsageExact = process.memoryUsage().heapUsed;
130 |
131 | console.log(`Execution Time for Exact Count: ${(endTimeExact - startTimeExact).toFixed(2)} ms`);
132 | console.log(`Start Memory for Exact Count: ${startMemoryUsageExact / 1024 / 1024} MB`);
133 | console.log(`End Memory for Exact Count: ${endMemoryUsageExact / 1024 / 1024} MB`);
134 | console.log(`Memory Used for Exact Count: ${(endMemoryUsageExact - startMemoryUsageExact) / 1024 / 1024} MB`);
135 |
136 | const startMemoryUsage = process.memoryUsage().heapUsed;
137 | const startTime = performance.now();
138 |
139 | const query = "SELECT APPROXIMATE_COUNT(*) FROM student_large";
140 | const result = await executeSELECTQuery(query);
141 |
142 | // Expect the approximate count to be within 5% of the actual count
143 | expect(result[0]['APPROXIMATE_COUNT(*)']).toBeGreaterThan(exactResult - 0.05 * exactResult);
144 | expect(result[0]['APPROXIMATE_COUNT(*)']).toBeLessThan(exactResult + 0.05 * exactResult);
145 |
146 | const endTime = performance.now();
147 | const endMemoryUsage = process.memoryUsage().heapUsed;
148 |
149 | console.log(`Execution Time for Approximate Count: ${(endTime - startTime).toFixed(2)} ms`);
150 | console.log(`Start Memory: ${startMemoryUsage / 1024 / 1024} MB`);
151 | console.log(`End Memory: ${endMemoryUsage / 1024 / 1024} MB`);
152 | console.log(`Memory Used for Approximate Count: ${(endMemoryUsage - startMemoryUsage) / 1024 / 1024} MB`);
153 |
154 | }, 120000);
155 |
156 | test('Execute SQL Query with COUNT with DISTINCT on a column', async () => {
157 | const queryExact = "SELECT COUNT(DISTINCT (name, age)) FROM student_large";
158 | const resultExact = await executeSELECTQuery(queryExact);
159 | console.log({ resultExact });
160 | const exactResult = resultExact[0]['COUNT(DISTINCT (name, age))'];
161 |
162 | const query = "SELECT APPROXIMATE_COUNT(DISTINCT (name, age)) FROM student_large";
163 | const result = await executeSELECTQuery(query);
164 |
165 | // Expect the approximate count to be within 2% of the actual count
166 | expect(result[0]['APPROXIMATE_COUNT(DISTINCT (name, age))']).toBeGreaterThan(exactResult - 0.05 * exactResult);
167 | expect(result[0]['APPROXIMATE_COUNT(DISTINCT (name, age))']).toBeLessThan(exactResult + 0.05 * exactResult);
168 | }, 120000);
169 | ```
170 |
171 | ### 21.6 Update the tests for other files to test for the updates you made in other parts of the implementation
172 |
173 | Since we have made changes to the other parts of the implementation such as the `csvStorage.js`, `queryParser.js` and `queryExecutor.js` we need to update the tests for those files to test for the functionality.
--------------------------------------------------------------------------------
/docs/tutorials/assets/tutorial-19.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ChakshuGautam/stylusdb-sql/c52af47d8e8f0ba35a2bf7f063c7e0da78816f80/docs/tutorials/assets/tutorial-19.gif
--------------------------------------------------------------------------------
/enrollment.csv:
--------------------------------------------------------------------------------
1 | student_id,course
2 | 1,Mathematics
3 | 1,Physics
4 | 2,Chemistry
5 | 3,Mathematics
6 | 5,Biology
7 | 5,Physics
--------------------------------------------------------------------------------
/example/prisma/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Optional stylelint cache
58 | .stylelintcache
59 |
60 | # Microbundle cache
61 | .rpt2_cache/
62 | .rts2_cache_cjs/
63 | .rts2_cache_es/
64 | .rts2_cache_umd/
65 |
66 | # Optional REPL history
67 | .node_repl_history
68 |
69 | # Output of 'npm pack'
70 | *.tgz
71 |
72 | # Yarn Integrity file
73 | .yarn-integrity
74 |
75 | # dotenv environment variable files
76 | .env
77 | .env.development.local
78 | .env.test.local
79 | .env.production.local
80 | .env.local
81 |
82 | # parcel-bundler cache (https://parceljs.org/)
83 | .cache
84 | .parcel-cache
85 |
86 | # Next.js build output
87 | .next
88 | out
89 |
90 | # Nuxt.js build / generate output
91 | .nuxt
92 | dist
93 |
94 | # Gatsby files
95 | .cache/
96 | # Comment in the public line in if your project uses Gatsby and not Next.js
97 | # https://nextjs.org/blog/next-9-1#public-directory-support
98 | # public
99 |
100 | # vuepress build output
101 | .vuepress/dist
102 |
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 |
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 |
110 | # Serverless directories
111 | .serverless/
112 |
113 | # FuseBox cache
114 | .fusebox/
115 |
116 | # DynamoDB Local files
117 | .dynamodb/
118 |
119 | # TernJS port file
120 | .tern-port
121 |
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 |
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 |
132 |
133 | **/.DS_Store
--------------------------------------------------------------------------------
/example/prisma/client.js:
--------------------------------------------------------------------------------
1 |
2 | export class Client {
3 | socket = new net.Socket();
4 | isConnected = false;
5 | isWaitingForResponse = false;
6 | responseHandlers = {};
7 |
8 | constructor(url) {
9 | this.url = url;
10 | }
11 |
12 | connect() {
13 | return new Promise((resolve, reject) => {
14 | this.socket.connect(this.url.split(":")[1], this.url.split(":")[0], () => {
15 | console.log('Attempting to connect to the server...');
16 | });
17 |
18 | let buffer = '';
19 |
20 | this.socket.on('data', (data) => {
21 | buffer += data.toString();
22 | let boundary = buffer.indexOf('\n');
23 | while (boundary !== -1) {
24 | const message = buffer.substring(0, boundary).trim();
25 | buffer = buffer.substring(boundary + 1);
26 | boundary = buffer.indexOf('\n');
27 |
28 | this.handleResponse(message);
29 | }
30 | });
31 |
32 | this.socket.on('connect', () => {
33 | this.isConnected = true;
34 | resolve();
35 | });
36 |
37 | this.socket.on('error', (error) => {
38 | console.error('Error:', error.message);
39 | this.isConnected = false;
40 | reject(error);
41 | });
42 |
43 | this.socket.on('close', () => {
44 | console.log('Connection closed');
45 | this.isConnected = false;
46 | });
47 | });
48 | }
49 |
50 |
51 | disconnect() {
52 | this.socket.end();
53 | }
54 |
55 | setup() {
56 | this.socket.connect(this.url.split(":")[1], this.url.split(":")[0], () => {
57 | console.log('Attempting to connect to the server...');
58 | });
59 |
60 | let buffer = '';
61 |
62 | this.socket.on('data', (data) => {
63 | buffer += data.toString();
64 | let boundary = buffer.indexOf('\n');
65 | while (boundary !== -1) {
66 | const message = buffer.substring(0, boundary).trim();
67 | buffer = buffer.substring(boundary + 1);
68 | boundary = buffer.indexOf('\n');
69 |
70 | this.handleResponse(message);
71 | }
72 | });
73 |
74 | this.socket.on('close', () => {
75 | console.log('Connection closed');
76 | this.isConnected = false;
77 | });
78 |
79 | this.socket.on('error', (error) => {
80 | console.error('Error:', error.message);
81 | this.isConnected = false;
82 | });
83 | }
84 |
85 | execute = (query) => {
86 | return new Promise((resolve, reject) => {
87 | if (!this.isConnected) {
88 | reject(new Error('Not connected to the server.'));
89 | return;
90 | }
91 | if (this.isWaitingForResponse) {
92 | reject(new Error('Waiting for the previous query to be processed.'));
93 | return;
94 | }
95 |
96 | const queryId = this.generateQueryId();
97 | this.responseHandlers[queryId] = { resolve, reject };
98 | this.socket.write(`${queryId}:${query}`);
99 | this.isWaitingForResponse = true;
100 | });
101 | }
102 |
103 | handleResponse(response) {
104 | if (response === 'Connected') {
105 | this.isConnected = true;
106 | console.log('Successfully connected to the server.');
107 | return;
108 | }
109 |
110 | // Handle query response
111 | const [queryId, result] = response.split('<|>', 2);
112 | console.log('Received from server:', queryId, result);
113 | if (this.responseHandlers[queryId]) {
114 | try {
115 | const parsedResult = JSON.parse(result);
116 | this.responseHandlers[queryId].resolve(parsedResult);
117 | } catch (error) {
118 | this.responseHandlers[queryId].reject(error);
119 | }
120 | delete this.responseHandlers[queryId];
121 | this.isWaitingForResponse = false;
122 | }
123 | }
124 |
125 | generateQueryId() {
126 | return Date.now().toString(36) + Math.random().toString(36).substring(2);
127 | }
128 | }
--------------------------------------------------------------------------------
/example/prisma/index.js:
--------------------------------------------------------------------------------
1 | import { PrismaClient } from '@prisma/client';
2 | import { PrismaStylusDBSQL } from 'stylusdb-sql-prisma-adapter';
3 | import net from 'net';
4 |
5 |
6 | const connectionString = `127.0.0.1:5432`
7 |
8 | class Client {
9 | socket = new net.Socket();
10 | isConnected = false;
11 | isWaitingForResponse = false;
12 | responseHandlers = {};
13 |
14 | constructor(url) {
15 | this.url = url;
16 | }
17 |
18 | connect() {
19 | return new Promise((resolve, reject) => {
20 | this.socket.connect(this.url.split(":")[1], this.url.split(":")[0], () => {
21 | console.log('Attempting to connect to the server...');
22 | });
23 |
24 | let buffer = '';
25 |
26 | this.socket.on('data', (data) => {
27 | buffer += data.toString();
28 | let boundary = buffer.indexOf('\n');
29 | while (boundary !== -1) {
30 | const message = buffer.substring(0, boundary).trim();
31 | buffer = buffer.substring(boundary + 1);
32 | boundary = buffer.indexOf('\n');
33 |
34 | this.handleResponse(message);
35 | }
36 | });
37 |
38 | this.socket.on('connect', () => {
39 | this.isConnected = true;
40 | resolve();
41 | });
42 |
43 | this.socket.on('error', (error) => {
44 | console.error('Error:', error.message);
45 | this.isConnected = false;
46 | reject(error);
47 | });
48 |
49 | this.socket.on('close', () => {
50 | console.log('Connection closed');
51 | this.isConnected = false;
52 | });
53 | });
54 | }
55 |
56 |
57 | disconnect() {
58 | this.socket.end();
59 | }
60 |
61 | setup() {
62 | this.socket.connect(this.url.split(":")[1], this.url.split(":")[0], () => {
63 | console.log('Attempting to connect to the server...');
64 | });
65 |
66 | let buffer = '';
67 |
68 | this.socket.on('data', (data) => {
69 | buffer += data.toString();
70 | let boundary = buffer.indexOf('\n');
71 | while (boundary !== -1) {
72 | const message = buffer.substring(0, boundary).trim();
73 | buffer = buffer.substring(boundary + 1);
74 | boundary = buffer.indexOf('\n');
75 |
76 | this.handleResponse(message);
77 | }
78 | });
79 |
80 | this.socket.on('close', () => {
81 | console.log('Connection closed');
82 | this.isConnected = false;
83 | });
84 |
85 | this.socket.on('error', (error) => {
86 | console.error('Error:', error.message);
87 | this.isConnected = false;
88 | });
89 | }
90 |
91 | execute = (query) => {
92 | return new Promise((resolve, reject) => {
93 | if (!this.isConnected) {
94 | reject(new Error('Not connected to the server.'));
95 | return;
96 | }
97 | if (this.isWaitingForResponse) {
98 | reject(new Error('Waiting for the previous query to be processed.'));
99 | return;
100 | }
101 |
102 | const queryId = this.generateQueryId();
103 | this.responseHandlers[queryId] = { resolve, reject };
104 | this.socket.write(`${queryId}:${query}`);
105 | this.isWaitingForResponse = true;
106 | });
107 | }
108 |
109 | handleResponse(response) {
110 | if (response === 'Connected') {
111 | this.isConnected = true;
112 | console.log('Successfully connected to the server.');
113 | return;
114 | }
115 |
116 | // Handle query response
117 | const [queryId, result] = response.split('<|>', 2);
118 | console.log('Received from server:', queryId, result);
119 | if (this.responseHandlers[queryId]) {
120 | try {
121 | const parsedResult = JSON.parse(result);
122 | this.responseHandlers[queryId].resolve(parsedResult);
123 | } catch (error) {
124 | this.responseHandlers[queryId].reject(error);
125 | }
126 | delete this.responseHandlers[queryId];
127 | this.isWaitingForResponse = false;
128 | }
129 | }
130 |
131 | generateQueryId() {
132 | return Date.now().toString(36) + Math.random().toString(36).substring(2);
133 | }
134 | }
135 |
136 | const client = new Client(connectionString)
137 | const adapter = new PrismaStylusDBSQL(client, {})
138 | const prisma = new PrismaClient({ adapter })
139 |
140 | async function main() {
141 | await client.connect();
142 | const rawQueryData = await prisma.$queryRaw`SELECT id from student`;
143 | console.log({ rawQueryData });
144 | const student = await prisma.student.create({
145 | data: {
146 | name: 'test',
147 | age: '28',
148 | },
149 | }).catch((e) => {
150 | console.log(e)
151 | });
152 | console.log(student);
153 |
154 | const students = await prisma.student.findMany();
155 | console.log(students);
156 | }
157 |
158 | main()
159 | .then(async () => {
160 | await prisma.$disconnect()
161 | })
162 | .catch(async (e) => {
163 | console.error(e)
164 | await prisma.$disconnect()
165 | process.exit(1)
166 | })
167 |
168 |
--------------------------------------------------------------------------------
/example/prisma/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stylusdb-example",
3 | "version": "0.0.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "stylusdb-example",
9 | "version": "0.0.0",
10 | "license": "ISC",
11 | "dependencies": {
12 | "@prisma/client": "^5.8.0"
13 | },
14 | "devDependencies": {
15 | "@types/node": "^20.11.0",
16 | "prisma": "^5.8.0",
17 | "ts-node": "^10.9.2",
18 | "typescript": "^5.3.3"
19 | }
20 | },
21 | "node_modules/@cspotcode/source-map-support": {
22 | "version": "0.8.1",
23 | "resolved": "https://registry.npmjs.org/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz",
24 | "integrity": "sha512-IchNf6dN4tHoMFIn/7OE8LWZ19Y6q/67Bmf6vnGREv8RSbBVb9LPJxEcnwrcwX6ixSvaiGoomAUvu4YSxXrVgw==",
25 | "dev": true,
26 | "dependencies": {
27 | "@jridgewell/trace-mapping": "0.3.9"
28 | },
29 | "engines": {
30 | "node": ">=12"
31 | }
32 | },
33 | "node_modules/@jridgewell/resolve-uri": {
34 | "version": "3.1.1",
35 | "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.1.tgz",
36 | "integrity": "sha512-dSYZh7HhCDtCKm4QakX0xFpsRDqjjtZf/kjI/v3T3Nwt5r8/qz/M19F9ySyOqU94SXBmeG9ttTul+YnR4LOxFA==",
37 | "dev": true,
38 | "engines": {
39 | "node": ">=6.0.0"
40 | }
41 | },
42 | "node_modules/@jridgewell/sourcemap-codec": {
43 | "version": "1.4.15",
44 | "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz",
45 | "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==",
46 | "dev": true
47 | },
48 | "node_modules/@jridgewell/trace-mapping": {
49 | "version": "0.3.9",
50 | "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz",
51 | "integrity": "sha512-3Belt6tdc8bPgAtbcmdtNJlirVoTmEb5e2gC94PnkwEW9jI6CAHUeoG85tjWP5WquqfavoMtMwiG4P926ZKKuQ==",
52 | "dev": true,
53 | "dependencies": {
54 | "@jridgewell/resolve-uri": "^3.0.3",
55 | "@jridgewell/sourcemap-codec": "^1.4.10"
56 | }
57 | },
58 | "node_modules/@prisma/client": {
59 | "version": "5.8.0",
60 | "resolved": "https://registry.npmjs.org/@prisma/client/-/client-5.8.0.tgz",
61 | "integrity": "sha512-QxO6C4MaA/ysTIbC+EcAH1aX/YkpymhXtO6zPdk+FvA7+59tNibIYpd+7koPdViLg2iKES4ojsxWNUGNJaEcbA==",
62 | "hasInstallScript": true,
63 | "engines": {
64 | "node": ">=16.13"
65 | },
66 | "peerDependencies": {
67 | "prisma": "*"
68 | },
69 | "peerDependenciesMeta": {
70 | "prisma": {
71 | "optional": true
72 | }
73 | }
74 | },
75 | "node_modules/@prisma/debug": {
76 | "version": "5.8.0",
77 | "resolved": "https://registry.npmjs.org/@prisma/debug/-/debug-5.8.0.tgz",
78 | "integrity": "sha512-ZqPpkvbovu/kQJ1bvy57NO4dw97fpQGcbQSCtsqlwSE1UNKJP75R3BKxdznk8ZPMY+GJdMRetWNv4oAvSbWn8Q==",
79 | "devOptional": true
80 | },
81 | "node_modules/@prisma/engines": {
82 | "version": "5.8.0",
83 | "resolved": "https://registry.npmjs.org/@prisma/engines/-/engines-5.8.0.tgz",
84 | "integrity": "sha512-Qhqm9WWLujNEC13AuZlUO14SQ15tNLe5puaz+tOk7UqINqJ3PtqMmuSuzomiw2diGVqZ+HYiSQzlR3+pPucVHA==",
85 | "devOptional": true,
86 | "hasInstallScript": true,
87 | "dependencies": {
88 | "@prisma/debug": "5.8.0",
89 | "@prisma/engines-version": "5.8.0-37.0a83d8541752d7582de2ebc1ece46519ce72a848",
90 | "@prisma/fetch-engine": "5.8.0",
91 | "@prisma/get-platform": "5.8.0"
92 | }
93 | },
94 | "node_modules/@prisma/engines-version": {
95 | "version": "5.8.0-37.0a83d8541752d7582de2ebc1ece46519ce72a848",
96 | "resolved": "https://registry.npmjs.org/@prisma/engines-version/-/engines-version-5.8.0-37.0a83d8541752d7582de2ebc1ece46519ce72a848.tgz",
97 | "integrity": "sha512-cXcoVweYbnv8xRfkWq9oj8BECOdzHUazrSpYCa0ehp5TNz4l5Spa8jbq/VROCTzj3ZncH5D9Q2TmySYTOUeKlw==",
98 | "devOptional": true
99 | },
100 | "node_modules/@prisma/fetch-engine": {
101 | "version": "5.8.0",
102 | "resolved": "https://registry.npmjs.org/@prisma/fetch-engine/-/fetch-engine-5.8.0.tgz",
103 | "integrity": "sha512-1CAuE+JoYsPNggMEn6qk0zos06Uc9bYZBJ0VBPHD6R7REL05614koAbOCmn52IaYz3nobb7f25hqW6AY7rLkIw==",
104 | "devOptional": true,
105 | "dependencies": {
106 | "@prisma/debug": "5.8.0",
107 | "@prisma/engines-version": "5.8.0-37.0a83d8541752d7582de2ebc1ece46519ce72a848",
108 | "@prisma/get-platform": "5.8.0"
109 | }
110 | },
111 | "node_modules/@prisma/get-platform": {
112 | "version": "5.8.0",
113 | "resolved": "https://registry.npmjs.org/@prisma/get-platform/-/get-platform-5.8.0.tgz",
114 | "integrity": "sha512-Nk3rhTFZ1LYkFZJnpSvQcLPCaBWgJQfteHII6UEENOOkYlmP0k3FuswND54tzzEr4qs39wOdV9pbXKX9U2lv7A==",
115 | "devOptional": true,
116 | "dependencies": {
117 | "@prisma/debug": "5.8.0"
118 | }
119 | },
120 | "node_modules/@tsconfig/node10": {
121 | "version": "1.0.9",
122 | "resolved": "https://registry.npmjs.org/@tsconfig/node10/-/node10-1.0.9.tgz",
123 | "integrity": "sha512-jNsYVVxU8v5g43Erja32laIDHXeoNvFEpX33OK4d6hljo3jDhCBDhx5dhCCTMWUojscpAagGiRkBKxpdl9fxqA==",
124 | "dev": true
125 | },
126 | "node_modules/@tsconfig/node12": {
127 | "version": "1.0.11",
128 | "resolved": "https://registry.npmjs.org/@tsconfig/node12/-/node12-1.0.11.tgz",
129 | "integrity": "sha512-cqefuRsh12pWyGsIoBKJA9luFu3mRxCA+ORZvA4ktLSzIuCUtWVxGIuXigEwO5/ywWFMZ2QEGKWvkZG1zDMTag==",
130 | "dev": true
131 | },
132 | "node_modules/@tsconfig/node14": {
133 | "version": "1.0.3",
134 | "resolved": "https://registry.npmjs.org/@tsconfig/node14/-/node14-1.0.3.tgz",
135 | "integrity": "sha512-ysT8mhdixWK6Hw3i1V2AeRqZ5WfXg1G43mqoYlM2nc6388Fq5jcXyr5mRsqViLx/GJYdoL0bfXD8nmF+Zn/Iow==",
136 | "dev": true
137 | },
138 | "node_modules/@tsconfig/node16": {
139 | "version": "1.0.4",
140 | "resolved": "https://registry.npmjs.org/@tsconfig/node16/-/node16-1.0.4.tgz",
141 | "integrity": "sha512-vxhUy4J8lyeyinH7Azl1pdd43GJhZH/tP2weN8TntQblOY+A0XbT8DJk1/oCPuOOyg/Ja757rG0CgHcWC8OfMA==",
142 | "dev": true
143 | },
144 | "node_modules/@types/node": {
145 | "version": "20.11.0",
146 | "resolved": "https://registry.npmjs.org/@types/node/-/node-20.11.0.tgz",
147 | "integrity": "sha512-o9bjXmDNcF7GbM4CNQpmi+TutCgap/K3w1JyKgxAjqx41zp9qlIAVFi0IhCNsJcXolEqLWhbFbEeL0PvYm4pcQ==",
148 | "dev": true,
149 | "dependencies": {
150 | "undici-types": "~5.26.4"
151 | }
152 | },
153 | "node_modules/acorn": {
154 | "version": "8.11.3",
155 | "resolved": "https://registry.npmjs.org/acorn/-/acorn-8.11.3.tgz",
156 | "integrity": "sha512-Y9rRfJG5jcKOE0CLisYbojUjIrIEE7AGMzA/Sm4BslANhbS+cDMpgBdcPT91oJ7OuJ9hYJBx59RjbhxVnrF8Xg==",
157 | "dev": true,
158 | "bin": {
159 | "acorn": "bin/acorn"
160 | },
161 | "engines": {
162 | "node": ">=0.4.0"
163 | }
164 | },
165 | "node_modules/acorn-walk": {
166 | "version": "8.3.2",
167 | "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-8.3.2.tgz",
168 | "integrity": "sha512-cjkyv4OtNCIeqhHrfS81QWXoCBPExR/J62oyEqepVw8WaQeSqpW2uhuLPh1m9eWhDuOo/jUXVTlifvesOWp/4A==",
169 | "dev": true,
170 | "engines": {
171 | "node": ">=0.4.0"
172 | }
173 | },
174 | "node_modules/arg": {
175 | "version": "4.1.3",
176 | "resolved": "https://registry.npmjs.org/arg/-/arg-4.1.3.tgz",
177 | "integrity": "sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==",
178 | "dev": true
179 | },
180 | "node_modules/create-require": {
181 | "version": "1.1.1",
182 | "resolved": "https://registry.npmjs.org/create-require/-/create-require-1.1.1.tgz",
183 | "integrity": "sha512-dcKFX3jn0MpIaXjisoRvexIJVEKzaq7z2rZKxf+MSr9TkdmHmsU4m2lcLojrj/FHl8mk5VxMmYA+ftRkP/3oKQ==",
184 | "dev": true
185 | },
186 | "node_modules/diff": {
187 | "version": "4.0.2",
188 | "resolved": "https://registry.npmjs.org/diff/-/diff-4.0.2.tgz",
189 | "integrity": "sha512-58lmxKSA4BNyLz+HHMUzlOEpg09FV+ev6ZMe3vJihgdxzgcwZ8VoEEPmALCZG9LmqfVoNMMKpttIYTVG6uDY7A==",
190 | "dev": true,
191 | "engines": {
192 | "node": ">=0.3.1"
193 | }
194 | },
195 | "node_modules/make-error": {
196 | "version": "1.3.6",
197 | "resolved": "https://registry.npmjs.org/make-error/-/make-error-1.3.6.tgz",
198 | "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
199 | "dev": true
200 | },
201 | "node_modules/prisma": {
202 | "version": "5.8.0",
203 | "resolved": "https://registry.npmjs.org/prisma/-/prisma-5.8.0.tgz",
204 | "integrity": "sha512-hDKoEqPt2qEUTH5yGO3l27CBnPtwvte0CGMKrpCr9+/A919JghfqJ3qgCGgMbOwdkXUOzdho0RH9tyUF3UhpMw==",
205 | "devOptional": true,
206 | "hasInstallScript": true,
207 | "dependencies": {
208 | "@prisma/engines": "5.8.0"
209 | },
210 | "bin": {
211 | "prisma": "build/index.js"
212 | },
213 | "engines": {
214 | "node": ">=16.13"
215 | }
216 | },
217 | "node_modules/ts-node": {
218 | "version": "10.9.2",
219 | "resolved": "https://registry.npmjs.org/ts-node/-/ts-node-10.9.2.tgz",
220 | "integrity": "sha512-f0FFpIdcHgn8zcPSbf1dRevwt047YMnaiJM3u2w2RewrB+fob/zePZcrOyQoLMMO7aBIddLcQIEK5dYjkLnGrQ==",
221 | "dev": true,
222 | "dependencies": {
223 | "@cspotcode/source-map-support": "^0.8.0",
224 | "@tsconfig/node10": "^1.0.7",
225 | "@tsconfig/node12": "^1.0.7",
226 | "@tsconfig/node14": "^1.0.0",
227 | "@tsconfig/node16": "^1.0.2",
228 | "acorn": "^8.4.1",
229 | "acorn-walk": "^8.1.1",
230 | "arg": "^4.1.0",
231 | "create-require": "^1.1.0",
232 | "diff": "^4.0.1",
233 | "make-error": "^1.1.1",
234 | "v8-compile-cache-lib": "^3.0.1",
235 | "yn": "3.1.1"
236 | },
237 | "bin": {
238 | "ts-node": "dist/bin.js",
239 | "ts-node-cwd": "dist/bin-cwd.js",
240 | "ts-node-esm": "dist/bin-esm.js",
241 | "ts-node-script": "dist/bin-script.js",
242 | "ts-node-transpile-only": "dist/bin-transpile.js",
243 | "ts-script": "dist/bin-script-deprecated.js"
244 | },
245 | "peerDependencies": {
246 | "@swc/core": ">=1.2.50",
247 | "@swc/wasm": ">=1.2.50",
248 | "@types/node": "*",
249 | "typescript": ">=2.7"
250 | },
251 | "peerDependenciesMeta": {
252 | "@swc/core": {
253 | "optional": true
254 | },
255 | "@swc/wasm": {
256 | "optional": true
257 | }
258 | }
259 | },
260 | "node_modules/typescript": {
261 | "version": "5.3.3",
262 | "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.3.3.tgz",
263 | "integrity": "sha512-pXWcraxM0uxAS+tN0AG/BF2TyqmHO014Z070UsJ+pFvYuRSq8KH8DmWpnbXe0pEPDHXZV3FcAbJkijJ5oNEnWw==",
264 | "dev": true,
265 | "bin": {
266 | "tsc": "bin/tsc",
267 | "tsserver": "bin/tsserver"
268 | },
269 | "engines": {
270 | "node": ">=14.17"
271 | }
272 | },
273 | "node_modules/undici-types": {
274 | "version": "5.26.5",
275 | "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz",
276 | "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==",
277 | "dev": true
278 | },
279 | "node_modules/v8-compile-cache-lib": {
280 | "version": "3.0.1",
281 | "resolved": "https://registry.npmjs.org/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz",
282 | "integrity": "sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==",
283 | "dev": true
284 | },
285 | "node_modules/yn": {
286 | "version": "3.1.1",
287 | "resolved": "https://registry.npmjs.org/yn/-/yn-3.1.1.tgz",
288 | "integrity": "sha512-Ux4ygGWsu2c7isFWe8Yu1YluJmqVhxqK2cLXNQA5AcC3QfbGNpM7fu0Y8b/z16pXLnFxZYvWhd3fhBY9DLmC6Q==",
289 | "dev": true,
290 | "engines": {
291 | "node": ">=6"
292 | }
293 | }
294 | }
295 | }
296 |
--------------------------------------------------------------------------------
/example/prisma/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stylusdb-example",
3 | "version": "0.0.0",
4 | "description": "A Node.js example of using StylusDB with all bells and whistles.",
5 | "main": "index.js",
6 | "type": "module",
7 | "scripts": {
8 | "test": "echo \"Error: no test specified\" && exit 1",
9 | "prisma": "node index.js"
10 | },
11 | "author": "Chakshu Gautam",
12 | "license": "ISC",
13 | "devDependencies": {
14 | "@types/node": "^20.11.0",
15 | "prisma": "^5.8.0",
16 | "ts-node": "^10.9.2",
17 | "typescript": "^5.3.3"
18 | },
19 | "dependencies": {
20 | "@prisma/client": "^5.8.0",
21 | "stylusdb-sql-prisma-adapter": "^0.0.1"
22 | }
23 | }
--------------------------------------------------------------------------------
/example/prisma/schema.prisma:
--------------------------------------------------------------------------------
1 | generator client {
2 | provider = "prisma-client-js"
3 | previewFeatures = ["driverAdapters"]
4 | }
5 |
6 | datasource db {
7 | provider = "postgresql"
8 | url = "postgresql://127.0.0.1:5432/database?sslaccept=strict"
9 | }
10 |
11 | model Student {
12 | id Int @id @default(autoincrement())
13 | name String
14 | age String
15 | enrollment Enrollment[]
16 | }
17 |
18 | model Enrollment {
19 | id Int @id @default(autoincrement())
20 | studentId Int
21 | course String
22 | student Student @relation(fields: [studentId], references: [id])
23 | }
24 |
--------------------------------------------------------------------------------
/example/prisma/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | /* Visit https://aka.ms/tsconfig to read more about this file */
4 |
5 | /* Projects */
6 | // "incremental": true, /* Save .tsbuildinfo files to allow for incremental compilation of projects. */
7 | // "composite": true, /* Enable constraints that allow a TypeScript project to be used with project references. */
8 | // "tsBuildInfoFile": "./.tsbuildinfo", /* Specify the path to .tsbuildinfo incremental compilation file. */
9 | // "disableSourceOfProjectReferenceRedirect": true, /* Disable preferring source files instead of declaration files when referencing composite projects. */
10 | // "disableSolutionSearching": true, /* Opt a project out of multi-project reference checking when editing. */
11 | // "disableReferencedProjectLoad": true, /* Reduce the number of projects loaded automatically by TypeScript. */
12 |
13 | /* Language and Environment */
14 | "target": "es2016", /* Set the JavaScript language version for emitted JavaScript and include compatible library declarations. */
15 | // "lib": [], /* Specify a set of bundled library declaration files that describe the target runtime environment. */
16 | // "jsx": "preserve", /* Specify what JSX code is generated. */
17 | // "experimentalDecorators": true, /* Enable experimental support for legacy experimental decorators. */
18 | // "emitDecoratorMetadata": true, /* Emit design-type metadata for decorated declarations in source files. */
19 | // "jsxFactory": "", /* Specify the JSX factory function used when targeting React JSX emit, e.g. 'React.createElement' or 'h'. */
20 | // "jsxFragmentFactory": "", /* Specify the JSX Fragment reference used for fragments when targeting React JSX emit e.g. 'React.Fragment' or 'Fragment'. */
21 | // "jsxImportSource": "", /* Specify module specifier used to import the JSX factory functions when using 'jsx: react-jsx*'. */
22 | // "reactNamespace": "", /* Specify the object invoked for 'createElement'. This only applies when targeting 'react' JSX emit. */
23 | // "noLib": true, /* Disable including any library files, including the default lib.d.ts. */
24 | // "useDefineForClassFields": true, /* Emit ECMAScript-standard-compliant class fields. */
25 | // "moduleDetection": "auto", /* Control what method is used to detect module-format JS files. */
26 |
27 | /* Modules */
28 | "module": "commonjs", /* Specify what module code is generated. */
29 | // "rootDir": "./", /* Specify the root folder within your source files. */
30 | // "moduleResolution": "node10", /* Specify how TypeScript looks up a file from a given module specifier. */
31 | // "baseUrl": "./", /* Specify the base directory to resolve non-relative module names. */
32 | // "paths": {}, /* Specify a set of entries that re-map imports to additional lookup locations. */
33 | // "rootDirs": [], /* Allow multiple folders to be treated as one when resolving modules. */
34 | // "typeRoots": [], /* Specify multiple folders that act like './node_modules/@types'. */
35 | // "types": [], /* Specify type package names to be included without being referenced in a source file. */
36 | // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */
37 | // "moduleSuffixes": [], /* List of file name suffixes to search when resolving a module. */
38 | // "allowImportingTsExtensions": true, /* Allow imports to include TypeScript file extensions. Requires '--moduleResolution bundler' and either '--noEmit' or '--emitDeclarationOnly' to be set. */
39 | // "resolvePackageJsonExports": true, /* Use the package.json 'exports' field when resolving package imports. */
40 | // "resolvePackageJsonImports": true, /* Use the package.json 'imports' field when resolving imports. */
41 | // "customConditions": [], /* Conditions to set in addition to the resolver-specific defaults when resolving imports. */
42 | // "resolveJsonModule": true, /* Enable importing .json files. */
43 | // "allowArbitraryExtensions": true, /* Enable importing files with any extension, provided a declaration file is present. */
44 | // "noResolve": true, /* Disallow 'import's, 'require's or ''s from expanding the number of files TypeScript should add to a project. */
45 |
46 | /* JavaScript Support */
47 | // "allowJs": true, /* Allow JavaScript files to be a part of your program. Use the 'checkJS' option to get errors from these files. */
48 | // "checkJs": true, /* Enable error reporting in type-checked JavaScript files. */
49 | // "maxNodeModuleJsDepth": 1, /* Specify the maximum folder depth used for checking JavaScript files from 'node_modules'. Only applicable with 'allowJs'. */
50 |
51 | /* Emit */
52 | // "declaration": true, /* Generate .d.ts files from TypeScript and JavaScript files in your project. */
53 | // "declarationMap": true, /* Create sourcemaps for d.ts files. */
54 | // "emitDeclarationOnly": true, /* Only output d.ts files and not JavaScript files. */
55 | // "sourceMap": true, /* Create source map files for emitted JavaScript files. */
56 | // "inlineSourceMap": true, /* Include sourcemap files inside the emitted JavaScript. */
57 | // "outFile": "./", /* Specify a file that bundles all outputs into one JavaScript file. If 'declaration' is true, also designates a file that bundles all .d.ts output. */
58 | // "outDir": "./", /* Specify an output folder for all emitted files. */
59 | // "removeComments": true, /* Disable emitting comments. */
60 | // "noEmit": true, /* Disable emitting files from a compilation. */
61 | // "importHelpers": true, /* Allow importing helper functions from tslib once per project, instead of including them per-file. */
62 | // "importsNotUsedAsValues": "remove", /* Specify emit/checking behavior for imports that are only used for types. */
63 | // "downlevelIteration": true, /* Emit more compliant, but verbose and less performant JavaScript for iteration. */
64 | // "sourceRoot": "", /* Specify the root path for debuggers to find the reference source code. */
65 | // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
66 | // "inlineSources": true, /* Include source code in the sourcemaps inside the emitted JavaScript. */
67 | // "emitBOM": true, /* Emit a UTF-8 Byte Order Mark (BOM) in the beginning of output files. */
68 | // "newLine": "crlf", /* Set the newline character for emitting files. */
69 | // "stripInternal": true, /* Disable emitting declarations that have '@internal' in their JSDoc comments. */
70 | // "noEmitHelpers": true, /* Disable generating custom helper functions like '__extends' in compiled output. */
71 | // "noEmitOnError": true, /* Disable emitting files if any type checking errors are reported. */
72 | // "preserveConstEnums": true, /* Disable erasing 'const enum' declarations in generated code. */
73 | // "declarationDir": "./", /* Specify the output directory for generated declaration files. */
74 | // "preserveValueImports": true, /* Preserve unused imported values in the JavaScript output that would otherwise be removed. */
75 |
76 | /* Interop Constraints */
77 | // "isolatedModules": true, /* Ensure that each file can be safely transpiled without relying on other imports. */
78 | // "verbatimModuleSyntax": true, /* Do not transform or elide any imports or exports not marked as type-only, ensuring they are written in the output file's format based on the 'module' setting. */
79 | // "allowSyntheticDefaultImports": true, /* Allow 'import x from y' when a module doesn't have a default export. */
80 | "esModuleInterop": true, /* Emit additional JavaScript to ease support for importing CommonJS modules. This enables 'allowSyntheticDefaultImports' for type compatibility. */
81 | // "preserveSymlinks": true, /* Disable resolving symlinks to their realpath. This correlates to the same flag in node. */
82 | "forceConsistentCasingInFileNames": true, /* Ensure that casing is correct in imports. */
83 |
84 | /* Type Checking */
85 | "strict": true, /* Enable all strict type-checking options. */
86 | // "noImplicitAny": true, /* Enable error reporting for expressions and declarations with an implied 'any' type. */
87 | // "strictNullChecks": true, /* When type checking, take into account 'null' and 'undefined'. */
88 | // "strictFunctionTypes": true, /* When assigning functions, check to ensure parameters and the return values are subtype-compatible. */
89 | // "strictBindCallApply": true, /* Check that the arguments for 'bind', 'call', and 'apply' methods match the original function. */
90 | // "strictPropertyInitialization": true, /* Check for class properties that are declared but not set in the constructor. */
91 | // "noImplicitThis": true, /* Enable error reporting when 'this' is given the type 'any'. */
92 | // "useUnknownInCatchVariables": true, /* Default catch clause variables as 'unknown' instead of 'any'. */
93 | // "alwaysStrict": true, /* Ensure 'use strict' is always emitted. */
94 | // "noUnusedLocals": true, /* Enable error reporting when local variables aren't read. */
95 | // "noUnusedParameters": true, /* Raise an error when a function parameter isn't read. */
96 | // "exactOptionalPropertyTypes": true, /* Interpret optional property types as written, rather than adding 'undefined'. */
97 | // "noImplicitReturns": true, /* Enable error reporting for codepaths that do not explicitly return in a function. */
98 | // "noFallthroughCasesInSwitch": true, /* Enable error reporting for fallthrough cases in switch statements. */
99 | // "noUncheckedIndexedAccess": true, /* Add 'undefined' to a type when accessed using an index. */
100 | // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an override modifier. */
101 | // "noPropertyAccessFromIndexSignature": true, /* Enforces using indexed accessors for keys declared using an indexed type. */
102 | // "allowUnusedLabels": true, /* Disable error reporting for unused labels. */
103 | // "allowUnreachableCode": true, /* Disable error reporting for unreachable code. */
104 |
105 | /* Completeness */
106 | // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */
107 | "skipLibCheck": true /* Skip type checking all .d.ts files. */
108 | }
109 | }
110 |
--------------------------------------------------------------------------------
/example/socket/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 | .pnpm-debug.log*
9 |
10 | # Diagnostic reports (https://nodejs.org/api/report.html)
11 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
12 |
13 | # Runtime data
14 | pids
15 | *.pid
16 | *.seed
17 | *.pid.lock
18 |
19 | # Directory for instrumented libs generated by jscoverage/JSCover
20 | lib-cov
21 |
22 | # Coverage directory used by tools like istanbul
23 | coverage
24 | *.lcov
25 |
26 | # nyc test coverage
27 | .nyc_output
28 |
29 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
30 | .grunt
31 |
32 | # Bower dependency directory (https://bower.io/)
33 | bower_components
34 |
35 | # node-waf configuration
36 | .lock-wscript
37 |
38 | # Compiled binary addons (https://nodejs.org/api/addons.html)
39 | build/Release
40 |
41 | # Dependency directories
42 | node_modules/
43 | jspm_packages/
44 |
45 | # Snowpack dependency directory (https://snowpack.dev/)
46 | web_modules/
47 |
48 | # TypeScript cache
49 | *.tsbuildinfo
50 |
51 | # Optional npm cache directory
52 | .npm
53 |
54 | # Optional eslint cache
55 | .eslintcache
56 |
57 | # Optional stylelint cache
58 | .stylelintcache
59 |
60 | # Microbundle cache
61 | .rpt2_cache/
62 | .rts2_cache_cjs/
63 | .rts2_cache_es/
64 | .rts2_cache_umd/
65 |
66 | # Optional REPL history
67 | .node_repl_history
68 |
69 | # Output of 'npm pack'
70 | *.tgz
71 |
72 | # Yarn Integrity file
73 | .yarn-integrity
74 |
75 | # dotenv environment variable files
76 | .env
77 | .env.development.local
78 | .env.test.local
79 | .env.production.local
80 | .env.local
81 |
82 | # parcel-bundler cache (https://parceljs.org/)
83 | .cache
84 | .parcel-cache
85 |
86 | # Next.js build output
87 | .next
88 | out
89 |
90 | # Nuxt.js build / generate output
91 | .nuxt
92 | dist
93 |
94 | # Gatsby files
95 | .cache/
96 | # Comment in the public line in if your project uses Gatsby and not Next.js
97 | # https://nextjs.org/blog/next-9-1#public-directory-support
98 | # public
99 |
100 | # vuepress build output
101 | .vuepress/dist
102 |
103 | # vuepress v2.x temp and cache directory
104 | .temp
105 | .cache
106 |
107 | # Docusaurus cache and generated files
108 | .docusaurus
109 |
110 | # Serverless directories
111 | .serverless/
112 |
113 | # FuseBox cache
114 | .fusebox/
115 |
116 | # DynamoDB Local files
117 | .dynamodb/
118 |
119 | # TernJS port file
120 | .tern-port
121 |
122 | # Stores VSCode versions used for testing VSCode extensions
123 | .vscode-test
124 |
125 | # yarn v2
126 | .yarn/cache
127 | .yarn/unplugged
128 | .yarn/build-state.yml
129 | .yarn/install-state.gz
130 | .pnp.*
131 |
132 |
133 | **/.DS_Store
--------------------------------------------------------------------------------
/example/socket/client.js:
--------------------------------------------------------------------------------
1 | const net = require('net');
2 |
3 | class Client {
4 | socket = new net.Socket();
5 | isConnected = false;
6 | isWaitingForResponse = false;
7 | responseHandlers = {};
8 |
9 | constructor(url) {
10 | this.url = url;
11 | }
12 |
13 | connect() {
14 | this.setup();
15 | }
16 |
17 | disconnect() {
18 | this.socket.end();
19 | }
20 |
21 | setup() {
22 | this.socket.connect(this.url.split(":")[1], this.url.split(":")[0], () => {
23 | console.log('Attempting to connect to the server...');
24 | });
25 |
26 | let buffer = '';
27 |
28 | this.socket.on('data', (data) => {
29 | buffer += data.toString();
30 | let boundary = buffer.indexOf('\n');
31 | while (boundary !== -1) {
32 | const message = buffer.substring(0, boundary).trim();
33 | buffer = buffer.substring(boundary + 1);
34 | boundary = buffer.indexOf('\n');
35 |
36 | this.handleResponse(message);
37 | }
38 | });
39 |
40 | this.socket.on('close', () => {
41 | console.log('Connection closed');
42 | this.isConnected = false;
43 | });
44 |
45 | this.socket.on('error', (error) => {
46 | console.error('Error:', error.message);
47 | this.isConnected = false;
48 | });
49 | }
50 |
51 | execute = (query) => {
52 | return new Promise((resolve, reject) => {
53 | if (!this.isConnected) {
54 | reject(new Error('Not connected to the server.'));
55 | return;
56 | }
57 | if (this.isWaitingForResponse) {
58 | reject(new Error('Waiting for the previous query to be processed.'));
59 | return;
60 | }
61 |
62 | const queryId = this.generateQueryId();
63 | this.responseHandlers[queryId] = { resolve, reject };
64 | this.socket.write(`${queryId}:${query}`);
65 | this.isWaitingForResponse = true;
66 | });
67 | }
68 |
69 | handleResponse(response) {
70 | if (response === 'Connected') {
71 | this.isConnected = true;
72 | console.log('Successfully connected to the server.');
73 | return;
74 | }
75 |
76 | // Handle query response
77 | const [queryId, result] = response.split('<|>', 2);
78 | console.log('Received from server:', queryId, result);
79 | if (this.responseHandlers[queryId]) {
80 | try {
81 | const parsedResult = JSON.parse(result);
82 | this.responseHandlers[queryId].resolve(parsedResult);
83 | } catch (error) {
84 | this.responseHandlers[queryId].reject(error);
85 | }
86 | delete this.responseHandlers[queryId];
87 | this.isWaitingForResponse = false;
88 | }
89 | }
90 |
91 | generateQueryId() {
92 | return Date.now().toString(36) + Math.random().toString(36).substring(2);
93 | }
94 | }
95 |
96 | module.exports = { Client };
--------------------------------------------------------------------------------
/example/socket/index.js:
--------------------------------------------------------------------------------
1 | // Import the Client class
2 | const { Client } = require('./client');
3 |
4 | const client = new Client('127.0.0.1:5432');
5 | client.connect();
6 |
7 |
8 | setTimeout(() => {
9 | client.execute('SELECT id FROM student')
10 | .then(response => {
11 | console.log('Response:', response);
12 | })
13 | .catch(error => {
14 | console.error('Error:', error.message);
15 | })
16 | .finally(() => {
17 | client.disconnect();
18 | });
19 | }, 3000);
--------------------------------------------------------------------------------
/example/socket/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "socket-example",
3 | "version": "1.0.0",
4 | "description": "Socket Connection Example for StylusDB",
5 | "main": "server.js",
6 | "scripts": {
7 | "test": "node index.js"
8 | },
9 | "author": "Chakshu Gautam",
10 | "license": "ISC"
11 | }
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stylusdb-sql",
3 | "version": "0.1.6",
4 | "description": "A minimal SQL based DB based on CSV files. For educational purposes only.",
5 | "main": "./src/index.js",
6 | "directories": {
7 | "doc": "docs"
8 | },
9 | "scripts": {
10 | "generate": "node ./util/generateLargeFile.js",
11 | "test": "jest",
12 | "test-verbose": "jest --verbose",
13 | "server": "node ./src/server.js"
14 | },
15 | "bin": {
16 | "stylusdb-cli": "node ./src/cli.js"
17 | },
18 | "keywords": [],
19 | "author": "Chakshu Gautam",
20 | "license": "ISC",
21 | "devDependencies": {
22 | "@faker-js/faker": "^8.4.1",
23 | "jest": "^29.7.0"
24 | },
25 | "dependencies": {
26 | "csv-parser": "^3.0.0",
27 | "hll": "^2.0.0",
28 | "json2csv": "^6.0.0-alpha.2",
29 | "xterm": "^5.3.0"
30 | }
31 | }
--------------------------------------------------------------------------------
/prisma/prisma-adapter/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 |
3 | yarn-error.log
4 | dist
5 | build
6 | tmp
7 | pnpm-debug.log
8 | sandbox
9 | .DS_Store
10 | .idea
11 |
12 | query-engine*
13 | migration-engine*
14 | schema-engine*
15 | libquery_engine*
16 | libquery-engine*
17 | query_engine-windows.dll.node
18 |
19 | *tmp.db
20 | dist/
21 | declaration/
22 |
23 | *.tsbuildinfo
24 | .prisma
25 | .pnpm-store
26 |
27 | .vscode
28 | !.vscode/launch.json.default
29 | coverage
30 |
31 | .eslintcache
32 |
33 | .pnpm-debug.log
34 |
35 | .envrc
36 |
37 | esm
38 | reproductions/*
39 | !reproductions/basic-sqlite
40 | !reproductions/tracing
41 | !reproductions/pnpm-workspace.yaml
42 |
43 | dev.db
44 | junit.xml
45 | /output.txt
--------------------------------------------------------------------------------
/prisma/prisma-adapter/README.md:
--------------------------------------------------------------------------------
1 | # @stylusdb-sql/prisma-adapter
2 |
3 | Prisma driver adapter for [StylusDB-SQL](https://github.com/ChakshuGautam/stylusdb-sql).
4 |
5 | ## Before you start
6 |
7 | Before you start, make sure you have:
8 |
9 | - Node >= 18
10 | - [Prisma CLI](https://www.prisma.io/docs/concepts/components/prisma-cli) installed
11 |
12 | ## Install
13 |
14 | You will need to install the `stylusdb-sql-prisma-adapter` driver adapter and the `stylus-db-prisma-client` which is currently unpackaged and can be found [here](./../../examples/prisma/client.js).
15 |
16 | ```
17 | npm install stylusdb-sql-prisma-adapter
18 | ```
19 |
20 | ## DATABASE URL
21 |
22 | Set the environment to your .env file in the local environment. You can get connection information on the TiDB Cloud console.
23 |
24 | ```env
25 | // .env
26 | DATABASE_URL="localhost:5432"
27 | ```
28 |
29 | > NOTE
30 | >
31 | > The adapter only supports Prisma Client. How to make it work with the Prisma Migrate CLI is still under investigation.
32 |
33 | ## Define Prisma schema
34 |
35 | First, you need to create a Prisma schema file called schema.prisma and define the model. Here we use the user as an example.
36 |
37 | ```prisma
38 | // schema.prisma
39 | generator client {
40 | provider = "prisma-client-js"
41 | previewFeatures = ["driverAdapters"]
42 | }
43 |
44 | datasource db {
45 | provider = "postgresql"
46 | url = "postgresql://127.0.0.1:5432/database?sslaccept=strict"
47 | }
48 |
49 | model Student {
50 | id Int @id @default(autoincrement())
51 | name String
52 | age String
53 | enrollment Enrollment[]
54 | }
55 |
56 | model Enrollment {
57 | id Int @id @default(autoincrement())
58 | studentId Int
59 | course String
60 | student Student @relation(fields: [studentId], references: [id])
61 | }
62 | ```
63 |
64 | ## Query
65 |
66 | Here is an example of query:
67 |
68 | ```js
69 | // query.js
70 | import { PrismaClient } from '@prisma/client';
71 | import { PrismaStylusDBSQL } from 'stylusdb-sql-prisma-adapter';
72 | import net from 'net';
73 | import dotenv from 'dotenv';
74 |
75 | // setup
76 | dotenv.config();
77 | const connectionString = `${process.env.DATABASE_URL}`;
78 |
79 | const client = new Client(connectionString)
80 | const adapter = new PrismaStylusDBSQL(client, {})
81 | const prisma = new PrismaClient({ adapter })
82 |
83 | async function main() {
84 | await client.connect();
85 | const rawQueryData = await prisma.$queryRaw`SELECT id from student`;
86 | console.log({ rawQueryData });
87 | const student = await prisma.student.create({
88 | data: {
89 | name: 'test',
90 | age: '28',
91 | },
92 | }).catch((e) => {
93 | console.log(e)
94 | });
95 | console.log(student);
96 |
97 | const students = await prisma.student.findMany();
98 | console.log(students);
99 | }
100 |
101 | main()
102 | .then(async () => {
103 | await prisma.$disconnect()
104 | })
105 | .catch(async (e) => {
106 | console.error(e)
107 | await prisma.$disconnect()
108 | process.exit(1)
109 | })
110 |
111 | ```
112 |
113 | ## Transaction
114 | > Coming Soon
115 |
116 | ## Limitations
117 |
118 | - Heavily under development.
119 |
--------------------------------------------------------------------------------
/prisma/prisma-adapter/dev_guide.md:
--------------------------------------------------------------------------------
1 | # Test
2 |
3 | Now, we test the adapter by this [pull request](https://github.com/prisma/prisma-engines/pull/4342) as a temporary solution.
4 |
5 | # Release
6 |
7 | Just publish a new GitHub release, then the GitHub Action will publish the package to npm.
8 |
9 |
--------------------------------------------------------------------------------
/prisma/prisma-adapter/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stylusdb-sql-prisma-adapter",
3 | "version": "0.0.1",
4 | "description": "Prisma's driver adapter for StylusDB SQL",
5 | "main": "dist/index.js",
6 | "module": "dist/index.mjs",
7 | "types": "dist/index.d.ts",
8 | "scripts": {
9 | "build": "tsup ./src/index.ts --format cjs,esm --dts",
10 | "lint": "tsc -p ./tsconfig.build.json"
11 | },
12 | "files": [
13 | "dist",
14 | "README.md"
15 | ],
16 | "keywords": [
17 | "StylusDB",
18 | "SQL",
19 | "Prisma"
20 | ],
21 | "author": "Chakshu Gautam",
22 | "license": "Apache-2.0",
23 | "sideEffects": false,
24 | "dependencies": {
25 | "@prisma/driver-adapter-utils": "5.8.0-dev.15"
26 | },
27 | "devDependencies": {
28 | "@tidbcloud/serverless": "^0.0.9",
29 | "@types/node": "^20.5.1",
30 | "tsup": "^7.2.0",
31 | "tsx": "^3.12.7",
32 | "typescript": "^5.1.6"
33 | },
34 | "peerDependencies": {
35 | "@tidbcloud/serverless": "^0.0.9"
36 | }
37 | }
--------------------------------------------------------------------------------
/prisma/prisma-adapter/src/index.ts:
--------------------------------------------------------------------------------
1 | import { ConnectionInfo, Debug, ok, Queryable, Query, Result, ResultSet, Transaction, TransactionOptions, ColumnType, ColumnTypeEnum, DriverAdapter } from '@prisma/driver-adapter-utils'
2 | import { adaptInsertQuery, convertToPrismaResultSetINSERTQuery, convertToPrismaResultSetSELECTQuery, evaluateQuery } from './queryUtils'
3 | const debug = Debug('prisma:driver-adapter:stylusdb')
4 |
5 | const defaultDatabase = 'test'
6 |
7 | class RollbackError extends Error {
8 | constructor() {
9 | super('ROLLBACK')
10 | this.name = 'RollbackError'
11 |
12 | if (Error.captureStackTrace) {
13 | Error.captureStackTrace(this, RollbackError)
14 | }
15 | }
16 | }
17 |
18 | class StylusDBSQLQueryable implements Queryable {
19 | readonly provider = 'postgres'
20 | constructor(protected client: any) {}
21 |
22 | /**
23 | * Execute a query given as SQL, interpolating the given parameters.
24 | */
25 | async queryRaw(query: Query): Promise> {
26 | debug('PrismaStylusDBSQL: queryRaw Called');
27 | const tag = '[js::query_raw]'
28 |
29 | const queries = adaptInsertQuery(query)
30 | let result;
31 |
32 | debug('PrismaStylusDBSQL: queryRaw queries: %O', queries);
33 |
34 | for (const query of queries) {
35 | debug(`${tag} %O`, query)
36 | const fixedQuery = adaptInsertQuery({sql: query, args: []})[0] //single query so this works
37 | result = await this.performIO({sql: fixedQuery, args: []})
38 | }
39 |
40 | debug('PrismaStylusDBSQL: queryRaw result: %O', result);
41 |
42 | let resultSet: ResultSet;
43 | if(query.sql.includes('INSERT INTO')) {
44 | resultSet = convertToPrismaResultSetINSERTQuery(result);
45 | }else {
46 | resultSet = convertToPrismaResultSetSELECTQuery(result);
47 | }
48 |
49 | return ok(resultSet)
50 | }
51 |
52 | /**
53 | * Execute a query given as SQL, interpolating the given parameters and
54 | * returning the number of affected rows.
55 | * Note: Queryable expects a u64, but napi.rs only supports u32.
56 | */
57 | async executeRaw(query: Query): Promise> {
58 | const tag = '[js::execute_raw]'
59 | debug(`${tag} %O`, query)
60 |
61 | const result = await this.performIO(query)
62 | const rowsAffected = result.rowsAffected as number
63 | return ok(rowsAffected)
64 | }
65 |
66 | /**
67 | * Run a query against the database, returning the result set.
68 | * Should the query fail due to a connection error, the connection is
69 | * marked as unhealthy.
70 | */
71 | private async performIO(query: Query) {
72 | const { sql, args: values } = query
73 | try {
74 | const result = await this.client.execute(sql)
75 | return result;
76 | } catch (e) {
77 | const error = e as Error
78 | debug('Error in performIO: %O', error)
79 | throw error
80 | }
81 | }
82 | }
83 |
84 | class StylusDBSQLTransaction extends StylusDBSQLQueryable implements Transaction {
85 | finished = false
86 |
87 | constructor(
88 | tx: any,
89 | readonly options: TransactionOptions,
90 | ) {
91 | super(tx)
92 | }
93 |
94 | async commit(): Promise> {
95 | debug(`[js::commit]`)
96 |
97 | this.finished = true
98 | await this.client.commit()
99 | return Promise.resolve(ok(undefined))
100 | }
101 |
102 | async rollback(): Promise> {
103 | debug(`[js::rollback]`)
104 |
105 | this.finished = true
106 | await this.client.rollback()
107 | return Promise.resolve(ok(undefined))
108 | }
109 |
110 | dispose(): Result {
111 | if (!this.finished) {
112 | this.rollback().catch(console.error)
113 | }
114 | return ok(undefined)
115 | }
116 | }
117 |
118 | export class PrismaStylusDBSQL extends StylusDBSQLTransaction implements DriverAdapter {
119 | constructor(client: any, options: any) {
120 | super(client, options)
121 |
122 | debug('PrismaStylusDBSQL: Client Constructor Called');
123 | }
124 |
125 | getConnectionInfo(): Result {
126 | debug('PrismaStylusDBSQL: getConnectionInfo Called');
127 | return ok({
128 | schemaName: undefined,
129 | })
130 | }
131 |
132 | async startTransaction() {
133 | const options: TransactionOptions = {
134 | usePhantomQuery: true,
135 | }
136 |
137 | const tag = '[js::startTransaction]'
138 | debug(`${tag} options: %O`, options)
139 |
140 | const tx = await this.client.begin()
141 | return ok(new StylusDBSQLTransaction(tx, options))
142 | }
143 | }
144 |
--------------------------------------------------------------------------------
/prisma/prisma-adapter/src/queryUtils.ts:
--------------------------------------------------------------------------------
1 | import { ColumnTypeEnum, Query } from "@prisma/driver-adapter-utils";
2 |
3 | function adaptInsertQuery(query: Query): string [] {
4 | // Check if it's an INSERT query
5 | if (!query.sql.startsWith('INSERT INTO')) {
6 | // Remove schema ("public".) from the query
7 | let simplifiedQuery = query.sql
8 | .replace(/"public"\./g, '')
9 | .replace(/\sWHERE\s1=1/g, '')
10 | .replace(/\sOFFSET\s'0'/g, '')
11 | .replace(/\sOFFSET\s0/g, '')
12 | .replace(/"([^"]+)"/g, '$1')
13 | .replace(/"?\b\w+\b"?\."?(\w+)"?/g, '$1')
14 | .replace(/\bFROM\s+(\w+)/, (match, p1) => `FROM ${p1.toLowerCase()}`);
15 | return [evaluateQuery({sql: simplifiedQuery, args: query.args})];
16 | }
17 |
18 | // Remove schema ("public".) from the query
19 | let simplifiedQuery = query.sql.replace(/"public"\./g, '');
20 |
21 | // Extract the table name
22 | const tableName = simplifiedQuery.split(' ')[2].replace(/"/g, '');
23 |
24 | // Split the query into INSERT and SELECT parts
25 | const insertPart = simplifiedQuery;
26 |
27 | return [evaluateQuery({sql: insertPart, args: query.args})]
28 | }
29 |
30 | function convertToPrismaResultSetSELECTQuery(dbResponse) {
31 | // Ensure the response is in array format even if it's a single record
32 | const records = Array.isArray(dbResponse) ? dbResponse : [dbResponse];
33 |
34 | // Extract column names from the first record (assuming all records have the same structure)
35 | const columnNames = records.length > 0 ? Object.keys(records[0]) : [];
36 |
37 | // Assign 'Number' type to 'id' column, and 'String' to all other columns
38 | const columnTypes = columnNames.map(columnName => columnName === 'id' ? ColumnTypeEnum.Int32 : ColumnTypeEnum.Text);
39 |
40 | // Convert each record into an array of values
41 | const rows = records.map(record =>
42 | columnNames.map(columnName =>
43 | columnName === 'id' ? Number(record[columnName]) : record[columnName]
44 | )
45 | );
46 |
47 | // Construct the ResultSet
48 | const resultSet = {
49 | columnNames,
50 | columnTypes,
51 | rows,
52 | lastInsertId: undefined // Set this appropriately if your DB returns last insert ID
53 | };
54 |
55 | return resultSet;
56 | }
57 |
58 | function convertToPrismaResultSetINSERTQuery(dbResponse) {
59 | const { message, insertedId, returning } = dbResponse;
60 |
61 | // If no data is returned (i.e., no RETURNING clause), create an empty result set
62 | if (!returning || Object.keys(returning).length === 0) {
63 | return {
64 | columnNames: [],
65 | columnTypes: [],
66 | rows: [],
67 | lastInsertId: insertedId ? insertedId.toString() : undefined
68 | };
69 | }
70 |
71 | // Extract column names and values from the returning object
72 | const columnNames = Object.keys(returning);
73 | const values = Object.values(returning);
74 |
75 | // Assign 'Number' type to 'id' column, and 'String' to all other columns
76 | const columnTypes = columnNames.map(columnName => columnName === 'id' ? ColumnTypeEnum.Int32 : ColumnTypeEnum.Text);
77 |
78 | // The rows array will contain only one row (the inserted row)
79 | const rows = [values];
80 |
81 | // Construct the ResultSet
82 | const resultSet = {
83 | columnNames,
84 | columnTypes,
85 | rows,
86 | lastInsertId: insertedId ? insertedId.toString() : undefined
87 | };
88 |
89 | return resultSet;
90 | }
91 |
92 | function evaluateQuery(queryObject: Query) {
93 | let { sql, args } = queryObject;
94 |
95 | // Replace placeholders with the corresponding values from args
96 | args.forEach((arg, index) => {
97 | sql = sql.replace(`$${index + 1}`, `'${arg}'`);
98 | });
99 |
100 | return sql;
101 | }
102 |
103 | export { adaptInsertQuery, convertToPrismaResultSetSELECTQuery, convertToPrismaResultSetINSERTQuery, evaluateQuery }
--------------------------------------------------------------------------------
/prisma/prisma-adapter/tsconfig.build.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "compilerOptions": {
4 | "outDir": "declaration"
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/prisma/prisma-adapter/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "ES2022",
4 | "module": "ESNext",
5 | "lib": ["ES2022"],
6 | "moduleResolution": "Bundler",
7 | "esModuleInterop": false,
8 | "isolatedModules": true,
9 | "sourceMap": true,
10 | "declaration": true,
11 | "strict": true,
12 | "noImplicitAny": false,
13 | "noUncheckedIndexedAccess": false,
14 | "noImplicitReturns": true,
15 | "noFallthroughCasesInSwitch": true,
16 | "useUnknownInCatchVariables": false,
17 | "skipDefaultLibCheck": true,
18 | "skipLibCheck": true,
19 | "emitDeclarationOnly": true,
20 | "resolveJsonModule": true
21 | },
22 | "exclude": ["**/dist", "**/declaration", "**/node_modules", "**/src/__tests__"]
23 | }
--------------------------------------------------------------------------------
/src/cli.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | const readline = require('readline');
4 | const { executeSELECTQuery, executeINSERTQuery, executeDELETEQuery } = require('./queryExecuter');
5 |
6 | const rl = readline.createInterface({
7 | input: process.stdin,
8 | output: process.stdout
9 | });
10 |
11 | rl.setPrompt('SQL> ');
12 | console.log('SQL Query Engine CLI. Enter your SQL commands, or type "exit" to quit.');
13 |
14 | rl.prompt();
15 |
16 | rl.on('line', async (line) => {
17 | if (line.toLowerCase() === 'exit') {
18 | rl.close();
19 | return;
20 | }
21 |
22 | try {
23 | if (line.toLowerCase().startsWith('select')) {
24 | const result = await executeSELECTQuery(line);
25 | console.log('Result:', result);
26 | } else if (line.toLowerCase().startsWith('insert into')) {
27 | const result = await executeINSERTQuery(line);
28 | console.log(result.message);
29 | } else if (line.toLowerCase().startsWith('delete from')) {
30 | const result = await executeDELETEQuery(line);
31 | console.log(result.message);
32 | } else {
33 | console.log('Unsupported command');
34 | }
35 | } catch (error) {
36 | console.error('Error:', error.message);
37 | }
38 |
39 | rl.prompt();
40 | }).on('close', () => {
41 | console.log('Exiting SQL CLI');
42 | process.exit(0);
43 | });
44 |
--------------------------------------------------------------------------------
/src/csvStorage.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const csv = require('csv-parser');
3 | const { parse } = require('json2csv');
4 | const hll = require('hll');
5 |
6 | function readCSV(filePath) {
7 | const results = [];
8 | var h = hll();
9 |
10 | return new Promise((resolve, reject) => {
11 | fs.createReadStream(filePath)
12 | .pipe(csv())
13 | .on('data', (data) => results.push(data))
14 | .on('end', () => {
15 | resolve(results);
16 | })
17 | .on('error', (error) => {
18 | reject(error);
19 | });
20 | });
21 | }
22 |
23 | function readCSVForHLL(filePath, bitSampleSize = 12, digestSize = 128) {
24 | const results = [];
25 | var h = hll({ bitSampleSize: bitSampleSize, digestSize: digestSize });
26 |
27 | return new Promise((resolve, reject) => {
28 | fs.createReadStream(filePath)
29 | .pipe(csv())
30 | .on('data', (data) => h.insert(JSON.stringify(data)))
31 | .on('end', () => {
32 | resolve(h);
33 | })
34 | .on('error', (error) => {
35 | reject(error);
36 | });
37 | });
38 | }
39 |
40 | async function writeCSV(filename, data) {
41 | const csv = parse(data);
42 | fs.writeFileSync(filename, csv);
43 | }
44 |
45 | module.exports = { readCSV, readCSVForHLL, writeCSV };
--------------------------------------------------------------------------------
/src/index.js:
--------------------------------------------------------------------------------
1 | const { readCSV, writeCSV } = require('./csvStorage.js');
2 | const { parseSelectQuery, parseInsertQuery, parseDeleteQuery } = require('./queryParser.js');
3 | const { executeSELECTQuery, executeINSERTQuery, executeDELETEQuery } = require('./queryExecuter.js');
4 |
5 | module.exports = {
6 | readCSV,
7 | writeCSV,
8 | executeSELECTQuery,
9 | executeINSERTQuery,
10 | executeDELETEQuery,
11 | parseSelectQuery,
12 | parseInsertQuery,
13 | parseDeleteQuery
14 | }
--------------------------------------------------------------------------------
/src/queryExecuter.js:
--------------------------------------------------------------------------------
1 | const { parseSelectQuery, parseInsertQuery, parseDeleteQuery } = require('./queryParser.js');
2 | const { readCSV, readCSVForHLL, writeCSV } = require('./csvStorage.js');
3 | const hll = require('hll');
4 |
5 |
6 | function performInnerJoin(data, joinData, joinCondition, fields, table) {
7 | return data.flatMap(mainRow => {
8 | return joinData
9 | .filter(joinRow => {
10 | const mainValue = mainRow[joinCondition.left.split('.')[1]];
11 | const joinValue = joinRow[joinCondition.right.split('.')[1]];
12 | return mainValue === joinValue;
13 | })
14 | .map(joinRow => {
15 | return fields.reduce((acc, field) => {
16 | const [tableName, fieldName] = field.split('.');
17 | acc[field] = tableName === table ? mainRow[fieldName] : joinRow[fieldName];
18 | return acc;
19 | }, {});
20 | });
21 | });
22 | }
23 |
24 | function performLeftJoin(data, joinData, joinCondition, fields, table) {
25 | return data.flatMap(mainRow => {
26 | const matchingJoinRows = joinData.filter(joinRow => {
27 | const mainValue = getValueFromRow(mainRow, joinCondition.left);
28 | const joinValue = getValueFromRow(joinRow, joinCondition.right);
29 | return mainValue === joinValue;
30 | });
31 |
32 | if (matchingJoinRows.length === 0) {
33 | return [createResultRow(mainRow, null, fields, table, true)];
34 | }
35 |
36 | return matchingJoinRows.map(joinRow => createResultRow(mainRow, joinRow, fields, table, true));
37 | });
38 | }
39 |
40 | function getValueFromRow(row, compoundFieldName) {
41 | const [tableName, fieldName] = compoundFieldName.split('.');
42 | return row[`${tableName}.${fieldName}`] || row[fieldName];
43 | }
44 |
45 | function performRightJoin(data, joinData, joinCondition, fields, table) {
46 | // Cache the structure of a main table row (keys only)
47 | const mainTableRowStructure = data.length > 0 ? Object.keys(data[0]).reduce((acc, key) => {
48 | acc[key] = null; // Set all values to null initially
49 | return acc;
50 | }, {}) : {};
51 |
52 | return joinData.map(joinRow => {
53 | const mainRowMatch = data.find(mainRow => {
54 | const mainValue = getValueFromRow(mainRow, joinCondition.left);
55 | const joinValue = getValueFromRow(joinRow, joinCondition.right);
56 | return mainValue === joinValue;
57 | });
58 |
59 | // Use the cached structure if no match is found
60 | const mainRowToUse = mainRowMatch || mainTableRowStructure;
61 |
62 | // Include all necessary fields from the 'student' table
63 | return createResultRow(mainRowToUse, joinRow, fields, table, true);
64 | });
65 | }
66 |
67 | function createResultRow(mainRow, joinRow, fields, table, includeAllMainFields) {
68 | const resultRow = {};
69 |
70 | if (includeAllMainFields) {
71 | // Include all fields from the main table
72 | Object.keys(mainRow || {}).forEach(key => {
73 | const prefixedKey = `${table}.${key}`;
74 | resultRow[prefixedKey] = mainRow ? mainRow[key] : null;
75 | });
76 | }
77 |
78 | // Now, add or overwrite with the fields specified in the query
79 | fields.forEach(field => {
80 | const [tableName, fieldName] = field.includes('.') ? field.split('.') : [table, field];
81 | resultRow[field] = tableName === table && mainRow ? mainRow[fieldName] : joinRow ? joinRow[fieldName] : null;
82 | });
83 |
84 | return resultRow;
85 | }
86 |
87 | function evaluateCondition(row, clause) {
88 | let { field, operator, value } = clause;
89 |
90 | // Check if the field exists in the row
91 | if (row[field] === undefined) {
92 | throw new Error(`Invalid field: ${field}`);
93 | }
94 |
95 | // Parse row value and condition value based on their actual types
96 | const rowValue = parseValue(row[field]);
97 | let conditionValue = parseValue(value);
98 |
99 | if (operator === 'LIKE') {
100 | // Transform SQL LIKE pattern to JavaScript RegExp pattern
101 | const regexPattern = '^' + value.replace(/%/g, '.*').replace(/_/g, '.') + '$';
102 | const regex = new RegExp(regexPattern, 'i'); // 'i' for case-insensitive matching
103 | return regex.test(row[field]);
104 | }
105 |
106 | switch (operator) {
107 | case '=': return rowValue === conditionValue;
108 | case '!=': return rowValue !== conditionValue;
109 | case '>': return rowValue > conditionValue;
110 | case '<': return rowValue < conditionValue;
111 | case '>=': return rowValue >= conditionValue;
112 | case '<=': return rowValue <= conditionValue;
113 | default: throw new Error(`Unsupported operator: ${operator}`);
114 | }
115 | }
116 |
117 | // Helper function to parse value based on its apparent type
118 | function parseValue(value) {
119 |
120 | // Return null or undefined as is
121 | if (value === null || value === undefined) {
122 | return value;
123 | }
124 |
125 | // If the value is a string enclosed in single or double quotes, remove them
126 | if (typeof value === 'string' && ((value.startsWith("'") && value.endsWith("'")) || (value.startsWith('"') && value.endsWith('"')))) {
127 | value = value.substring(1, value.length - 1);
128 | }
129 |
130 | // Check if value is a number
131 | if (!isNaN(value) && value.trim() !== '') {
132 | return Number(value);
133 | }
134 | // Assume value is a string if not a number
135 | return value;
136 | }
137 |
138 | function applyGroupBy(data, groupByFields, aggregateFunctions) {
139 | const groupResults = {};
140 |
141 | data.forEach(row => {
142 | // Generate a key for the group
143 | const groupKey = groupByFields.map(field => row[field]).join('-');
144 |
145 | // Initialize group in results if it doesn't exist
146 | if (!groupResults[groupKey]) {
147 | groupResults[groupKey] = { count: 0, sums: {}, mins: {}, maxes: {} };
148 | groupByFields.forEach(field => groupResults[groupKey][field] = row[field]);
149 | }
150 |
151 | // Aggregate calculations
152 | groupResults[groupKey].count += 1;
153 | aggregateFunctions.forEach(func => {
154 | const match = /(\w+)\((\w+)\)/.exec(func);
155 | if (match) {
156 | const [, aggFunc, aggField] = match;
157 | const value = parseFloat(row[aggField]);
158 |
159 | switch (aggFunc.toUpperCase()) {
160 | case 'SUM':
161 | groupResults[groupKey].sums[aggField] = (groupResults[groupKey].sums[aggField] || 0) + value;
162 | break;
163 | case 'MIN':
164 | groupResults[groupKey].mins[aggField] = Math.min(groupResults[groupKey].mins[aggField] || value, value);
165 | break;
166 | case 'MAX':
167 | groupResults[groupKey].maxes[aggField] = Math.max(groupResults[groupKey].maxes[aggField] || value, value);
168 | break;
169 | // Additional aggregate functions can be added here
170 | }
171 | }
172 | });
173 | });
174 |
175 | // Convert grouped results into an array format
176 | return Object.values(groupResults).map(group => {
177 | // Construct the final grouped object based on required fields
178 | const finalGroup = {};
179 | groupByFields.forEach(field => finalGroup[field] = group[field]);
180 | aggregateFunctions.forEach(func => {
181 | const match = /(\w+)\((\*|\w+)\)/.exec(func);
182 | if (match) {
183 | const [, aggFunc, aggField] = match;
184 | switch (aggFunc.toUpperCase()) {
185 | case 'SUM':
186 | finalGroup[func] = group.sums[aggField];
187 | break;
188 | case 'MIN':
189 | finalGroup[func] = group.mins[aggField];
190 | break;
191 | case 'MAX':
192 | finalGroup[func] = group.maxes[aggField];
193 | break;
194 | case 'COUNT':
195 | finalGroup[func] = group.count;
196 | break;
197 | // Additional aggregate functions can be handled here
198 | }
199 | }
200 | });
201 |
202 | return finalGroup;
203 | });
204 | }
205 |
206 | async function executeSELECTQuery(query) {
207 | try {
208 | const { fields, table, whereClauses, joinType, joinTable, joinCondition, groupByFields, hasAggregateWithoutGroupBy, isApproximateCount, orderByFields, limit, isDistinct, distinctFields, isCountDistinct } = parseSelectQuery(query);
209 |
210 |
211 | if (isApproximateCount && fields.length === 1 && fields[0] === 'COUNT(*)' && whereClauses.length === 0) {
212 | let hll = await readCSVForHLL(`${table}.csv`);
213 | return [{ 'APPROXIMATE_COUNT(*)': hll.estimate() }];
214 | }
215 |
216 | let data = await readCSV(`${table}.csv`);
217 |
218 | // Perform INNER JOIN if specified
219 | if (joinTable && joinCondition) {
220 | const joinData = await readCSV(`${joinTable}.csv`);
221 | switch (joinType.toUpperCase()) {
222 | case 'INNER':
223 | data = performInnerJoin(data, joinData, joinCondition, fields, table);
224 | break;
225 | case 'LEFT':
226 | data = performLeftJoin(data, joinData, joinCondition, fields, table);
227 | break;
228 | case 'RIGHT':
229 | data = performRightJoin(data, joinData, joinCondition, fields, table);
230 | break;
231 | default:
232 | throw new Error(`Unsupported JOIN type: ${joinType}`);
233 | }
234 | }
235 | // Apply WHERE clause filtering after JOIN (or on the original data if no join)
236 | let filteredData = whereClauses.length > 0
237 | ? data.filter(row => whereClauses.every(clause => evaluateCondition(row, clause)))
238 | : data;
239 |
240 |
241 | let groupResults = filteredData;
242 | if (hasAggregateWithoutGroupBy) {
243 | // Special handling for queries like 'SELECT COUNT(*) FROM table'
244 | const result = {};
245 |
246 | fields.forEach(field => {
247 | const match = /(\w+)\((\*|\w+)\)/.exec(field);
248 | if (match) {
249 | const [, aggFunc, aggField] = match;
250 | switch (aggFunc.toUpperCase()) {
251 | case 'COUNT':
252 | result[field] = filteredData.length;
253 | break;
254 | case 'SUM':
255 | result[field] = filteredData.reduce((acc, row) => acc + parseFloat(row[aggField]), 0);
256 | break;
257 | case 'AVG':
258 | result[field] = filteredData.reduce((acc, row) => acc + parseFloat(row[aggField]), 0) / filteredData.length;
259 | break;
260 | case 'MIN':
261 | result[field] = Math.min(...filteredData.map(row => parseFloat(row[aggField])));
262 | break;
263 | case 'MAX':
264 | result[field] = Math.max(...filteredData.map(row => parseFloat(row[aggField])));
265 | break;
266 | // Additional aggregate functions can be handled here
267 | }
268 | }
269 | });
270 |
271 | return [result];
272 | // Add more cases here if needed for other aggregates
273 | } else if (groupByFields) {
274 | groupResults = applyGroupBy(filteredData, groupByFields, fields);
275 |
276 | // Order them by the specified fields
277 | let orderedResults = groupResults;
278 | if (orderByFields) {
279 | orderedResults = groupResults.sort((a, b) => {
280 | for (let { fieldName, order } of orderByFields) {
281 | if (a[fieldName] < b[fieldName]) return order === 'ASC' ? -1 : 1;
282 | if (a[fieldName] > b[fieldName]) return order === 'ASC' ? 1 : -1;
283 | }
284 | return 0;
285 | });
286 | }
287 | if (limit !== null) {
288 | groupResults = groupResults.slice(0, limit);
289 | }
290 | return groupResults;
291 | } else {
292 |
293 | // Order them by the specified fields
294 | let orderedResults = groupResults;
295 | if (orderByFields) {
296 | orderedResults = groupResults.sort((a, b) => {
297 | for (let { fieldName, order } of orderByFields) {
298 | if (a[fieldName] < b[fieldName]) return order === 'ASC' ? -1 : 1;
299 | if (a[fieldName] > b[fieldName]) return order === 'ASC' ? 1 : -1;
300 | }
301 | return 0;
302 | });
303 | }
304 |
305 | // Distinct inside count - example "SELECT COUNT (DISTINCT student.name) FROM student"
306 | if (isCountDistinct) {
307 |
308 | if (isApproximateCount) {
309 | var h = hll({ bitSampleSize: 12, digestSize: 128 });
310 | orderedResults.forEach(row => h.insert(distinctFields.map(field => row[field]).join('|')));
311 | return [{ [`APPROXIMATE_${fields[0]}`]: h.estimate() }];
312 | }
313 | else {
314 | let distinctResults = [...new Map(orderedResults.map(item => [distinctFields.map(field => item[field]).join('|'), item])).values()];
315 | return [{ [fields[0]]: distinctResults.length }];
316 | }
317 | }
318 |
319 | // Select the specified fields
320 | let finalResults = orderedResults.map(row => {
321 | const selectedRow = {};
322 | fields.forEach(field => {
323 | // Assuming 'field' is just the column name without table prefix
324 | selectedRow[field] = row[field];
325 | });
326 | return selectedRow;
327 | });
328 |
329 | // console.log("CP-2", orderedResults)
330 |
331 | // Remove duplicates if specified
332 | let distinctResults = finalResults;
333 | if (isDistinct) {
334 | distinctResults = [...new Map(finalResults.map(item => [fields.map(field => item[field]).join('|'), item])).values()];
335 | }
336 |
337 | let limitResults = distinctResults;
338 | if (limit !== null) {
339 | limitResults = distinctResults.slice(0, limit);
340 | }
341 |
342 | return limitResults;
343 |
344 |
345 | }
346 | } catch (error) {
347 | throw new Error(`Error executing query: ${error.message}`);
348 | }
349 | }
350 |
351 | async function executeINSERTQuery(query) {
352 | const { table, columns, values, returningColumns } = parseInsertQuery(query);
353 | const data = await readCSV(`${table}.csv`);
354 |
355 | // Check if 'id' column is included in the query and in CSV headers
356 | let newId = null;
357 | if (!columns.includes('id') && data.length > 0 && 'id' in data[0]) {
358 | // 'id' column not included in the query, so we auto-generate an ID
359 | const existingIds = data.map(row => parseInt(row.id)).filter(id => !isNaN(id));
360 | const maxId = existingIds.length > 0 ? Math.max(...existingIds) : 0;
361 | newId = maxId + 1;
362 | columns.push('id');
363 | values.push(newId.toString()); // Add as a string
364 | }
365 |
366 | // Create a new row object matching the CSV structure
367 | const headers = data.length > 0 ? Object.keys(data[0]) : columns;
368 | const newRow = {};
369 | headers.forEach(header => {
370 | const columnIndex = columns.indexOf(header);
371 | if (columnIndex !== -1) {
372 | let value = values[columnIndex];
373 | if (value.startsWith("'") && value.endsWith("'")) {
374 | value = value.substring(1, value.length - 1);
375 | }
376 | newRow[header] = value;
377 | } else {
378 | newRow[header] = header === 'id' ? newId.toString() : '';
379 | }
380 | });
381 |
382 | // Add the new row to the data
383 | data.push(newRow);
384 |
385 | // Save the updated data back to the CSV file
386 | await writeCSV(`${table}.csv`, data);
387 |
388 | // Prepare the returning result if returningColumns are specified
389 | let returningResult = {};
390 | if (returningColumns.length > 0) {
391 | returningColumns.forEach(column => {
392 | returningResult[column] = newRow[column];
393 | });
394 | }
395 |
396 | return {
397 | message: "Row inserted successfully.",
398 | insertedId: newId,
399 | returning: returningResult
400 | };
401 | }
402 |
403 |
404 | async function executeDELETEQuery(query) {
405 | const { table, whereClauses } = parseDeleteQuery(query);
406 | let data = await readCSV(`${table}.csv`);
407 |
408 | if (whereClauses.length > 0) {
409 | // Filter out the rows that meet the where clause conditions
410 | data = data.filter(row => !whereClauses.every(clause => evaluateCondition(row, clause)));
411 | } else {
412 | // If no where clause, clear the entire table
413 | data = [];
414 | }
415 |
416 | // Save the updated data back to the CSV file
417 | await writeCSV(`${table}.csv`, data);
418 |
419 | return { message: "Rows deleted successfully." };
420 | }
421 |
422 |
423 | module.exports = { executeSELECTQuery, executeINSERTQuery, executeDELETEQuery };
--------------------------------------------------------------------------------
/src/queryParser.js:
--------------------------------------------------------------------------------
1 | /*
2 | Creating a Query Parser which can parse SQL `SELECT` Queries only.
3 | // */
4 | function parseSelectQuery(query) {
5 | try {
6 |
7 | // Trim the query to remove any leading/trailing whitespaces
8 | query = query.trim();
9 |
10 | // Initialize distinct flag
11 | let isDistinct = false; // Global DISTINCT, not within COUNT
12 | let isCountDistinct = false; // New flag for DISTINCT within COUNT
13 | let distinctFields = []; // Array to hold fields after DISTINCT within COUNT or APPROXIMATE_COUNT
14 |
15 |
16 | // Detect APPROXIMATE_COUNT
17 | let isApproximateCount = false;
18 | const approximateCountRegex = /APPROXIMATE_COUNT\((DISTINCT\s)?(.+?)\)/i;
19 | const approximateCountMatch = query.match(approximateCountRegex);
20 | if (approximateCountMatch) {
21 | isApproximateCount = true;
22 | // If DISTINCT is used within APPROXIMATE_COUNT, capture the fields
23 | if (approximateCountMatch[1]) {
24 | isCountDistinct = true;
25 | // distinctFields.push(approximateCountMatch[2].trim());
26 | }
27 | // Simplify further processing by normalizing to COUNT (adjust as necessary for your logic)
28 | query = query.replace(approximateCountRegex, `COUNT(${approximateCountMatch[1] || ''}${approximateCountMatch[2]})`);
29 | }
30 |
31 | // Check for DISTINCT keyword and update the query
32 | if (query.toUpperCase().includes('SELECT DISTINCT')) {
33 | isDistinct = true;
34 | query = query.replace('SELECT DISTINCT', 'SELECT');
35 | }
36 |
37 | // Updated regex to capture LIMIT clause and remove it for further processing
38 | const limitRegex = /\sLIMIT\s(\d+)/i;
39 | const limitMatch = query.match(limitRegex);
40 |
41 | let limit = null;
42 | if (limitMatch) {
43 | limit = parseInt(limitMatch[1], 10);
44 | query = query.replace(limitRegex, ''); // Remove LIMIT clause
45 | }
46 |
47 | // Process ORDER BY clause and remove it for further processing
48 | const orderByRegex = /\sORDER BY\s(.+)/i;
49 | const orderByMatch = query.match(orderByRegex);
50 | let orderByFields = null;
51 | if (orderByMatch) {
52 | orderByFields = orderByMatch[1].split(',').map(field => {
53 | const [fieldName, order] = field.trim().split(/\s+/);
54 | return { fieldName, order: order ? order.toUpperCase() : 'ASC' };
55 | });
56 | query = query.replace(orderByRegex, '');
57 | }
58 |
59 | // Process GROUP BY clause and remove it for further processing
60 | const groupByRegex = /\sGROUP BY\s(.+)/i;
61 | const groupByMatch = query.match(groupByRegex);
62 | let groupByFields = null;
63 | if (groupByMatch) {
64 | groupByFields = groupByMatch[1].split(',').map(field => field.trim());
65 | query = query.replace(groupByRegex, '');
66 | }
67 |
68 | // Process WHERE clause
69 | const whereSplit = query.split(/\sWHERE\s/i);
70 | const queryWithoutWhere = whereSplit[0]; // Everything before WHERE clause
71 | const whereClause = whereSplit.length > 1 ? whereSplit[1].trim() : null;
72 |
73 | // Process JOIN clause
74 | const joinSplit = queryWithoutWhere.split(/\s(INNER|LEFT|RIGHT) JOIN\s/i);
75 | const selectPart = joinSplit[0].trim(); // Everything before JOIN clause
76 |
77 | // Extract JOIN information
78 | const { joinType, joinTable, joinCondition } = parseJoinClause(queryWithoutWhere);
79 |
80 | const countDistinctRegex = /COUNT\((DISTINCT\s\((.*?)\))\)/gi;
81 | let countDistinctMatch;
82 | while ((countDistinctMatch = countDistinctRegex.exec(query)) !== null) {
83 | isCountDistinct = true;
84 | if (isApproximateCount) {
85 | distinctFields.push(...countDistinctMatch[2].trim().split(',').map(field => field.trim()));
86 | } else {
87 | distinctFields.push(...countDistinctMatch[2].trim().split(',').map(field => field.trim()));
88 | }
89 | }
90 |
91 | // Parse SELECT part
92 | const selectRegex = /^SELECT\s(.+?)\sFROM\s(.+)/i;
93 | const selectMatch = selectPart.match(selectRegex);
94 | if (!selectMatch) {
95 | throw new Error('Invalid SELECT format');
96 | }
97 | let [, fields, table] = selectMatch;
98 |
99 | // Parse WHERE part if it exists
100 | let whereClauses = [];
101 | if (whereClause) {
102 | whereClauses = parseWhereClause(whereClause);
103 | }
104 |
105 | // Check for aggregate functions without GROUP BY
106 | const hasAggregateWithoutGroupBy = checkAggregateWithoutGroupBy(query, groupByFields);
107 |
108 | // Temporarily replace commas within parentheses to avoid incorrect splitting
109 | const tempPlaceholder = '__TEMP_COMMA__'; // Ensure this placeholder doesn't appear in your actual queries
110 | fields = fields.replace(/\(([^)]+)\)/g, (match) => match.replace(/,/g, tempPlaceholder));
111 |
112 | // Now split fields and restore any temporary placeholders
113 | const parsedFields = fields.split(',').map(field =>
114 | field.trim().replace(new RegExp(tempPlaceholder, 'g'), ','));
115 |
116 |
117 | return {
118 | fields: parsedFields,
119 | table: table.trim(),
120 | whereClauses,
121 | joinType,
122 | joinTable,
123 | joinCondition,
124 | groupByFields,
125 | orderByFields,
126 | hasAggregateWithoutGroupBy,
127 | isApproximateCount,
128 | isCountDistinct,
129 | limit,
130 | distinctFields,
131 | isDistinct
132 | };
133 | } catch (error) {
134 | throw new Error(`Query parsing error: ${error.message}`);
135 | }
136 | }
137 |
138 | function checkAggregateWithoutGroupBy(query, groupByFields) {
139 | const aggregateFunctionRegex = /(\bCOUNT\b|\bAVG\b|\bSUM\b|\bMIN\b|\bMAX\b)\s*\(\s*(\*|\w+)\s*\)/i;
140 | return aggregateFunctionRegex.test(query) && !groupByFields;
141 | }
142 |
143 | function parseWhereClause(whereString) {
144 | const conditionRegex = /(.*?)(=|!=|>=|<=|>|<)(.*)/;
145 | return whereString.split(/ AND | OR /i).map(conditionString => {
146 | if (conditionString.includes(' LIKE ')) {
147 | const [field, pattern] = conditionString.split(/\sLIKE\s/i);
148 | return { field: field.trim(), operator: 'LIKE', value: pattern.trim().replace(/^'(.*)'$/, '$1') };
149 | } else {
150 | const match = conditionString.match(conditionRegex);
151 | if (match) {
152 | const [, field, operator, value] = match;
153 | return { field: field.trim(), operator, value: value.trim() };
154 | }
155 | throw new Error('Invalid WHERE clause format');
156 | }
157 | });
158 | }
159 |
160 | function parseJoinClause(query) {
161 | const joinRegex = /\s(INNER|LEFT|RIGHT) JOIN\s(.+?)\sON\s([\w.]+)\s*=\s*([\w.]+)/i;
162 | const joinMatch = query.match(joinRegex);
163 |
164 | if (joinMatch) {
165 | return {
166 | joinType: joinMatch[1].trim(),
167 | joinTable: joinMatch[2].trim(),
168 | joinCondition: {
169 | left: joinMatch[3].trim(),
170 | right: joinMatch[4].trim()
171 | }
172 | };
173 | }
174 |
175 | return {
176 | joinType: null,
177 | joinTable: null,
178 | joinCondition: null
179 | };
180 | }
181 |
182 | function parseInsertQuery(query) {
183 | // Simplify the query by removing schema names and table references from column names
184 | let simplifiedQuery = query.replace(/"?\w+"?\."(\w+)"?/g, '$1');
185 |
186 | // Parse the INSERT INTO part
187 | const insertRegex = /INSERT INTO "?(\w+)"?\s\(([^)]+)\)\sVALUES\s\(([^)]+)\)/i;
188 | const match = simplifiedQuery.match(insertRegex);
189 |
190 | if (!match) {
191 | throw new Error("Invalid INSERT INTO syntax.");
192 | }
193 |
194 | const [, table, columns, values] = match;
195 |
196 | // Function to clean and remove surrounding quotes from column names
197 | const cleanColumnName = (name) => {
198 | return name.trim().replace(/^"?(.+?)"?$/g, '$1');
199 | };
200 |
201 | // Function to clean and remove surrounding single quotes from values
202 | const cleanValue = (value) => {
203 | return value.trim().replace(/^'(.*)'$/g, '$1').replace(/^"(.*)"$/g, '$1');
204 | };
205 |
206 | // Function to clean returning column names by removing table prefixes and quotes
207 | const cleanReturningColumn = (name) => {
208 | return name.trim().replace(/\w+\./g, '').replace(/^"?(.+?)"?$/g, '$1');
209 | };
210 |
211 | // Parse and clean columns and values
212 | const parsedColumns = columns.split(',').map(cleanColumnName);
213 | const parsedValues = values.split(',').map(cleanValue);
214 |
215 | // Parse the RETURNING part if present
216 | const returningMatch = simplifiedQuery.match(/RETURNING\s(.+)$/i);
217 | const returningColumns = returningMatch
218 | ? returningMatch[1].split(',').map(cleanReturningColumn)
219 | : [];
220 | return {
221 | type: 'INSERT',
222 | table: cleanColumnName(table),
223 | columns: parsedColumns,
224 | values: parsedValues,
225 | returningColumns
226 | };
227 | }
228 |
229 | function parseDeleteQuery(query) {
230 | const deleteRegex = /DELETE FROM (\w+)( WHERE (.*))?/i;
231 | const match = query.match(deleteRegex);
232 |
233 | if (!match) {
234 | throw new Error("Invalid DELETE syntax.");
235 | }
236 |
237 | const [, table, , whereString] = match;
238 | let whereClauses = [];
239 | if (whereString) {
240 | whereClauses = parseWhereClause(whereString);
241 | }
242 |
243 | return {
244 | type: 'DELETE',
245 | table: table.trim(),
246 | whereClauses
247 | };
248 | }
249 |
250 |
251 | module.exports = { parseSelectQuery, parseJoinClause, parseInsertQuery, parseDeleteQuery };
--------------------------------------------------------------------------------
/src/server.js:
--------------------------------------------------------------------------------
1 | const net = require('net');
2 | const { EventEmitter } = require('events');
3 | const { executeSELECTQuery, executeINSERTQuery, executeDELETEQuery } = require('./queryExecuter');
4 |
5 | class QueryQueue extends EventEmitter {
6 | constructor() {
7 | super();
8 | this.queue = [];
9 | this.isProcessing = false;
10 | }
11 |
12 | addQuery(queryId, query, callback) {
13 | this.queue.push({ query, callback, queryId });
14 | this.emit('newQuery');
15 | }
16 |
17 | processQueue() {
18 | if (this.isProcessing || this.queue.length === 0) {
19 | return;
20 | }
21 | this.isProcessing = true;
22 | const { query, callback, queryId } = this.queue.shift();
23 | this.execute(query)
24 | .then(result => callback(null, queryId, result))
25 | .catch(error => callback(error, queryId))
26 | .finally(() => {
27 | this.isProcessing = false;
28 | this.processQueue();
29 | });
30 | }
31 |
32 | async execute(query) {
33 | if (query.toLowerCase().startsWith('select')) {
34 | return await executeSELECTQuery(query);
35 | } else if (query.toLowerCase().startsWith('insert into')) {
36 | return await executeINSERTQuery(query);
37 | } else if (query.toLowerCase().startsWith('delete from')) {
38 | return await executeDELETEQuery(query);
39 | } else {
40 | throw new Error('Unsupported command');
41 | }
42 | }
43 | }
44 |
45 | const queryQueue = new QueryQueue();
46 | queryQueue.on('newQuery', () => queryQueue.processQueue());
47 |
48 | const server = net.createServer();
49 | let activeConnection = false;
50 |
51 | server.on('connection', (socket) => {
52 | if (activeConnection) {
53 | socket.end('Another connection is already active.');
54 | return;
55 | }
56 | activeConnection = true;
57 |
58 | socket.write('Connected\n');
59 |
60 | socket.on('data', (data) => {
61 | const [queryId, query] = data.toString().trim().split(':', 2);
62 | queryQueue.addQuery(queryId, query, (error, queryId, result) => {
63 | let response;
64 | if (error) {
65 | response = `${queryId}<|>Error: ${error.message}`;
66 | } else {
67 | response = `${queryId}<|>${JSON.stringify(result)}`;
68 | }
69 | socket.write(response + '\n');
70 | });
71 | });
72 |
73 | socket.on('close', () => {
74 | activeConnection = false;
75 | });
76 | });
77 |
78 | server.listen(5432, () => {
79 | console.log('Server listening on port 5432');
80 | });
81 |
--------------------------------------------------------------------------------
/student.csv:
--------------------------------------------------------------------------------
1 | "id","name","age"
2 | "1","John","30"
3 | "2","Jane","25"
4 | "3","Bob","22"
5 | "4","Alice","24"
6 | "5","Jane","22"
--------------------------------------------------------------------------------
/tests/appoximateLargeFile.test.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const { executeSELECTQuery } = require('../src/queryExecuter');
3 | const jestConsole = console;
4 |
5 | beforeEach(() => {
6 | global.console = require('console');
7 | });
8 |
9 | afterEach(() => {
10 | global.console = jestConsole;
11 | });
12 |
13 | test('Large File Count(*) - Approximate and Exact', async () => {
14 | // Test Exact Count
15 |
16 | const startMemoryUsageExact = process.memoryUsage().heapUsed;
17 | const startTimeExact = performance.now();
18 |
19 | const queryExact = "SELECT COUNT(*) FROM student_large";
20 | const resultExact = await executeSELECTQuery(queryExact);
21 | const exactResult = resultExact[0]['COUNT(*)'];
22 |
23 | const endTimeExact = performance.now();
24 | const endMemoryUsageExact = process.memoryUsage().heapUsed;
25 |
26 | console.log(`Execution Time for Exact Count: ${(endTimeExact - startTimeExact).toFixed(2)} ms`);
27 | console.log(`Start Memory for Exact Count: ${startMemoryUsageExact / 1024 / 1024} MB`);
28 | console.log(`End Memory for Exact Count: ${endMemoryUsageExact / 1024 / 1024} MB`);
29 | console.log(`Memory Used for Exact Count: ${(endMemoryUsageExact - startMemoryUsageExact) / 1024 / 1024} MB`);
30 |
31 | const startMemoryUsage = process.memoryUsage().heapUsed;
32 | const startTime = performance.now();
33 |
34 | const query = "SELECT APPROXIMATE_COUNT(*) FROM student_large";
35 | const result = await executeSELECTQuery(query);
36 |
37 | // Expect the approximate count to be within 5% of the actual count
38 | expect(result[0]['APPROXIMATE_COUNT(*)']).toBeGreaterThan(exactResult - 0.05 * exactResult);
39 | expect(result[0]['APPROXIMATE_COUNT(*)']).toBeLessThan(exactResult + 0.05 * exactResult);
40 |
41 | const endTime = performance.now();
42 | const endMemoryUsage = process.memoryUsage().heapUsed;
43 |
44 | console.log(`Execution Time for Approximate Count: ${(endTime - startTime).toFixed(2)} ms`);
45 | console.log(`Start Memory: ${startMemoryUsage / 1024 / 1024} MB`);
46 | console.log(`End Memory: ${endMemoryUsage / 1024 / 1024} MB`);
47 | console.log(`Memory Used for Approximate Count: ${(endMemoryUsage - startMemoryUsage) / 1024 / 1024} MB`);
48 |
49 | }, 120000);
50 |
51 | test('Execute SQL Query with COUNT with DISTINCT on a column', async () => {
52 | const queryExact = "SELECT COUNT(DISTINCT (name, age)) FROM student_large";
53 | const resultExact = await executeSELECTQuery(queryExact);
54 | console.log({ resultExact });
55 | const exactResult = resultExact[0]['COUNT(DISTINCT (name, age))'];
56 |
57 | const query = "SELECT APPROXIMATE_COUNT(DISTINCT (name, age)) FROM student_large";
58 | const result = await executeSELECTQuery(query);
59 |
60 | // Expect the approximate count to be within 2% of the actual count
61 | expect(result[0]['APPROXIMATE_COUNT(DISTINCT (name, age))']).toBeGreaterThan(exactResult - 0.05 * exactResult);
62 | expect(result[0]['APPROXIMATE_COUNT(DISTINCT (name, age))']).toBeLessThan(exactResult + 0.05 * exactResult);
63 | }, 120000);
--------------------------------------------------------------------------------
/tests/cli.test.js:
--------------------------------------------------------------------------------
1 | const child_process = require('child_process');
2 | const path = require('path');
3 |
4 | function convertToJson(str) {
5 | // Regular expression to find unquoted keys
6 | const regexKeys = /(\b\w+\b)(?=:)/g;
7 | // Regular expression to strip ANSI color codes
8 | const regexAnsi = /\x1B\[\d+m/g;
9 |
10 | str = str.replace(/'/g, '"').replace(/(\w+):/g, '"$1":')
11 | // Replace unquoted keys with quoted keys and remove ANSI color codes
12 | const jsonString = str.replace(regexKeys, '"$1"').replace(regexAnsi, '');
13 |
14 | try {
15 | // Parse the corrected string into JSON
16 | const json = JSON.parse(jsonString);
17 | return json;
18 | } catch (e) {
19 | // If an error occurs, log it and return null
20 | console.error("Error parsing JSON:", e);
21 | return null;
22 | }
23 | }
24 |
25 |
26 |
27 | test('DISTINCT with Multiple Columns via CLI', (done) => {
28 | const cliPath = path.join(__dirname, '..', 'src', 'cli.js');
29 | const cliProcess = child_process.spawn('node', [cliPath]);
30 |
31 | let outputData = "";
32 | cliProcess.stdout.on('data', (data) => {
33 | outputData += data.toString();
34 | });
35 |
36 | cliProcess.on('exit', () => {
37 | // Define a regex pattern to extract the JSON result
38 | const cleanedOutput = outputData.replace(/\s+/g, ' ');
39 |
40 | const resultRegex = /Result: (\[.+\])/s;
41 | const match = cleanedOutput.match(resultRegex);
42 |
43 | if (match && match[1]) {
44 | // Parse the captured JSON string
45 | const results = convertToJson(match[1]);
46 |
47 | // Validation logic
48 | expect(results).toEqual([
49 | { student_id: '1', course: 'Mathematics' },
50 | { student_id: '1', course: 'Physics' },
51 | { student_id: '2', course: 'Chemistry' },
52 | { student_id: '3', course: 'Mathematics' },
53 | { student_id: '5', course: 'Biology' },
54 | { student_id: '5', course: 'Physics' }
55 | ]);
56 | } else {
57 | done();
58 | throw new Error('Failed to parse CLI output');
59 | }
60 |
61 | done();
62 | });
63 |
64 | // Introduce a delay before sending the query
65 | setTimeout(() => {
66 | cliProcess.stdin.write("SELECT DISTINCT student_id, course FROM enrollment\n");
67 | setTimeout(() => {
68 | cliProcess.stdin.write("exit\n");
69 | }, 1000); // 1 second delay
70 | }, 1000); // 1 second delay
71 | }, 10000);
--------------------------------------------------------------------------------
/tests/csvReader.test.js:
--------------------------------------------------------------------------------
1 | const { readCSV, writeCSV } = require('../src/csvStorage');
2 | const fs = require('fs');
3 |
4 | test('Read CSV File', async () => {
5 | const data = await readCSV('./student.csv');
6 | expect(data.length).toBeGreaterThan(0);
7 | expect(data.length).toBe(5);
8 | expect(data[0].name).toBe('John');
9 | expect(data[0].age).toBe('30'); //ignore the string type here, we will fix this later
10 | });
11 |
12 | describe('writeCSV Function', () => {
13 | const testFilename = 'test_output.csv';
14 |
15 | afterAll(() => {
16 | // Cleanup: Delete the test file after the test
17 | if (fs.existsSync(testFilename)) {
18 | fs.unlinkSync(testFilename);
19 | }
20 | });
21 |
22 | test('Should create a CSV file with correct contents', async () => {
23 | const testData = [
24 | { column1: 'data1', column2: 'data2' },
25 | { column1: 'data3', column2: 'data4' }
26 | ];
27 |
28 | await writeCSV(testFilename, testData);
29 |
30 | // Check if file exists
31 | expect(fs.existsSync(testFilename)).toBe(true);
32 |
33 | // Read the file and verify its contents
34 | const fileContents = fs.readFileSync(testFilename, 'utf8');
35 | const expectedContents = `"column1","column2"\n"data1","data2"\n"data3","data4"`;
36 | expect(fileContents).toBe(expectedContents);
37 | });
38 | });
39 |
40 |
41 |
42 |
43 |
--------------------------------------------------------------------------------
/tests/deleteExecuter.test.js:
--------------------------------------------------------------------------------
1 | const { executeDELETEQuery } = require('../src/queryExecuter');
2 | const { readCSV, writeCSV } = require('../src/csvStorage');
3 | const fs = require('fs');
4 |
5 | // Helper function to create courses.csv with initial data
6 | async function createCoursesCSV() {
7 | const initialData = [
8 | { course_id: '1', course_name: 'Mathematics', instructor: 'Dr. Smith' },
9 | { course_id: '2', course_name: 'Chemistry', instructor: 'Dr. Jones' },
10 | { course_id: '3', course_name: 'Physics', instructor: 'Dr. Taylor' }
11 | ];
12 | await writeCSV('courses.csv', initialData);
13 | }
14 |
15 | // Test to DELETE a course and verify
16 | test('Execute DELETE FROM Query for courses.csv', async () => {
17 | // Create courses.csv with initial data
18 | await createCoursesCSV();
19 |
20 | // Execute DELETE statement
21 | const deleteQuery = "DELETE FROM courses WHERE course_id = '2'";
22 | await executeDELETEQuery(deleteQuery);
23 |
24 | // Verify the course was removed
25 | const updatedData = await readCSV('courses.csv');
26 | const deletedCourse = updatedData.find(course => course.course_id === '2');
27 | expect(deletedCourse).toBeUndefined();
28 |
29 | // Cleanup: Delete courses.csv
30 | fs.unlinkSync('courses.csv');
31 | });
32 |
--------------------------------------------------------------------------------
/tests/insertExecuter.test.js:
--------------------------------------------------------------------------------
1 | const { executeINSERTQuery } = require('../src/queryExecuter');
2 | const { readCSV, writeCSV } = require('../src/csvStorage');
3 | const fs = require('fs');
4 | const exp = require('constants');
5 |
6 | // Helper function to create grades.csv with initial data
7 | async function createGradesCSV() {
8 | const initialData = [
9 | { student_id: '1', course: 'Mathematics', grade: 'A' },
10 | { student_id: '2', course: 'Chemistry', grade: 'B' },
11 | { student_id: '3', course: 'Mathematics', grade: 'C' }
12 | ];
13 | await writeCSV('grades.csv', initialData);
14 | }
15 |
16 | // Test to INSERT a new grade and verify
17 | test('Execute INSERT INTO Query for grades.csv', async () => {
18 | // Create grades.csv with initial data
19 | await createGradesCSV();
20 |
21 | // Execute INSERT statement
22 | const insertQuery = `INSERT INTO grades (student_id, course, grade) VALUES ("4", "Physics", "A")`;
23 | await executeINSERTQuery(insertQuery);
24 |
25 | // Verify the new entry
26 | const updatedData = await readCSV('grades.csv');
27 | const newEntry = updatedData.find(row => row.student_id === '4' && row.course === 'Physics');
28 | expect(newEntry).toBeDefined();
29 | expect(newEntry.grade).toEqual('A');
30 |
31 | // Cleanup: Delete grades.csv
32 | fs.unlinkSync('grades.csv');
33 | });
34 |
35 | // Helper function to create teachers.csv with initial data
36 | async function createTeachersCSV() {
37 | const initialData = [
38 | { id: '1', name: 'Mr. Smith', subject: 'Mathematics' },
39 | { id: '2', name: 'Ms. Johnson', subject: 'Chemistry' }
40 | ];
41 | await writeCSV('teachers.csv', initialData);
42 | }
43 |
44 | // Test to INSERT a new teacher and verify
45 | test('Execute INSERT INTO Query for teachers with autogenerated id', async () => {
46 | // Create teachers.csv with initial data
47 | await createTeachersCSV();
48 |
49 | // Execute INSERT statement
50 | const insertQuery = `INSERT INTO teachers (name, subject) VALUES ("Dr. Brown", "Physics")`;
51 | await executeINSERTQuery(insertQuery);
52 |
53 | // Verify the new entry
54 | const updatedData = await readCSV('teachers.csv');
55 | const newEntry = updatedData.find(row => row.id === '3' && row.name === 'Dr. Brown');
56 | expect(newEntry).toBeDefined();
57 | expect(newEntry.subject).toEqual('Physics');
58 |
59 | const query = `INSERT INTO "public"."teachers" ("name","subject") VALUES ("test", "maths") RETURNING "public"."teachers"."id", "public"."teachers"."name", "public"."teachers"."subject"`;
60 | const returningData = await executeINSERTQuery(query);
61 | const updatedData2 = await readCSV('teachers.csv');
62 | const newEntry2 = updatedData2.find(row => row.id === '4' && row.name === 'test');
63 | expect(newEntry2).toBeDefined();
64 | expect(newEntry2.subject).toEqual('maths');
65 |
66 | expect(returningData).toEqual({
67 | message: 'Row inserted successfully.',
68 | insertedId: 4,
69 | returning: { id: '4', name: 'test', subject: 'maths' }
70 | });
71 |
72 | // Cleanup: Delete teachers.csv
73 | fs.unlinkSync('teachers.csv');
74 | });
75 |
76 |
77 | // Helper function to create Student.csv with initial data
78 | async function createUserCSV() {
79 | const initialData = [
80 | { id: '1', name: 'John', age: '30' },
81 | { id: '2', name: 'Jane', age: '25' }
82 | ];
83 | await writeCSV('user.csv', initialData);
84 | }
85 |
86 | test('Execute INSERT INTO Query for users with partial columns', async () => {
87 | // Create Student.csv with initial data
88 | await createUserCSV();
89 |
90 | // Execute INSERT statement
91 | await executeINSERTQuery(`INSERT INTO "user" ("name") VALUES ("test")`);
92 |
93 | // Read the updated CSV content
94 | const updatedData = await readCSV('user.csv');
95 |
96 | // Check if the new entry is added correctly
97 | const isEntryAdded = updatedData.some(row => row.name === 'test');
98 | expect(isEntryAdded).toBe(true);
99 |
100 | // Clean up: delete Student.csv
101 | fs.unlinkSync('user.csv');
102 | });
103 |
--------------------------------------------------------------------------------
/util/generateLargeFile.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs');
2 | const { faker, da } = require('@faker-js/faker');
3 | const { parse } = require('json2csv');
4 |
5 | async function generateLargeCSV(filename) {
6 | let data = [];
7 | for (let i = 1; i <= 10_000_000; i++) {
8 | const record = {
9 | id: i,
10 | name: faker.person.firstName(),
11 | age: faker.number.int({ min: 18, max: 100 }),
12 | };
13 | data.push(record);
14 |
15 | let rows;
16 | if (i % 500_000 === 0) {
17 | console.log(`Generated ${i} records`);
18 | if (!fs.existsSync(filename)) {
19 | rows = parse(data, { header: true });
20 | } else {
21 | // Rows without headers.
22 | rows = parse(data, { header: false });
23 | }
24 | fs.appendFileSync(filename, rows);
25 | data = [];
26 | }
27 |
28 | }
29 | // Append file function can create new file too.
30 |
31 | // Always add new line if file already exists.
32 | fs.appendFileSync(filename, "\r\n");
33 | }
34 |
35 | generateLargeCSV('student_large.csv')
--------------------------------------------------------------------------------
/web/server/enrollment.csv:
--------------------------------------------------------------------------------
1 | student_id,course
2 | 1,Mathematics
3 | 1,Physics
4 | 2,Chemistry
5 | 3,Mathematics
6 | 5,Biology
7 | 5,Physics
--------------------------------------------------------------------------------
/web/server/index.js:
--------------------------------------------------------------------------------
1 | const WebSocket = require('ws')
2 | var os = require('os');
3 | var pty = require('node-pty');
4 |
5 | const wss = new WebSocket.Server({ port: 6060 })
6 |
7 | console.log("Socket is up and running...")
8 |
9 | var shell = os.platform() === 'win32' ? 'powershell.exe' : 'bash';
10 | var ptyProcess = pty.spawn(shell, [], {
11 | name: 'xterm-color',
12 | env: process.env,
13 | });
14 |
15 | wss.on('connection', ws => {
16 | console.log("new session")
17 | ws.on('message', command => {
18 | ptyProcess.write(command);
19 | })
20 |
21 | ptyProcess.on('data', function (data) {
22 | ws.send(data)
23 | console.log(data);
24 | });
25 | });
--------------------------------------------------------------------------------
/web/server/package-lock.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stylusdb-sql-demo",
3 | "version": "0.0.0",
4 | "lockfileVersion": 3,
5 | "requires": true,
6 | "packages": {
7 | "": {
8 | "name": "stylusdb-sql-demo",
9 | "version": "0.0.0",
10 | "license": "ISC",
11 | "dependencies": {
12 | "node-pty": "^1.0.0",
13 | "stylusdb-sql": "^0.1.5",
14 | "ws": "^8.16.0"
15 | }
16 | },
17 | "node_modules/@streamparser/json": {
18 | "version": "0.0.6",
19 | "resolved": "https://registry.npmjs.org/@streamparser/json/-/json-0.0.6.tgz",
20 | "integrity": "sha512-vL9EVn/v+OhZ+Wcs6O4iKE9EUpwHUqHmCtNUMWjqp+6dr85+XPOSGTEsqYNq1Vn04uk9SWlOVmx9J48ggJVT2Q=="
21 | },
22 | "node_modules/commander": {
23 | "version": "6.2.1",
24 | "resolved": "https://registry.npmjs.org/commander/-/commander-6.2.1.tgz",
25 | "integrity": "sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA==",
26 | "engines": {
27 | "node": ">= 6"
28 | }
29 | },
30 | "node_modules/csv-parser": {
31 | "version": "3.0.0",
32 | "resolved": "https://registry.npmjs.org/csv-parser/-/csv-parser-3.0.0.tgz",
33 | "integrity": "sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ==",
34 | "dependencies": {
35 | "minimist": "^1.2.0"
36 | },
37 | "bin": {
38 | "csv-parser": "bin/csv-parser"
39 | },
40 | "engines": {
41 | "node": ">= 10"
42 | }
43 | },
44 | "node_modules/json2csv": {
45 | "version": "6.0.0-alpha.2",
46 | "resolved": "https://registry.npmjs.org/json2csv/-/json2csv-6.0.0-alpha.2.tgz",
47 | "integrity": "sha512-nJ3oP6QxN8z69IT1HmrJdfVxhU1kLTBVgMfRnNZc37YEY+jZ4nU27rBGxT4vaqM/KUCavLRhntmTuBFqZLBUcA==",
48 | "dependencies": {
49 | "@streamparser/json": "^0.0.6",
50 | "commander": "^6.2.0",
51 | "lodash.get": "^4.4.2"
52 | },
53 | "bin": {
54 | "json2csv": "bin/json2csv.js"
55 | },
56 | "engines": {
57 | "node": ">= 12",
58 | "npm": ">= 6.13.0"
59 | }
60 | },
61 | "node_modules/lodash.get": {
62 | "version": "4.4.2",
63 | "resolved": "https://registry.npmjs.org/lodash.get/-/lodash.get-4.4.2.tgz",
64 | "integrity": "sha512-z+Uw/vLuy6gQe8cfaFWD7p0wVv8fJl3mbzXh33RS+0oW2wvUqiRXiQ69gLWSLpgB5/6sU+r6BlQR0MBILadqTQ=="
65 | },
66 | "node_modules/minimist": {
67 | "version": "1.2.8",
68 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz",
69 | "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==",
70 | "funding": {
71 | "url": "https://github.com/sponsors/ljharb"
72 | }
73 | },
74 | "node_modules/nan": {
75 | "version": "2.18.0",
76 | "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz",
77 | "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w=="
78 | },
79 | "node_modules/node-pty": {
80 | "version": "1.0.0",
81 | "resolved": "https://registry.npmjs.org/node-pty/-/node-pty-1.0.0.tgz",
82 | "integrity": "sha512-wtBMWWS7dFZm/VgqElrTvtfMq4GzJ6+edFI0Y0zyzygUSZMgZdraDUMUhCIvkjhJjme15qWmbyJbtAx4ot4uZA==",
83 | "hasInstallScript": true,
84 | "dependencies": {
85 | "nan": "^2.17.0"
86 | }
87 | },
88 | "node_modules/stylusdb-sql": {
89 | "version": "0.1.5",
90 | "resolved": "https://registry.npmjs.org/stylusdb-sql/-/stylusdb-sql-0.1.5.tgz",
91 | "integrity": "sha512-zHd5OFozS5szbLOhx9nIJxY3HKAhjQ3mW9WXJ9PiHQACfD9nPms2pw2eCFll36dLqUQc7jPAK3j3oUDTX0Hm1Q==",
92 | "dependencies": {
93 | "csv-parser": "^3.0.0",
94 | "json2csv": "^6.0.0-alpha.2"
95 | },
96 | "bin": {
97 | "stylusdb-cli": "node ./src/cli.js"
98 | }
99 | },
100 | "node_modules/ws": {
101 | "version": "8.16.0",
102 | "resolved": "https://registry.npmjs.org/ws/-/ws-8.16.0.tgz",
103 | "integrity": "sha512-HS0c//TP7Ina87TfiPUz1rQzMhHrl/SG2guqRcTOIUYD2q8uhUdNHZYJUaQ8aTGPzCh+c6oawMKW35nFl1dxyQ==",
104 | "engines": {
105 | "node": ">=10.0.0"
106 | },
107 | "peerDependencies": {
108 | "bufferutil": "^4.0.1",
109 | "utf-8-validate": ">=5.0.2"
110 | },
111 | "peerDependenciesMeta": {
112 | "bufferutil": {
113 | "optional": true
114 | },
115 | "utf-8-validate": {
116 | "optional": true
117 | }
118 | }
119 | }
120 | }
121 | }
122 |
--------------------------------------------------------------------------------
/web/server/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "stylusdb-sql-demo",
3 | "version": "0.0.0",
4 | "description": "Demo for Stylusdb SQL",
5 | "main": "index.js",
6 | "scripts": {
7 | "start": "node index.js"
8 | },
9 | "keywords": [],
10 | "author": "Chakshu Gautam",
11 | "license": "ISC",
12 | "dependencies": {
13 | "node-pty": "^1.0.0",
14 | "stylusdb-sql": "^0.1.5",
15 | "ws": "^8.16.0"
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/web/server/student.csv:
--------------------------------------------------------------------------------
1 | id,name,age
2 | 1,John,30
3 | 2,Jane,25
4 | 3,Bob,22
5 | 4,Alice,24
6 | 5,Jane,22
--------------------------------------------------------------------------------
/web/www/frontend.js:
--------------------------------------------------------------------------------
1 | const socket = new WebSocket("ws://127.0.0.1:6060");
2 |
3 | var term = new window.Terminal({
4 | cursorBlink: true
5 | });
6 | term.open(document.getElementById('terminal'));
7 |
8 | function init() {
9 | if (term._initialized) {
10 | return;
11 | }
12 |
13 | term._initialized = true;
14 |
15 | term.prompt = () => {
16 | term.write('\r\n$ ');
17 | };
18 | prompt(term);
19 |
20 | term.onData(e => {
21 | switch (e) {
22 | case '\u0003': // Ctrl+C
23 | term.write('^C');
24 | prompt(term);
25 | break;
26 | case '\r': // Enter
27 | runCommand(term, command);
28 | command = '';
29 | break;
30 | case '\u007F': // Backspace (DEL)
31 | // Do not delete the prompt
32 | if (term._core.buffer.x > 2) {
33 | term.write('\b \b');
34 | if (command.length > 0) {
35 | command = command.substr(0, command.length - 1);
36 | }
37 | }
38 | break;
39 | case '\u0009':
40 | console.log('tabbed', output, ["dd", "ls"]);
41 | break;
42 | default:
43 | if (e >= String.fromCharCode(0x20) && e <= String.fromCharCode(0x7E) || e >= '\u00a0') {
44 | command += e;
45 | term.write(e);
46 | }
47 | }
48 | });
49 | }
50 |
51 | function clearInput(command) {
52 | var inputLengh = command.length;
53 | for (var i = 0; i < inputLengh; i++) {
54 | term.write('\b \b');
55 | }
56 | }
57 | function prompt(term) {
58 | command = '';
59 | term.write('\r\n$ ');
60 | }
61 | socket.onmessage = (event) => {
62 | term.write(event.data);
63 | }
64 |
65 | function runCommand(term, command) {
66 | if (command.length > 0) {
67 | clearInput(command);
68 | socket.send(command + '\n');
69 | return;
70 | }
71 | }
72 |
73 | init();
--------------------------------------------------------------------------------
/web/www/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/web/www/xterm.css:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (c) 2014 The xterm.js authors. All rights reserved.
3 | * Copyright (c) 2012-2013, Christopher Jeffrey (MIT License)
4 | * https://github.com/chjj/term.js
5 | * @license MIT
6 | *
7 | * Permission is hereby granted, free of charge, to any person obtaining a copy
8 | * of this software and associated documentation files (the "Software"), to deal
9 | * in the Software without restriction, including without limitation the rights
10 | * to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | * copies of the Software, and to permit persons to whom the Software is
12 | * furnished to do so, subject to the following conditions:
13 | *
14 | * The above copyright notice and this permission notice shall be included in
15 | * all copies or substantial portions of the Software.
16 | *
17 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
23 | * THE SOFTWARE.
24 | *
25 | * Originally forked from (with the author's permission):
26 | * Fabrice Bellard's javascript vt100 for jslinux:
27 | * http://bellard.org/jslinux/
28 | * Copyright (c) 2011 Fabrice Bellard
29 | * The original design remains. The terminal itself
30 | * has been extended to include xterm CSI codes, among
31 | * other features.
32 | */
33 |
34 | /**
35 | * Default styles for xterm.js
36 | */
37 |
38 | .xterm {
39 | cursor: text;
40 | position: relative;
41 | user-select: none;
42 | -ms-user-select: none;
43 | -webkit-user-select: none;
44 | }
45 |
46 | .xterm.focus,
47 | .xterm:focus {
48 | outline: none;
49 | }
50 |
51 | .xterm .xterm-helpers {
52 | position: absolute;
53 | top: 0;
54 | /**
55 | * The z-index of the helpers must be higher than the canvases in order for
56 | * IMEs to appear on top.
57 | */
58 | z-index: 5;
59 | }
60 |
61 | .xterm .xterm-helper-textarea {
62 | padding: 0;
63 | border: 0;
64 | margin: 0;
65 | /* Move textarea out of the screen to the far left, so that the cursor is not visible */
66 | position: absolute;
67 | opacity: 0;
68 | left: -9999em;
69 | top: 0;
70 | width: 0;
71 | height: 0;
72 | z-index: -5;
73 | /** Prevent wrapping so the IME appears against the textarea at the correct position */
74 | white-space: nowrap;
75 | overflow: hidden;
76 | resize: none;
77 | }
78 |
79 | .xterm .composition-view {
80 | /* TODO: Composition position got messed up somewhere */
81 | background: #000;
82 | color: #FFF;
83 | display: none;
84 | position: absolute;
85 | white-space: nowrap;
86 | z-index: 1;
87 | }
88 |
89 | .xterm .composition-view.active {
90 | display: block;
91 | }
92 |
93 | .xterm .xterm-viewport {
94 | /* On OS X this is required in order for the scroll bar to appear fully opaque */
95 | background-color: #000;
96 | overflow-y: scroll;
97 | cursor: default;
98 | position: absolute;
99 | right: 0;
100 | left: 0;
101 | top: 0;
102 | bottom: 0;
103 | }
104 |
105 | .xterm .xterm-screen {
106 | position: relative;
107 | }
108 |
109 | .xterm .xterm-screen canvas {
110 | position: absolute;
111 | left: 0;
112 | top: 0;
113 | }
114 |
115 | .xterm .xterm-scroll-area {
116 | visibility: hidden;
117 | }
118 |
119 | .xterm-char-measure-element {
120 | display: inline-block;
121 | visibility: hidden;
122 | position: absolute;
123 | top: 0;
124 | left: -9999em;
125 | line-height: normal;
126 | }
127 |
128 | .xterm.enable-mouse-events {
129 | /* When mouse events are enabled (eg. tmux), revert to the standard pointer cursor */
130 | cursor: default;
131 | }
132 |
133 | .xterm.xterm-cursor-pointer,
134 | .xterm .xterm-cursor-pointer {
135 | cursor: pointer;
136 | }
137 |
138 | .xterm.column-select.focus {
139 | /* Column selection mode */
140 | cursor: crosshair;
141 | }
142 |
143 | .xterm .xterm-accessibility,
144 | .xterm .xterm-message {
145 | position: absolute;
146 | left: 0;
147 | top: 0;
148 | bottom: 0;
149 | right: 0;
150 | z-index: 10;
151 | color: transparent;
152 | pointer-events: none;
153 | }
154 |
155 | .xterm .live-region {
156 | position: absolute;
157 | left: -9999px;
158 | width: 1px;
159 | height: 1px;
160 | overflow: hidden;
161 | }
162 |
163 | .xterm-dim {
164 | /* Dim should not apply to background, so the opacity of the foreground color is applied
165 | * explicitly in the generated class and reset to 1 here */
166 | opacity: 1 !important;
167 | }
168 |
169 | .xterm-underline-1 { text-decoration: underline; }
170 | .xterm-underline-2 { text-decoration: double underline; }
171 | .xterm-underline-3 { text-decoration: wavy underline; }
172 | .xterm-underline-4 { text-decoration: dotted underline; }
173 | .xterm-underline-5 { text-decoration: dashed underline; }
174 |
175 | .xterm-overline {
176 | text-decoration: overline;
177 | }
178 |
179 | .xterm-overline.xterm-underline-1 { text-decoration: overline underline; }
180 | .xterm-overline.xterm-underline-2 { text-decoration: overline double underline; }
181 | .xterm-overline.xterm-underline-3 { text-decoration: overline wavy underline; }
182 | .xterm-overline.xterm-underline-4 { text-decoration: overline dotted underline; }
183 | .xterm-overline.xterm-underline-5 { text-decoration: overline dashed underline; }
184 |
185 | .xterm-strikethrough {
186 | text-decoration: line-through;
187 | }
188 |
189 | .xterm-screen .xterm-decoration-container .xterm-decoration {
190 | z-index: 6;
191 | position: absolute;
192 | }
193 |
194 | .xterm-screen .xterm-decoration-container .xterm-decoration.xterm-decoration-top-layer {
195 | z-index: 7;
196 | }
197 |
198 | .xterm-decoration-overview-ruler {
199 | z-index: 8;
200 | position: absolute;
201 | top: 0;
202 | right: 0;
203 | pointer-events: none;
204 | }
205 |
206 | .xterm-decoration-top {
207 | z-index: 2;
208 | position: relative;
209 | }
210 |
--------------------------------------------------------------------------------