├── .changeset
├── README.md
└── config.json
├── .github
└── workflows
│ ├── release.yml
│ └── tests.yml
├── .gitignore
├── .prettierrc
├── LICENSE
├── README.md
├── apps
├── docs
│ ├── .gitignore
│ ├── .nvmrc
│ ├── README.md
│ ├── api-reference
│ │ ├── events-create.mdx
│ │ ├── events-get.mdx
│ │ ├── overview.mdx
│ │ ├── queries-execute.mdx
│ │ ├── webhooks-create.mdx
│ │ ├── webhooks-delete.mdx
│ │ └── webhooks-get.mdx
│ ├── client.mdx
│ ├── cloud-quickstart.mdx
│ ├── contributing.mdx
│ ├── favicon.png
│ ├── favicon.svg
│ ├── guides
│ │ └── grafana.mdx
│ ├── images
│ │ ├── grafana-config-datasource.png
│ │ ├── grafana-dashboard.png
│ │ ├── grafana-login.jpeg
│ │ ├── grafana-plugin-install.png
│ │ ├── trench-cover.png
│ │ └── trench-dashboard-dark.png
│ ├── introduction.mdx
│ ├── json
│ │ └── grafana-dashboard.json
│ ├── logo
│ │ └── dark.svg
│ ├── mint.json
│ ├── quickstart.mdx
│ ├── scripts
│ │ └── openapi.sh
│ ├── snippets
│ │ └── trench-script.mdx
│ ├── style.css
│ └── swagger-spec.json
└── trench
│ ├── .dockerignore
│ ├── .env.example
│ ├── .eslintrc.js
│ ├── .gitignore
│ ├── .nvmrc
│ ├── .prettierrc
│ ├── Dockerfile
│ ├── docker-compose.dev.yml
│ ├── docker-compose.stateless.yml
│ ├── docker-compose.yml
│ ├── nest-cli.json
│ ├── package.json
│ ├── scripts
│ └── create-sample-data.js
│ ├── src
│ ├── api-keys
│ │ ├── api-keys.interface.ts
│ │ ├── api-keys.module.ts
│ │ └── api-keys.service.ts
│ ├── app.controller.ts
│ ├── app.module.ts
│ ├── app.service.ts
│ ├── appCluster.service.ts
│ ├── common
│ │ ├── constants.ts
│ │ ├── crypto.ts
│ │ ├── models.ts
│ │ ├── request.ts
│ │ └── utils.ts
│ ├── events
│ │ ├── events.controller.ts
│ │ ├── events.dao.ts
│ │ ├── events.interface.ts
│ │ ├── events.module.ts
│ │ ├── events.service.ts
│ │ └── events.util.ts
│ ├── main.ts
│ ├── middlewares
│ │ ├── admin-api.guard.ts
│ │ ├── api.guard.ts
│ │ ├── private-api.guard.ts
│ │ └── public-api.guard.ts
│ ├── queries
│ │ ├── queries.controller.ts
│ │ ├── queries.interface.ts
│ │ ├── queries.module.ts
│ │ ├── queries.service.ts
│ │ └── queries.util.ts
│ ├── resources
│ │ └── migrations
│ │ │ ├── v001_initial.sql
│ │ │ ├── v002_webhooks.sql
│ │ │ ├── v003_workspaces.sql
│ │ │ ├── v004_workspaces_properties.sql
│ │ │ └── v005_webhooks_flatten.sql
│ ├── services
│ │ └── data
│ │ │ ├── bootstrap
│ │ │ ├── bootstrap.module.ts
│ │ │ └── bootstrap.service.ts
│ │ │ ├── click-house
│ │ │ ├── click-house.interface.ts
│ │ │ ├── click-house.module.ts
│ │ │ ├── click-house.service.ts
│ │ │ └── click-house.util.ts
│ │ │ └── kafka
│ │ │ ├── kafka.interface.ts
│ │ │ ├── kafka.module.ts
│ │ │ ├── kafka.service.ts
│ │ │ └── kafka.util.ts
│ ├── webhooks
│ │ ├── webhooks.controller.ts
│ │ ├── webhooks.dao.ts
│ │ ├── webhooks.interface.ts
│ │ ├── webhooks.module.ts
│ │ ├── webhooks.service.ts
│ │ └── webhooks.util.ts
│ └── workspaces
│ │ ├── workspaces.controller.ts
│ │ ├── workspaces.interface.ts
│ │ ├── workspaces.module.ts
│ │ ├── workspaces.service.ts
│ │ └── workspaces.util.ts
│ ├── swagger-spec.json
│ ├── test
│ ├── e2e
│ │ ├── app.e2e.test.ts
│ │ ├── events.e2e.test.ts
│ │ ├── queries.e2e.test.ts
│ │ ├── utils.ts
│ │ └── workspaces.e2e.test.ts
│ └── unit
│ │ ├── queries.util.test.ts
│ │ └── webhooks.util.test.ts
│ ├── tsconfig.build.json
│ └── tsconfig.json
├── img
├── trench-cover.png
├── trench-dark.png
├── trench-dashboard-dark.png
├── trench-dashboard.png
└── trench-light.png
├── package.json
├── packages
├── analytics-plugin-trench
│ ├── .gitignore
│ ├── .prettierrc
│ ├── CHANGELOG.md
│ ├── README.md
│ ├── package.json
│ ├── src
│ │ └── index.ts
│ ├── test
│ │ └── analytics-plugin-trench.test.ts
│ ├── tsconfig.json
│ ├── tslint.json
│ └── tsup.config.ts
└── trench-js
│ ├── .gitignore
│ ├── .prettierrc
│ ├── CHANGELOG.md
│ ├── README.md
│ ├── package.json
│ ├── src
│ ├── index.ts
│ └── types.ts
│ ├── test
│ └── trench-js.test.ts
│ ├── tsconfig.json
│ ├── tslint.json
│ └── tsup.config.ts
├── pnpm-lock.yaml
├── pnpm-workspace.yaml
├── tsconfig.json
└── turbo.json
/.changeset/README.md:
--------------------------------------------------------------------------------
1 | # Changesets
2 |
3 | Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works
4 | with multi-package repos, or single-package repos to help you version and publish your code. You can
5 | find the full documentation for it [in our repository](https://github.com/changesets/changesets)
6 |
7 | We have a quick list of common questions to get you started engaging with this project in
8 | [our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md)
9 |
--------------------------------------------------------------------------------
/.changeset/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://unpkg.com/@changesets/config@2.3.1/schema.json",
3 | "changelog": "@changesets/cli/changelog",
4 | "commit": false,
5 | "fixed": [],
6 | "linked": [],
7 | "access": "public",
8 | "baseBranch": "main",
9 | "updateInternalDependencies": "patch",
10 | "ignore": ["trench"]
11 | }
12 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release
2 | on:
3 | workflow_run:
4 | workflows: ['Tests']
5 | branches: [main]
6 | types:
7 | - completed
8 | jobs:
9 | release:
10 | name: Release
11 | runs-on: ubuntu-latest
12 | steps:
13 | - name: Checkout
14 | uses: actions/checkout@v2
15 | with:
16 | fetch-depth: 0
17 | - name: Setup Node.js
18 | uses: actions/setup-node@v2
19 | with:
20 | node-version: 'lts/*'
21 | - uses: pnpm/action-setup@v2
22 | - name: Install dependencies
23 | run: pnpm install
24 | - name: Build
25 | run: pnpm build
26 | - name: Create Release Pull Request or Publish to npm
27 | id: changesets
28 | uses: changesets/action@v1
29 | with:
30 | publish: pnpm release
31 | env:
32 | GITHUB_TOKEN: ${{ secrets.GH_TOKEN }}
33 | NPM_TOKEN: ${{ secrets.NPM_TOKEN }}
34 |
--------------------------------------------------------------------------------
/.github/workflows/tests.yml:
--------------------------------------------------------------------------------
1 | name: Tests
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 | branches: [main]
8 |
9 | jobs:
10 | build:
11 | runs-on: ubuntu-latest
12 |
13 | strategy:
14 | matrix:
15 | node-version: [19.x]
16 |
17 | steps:
18 | - uses: actions/checkout@v3
19 | - name: Use Node.js ${{ matrix.node-version }}
20 | uses: actions/setup-node@v3
21 | with:
22 | node-version: ${{ matrix.node-version }}
23 | - uses: pnpm/action-setup@v2
24 | - run: pnpm install
25 | - run: cd packages && pnpm test
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Logs
2 | logs
3 | *.log
4 | npm-debug.log*
5 | yarn-debug.log*
6 | yarn-error.log*
7 | lerna-debug.log*
8 |
9 | # Misc
10 | out
11 |
12 | # Diagnostic reports (https://nodejs.org/api/report.html)
13 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json
14 |
15 | # Runtime data
16 | pids
17 | *.pid
18 | *.seed
19 | *.pid.lock
20 |
21 | # Directory for instrumented libs generated by jscoverage/JSCover
22 | lib-cov
23 |
24 | # Coverage directory used by tools like istanbul
25 | coverage
26 | *.lcov
27 |
28 | # nyc test coverage
29 | .nyc_output
30 |
31 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
32 | .grunt
33 |
34 | # Bower dependency directory (https://bower.io/)
35 | bower_components
36 |
37 | # node-waf configuration
38 | .lock-wscript
39 |
40 | # Compiled binary addons (https://nodejs.org/api/addons.html)
41 | build/Release
42 |
43 | # Dependency directories
44 | node_modules/
45 | jspm_packages/
46 |
47 | # TypeScript v1 declaration files
48 | typings/
49 |
50 | # TypeScript cache
51 | *.tsbuildinfo
52 |
53 | # Optional npm cache directory
54 | .npm
55 |
56 | # Optional eslint cache
57 | .eslintcache
58 |
59 | # Microbundle cache
60 | .rpt2_cache/
61 | .rts2_cache_cjs/
62 | .rts2_cache_es/
63 | .rts2_cache_umd/
64 |
65 | # Optional REPL history
66 | .node_repl_history
67 |
68 | # Output of 'npm pack'
69 | *.tgz
70 |
71 | # Yarn Integrity file
72 | .yarn-integrity
73 |
74 | # dotenv environment variables file
75 | .env
76 | .env.test
77 |
78 | # parcel-bundler cache (https://parceljs.org/)
79 | .cache
80 |
81 | # Next.js build output
82 | .next
83 |
84 | # Nuxt.js build / generate output
85 | .nuxt
86 | dist
87 |
88 | # Gatsby files
89 | .cache/
90 | # Comment in the public line in if your project uses Gatsby and *not* Next.js
91 | # https://nextjs.org/blog/next-9-1#public-directory-support
92 | # public
93 |
94 | # vuepress build output
95 | .vuepress/dist
96 |
97 | # Serverless directories
98 | .serverless/
99 |
100 | # FuseBox cache
101 | .fusebox/
102 |
103 | # DynamoDB Local files
104 | .dynamodb/
105 |
106 | # TernJS port file
107 | .tern-port
108 |
109 | .dccache
110 | .DS_Store
111 | dist
112 | *.tgz
113 | .idea
114 | .watchmanconfig
115 | ios/Pods
116 |
117 | .yarn/*
118 | !.yarn/releases
119 | !.yarn/plugins
120 |
121 | .turbo
122 |
--------------------------------------------------------------------------------
/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 100,
3 | "semi": false,
4 | "singleQuote": true,
5 | "tabWidth": 2,
6 | "trailingComma": "es5"
7 | }
8 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Frigade Inc.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
10 |
11 |
12 |
13 | Open-Source Analytics Infrastructure
14 |
15 |
16 |
17 | Documentation ·
18 | Website ·
19 | Slack Community ·
20 | Demo
21 |
22 |
23 |
24 |
25 | ## 🌊 What is Trench?
26 |
27 | Trench is an event tracking system built on top of Apache Kafka and ClickHouse. It can handle large event volumes and provides real-time analytics. Trench is no-cookie, GDPR, and PECR compliant. Users have full control to access, rectify, or delete their data.
28 |
29 | Our team built Trench to scale up the real-time event tracking pipeline at Frigade.
30 |
31 |
32 |
33 |
34 |
35 |
36 | ## ⭐ Features
37 |
38 | - 🤝 Compliant with the Segment API (Track, Group, Identify)
39 | - 🐳 Deploy quickly with a single production-ready Docker image
40 | - 💻 Process thousands of events per second on a single node
41 | - ⚡ Query data in real-time
42 | - 🔗 Connect data to other destinations with webhooks
43 | - 👥 Open-source and MIT Licensed
44 |
45 | ## 🖥️ Demo
46 |
47 | **Live demo:**
48 | [https://demo.trench.dev](https://demo.trench.dev)
49 |
50 | **Video demo:**
51 |
52 | Watch the following demo to see how you can build a basic version of Google Analytics using Trench and Grafana.
53 |
54 | https://github.com/user-attachments/assets/e3f64590-6e7e-41b9-b425-7adb5a1e19b1
55 |
56 | ## 🚀 Quickstart
57 |
58 | Trench has two methods of deployment:
59 |
60 | 1. **Trench Self-Hosted**: An open-source version to deploy and manage Trench on your own infrastructure.
61 | 2. **Trench Cloud**: A fully-managed serverless solution with zero ops, autoscaling, and 99.99% SLAs.
62 |
63 | ### 1. Trench Self-Hosted 💻
64 |
65 | Follow our self-hosting instructions below and in our [quickstart guide](https://docs.trench.dev/quickstart) to begin using Trench Self-Hosted.
66 |
67 | If you have questions or need assistance, you can join our [Slack group](https://join.slack.com/t/trench-community/shared_invite/zt-2sjet5kh2-v31As3yC_zRIadk_AGn~3A) for support.
68 |
69 | #### Quickstart
70 |
71 | 1. **Deploy Trench Dev Server**:
72 | The only prerequisite for Trench is a system that has Docker and Docker Compose installed [see installation guide](https://docs.docker.com/compose/install/). We recommend having at least 4GB of RAM and 4 CPU cores for optimal performance if you're running a production environment.
73 |
74 | After installing Docker, you can start the local development server by running the following commands:
75 |
76 | ```sh
77 | git clone https://github.com/frigadehq/trench.git
78 | cd trench/apps/trench
79 | cp .env.example .env
80 | docker-compose -f docker-compose.yml -f docker-compose.dev.yml up --build --force-recreate --renew-anon-volumes
81 | ```
82 |
83 | The above command will start the Trench server that includes a local ClickHouse and Kafka instance on `http://localhost:4000`. You can open this URL in your browser and you should see the message `Trench server is running`. You shouldupdate the `.env` file to change any of the configuration options.
84 |
85 | 2. **Send a sample event**:
86 | You can find and update the default public and private API key in the `.env` file. Using your public API key, you can send a sample event to Trench as such:
87 |
88 | ```sh
89 | curl -i -X POST \
90 | -H "Authorization:Bearer public-d613be4e-di03-4b02-9058-70aa4j04ff28" \
91 | -H "Content-Type:application/json" \
92 | -d \
93 | '{
94 | "events": [
95 | {
96 | "userId": "550e8400-e29b-41d4-a716-446655440000",
97 | "type": "track",
98 | "event": "ConnectedAccount",
99 | "properties": {
100 | "totalAccounts": 4,
101 | "country": "Denmark"
102 | },
103 | }]
104 | }' \
105 | 'http://localhost:4000/events'
106 | ```
107 |
108 | 3. **Querying events**:
109 | You can query events using the `/events` endpoint (see [API reference](https://docs.trench.dev/api-reference/events-get) for more details).
110 |
111 | You can also query events directly from your local Trench server. For example, to query events of type `ConnectedAccount`, you can use the following URL:
112 |
113 | ```sh
114 | curl -i -X GET \
115 | -H "Authorization: Bearer private-d613be4e-di03-4b02-9058-70aa4j04ff28" \
116 | 'http://localhost:4000/events?event=ConnectedAccount'
117 | ```
118 |
119 | This will return a JSON response with the event that was just sent:
120 |
121 | ```json
122 | {
123 | "results": [
124 | {
125 | "uuid": "25f7c712-dd86-4db0-89a8-d07d11b73e57",
126 | "type": "track",
127 | "event": "ConnectedAccount",
128 | "userId": "550e8400-e29b-41d4-a716-446655440000",
129 | "properties": {
130 | "totalAccounts": 4,
131 | "country": "Denmark"
132 | },
133 | "timestamp": "2024-10-22T19:34:56.000Z",
134 | "parsedAt": "2024-10-22T19:34:59.530Z"
135 | }
136 | ],
137 | "limit": 1000,
138 | "offset": 0,
139 | "total": 1
140 | }
141 | ```
142 |
143 | 4. **Execute raw SQL queries**:
144 | Use the queries endpoint to analyze your data. Example:
145 |
146 | ```sh
147 | curl -i -X POST \
148 | -H "Authorization:Bearer public-d613be4e-di03-4b02-9058-70aa4j04ff28" \
149 | -H "Content-Type:application/json" \
150 | -d \
151 | '{
152 | "queries": [
153 | "SELECT COUNT(*) FROM events WHERE userId = '550e8400-e29b-41d4-a716-446655440000'"
154 | ]
155 | }' \
156 | 'http://localhost:4000/queries'
157 | ```
158 |
159 | Sample query result:
160 |
161 | ```json
162 | {
163 | "results": [
164 | {
165 | "count": 5
166 | }
167 | ],
168 | "limit": 0,
169 | "offset": 0,
170 | "total": 1
171 | }
172 | ```
173 |
174 | ### 2. Trench Cloud ☁️
175 |
176 | If you don't want to selfhost, you can get started with Trench in a few minutes via:
177 | * our [Cloud web interface](https://app.trench.dev/sign-up)
178 | * our [Cloud quickstart guide](https://docs.trench.dev/cloud-quickstart)
179 |
180 |
181 |
182 |
183 |
184 |
185 | ## 🔗 Links
186 |
187 | - [Website](https://trench.dev?utm_campaign=github-readme)
188 | - [Documentation](https://docs.trench.dev/)
189 | - [Slack community](https://join.slack.com/t/trench-community/shared_invite/zt-2sjet5kh2-v31As3yC_zRIadk_AGn~3A)
190 |
191 | ## 📚 Authors
192 |
193 | Trench is a project built by [Frigade](https://frigade.com).
194 |
195 | ## 📄 License
196 |
197 | MIT License
198 |
--------------------------------------------------------------------------------
/apps/docs/.gitignore:
--------------------------------------------------------------------------------
1 | .idea
2 | .DS_Store
3 | *.iml
4 |
--------------------------------------------------------------------------------
/apps/docs/.nvmrc:
--------------------------------------------------------------------------------
1 | v21.6.1
2 |
--------------------------------------------------------------------------------
/apps/docs/README.md:
--------------------------------------------------------------------------------
1 | # Trench Documentation
2 |
3 | The content and configuration powering the Trench documentation available at [docs.trench.dev](https://docs.trench.dev)
4 |
5 | ### 👩💻 Development
6 |
7 | Install the [Mintlify CLI](https://www.npmjs.com/package/mintlify) to preview the documentation changes locally. To install, use the following command
8 |
9 | ```
10 | npm i mintlify -g
11 | ```
12 |
13 | Run the following command at the root of your documentation (where mint.json is)
14 |
15 | ```
16 | mintlify dev
17 | ```
18 |
19 | #### Troubleshooting
20 |
21 | - Mintlify dev isn't running - Run `mintlify install` it'll re-install dependencies.
22 | - Mintlify dev is updating really slowly - Run `mintlify clear` to clear the cache.
23 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/events-create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Create Events
3 | description: Use `track`, `identify`, and `group` to send events to Trench.
4 | openapi: post /events
5 | ---
6 |
7 | This endpoint requires your [public API key](/api-reference/overview#authentication).
8 | Trench is fully compatible with the [Segment Spec](https://segment.com/docs/spec/) schema.
9 |
10 | You can use the `track`, `identify`, and `group` methods to send events to Trench via the `/events` endpoint.
11 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/events-get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Get Events
3 | description: Get events based on a query
4 | openapi: get /events
5 | ---
6 |
7 | This endpoint requires your [private API key](/api-reference/overview#authentication).
8 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: API Overview
3 | description: Overview of the Trench API
4 | ---
5 |
6 | ## Trench API Overview
7 |
8 | The Trench API provides a comprehensive suite of endpoints to interact with the Trench platform. It is designed to be fully compatible with the [Segment Spec](https://segment.com/docs/spec/) schema, allowing you to seamlessly integrate and send events using familiar methods such as `track`, `identify`, and `group`.
9 |
10 | ### Events
11 |
12 | The `/events` endpoint allows you to create and retrieve events using the `track`, `identify`, and `group` methods. These methods enable you to send detailed event data to Trench, including user interactions, identity information, and group associations. For more information, refer to the [Create Events](./events-create) and the [Get Events](./events-get) documentation.
13 |
14 | ### Queries
15 |
16 | The `/queries` endpoint enables you to execute queries against the Trench platform. This allows you to retrieve and analyze data based on the events and interactions recorded. For more details, refer to the [Execute Queries](./queries-execute) documentation.
17 |
18 | ### Webhooks
19 |
20 | The Trench API also supports webhooks, which allow you to receive real-time notifications about specific events or changes within the platform. You can create, retrieve, and delete webhooks using the respective endpoints. For more information, refer to the following documentation:
21 |
22 | - [Get Webhooks](./webhooks-get)
23 | - [Create Webhooks](./webhooks-create)
24 | - [Delete Webhooks](./webhooks-delete)
25 |
26 | ### Authentication
27 |
28 | The Trench API uses bearer token authentication to secure access to its endpoints. You will be issued a **public api key** and a **private api key** to use when making requests. The public key is used only in the [Events](./events-create) endpoint to send data to Trench. It is safe to expose this key to the public internet.
29 |
30 | The private key is used on all other endpoints and should be kept secret.
31 |
32 |
33 | **Note:** Do not use your private API key on public endpoints. Always use your public API key to
34 | avoid unauthorized access and misuse.
35 |
36 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/queries-execute.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Execute Queries
3 | description: Overview of the Trench API
4 | openapi: post /queries
5 | ---
6 |
7 | This endpoint requires your [private API key](/api-reference/overview#authentication).
8 |
9 | This endpoint allows you to execute read-only queries against your Trench data.
10 | The endpoint will proxy the request to the underlying ClickHouse database.
11 | For more information about ClickHouse's query language, see the [official documentation](https://clickhouse.com/docs/en/sql-reference).
12 |
13 | ## Examples
14 |
15 | ### Quering event properties, context, and traits
16 |
17 | To query a specific nested property, you can use the `JSONExtract` function. For example, to query all events where the `totalAccounts` property is greater than 3, you can use the following query:
18 |
19 | ```sql
20 | SELECT * FROM events WHERE JSONExtract(properties, 'totalAccounts', 'UInt64') > 3
21 | ```
22 |
23 | Similarly, you can query the context and traits:
24 |
25 | ```sql
26 | SELECT * FROM events WHERE JSONExtract(context, 'country', 'String') = 'Denmark'
27 | ```
28 |
29 | ### Joining identified users with their events
30 |
31 | All `identify` calls are sent to the same underlying `events` ClickHouse table, so you can join events with identified users using the `userId` column. For example, to query all events for a user with the ID `user-123`, you can use the following query:
32 |
33 | ```sql
34 | SELECT * FROM events WHERE userId = 'user-123'
35 | ```
36 |
37 | To get the tracking events and the user's most recently provided email, you can join the `track` and `identify` event types:
38 |
39 | ```sql
40 | SELECT
41 | i.email,
42 | e.*
43 | FROM
44 | events e
45 | LEFT JOIN
46 | (SELECT userId, type, JSONExtract(traits, 'email', 'String') AS email FROM events) i
47 | ON
48 | e.userId = i.userId
49 | WHERE
50 | e.type = 'track'
51 | AND i.type = 'identify';
52 | ```
53 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/webhooks-create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Create Webhook
3 | description: Create a new webhook.
4 | openapi: post /webhooks
5 | ---
6 |
7 | This endpoint requires your [private API key](/api-reference/overview#authentication).
8 |
9 | Once a webhook is created, events will be sent to it based on the criteria specified in the webhook.
10 | The data sent follows the [Event schema](/api-reference/events-get) and is sent as a JSON array in the POST request in the `data` field. Below is an example request body:
11 |
12 | ```json
13 | {
14 | "data": [
15 | {
16 | "uuid": "b99b40c5-e306-4351-9f1d-9a13bb9e8bd1",
17 | "type": "track",
18 | "event": "ConnectedAccount",
19 | "userId": "123e4567-e89b-12d3-a456-426614174000",
20 | "properties": {
21 | "totalAccounts": 4
22 | },
23 | "timestamp": "2024-10-21T21:32:17.000Z",
24 | "parsedAt": "2024-10-21T21:32:23.194Z"
25 | }
26 | ]
27 | }
28 | ```
29 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/webhooks-delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Delete Webhook
3 | description: Delete a webhook.
4 | openapi: delete /webhooks/{uuid}
5 | ---
6 |
7 | This endpoint requires your [private API key](/api-reference/overview#authentication).
8 |
--------------------------------------------------------------------------------
/apps/docs/api-reference/webhooks-get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Get Webhooks
3 | description: Get all webhooks under your account.
4 | openapi: get /webhooks
5 | ---
6 |
7 | This endpoint requires your [private API key](/api-reference/overview#authentication).
8 |
--------------------------------------------------------------------------------
/apps/docs/client.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: JavaScript Client
3 | description: Documentation for the Trench JavaScript client
4 | ---
5 |
6 | import TrenchScript from '/snippets/trench-script.mdx';
7 |
8 | ## Overview
9 |
10 | `trench-js` is a client library for both web and Node.js environments that allows you to send events to any Trench instance. This library is designed to be easy to use and integrate into your existing projects. The library is built on top of [Analytics](https://github.com/DavidWells/analytics) and supports all of its [methods](https://github.com/DavidWells/analytics#usage). It can be installed via a package manager or via a CDN script tag.
11 |
12 | ## Installation via package manager
13 |
14 | Install `trench-js` using your preferred package manager:
15 |
16 | ```bash
17 | npm install trench-js
18 | ```
19 |
20 | You can now initialize the client with your Trench instance URL and public API key:
21 |
22 | ```ts
23 | import Trench from 'trench-js'
24 |
25 | const trench = new Trench({
26 | serverUrl: 'your-server-url',
27 | publicApiKey: 'your-public-api-key',
28 | })
29 | ```
30 |
31 | For example, you can identify a user like this:
32 |
33 | ```ts
34 | trench.identify('user-id', {
35 | email: 'test@example.com',
36 | })
37 | ```
38 |
39 | And track an event like this:
40 |
41 | ```ts
42 | trench.track('test-event', {
43 | test: 'test-value',
44 | })
45 | ```
46 |
47 | Or to record a trigger page view:
48 |
49 | ```ts
50 | trench.page()
51 | ```
52 |
53 | ## Installation via CDN script tag
54 |
55 | Alternatively, you can use the CDN version:
56 |
57 |
58 |
59 | This will automatically add the `trench` object to the global scope and you can start tracking events automatically as `autoCaptureEvents` is set to `true` by default.
60 |
--------------------------------------------------------------------------------
/apps/docs/cloud-quickstart.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Cloud Quickstart
3 | sidebarTitle: Cloud Quickstart
4 | description: Send your first event to Trench Cloud
5 | ---
6 |
7 | import TrenchScript from '/snippets/trench-script.mdx';
8 |
9 |
10 | In this guide, we'll walk you through the process of sending your first event and reading it back using the Trench Cloud API.
11 | The example uses the [Trench JavaScript client](https://www.npmjs.com/package/trench-js), but the same can be achieved by calling the [Events API](https://docs.trench.dev/api-reference/events-create) directly.
12 |
13 | ## Getting Started
14 |
15 |
16 |
17 | To get started with Trench Cloud, you need to sign up at [app.trench.dev](https://app.trench.dev?utm_campaign=cloud-quickstart). Then, locate the necessary credentials in the `API Keys` tab.
18 |
19 |
20 |
21 |
22 |
23 |
24 | Next, install the Trench JavaScript client using your favorite package manager:
25 |
26 | ```bash
27 | npm install trench-js
28 | ```
29 |
30 | Alternatively, you can use the hosted version via the script tag below and skip the **Initialize the Client** step:
31 |
32 |
33 |
34 |
35 |
36 |
37 | After installing the client, you need to initialize it with your API key. Replace `YOUR_API_KEY` and `YOUR_SERVER_URL` with the actual API key and server URL you received:
38 |
39 | ```javascript
40 | import Trench from 'trench-js'
41 |
42 | const trench = new Trench({
43 | publicApiKey: 'YOUR_PUBLIC_API_KEY',
44 | serverUrl: 'YOUR_SERVER_URL'
45 | });
46 | ```
47 | Optionally, you can identify a user with the `identify` method:
48 |
49 | ```javascript
50 | trench.identify('user-id', {
51 | email: 'test@example.com',
52 | // Add any other traits you want to associate with the user
53 | })
54 | ```
55 |
56 |
57 |
58 |
59 | Now you can send a sample event to Trench Cloud. Here is an example of how to send an event:
60 |
61 | ```javascript
62 | trench.track("Test Event");
63 | ```
64 |
65 | This will send an event with the name `Test Event`.
66 |
67 |
68 |
69 | You can verify that the event was received by opening the `Events` tab in the Trench Cloud dashboard.
70 |
71 |
72 |
73 | ## Going Further
74 |
75 | Now that you've sent your first event, you can learn more about the many things you can do with Trench.
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
--------------------------------------------------------------------------------
/apps/docs/contributing.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Contributing to Trench
3 | sidebarTitle: Contributing
4 | description: Trench is an open source project and we welcome contributions from the community.
5 | ---
6 |
7 |
8 |
9 | ## Developing and deploying the backend locally
10 |
11 | We welcome contributions to improve the backend. Before starting, we recommend opening an issue for discussion to ensure your idea aligns with the project's goals.
12 |
13 | ### Backend Setup Guide
14 |
15 | 1. Fork the repository and clone it to your local machine.
16 | 2. Navigate to the `apps/trench` directory and copy the `.env.example` file to `.env`:
17 |
18 | ```bash
19 | cp .env.example .env
20 | ```
21 |
22 | 3. Make sure you have the following prerequisites installed:
23 | - [Docker](https://www.docker.com/products/docker-desktop/)
24 | - [Docker Compose](https://docs.docker.com/compose/install/)
25 | - [Node.js](https://nodejs.org/en/download/)
26 | - [pnpm](https://pnpm.io/installation)
27 |
28 | 4. Start the local development environment:
29 |
30 | ```bash
31 | pnpm install && pnpm dev
32 | ```
33 |
34 | 5. The backend should now be running locally on `http://localhost:4000`.
35 | 6. Create a new branch for your changes.
36 | 7. Develop your feature or fix and write tests for it. Specifically, writing an end to end test for your feature is highly recommended.
37 | 8. Ensure all tests pass by running `pnpm test:e2e` (you will need to be running the dev server locally on port 4000 with HTTP for this to work)
38 | 9. Commit your changes and push them to your forked repository.
39 | 10. Submit a pull request to the main repository.
40 |
41 | ## Improving the Documentation
42 |
43 | Good documentation is crucial and requires significant time and effort to create and maintain. All our documentation is written in Markdown to make it easy for contributors.
44 |
45 | ### Documentation Setup Guide
46 |
47 | 1. Fork the repository and clone it to your local machine.
48 | 2. Go to the `apps/docs` directory.
49 | 3. Make your edits in the relevant `.mdx` files. Optionally, install and start [Mintlify](https://mintlify.com/) to build the documentation site locally.
50 | 4. Commit your changes and push them to your forked repository.
51 | 5. Submit a pull request to the main repository.
52 |
53 |
54 | ## Reporting Issues
55 |
56 | You can open an issue to report bugs or suggest new features.
57 |
58 | ### Reporting Bugs
59 |
60 | Clearly describe the bug, including steps to reproduce it, what happened, and what you expected to happen. Also, include the browser version, OS, and other relevant software versions (npm, Node.js, etc.) when applicable.
61 |
62 | ### Suggesting Features
63 |
64 | Describe the proposed feature, its functionality, why it is useful, and how users should use it. Provide as much information as possible to facilitate discussion, assessment, and implementation. If you are unsure about any aspect of the feature, feel free to leave it open for discussion.
65 |
66 | ## Submitting Pull Requests
67 |
68 | Pull requests are highly appreciated. If you plan to propose significant changes, please open an issue for discussion first to ensure your PR will be accepted before you invest time in coding it.
69 |
70 | ### Forking the Repository
71 |
72 | 1. Fork the repository and clone it to your local machine.
73 | 2. Create a branch for your proposed bug fix or new feature. Avoid working directly on the main branch.
74 |
75 | ### Making Changes
76 |
77 | 1. Implement your bug fix or feature, write tests for it, and ensure all tests pass.
78 | 2. Commit your changes and push your bug fix/feature branch to your forked repository.
79 | 3. Submit a pull request to the main branch of the upstream repository (the repository you originally forked).
80 |
81 |
--------------------------------------------------------------------------------
/apps/docs/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/favicon.png
--------------------------------------------------------------------------------
/apps/docs/favicon.svg:
--------------------------------------------------------------------------------
1 |
32 |
--------------------------------------------------------------------------------
/apps/docs/guides/grafana.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Grafana
3 | sidebarTitle: Grafana
4 | description: Step-by-step guide to setup Trench with Grafana dashboard
5 | ---
6 |
7 | # Setting up Grafana with Trench
8 |
9 | This guide will walk you through setting up Grafana with Trench to visualize your analytics data.
10 |
11 | ## Prerequisites
12 | - Docker installed
13 | - Trench running locally follow [Self-host Quickstart](/quickstart)
14 |
15 | ## Setup Steps
16 |
17 |
18 |
19 |
20 |
21 | Run the following command to start Grafana. This will make Grafana available at http://localhost:3001 and connect it to your Trench installation.
22 |
23 |
24 |
25 | ```bash
26 | docker run -d \
27 | --name grafana-trench \
28 | --network trench_app-network \
29 | -p 3001:3000 \
30 | grafana/grafana:latest
31 | ```
32 |
33 |
34 |
35 |
36 |
37 | - Username: `admin`
38 | - Password: `admin`
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | Go to plugins section and install the ClickHouse plugin.
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 | 1. Go to Data Sources section and click on `Add data source`.
55 | 2. Select `ClickHouse` as the data source.
56 | 3. Configure the data source with these settings:
57 | - Host: `clickhouse`
58 | - Port: `8123`
59 | - Protocol: `http`
60 | - Username: `user`
61 | - Password: `password`
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 | You can import the pre-built dashboard using the JSON configuration provided. Download the [JSON configuration](https://raw.githubusercontent.com/FrigadeHQ/trench/refs/heads/main/apps/docs/json/grafana-dashboard.json) and import it into Grafana.
71 |
72 | The Dashboard includes:
73 | - Unique pageviews over time
74 | - Time series visualization
75 | - 6-hour time window by default
76 |
77 |
78 |
79 |
80 |
81 | Here's an example of sending pageview data:
82 |
83 | ```bash
84 | curl --location 'http://localhost:4000/events' \
85 | --header 'Authorization: Bearer public-d613be4e-di03-4b02-9058-70aa4j04ff28' \
86 | --header 'Content-Type: application/json' \
87 | --data '{
88 | "events": [
89 | {
90 | "userId": "550e8400-e29b-41d4-a716-4466554400",
91 | "type": "track",
92 | "event": "$pageview",
93 | "properties": {
94 | "totalAccounts": 4,
95 | "country": "Denmark"
96 | }
97 | }
98 | ]
99 | }'
100 | ```
101 |
102 |
103 |
104 |
105 |
106 | New pageviews should be visible in the dashboard.
107 |
108 |
109 |
110 | #### Next Steps
111 | - Customize the dashboard based on your needs
112 | - Add more panels for different metrics
113 | - Set up alerts
114 |
115 |
116 |
117 |
118 |
119 |
120 |
--------------------------------------------------------------------------------
/apps/docs/images/grafana-config-datasource.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/images/grafana-config-datasource.png
--------------------------------------------------------------------------------
/apps/docs/images/grafana-dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/images/grafana-dashboard.png
--------------------------------------------------------------------------------
/apps/docs/images/grafana-login.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/images/grafana-login.jpeg
--------------------------------------------------------------------------------
/apps/docs/images/grafana-plugin-install.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/images/grafana-plugin-install.png
--------------------------------------------------------------------------------
/apps/docs/images/trench-cover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/images/trench-cover.png
--------------------------------------------------------------------------------
/apps/docs/images/trench-dashboard-dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/apps/docs/images/trench-dashboard-dark.png
--------------------------------------------------------------------------------
/apps/docs/introduction.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | description: Open source infrastructure for tracking events
4 | ---
5 |
6 |
7 |
8 | Trench is an event tracking system built on top of Apache Kafka and ClickHouse. It can handle large event volumes and provides real-time analytics. Our team built Trench to scale up the real-time event tracking pipeline at Frigade.
9 |
10 | ## Features
11 |
12 | - 🤝 Compliant with the Segment API (Track, Group, Identify)
13 | - 🐳 Deploy quickly with a single production-ready Docker image
14 | - 💻 Process thousands of events per second on a single node
15 | - ⚡ Query data in real-time
16 | - 🔗 Connect data to other destinations with webhooks
17 | - 👥 Open-source and MIT Licensed
18 |
19 | ## Getting Started
20 |
21 | ---
22 |
23 |
24 |
25 |
26 |
27 |
33 |
34 |
--------------------------------------------------------------------------------
/apps/docs/json/grafana-dashboard.json:
--------------------------------------------------------------------------------
1 | {
2 | "__inputs": [
3 | {
4 | "name": "DS_GRAFANA-CLICKHOUSE-DATASOURCE",
5 | "label": "grafana-clickhouse-datasource",
6 | "description": "",
7 | "type": "datasource",
8 | "pluginId": "grafana-clickhouse-datasource",
9 | "pluginName": "ClickHouse"
10 | }
11 | ],
12 | "__elements": {},
13 | "__requires": [
14 | {
15 | "type": "grafana",
16 | "id": "grafana",
17 | "name": "Grafana",
18 | "version": "11.4.0"
19 | },
20 | {
21 | "type": "datasource",
22 | "id": "grafana-clickhouse-datasource",
23 | "name": "ClickHouse",
24 | "version": "4.5.1"
25 | },
26 | {
27 | "type": "panel",
28 | "id": "timeseries",
29 | "name": "Time series",
30 | "version": ""
31 | }
32 | ],
33 | "annotations": {
34 | "list": [
35 | {
36 | "builtIn": 1,
37 | "datasource": {
38 | "type": "grafana",
39 | "uid": "-- Grafana --"
40 | },
41 | "enable": true,
42 | "hide": true,
43 | "iconColor": "rgba(0, 211, 255, 1)",
44 | "name": "Annotations & Alerts",
45 | "type": "dashboard"
46 | }
47 | ]
48 | },
49 | "editable": true,
50 | "fiscalYearStartMonth": 0,
51 | "graphTooltip": 0,
52 | "id": null,
53 | "links": [],
54 | "panels": [
55 | {
56 | "datasource": {
57 | "type": "grafana-clickhouse-datasource",
58 | "uid": "${DS_GRAFANA-CLICKHOUSE-DATASOURCE}"
59 | },
60 | "fieldConfig": {
61 | "defaults": {
62 | "color": {
63 | "mode": "palette-classic"
64 | },
65 | "custom": {
66 | "axisBorderShow": false,
67 | "axisCenteredZero": false,
68 | "axisColorMode": "text",
69 | "axisLabel": "",
70 | "axisPlacement": "auto",
71 | "barAlignment": 0,
72 | "barWidthFactor": 0.6,
73 | "drawStyle": "line",
74 | "fillOpacity": 0,
75 | "gradientMode": "none",
76 | "hideFrom": {
77 | "legend": false,
78 | "tooltip": false,
79 | "viz": false
80 | },
81 | "insertNulls": false,
82 | "lineInterpolation": "linear",
83 | "lineWidth": 1,
84 | "pointSize": 5,
85 | "scaleDistribution": {
86 | "type": "linear"
87 | },
88 | "showPoints": "auto",
89 | "spanNulls": false,
90 | "stacking": {
91 | "group": "A",
92 | "mode": "none"
93 | },
94 | "thresholdsStyle": {
95 | "mode": "off"
96 | }
97 | },
98 | "mappings": [],
99 | "thresholds": {
100 | "mode": "absolute",
101 | "steps": [
102 | {
103 | "color": "green",
104 | "value": null
105 | },
106 | {
107 | "color": "red",
108 | "value": 80
109 | }
110 | ]
111 | }
112 | },
113 | "overrides": []
114 | },
115 | "gridPos": {
116 | "h": 8,
117 | "w": 12,
118 | "x": 0,
119 | "y": 0
120 | },
121 | "id": 1,
122 | "options": {
123 | "legend": {
124 | "calcs": [],
125 | "displayMode": "list",
126 | "placement": "bottom",
127 | "showLegend": true
128 | },
129 | "tooltip": {
130 | "mode": "single",
131 | "sort": "none"
132 | }
133 | },
134 | "pluginVersion": "11.4.0",
135 | "targets": [
136 | {
137 | "builderOptions": {
138 | "aggregates": [],
139 | "columns": [
140 | {
141 | "alias": "user_id",
142 | "custom": false,
143 | "name": "user_id",
144 | "type": "String"
145 | }
146 | ],
147 | "database": "default",
148 | "filters": [],
149 | "groupBy": [
150 | "user_id"
151 | ],
152 | "limit": 1000,
153 | "meta": {},
154 | "mode": "aggregate",
155 | "orderBy": [],
156 | "queryType": "table",
157 | "table": "events"
158 | },
159 | "datasource": {
160 | "type": "grafana-clickhouse-datasource",
161 | "uid": "${DS_GRAFANA-CLICKHOUSE-DATASOURCE}"
162 | },
163 | "editorType": "sql",
164 | "format": 1,
165 | "meta": {
166 | "builderOptions": {
167 | "aggregates": [],
168 | "columns": [
169 | {
170 | "alias": "user_id",
171 | "custom": false,
172 | "name": "user_id",
173 | "type": "String"
174 | }
175 | ],
176 | "database": "default",
177 | "filters": [],
178 | "groupBy": [
179 | "user_id"
180 | ],
181 | "limit": 1000,
182 | "meta": {},
183 | "mode": "aggregate",
184 | "orderBy": [],
185 | "queryType": "table",
186 | "table": "events"
187 | }
188 | },
189 | "pluginVersion": "4.5.1",
190 | "queryType": "table",
191 | "rawSql": "SELECT \ntoStartOfHour(timestamp) as time,\ncount(DISTINCT user_id) as unique_pageviews\nFROM \"default\".\"events\" \nwhere event = '$pageview'\nGROUP BY time\norder by time",
192 | "refId": "A"
193 | }
194 | ],
195 | "title": "Panel Title",
196 | "type": "timeseries"
197 | }
198 | ],
199 | "schemaVersion": 40,
200 | "tags": [],
201 | "templating": {
202 | "list": []
203 | },
204 | "time": {
205 | "from": "now-6h",
206 | "to": "now"
207 | },
208 | "timepicker": {},
209 | "timezone": "browser",
210 | "title": "New dashboard",
211 | "uid": "ee6c3vm4x4dtsb",
212 | "version": 2,
213 | "weekStart": ""
214 | }
215 |
--------------------------------------------------------------------------------
/apps/docs/mint.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Trench",
3 | "logo": {
4 | "dark": "/logo/dark.svg",
5 | "light": "/logo/dark.svg"
6 | },
7 | "modeToggle": {
8 | "default": "dark",
9 | "isHidden": true
10 | },
11 | "favicon": "/favicon.png",
12 | "colors": {
13 | "primary": "#0F1FA1",
14 | "light": "#FFFFFF",
15 | "anchors": {
16 | "from": "#17A9F9",
17 | "to": "#443AFA"
18 | },
19 | "background": {
20 | "dark": "#010101"
21 | }
22 | },
23 | "topbarCtaButton": {
24 | "name": "Get Started",
25 | "url": "https://github.com/frigadehq/trench"
26 | },
27 | "topbarLinks": [
28 | {
29 | "name": "Website",
30 | "url": "https://trench.dev"
31 | }
32 | ],
33 | "anchors": [
34 | {
35 | "name": "GitHub",
36 | "icon": "github",
37 | "url": "https://github.com/frigadehq/trench"
38 | },
39 | {
40 | "name": "Slack Community",
41 | "icon": "slack",
42 | "url": "https://join.slack.com/t/trench-community/shared_invite/zt-2sjet5kh2-v31As3yC_zRIadk_AGn~3A"
43 | }
44 | ],
45 | "navigation": [
46 | {
47 | "group": "Overview",
48 | "pages": ["introduction", "quickstart", "cloud-quickstart", "contributing"]
49 | },
50 | {
51 | "group": "Guides",
52 | "pages": ["guides/grafana"]
53 | },
54 | {
55 | "group": "SDK and API",
56 | "pages": ["client", "api-reference/overview"]
57 | },
58 | {
59 | "group": "Events API",
60 | "pages": ["api-reference/events-create", "api-reference/events-get"]
61 | },
62 | {
63 | "group": "Webhooks API",
64 | "pages": [
65 | "api-reference/webhooks-get",
66 | "api-reference/webhooks-create",
67 | "api-reference/webhooks-delete"
68 | ]
69 | },
70 | {
71 | "group": "Query API",
72 | "pages": ["api-reference/queries-execute"]
73 | }
74 | ],
75 | "footerSocials": {
76 | "twitter": "https://twitter.com/FrigadeHQ",
77 | "github": "https://github.com/FrigadeHQ/trench",
78 | "linkedin": "https://www.linkedin.com/company/frigade/",
79 | "website": "https://trench.dev"
80 | },
81 | "api": {
82 | "baseUrl": "https://api.trench.dev",
83 | "auth": {
84 | "method": "bearer"
85 | }
86 | },
87 | "integrations": {},
88 | "feedback": {
89 | "thumbsRating": true,
90 | "suggestEdit": true
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/apps/docs/quickstart.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Self-host Quickstart
3 | sidebarTitle: Self-host Quickstart
4 | description: Get set up with Trench in less than 5 minutes
5 | ---
6 |
7 |
8 |
9 | The only prerequisite for Trench is a system that has Docker and Docker Compose installed [see installation guide](https://docs.docker.com/compose/install/). We recommend having at least 4GB of RAM and 4 CPU cores for optimal performance if you're running a production environment.
10 |
11 | After installing Docker, you can start the local development server by running the following commands:
12 |
13 | ```sh
14 | git clone https://github.com/frigadehq/trench.git
15 | cd trench/apps/trench
16 | cp .env.example .env
17 | docker-compose -f docker-compose.yml -f docker-compose.dev.yml up --build --force-recreate --renew-anon-volumes
18 | ```
19 |
20 | The above command will start the Trench server that includes a local ClickHouse and Kafka instance on `http://localhost:4000`. You can update the `.env` to change any of the configuration options.
21 |
22 |
23 |
24 | You can find and update the default public and private API key in the `.env` file. Using your public API key, you can send a sample event to Trench as such:
25 |
26 | ```bash
27 | curl -i -X POST \
28 | -H "Authorization: Bearer public-d613be4e-di03-4b02-9058-70aa4j04ff28" \
29 | -H "Content-Type: application/json" \
30 | -d \
31 | '{
32 | "events": [
33 | {
34 | "userId": "550e8400-e29b-41d4-a716-446655440000",
35 | "type": "track",
36 | "event": "ConnectedAccount",
37 | "properties": {
38 | "totalAccounts": 4,
39 | "country": "Denmark"
40 | }
41 | }]
42 | }' \
43 | 'http://localhost:4000/events'
44 | ```
45 |
46 |
47 |
48 |
49 | You can query events using the `/events` endpoint (see [API reference](/api-reference/events-get) for more details).
50 |
51 | You can also query events directly from your local Trench server. For example, to query events of type `ConnectedAccount`, you can use the following URL:
52 |
53 | ```bash
54 | curl -i -X GET \
55 | -H "Authorization: Bearer private-d613be4e-di03-4b02-9058-70aa4j04ff28" \
56 | 'http://localhost:4000/events?event=ConnectedAccount'
57 | ```
58 |
59 | This will return a JSON response with the event that was just sent:
60 |
61 | ```json
62 | {
63 | "results": [
64 | {
65 | "uuid": "25f7c712-dd86-4db0-89a8-d07d11b73e57",
66 | "type": "track",
67 | "event": "ConnectedAccount",
68 | "userId": "550e8400-e29b-41d4-a716-446655440000",
69 | "properties": {
70 | "totalAccounts": 4,
71 | "country": "Denmark"
72 | },
73 | "timestamp": "2024-10-22T19:34:56.000Z",
74 | "parsedAt": "2024-10-22T19:34:59.530Z"
75 | }
76 | ],
77 | "limit": 1000,
78 | "offset": 0,
79 | "total": 1
80 | }
81 | ```
82 |
83 |
84 |
85 |
86 | Use the queries endpoint to analyze your data. Example:
87 |
88 | ```bash
89 | curl -i -X POST \
90 | -H "Authorization: Bearer private-d613be4e-di03-4b02-9058-70aa4j04ff28" \
91 | -H "Content-Type: application/json" \
92 | -d \
93 | '{
94 | "queries": [
95 | "SELECT COUNT(*) FROM events WHERE userId = '550e8400-e29b-41d4-a716-446655440000'"
96 | ]
97 | }' \
98 | 'http://localhost:4000/queries'
99 | ```
100 |
101 | Sample query result:
102 |
103 | ```json
104 | {
105 | "results": [
106 | {
107 | "count": 5
108 | }
109 | ],
110 | "limit": 0,
111 | "offset": 0,
112 | "total": 1
113 | }
114 | ```
115 |
116 |
117 |
118 |
119 | ## Going Further
120 |
121 | While the above steps are a great starting point, the following video tutorial exemplifies the many things you can do with Trench. In this video, we build a mini version of Google Analytics using Trench and Grafana:
122 |
123 |
129 |
130 | ## Related Resources
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
--------------------------------------------------------------------------------
/apps/docs/scripts/openapi.sh:
--------------------------------------------------------------------------------
1 | yes | cp -rf ../trench/swagger-spec.json ./swagger-spec.json
2 | # Generate mintlify docs
3 | #npx @mintlify/scraping@latest openapi-file swagger-spec.json -o api-reference
4 | echo 'openapi.sh: Done!'
5 |
--------------------------------------------------------------------------------
/apps/docs/snippets/trench-script.mdx:
--------------------------------------------------------------------------------
1 | ```html
2 |
13 | ```
--------------------------------------------------------------------------------
/apps/docs/style.css:
--------------------------------------------------------------------------------
1 | hr {
2 | margin-bottom: 1.5rem !important;
3 | }
4 |
5 | #topbar-cta-button {
6 | border-radius: 100px;
7 | border: 1px solid #627ee910;
8 | background: radial-gradient(
9 | 77.34% 100% at 50% 0%,
10 | rgba(255, 255, 255, 0.17) 0%,
11 | rgba(255, 255, 255, 0) 100%
12 | ),
13 | linear-gradient(0deg, #12228f 0%, #12228f 100%), #090f1b;
14 | background-blend-mode: normal, overlay, normal;
15 | box-shadow: 0px 1px 0px 0px rgba(255, 255, 255, 0.47) inset,
16 | 0px 0px 140px 28px rgba(53, 79, 255, 0.5),
17 | 0px 0px 0px 5px rgba(255, 255, 255, 0.08) inset,
18 | 0px 0px 0px 4px rgba(0, 0, 0, 0.2), 0px 0px 36px 0px #242b5a,
19 | 0px 1.864px 0.466px -1.553px rgba(255, 255, 255, 0.7) inset,
20 | 0px 0.621px 0.311px -0.155px rgba(255, 255, 255, 0.3),
21 | 0px -0.311px 0.621px 0.621px rgba(0, 0, 0, 0.9) inset;
22 | }
23 |
--------------------------------------------------------------------------------
/apps/trench/.dockerignore:
--------------------------------------------------------------------------------
1 | Dockerfile
2 | .dockerignore
3 | node_modules
4 | npm-debug.log
5 | dist
6 | certs
--------------------------------------------------------------------------------
/apps/trench/.env.example:
--------------------------------------------------------------------------------
1 | NODE_ENV=development
2 | API_PORT=4000
3 | API_HTTPS=false
4 | CLICKHOUSE_USER=user
5 | CLICKHOUSE_PASSWORD=password
6 | CLICKHOUSE_DATABASE=default
7 | CLICKHOUSE_HOST=clickhouse
8 | CLICKHOUSE_PORT=8123
9 | #KAFKA_CLIENT_ID=trench
10 | # Comma separated list of brokers
11 | KAFKA_BROKERS=kafka:9092
12 | KAFKA_TOPIC=trench_events
13 | KAFKA_PARTITIONS=1
14 | # Comma separated list of public API keys. Make sure to update this.
15 | PUBLIC_API_KEYS=public-d613be4e-di03-4b02-9058-70aa4j04ff28
16 | # Comma separated list of private API keys. Make sure to update this.
17 | PRIVATE_API_KEYS=private-d613be4e-di03-4b02-9058-70aa4j04ff28
18 | # API_DOMAIN=local.trench.dev
19 |
--------------------------------------------------------------------------------
/apps/trench/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | parser: '@typescript-eslint/parser',
3 | parserOptions: {
4 | project: 'tsconfig.json',
5 | tsconfigRootDir: __dirname,
6 | sourceType: 'module',
7 | },
8 | plugins: ['@typescript-eslint/eslint-plugin'],
9 | extends: ['plugin:@typescript-eslint/recommended', 'plugin:prettier/recommended'],
10 | root: true,
11 | env: {
12 | node: true,
13 | jest: true,
14 | },
15 | ignorePatterns: ['.eslintrc.js'],
16 | rules: {
17 | '@typescript-eslint/interface-name-prefix': 'off',
18 | '@typescript-eslint/explicit-function-return-type': 'off',
19 | '@typescript-eslint/explicit-module-boundary-types': 'off',
20 | '@typescript-eslint/no-explicit-any': 'off',
21 | '@typescript-eslint/no-empty-function': 'off',
22 | '@typescript-eslint/ban-types': 'off',
23 | },
24 | }
25 |
--------------------------------------------------------------------------------
/apps/trench/.gitignore:
--------------------------------------------------------------------------------
1 | # compiled output
2 | /dist
3 | /node_modules
4 |
5 | # Logs
6 | logs
7 | *.log
8 | npm-debug.log*
9 | pnpm-debug.log*
10 | yarn-debug.log*
11 | yarn-error.log*
12 | lerna-debug.log*
13 |
14 | # OS
15 | .DS_Store
16 |
17 | # Tests
18 | /coverage
19 | /.nyc_output
20 |
21 | # IDEs and editors
22 | /.idea
23 | .project
24 | .classpath
25 | .c9/
26 | *.launch
27 | .settings/
28 | *.sublime-workspace
29 |
30 | # IDE - VSCode
31 | .vscode/*
32 | !.vscode/settings.json
33 | !.vscode/tasks.json
34 | !.vscode/launch.json
35 | !.vscode/extensions.json
36 |
37 | # environment variables
38 | .env
39 |
40 | # sqlite
41 | *.sqlite
42 |
--------------------------------------------------------------------------------
/apps/trench/.nvmrc:
--------------------------------------------------------------------------------
1 | 18.13.0
2 |
--------------------------------------------------------------------------------
/apps/trench/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "printWidth": 100,
3 | "semi": false,
4 | "singleQuote": true,
5 | "tabWidth": 2,
6 | "trailingComma": "es5"
7 | }
8 |
--------------------------------------------------------------------------------
/apps/trench/Dockerfile:
--------------------------------------------------------------------------------
1 | ###################
2 | # BUILD FOR LOCAL DEVELOPMENT
3 | ###################
4 |
5 | FROM node:22.9-bullseye AS build
6 |
7 | RUN apt-get update && apt-get install -y openssl certbot
8 | RUN npm install -g pnpm
9 |
10 | # Create app directory
11 | WORKDIR /app
12 |
13 | # Create certs directory
14 | RUN mkdir -p /app/certs
15 |
16 | # Generate a self-signed certificate for HTTPS. You can replace this with a real certificate in production
17 | RUN openssl genrsa -des3 -passout pass:x -out /app/certs/server.pass.key 2048 && \
18 | openssl rsa -passin pass:x -in /app/certs/server.pass.key -out /app/certs/server.key && \
19 | rm /app/certs/server.pass.key && \
20 | openssl req -new -key /app/certs/server.key -out /app/certs/server.csr \
21 | -subj "/C=US/ST=CA/L=SF/O=Trench/OU=IT Department/CN=trench.dev" && \
22 | openssl x509 -req -days 3650 -in /app/certs/server.csr -signkey /app/certs/server.key -out /app/certs/server.crt && \
23 | chown -R node:node /app/certs
24 |
25 | COPY --chown=node:node ./package.json ./
26 |
27 | RUN pnpm install
28 |
29 | # Bundle app source
30 | COPY --chown=node:node . .
31 | RUN chown -R node:node /app/certs
32 | RUN mkdir -p /app/dist
33 | RUN chown -R node:node /app/dist
34 |
35 | # Use the node user from the image (instead of the root user)
36 | USER node
37 |
38 | ###################
39 | # BUILD FOR PRODUCTION
40 | ###################
41 |
42 | FROM node:22.9-bullseye AS production-build
43 |
44 | RUN apt-get update && apt-get install -y openssl certbot
45 | RUN npm install -g pnpm
46 |
47 | WORKDIR /app
48 |
49 | # Create certs directory
50 | RUN mkdir -p /app/certs
51 |
52 | COPY --chown=node:node ./package*.json /app/
53 |
54 | # In order to run `pnpm run build` we need access to the Nest CLI which is a dev dependency.
55 | # In the previous development stage we ran `pnpm ci` which installed all dependencies, so we can copy over the node_modules directory from the development image
56 | COPY --chown=node:node --from=build /app/node_modules /app/node_modules
57 | COPY --chown=node:node --from=build /app/certs /app/certs
58 | COPY --chown=node:node ./ ./
59 |
60 | RUN pnpm build
61 |
62 | # Set NODE_ENV environment variable
63 |
64 | ENV NODE_ENV production
65 |
66 |
67 |
68 | ###################
69 | # PRODUCTION RUN
70 | ###################
71 |
72 | FROM node:22.9-bullseye AS production
73 |
74 | # RUN apt-get update && apt-get install -y openssl
75 |
76 | WORKDIR /app
77 |
78 | # Copy the bundled code from the build stage to the production image
79 | COPY --chown=node:node --from=production-build /app/node_modules /app/node_modules
80 | COPY --chown=node:node --from=production-build /app/dist /app/dist
81 | COPY --chown=node:node --from=production-build /app/certs /app/certs
82 |
83 | USER node
84 |
85 |
--------------------------------------------------------------------------------
/apps/trench/docker-compose.dev.yml:
--------------------------------------------------------------------------------
1 | services:
2 | api:
3 | build:
4 | dockerfile: Dockerfile
5 | context: .
6 | target: build
7 | command: pnpm run start:dev
8 | volumes:
9 | - ./:/app
10 | - /app/node_modules
11 | - /app/certs
12 | - /app/schemas
13 |
--------------------------------------------------------------------------------
/apps/trench/docker-compose.stateless.yml:
--------------------------------------------------------------------------------
1 | # This docker configuration is used to run the trench api with an external clickhouse and kafka instance
2 | services:
3 | api:
4 | build:
5 | dockerfile: Dockerfile
6 | context: .
7 | target: production-build
8 | args:
9 | API_DOMAIN: ${API_DOMAIN}
10 | env_file:
11 | - .env
12 | volumes:
13 | - ./:/app
14 | - /app/node_modules
15 | - /app/certs
16 | - /app/schemas
17 | - /app/dist
18 | command: node /app/dist/main.js -i -1
19 | ports:
20 | - '${API_PORT}:${API_PORT}'
21 | networks:
22 | - app-network
23 | restart: unless-stopped
24 |
--------------------------------------------------------------------------------
/apps/trench/docker-compose.yml:
--------------------------------------------------------------------------------
1 | services:
2 | api:
3 | build:
4 | dockerfile: Dockerfile
5 | context: .
6 | target: production-build
7 | args:
8 | API_DOMAIN: ${API_DOMAIN}
9 | env_file:
10 | - .env
11 | volumes:
12 | - ./:/app
13 | - /app/node_modules
14 | - /app/certs
15 | - /app/schemas
16 | - /app/dist
17 | command: node /app/dist/main.js -i -1
18 | ports:
19 | - '${API_PORT}:${API_PORT}'
20 | depends_on:
21 | - clickhouse
22 | - kafka
23 | networks:
24 | - app-network
25 | restart: unless-stopped
26 | clickhouse:
27 | image: clickhouse/clickhouse-server
28 | restart: always
29 | ports:
30 | - '8123:8123'
31 | - '9000:9000'
32 | volumes:
33 | - clickhouse-data:/var/lib/clickhouse # Data storage
34 | # - ./config:/etc/clickhouse-server # Configuration files (optional)
35 | environment:
36 | - CLICKHOUSE_USER=${CLICKHOUSE_USER}
37 | - CLICKHOUSE_PASSWORD=${CLICKHOUSE_PASSWORD}
38 | ulimits:
39 | nofile:
40 | soft: 262144
41 | hard: 262144
42 | networks:
43 | - app-network
44 | kafka:
45 | image: bitnami/kafka:latest
46 | ports:
47 | - 9092:9092
48 | - 9093:9093
49 | - 9094:9094
50 | deploy:
51 | resources:
52 | limits:
53 | cpus: '4'
54 | memory: 4096M
55 | environment:
56 | - KAFKA_CFG_NODE_ID=0
57 | - KAFKA_CFG_PROCESS_ROLES=controller,broker
58 | - KAFKA_CFG_LISTENERS=PLAINTEXT://:9092,CONTROLLER://:9093,EXTERNAL://:9094
59 | - KAFKA_CFG_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092,EXTERNAL://localhost:9094
60 | - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,EXTERNAL:PLAINTEXT
61 | - KAFKA_CFG_CONTROLLER_QUORUM_VOTERS=0@kafka:9093
62 | - KAFKA_CFG_CONTROLLER_LISTENER_NAMES=CONTROLLER
63 | volumes:
64 | - kafka-data:/bitnami/kafka
65 | networks:
66 | - app-network
67 | volumes:
68 | clickhouse-data:
69 | kafka-data:
70 | networks:
71 | app-network:
72 |
--------------------------------------------------------------------------------
/apps/trench/nest-cli.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://json.schemastore.org/nest-cli",
3 | "collection": "@nestjs/schematics",
4 | "sourceRoot": "src",
5 | "compilerOptions": {
6 | "assets": [{ "include": "**/*.sql", "watchAssets": true }]
7 | }
8 | }
9 |
--------------------------------------------------------------------------------
/apps/trench/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "trench",
3 | "version": "0.0.1",
4 | "description": "",
5 | "author": "",
6 | "license": "MIT",
7 | "private": true,
8 | "scripts": {
9 | "prebuild": "rimraf dist",
10 | "build": "nest build",
11 | "format": "prettier --write \"src/**/*.ts\" \"test/**/*.ts\"",
12 | "start": "nest start",
13 | "dev": "docker-compose -f docker-compose.yml -f docker-compose.dev.yml up --build --force-recreate --renew-anon-volumes",
14 | "dev:build": "docker-compose -f docker-compose.yml -f docker-compose.dev.yml build",
15 | "start:prod": "docker-compose -f docker-compose.yml up --build --force-recreate --renew-anon-volumes",
16 | "start:dev": "nest start --watch",
17 | "start:debug": "nest start --debug 0.0.0.0:9229 --watch",
18 | "lint": "eslint \"{src,apps,libs,test}/**/*.ts\" --fix",
19 | "test": "jest \"test/(?!e2e/).*\\.test\\.ts$\"",
20 | "test:e2e": "jest",
21 | "test:watch": "jest --watch",
22 | "test:cov": "jest --coverage",
23 | "test:debug": "node --inspect-brk -r tsconfig-paths/register -r ts-node/register node_modules/.bin/jest --runInBand"
24 | },
25 | "dependencies": {
26 | "@clickhouse/client": "^1.6.0",
27 | "@fastify/static": "^7.0.1",
28 | "@nestjs/axios": "3.0.3",
29 | "@nestjs/cache-manager": "^2.2.2",
30 | "@nestjs/common": "^10.4.4",
31 | "@nestjs/config": "^3.2.3",
32 | "@nestjs/core": "^10.4.4",
33 | "@nestjs/mercurius": "^12.2.0",
34 | "@nestjs/platform-express": "^10.4.4",
35 | "@nestjs/platform-fastify": "^10.4.4",
36 | "@nestjs/schedule": "^4.1.1",
37 | "@nestjs/swagger": "^7.4.2",
38 | "cache-manager": "^5.7.6",
39 | "kafkajs": "^2.2.4",
40 | "redis": "^4.7.0",
41 | "reflect-metadata": "^0.2.2",
42 | "rimraf": "^6.0.1",
43 | "rxjs": "^7.8.1",
44 | "semver": "^7.6.3",
45 | "typeorm": "^0.3.20",
46 | "uuid": "^10.0.0"
47 | },
48 | "devDependencies": {
49 | "@nestjs/cli": "^10.4.5",
50 | "@nestjs/schematics": "^10.1.4",
51 | "@nestjs/testing": "^10.4.4",
52 | "@types/express": "^5.0.0",
53 | "@types/jest": "29.5.13",
54 | "@types/lodash": "^4.17.9",
55 | "@types/node": "^22.7.4",
56 | "@types/supertest": "^6.0.2",
57 | "@typescript-eslint/eslint-plugin": "^8.8.0",
58 | "@typescript-eslint/parser": "^8.8.0",
59 | "eslint": "^9.11.1",
60 | "eslint-config-prettier": "^9.1.0",
61 | "eslint-plugin-prettier": "^5.2.1",
62 | "jest": "29.7.0",
63 | "node-fetch": "^3.3.2",
64 | "prettier": "^3.3.3",
65 | "source-map-support": "^0.5.21",
66 | "supertest": "^7.0.0",
67 | "ts-jest": "29.2.5",
68 | "ts-loader": "^9.5.1",
69 | "ts-node": "^10.9.2",
70 | "tsconfig-paths": "4.2.0",
71 | "typescript": "^5.6.2"
72 | },
73 | "jest": {
74 | "rootDir": ".",
75 | "moduleFileExtensions": [
76 | "js",
77 | "json",
78 | "ts",
79 | "tsx"
80 | ],
81 | "transform": {
82 | "^.+\\.(t|j)sx*$": "ts-jest"
83 | },
84 | "collectCoverageFrom": [
85 | "**/*.(t|j)s"
86 | ],
87 | "coverageDirectory": "../coverage",
88 | "testEnvironment": "node",
89 | "testTimeout": 30000,
90 | "maxWorkers": 1
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/apps/trench/scripts/create-sample-data.js:
--------------------------------------------------------------------------------
1 | const { Worker, isMainThread, parentPort, workerData } = require('worker_threads')
2 | const crypto = require('crypto')
3 |
4 | // Configuration
5 | const TOTAL_CALLS = 5000000 // Total number of calls to make
6 | const PARALLEL_THREADS = 10 // Number of parallel threads
7 | const CALLS_PER_THREAD = Math.ceil(TOTAL_CALLS / PARALLEL_THREADS)
8 | const API_URL = 'http://localhost:4001/events'
9 | const COUNTRIES = [
10 | 'Denmark',
11 | 'USA',
12 | 'Canada',
13 | 'Germany',
14 | 'France',
15 | 'India',
16 | 'Australia',
17 | 'Spain',
18 | 'Brazil',
19 | 'Mexico',
20 | 'Japan',
21 | 'South Korea',
22 | 'Italy',
23 | 'South Africa',
24 | 'Sweden',
25 | 'Norway',
26 | 'Finland',
27 | 'Netherlands',
28 | 'New Zealand',
29 | ]
30 | const EVENTS = [
31 | 'ConnectedAccount',
32 | 'LoggedIn',
33 | 'ViewedPage',
34 | 'PurchasedItem',
35 | 'ClickedButton',
36 | 'SignedUp',
37 | 'AddedToCart',
38 | 'Searched',
39 | 'UpdatedProfile',
40 | 'LoggedOut',
41 | ]
42 |
43 | // Function to generate random data
44 | const getRandomItem = (array) => array[Math.floor(Math.random() * array.length)]
45 | const getRandomString = (length) => crypto.randomBytes(length).toString('hex')
46 |
47 | // Function to make the API call
48 | async function makeApiCall() {
49 | try {
50 | const userId = getRandomString(5)
51 | const country = getRandomItem(COUNTRIES)
52 | const event = getRandomItem(EVENTS)
53 |
54 | const data = {
55 | events: [
56 | {
57 | userId,
58 | event,
59 | properties: {
60 | totalAccounts: Math.floor(Math.random() * 10) + 1,
61 | country,
62 | },
63 | type: 'track',
64 | },
65 | ],
66 | }
67 |
68 | await fetch(API_URL, {
69 | method: 'POST',
70 | headers: {
71 | 'Content-Type': 'application/json',
72 | },
73 | body: JSON.stringify(data),
74 | })
75 | } catch (error) {
76 | console.error('Error making API call:', error.message)
77 | }
78 | }
79 |
80 | // Worker function
81 | async function workerFunction(calls) {
82 | for (let i = 0; i < calls; i++) {
83 | await makeApiCall()
84 | }
85 | parentPort.postMessage(`Worker completed ${calls} calls`)
86 | }
87 |
88 | // Main thread logic
89 | if (isMainThread) {
90 | const startTime = Date.now()
91 | console.log(`Start time: ${new Date(startTime).toISOString()}`)
92 |
93 | let completedCalls = 0
94 |
95 | for (let i = 0; i < PARALLEL_THREADS; i++) {
96 | const worker = new Worker(__filename, {
97 | workerData: CALLS_PER_THREAD,
98 | })
99 |
100 | worker.on('message', (msg) => {
101 | console.log(msg)
102 | completedCalls += CALLS_PER_THREAD
103 | if (completedCalls === TOTAL_CALLS) {
104 | const endTime = Date.now()
105 | console.log(`End time: ${new Date(endTime).toISOString()}`)
106 | const durationInSeconds = (endTime - startTime) / 1000
107 | const averageQPS = TOTAL_CALLS / durationInSeconds
108 | console.log(`Average QPS: ${averageQPS.toFixed(2)}`)
109 | console.log(`Total time: ${durationInSeconds} seconds`)
110 | console.log(`Total records inserted: ${TOTAL_CALLS}`)
111 | }
112 | })
113 |
114 | worker.on('error', (err) => {
115 | console.error(`Worker error: ${err}`)
116 | })
117 | }
118 | } else {
119 | workerFunction(workerData)
120 | }
121 |
--------------------------------------------------------------------------------
/apps/trench/src/api-keys/api-keys.interface.ts:
--------------------------------------------------------------------------------
1 | export type ApiKeyType = 'public' | 'private' | 'admin'
2 |
--------------------------------------------------------------------------------
/apps/trench/src/api-keys/api-keys.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
3 | import { ClickHouseModule } from 'src/services/data/click-house/click-house.module'
4 | import { CacheModule } from '@nestjs/cache-manager'
5 |
6 | @Module({
7 | imports: [
8 | ClickHouseModule,
9 | CacheModule.register({
10 | ttl: 1000 * 60 * 2, // 2 minutes
11 | max: 100000,
12 | }),
13 | ],
14 | controllers: [],
15 | providers: [ApiKeysService],
16 | exports: [ApiKeysService],
17 | })
18 | export class ApiKeysModule {}
19 |
--------------------------------------------------------------------------------
/apps/trench/src/api-keys/api-keys.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
3 | import { escapeString } from 'src/services/data/click-house/click-house.util'
4 | import { Cache } from '@nestjs/cache-manager'
5 | import { Inject } from '@nestjs/common'
6 | import { v4 as uuidv4 } from 'uuid'
7 | import { ApiKeyType } from 'src/api-keys/api-keys.interface'
8 | import { Workspace } from 'src/workspaces/workspaces.interface'
9 | import { mapRowToWorkspace } from 'src/workspaces/workspaces.util'
10 | const IS_VALID_API_KEY_STRING = 'is-valid'
11 | const IS_INVALID_API_KEY_STRING = 'is-invalid'
12 | @Injectable()
13 | export class ApiKeysService {
14 | constructor(
15 | private readonly clickhouseService: ClickHouseService,
16 | @Inject(Cache) private cacheManager: Cache
17 | ) {}
18 |
19 | async validateApiKey(apiKey: string, type: ApiKeyType): Promise {
20 | const cacheKey = `validate_api_key:${apiKey}:${type}`
21 | const cached = await this.cacheManager.get(cacheKey)
22 |
23 | if (cached !== undefined) {
24 | return cached
25 | }
26 |
27 | const result = await this.clickhouseService.queryResults(`
28 | SELECT COUNT(*) as count
29 | FROM api_keys
30 | WHERE key = '${escapeString(apiKey)}' AND type = '${escapeString(type)}'
31 | `)
32 | const isValid = result[0].count > 0 ? IS_VALID_API_KEY_STRING : IS_INVALID_API_KEY_STRING
33 |
34 | await this.cacheManager.set(cacheKey, isValid, 120000) // Cache for 2 minutes
35 | return isValid === IS_VALID_API_KEY_STRING
36 | }
37 |
38 | async createApiKey(workspaceId: string, type: ApiKeyType): Promise {
39 | const apiKey = `${type}-${uuidv4()}`
40 |
41 | await this.clickhouseService.insert('api_keys', [
42 | {
43 | workspace_id: workspaceId,
44 | key: apiKey,
45 | type,
46 | },
47 | ])
48 |
49 | return apiKey
50 | }
51 |
52 | async getWorkspaceFromApiKey(apiKey: string, type: ApiKeyType): Promise {
53 | const cacheKey = `workspace_id:${apiKey}:${type}`
54 | const cached = await this.cacheManager.get(cacheKey)
55 |
56 | if (cached !== undefined) {
57 | return cached
58 | }
59 |
60 | const result = await this.clickhouseService.queryResults(`
61 | SELECT workspace_id
62 | FROM api_keys
63 | WHERE key = '${escapeString(apiKey)}' AND type = '${escapeString(type)}'
64 | LIMIT 1
65 | `)
66 | const workspaceId = result.length > 0 ? result[0].workspace_id : null
67 | if (!workspaceId) {
68 | return null
69 | }
70 | const workspaceResult = await this.clickhouseService.queryResults(`
71 | SELECT * FROM workspaces WHERE workspace_id = '${workspaceId}'
72 | `)
73 | const workspace = mapRowToWorkspace(workspaceResult[0])
74 |
75 | await this.cacheManager.set(cacheKey, workspace, 120000) // Cache for 2 minutes
76 | return workspace
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/apps/trench/src/app.controller.ts:
--------------------------------------------------------------------------------
1 | import { Controller, Get } from '@nestjs/common'
2 | import { AppService } from './app.service'
3 |
4 | @Controller()
5 | export class AppController {
6 | constructor(private readonly appService: AppService) {}
7 |
8 | @Get()
9 | root() {
10 | return 'Trench server is running'
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/apps/trench/src/app.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { AppController } from './app.controller'
3 | import { AppService } from './app.service'
4 | import { EventsModule } from './events/events.module'
5 | import { ConfigModule } from '@nestjs/config'
6 | import { ClickHouseModule } from './services/data/click-house/click-house.module'
7 | import { ApiKeysModule } from './api-keys/api-keys.module'
8 | import { ApiKeysService } from './api-keys/api-keys.service'
9 | import { QueriesModule } from './queries/queries.module'
10 | import { WebhooksModule } from './webhooks/webhooks.module'
11 | import { CacheModule } from '@nestjs/cache-manager'
12 | import { WorkspacesModule } from './workspaces/workspaces.module'
13 | import { BootstrapModule } from './services/data/bootstrap/bootstrap.module'
14 |
15 | @Module({
16 | imports: [
17 | ConfigModule.forRoot(),
18 | CacheModule.register({
19 | ttl: 1000 * 60 * 10, // 10 minutes (in milliseconds)
20 | max: 100000, // maximum number of items in cache
21 | }),
22 | EventsModule,
23 | ClickHouseModule,
24 | QueriesModule,
25 | WebhooksModule,
26 | WorkspacesModule,
27 | ApiKeysModule,
28 | BootstrapModule,
29 | ],
30 | controllers: [AppController],
31 | providers: [AppService],
32 | })
33 | export class AppModule {}
34 |
--------------------------------------------------------------------------------
/apps/trench/src/app.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 |
3 | @Injectable()
4 | export class AppService {
5 | constructor() {}
6 | }
7 |
--------------------------------------------------------------------------------
/apps/trench/src/appCluster.service.ts:
--------------------------------------------------------------------------------
1 | import * as _cluster from 'cluster'
2 | import * as os from 'os'
3 | import { Injectable, Logger } from '@nestjs/common'
4 |
5 | const cluster = _cluster as unknown as _cluster.Cluster // typings fix
6 |
7 | const numCPUs = os.cpus().length
8 |
9 | @Injectable()
10 | export class AppClusterService {
11 | private static readonly logger = new Logger(AppClusterService.name)
12 | static clusterize(callback: Function): void {
13 | if (cluster.isPrimary) {
14 | this.logger.log(`Primary server started on ${process.pid} (using ${numCPUs} processes).`)
15 | for (let i = 0; i < numCPUs; i++) {
16 | cluster.fork()
17 | }
18 | cluster.on('exit', (worker, code, signal) => {
19 | this.logger.log(`Worker ${worker.process.pid} died. Restarting`)
20 | cluster.fork()
21 | })
22 | } else {
23 | const nodeNumber = cluster.worker.id
24 | this.logger.log(`Cluster server started on ${process.pid}`)
25 | callback(nodeNumber)
26 | }
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/apps/trench/src/common/constants.ts:
--------------------------------------------------------------------------------
1 | export const DEFAULT_KAFKA_TOPIC = 'trench_events'
2 | export const DEFAULT_KAFKA_BROKERS = 'kafka:9092'
3 | export const DEFAULT_KAFKA_PARTITIONS = 1
4 | export const DEFAULT_KAFKA_CLIENT_ID = 'trench'
5 | export const DEFAULT_WORKSPACE_NAME = 'Default Workspace'
6 | export const DEFAULT_WORKSPACE_ID = '00000000-0000-0000-0000-000000000000'
7 |
--------------------------------------------------------------------------------
/apps/trench/src/common/crypto.ts:
--------------------------------------------------------------------------------
1 | const crypto = require('crypto')
2 |
3 | export function md5(str: string) {
4 | return crypto.createHash('md5').update(str).digest('hex')
5 | }
6 |
--------------------------------------------------------------------------------
/apps/trench/src/common/models.ts:
--------------------------------------------------------------------------------
1 | import { ApiProperty } from '@nestjs/swagger'
2 |
3 | export class PaginatedResponse {
4 | results: T[]
5 | @ApiProperty({
6 | type: Number,
7 | description: 'The limit of the pagination.',
8 | nullable: true,
9 | })
10 | limit: number | null
11 | @ApiProperty({
12 | type: Number,
13 | description: 'The offset of the pagination.',
14 | nullable: true,
15 | })
16 | offset: number | null
17 | @ApiProperty({
18 | type: Number,
19 | description: 'The total number of results. If `null`, the total is unknown.',
20 | nullable: true,
21 | })
22 | total: number | null
23 | }
24 |
--------------------------------------------------------------------------------
/apps/trench/src/common/request.ts:
--------------------------------------------------------------------------------
1 | import { Request } from '@nestjs/common'
2 | import { Workspace } from 'src/workspaces/workspaces.interface'
3 |
4 | export function getWorkspace(req: Request): Workspace {
5 | const workspace = (req as any).workspace
6 | if (!workspace) {
7 | throw new Error('Workspace not found in request. Ensure request is authenticated.')
8 | }
9 | return workspace
10 | }
11 |
--------------------------------------------------------------------------------
/apps/trench/src/common/utils.ts:
--------------------------------------------------------------------------------
1 | export function flatten(data: any): Record {
2 | const result: Record = {}
3 |
4 | function recurse(cur: any, prop: string) {
5 | if (Object(cur) !== cur) {
6 | result[prop] = cur
7 | } else if (cur instanceof Date) {
8 | result[prop] = cur.toISOString()
9 | } else if (Array.isArray(cur)) {
10 | for (let i = 0; i < cur.length; i++) {
11 | recurse(cur[i], prop + '_' + i)
12 | }
13 | if (cur.length === 0) {
14 | result[prop] = []
15 | }
16 | } else {
17 | let isEmpty = true
18 | for (const p in cur) {
19 | isEmpty = false
20 | recurse(cur[p], prop ? prop + '_' + p : p)
21 | }
22 | if (isEmpty && prop) {
23 | result[prop] = {}
24 | }
25 | }
26 | }
27 |
28 | recurse(data, '')
29 | return result
30 | }
31 |
--------------------------------------------------------------------------------
/apps/trench/src/events/events.controller.ts:
--------------------------------------------------------------------------------
1 | import { Body, Controller, Get, Post, Query, Request, UseGuards } from '@nestjs/common'
2 | import { ApiBearerAuth, ApiOperation, ApiResponse, ApiTags } from '@nestjs/swagger'
3 | import { EventsService } from 'src/events/events.service'
4 |
5 | import { EventsDTO, EventsQuery, PaginatedEventResponse } from 'src/events/events.interface'
6 | import { PublicApiGuard } from 'src/middlewares/public-api.guard'
7 | import { PrivateApiGuard } from 'src/middlewares/private-api.guard'
8 | import { getWorkspace } from 'src/common/request'
9 |
10 | @ApiBearerAuth()
11 | @ApiTags('events')
12 | @Controller()
13 | export class EventsController {
14 | constructor(private readonly eventsService: EventsService) {}
15 |
16 | @ApiOperation({ summary: 'Create one or more events. Requires public API key in Bearer token.' })
17 | @ApiResponse({
18 | status: 201,
19 | description: 'The events have been successfully created.',
20 | })
21 | @Post('/events')
22 | @UseGuards(PublicApiGuard)
23 | async createEvents(
24 | @Request() request: Request,
25 | @Body() eventDTOs: EventsDTO
26 | ): Promise {
27 | const workspace = getWorkspace(request)
28 | const events = await this.eventsService.createEvents(workspace, eventDTOs.events)
29 | return {
30 | results: events,
31 | limit: eventDTOs.events.length,
32 | offset: 0,
33 | total: events.length,
34 | }
35 | }
36 |
37 | @ApiOperation({
38 | summary: 'Get events based on a query. Requires private API key in Bearer token.',
39 | })
40 | @ApiResponse({
41 | status: 200,
42 | description: 'The events have been successfully retrieved.',
43 | type: PaginatedEventResponse,
44 | })
45 | @Get('/events')
46 | @UseGuards(PrivateApiGuard)
47 | async getEvents(
48 | @Request() request: Request,
49 | @Query() query: EventsQuery
50 | ): Promise {
51 | const workspace = getWorkspace(request)
52 | return this.eventsService.getEventsByQuery(workspace, query)
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/apps/trench/src/events/events.dao.ts:
--------------------------------------------------------------------------------
1 | import { BadRequestException, Injectable } from '@nestjs/common'
2 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
3 | import { escapeString, formatToClickhouseDate } from 'src/services/data/click-house/click-house.util'
4 | import { Event, EventDTO, EventsQuery, PaginatedEventResponse } from 'src/events/events.interface'
5 | import { KafkaService } from 'src/services/data/kafka/kafka.service'
6 | import { KafkaEventWithUUID } from 'src/services/data/kafka/kafka.interface'
7 | import { v4 as uuidv4 } from 'uuid'
8 | import { mapRowToEvent } from 'src/events/events.util'
9 | import { Workspace } from 'src/workspaces/workspaces.interface'
10 | import { getKafkaTopicFromWorkspace } from 'src/services/data/kafka/kafka.util'
11 | import { isReadOnlyQuery } from 'src/queries/queries.util'
12 |
13 | @Injectable()
14 | export class EventsDao {
15 | constructor(
16 | private readonly clickhouse: ClickHouseService,
17 | private kafkaService: KafkaService
18 | ) {}
19 |
20 | async getEventsByUUIDs(workspace: Workspace, uuids: string[]): Promise {
21 | const escapedUUIDs = uuids.map((uuid) => `'${escapeString(uuid)}'`).join(', ')
22 | const query = `SELECT * FROM events WHERE uuid IN (${escapedUUIDs})`
23 | const result = await this.clickhouse.queryResults(query, workspace.databaseName)
24 | return result.map((row: any) => mapRowToEvent(row))
25 | }
26 |
27 | async getEventsByQuery(
28 | workspace: Workspace,
29 | query: EventsQuery
30 | ): Promise {
31 | const {
32 | uuid,
33 | event,
34 | userId,
35 | groupId,
36 | anonymousId,
37 | instanceId,
38 | properties,
39 | traits,
40 | context,
41 | startDate,
42 | endDate,
43 | limit,
44 | offset,
45 | orderByField,
46 | orderByDirection,
47 | } = query
48 |
49 | const maxRecords = Math.min(limit ?? 1000, 1000)
50 |
51 | let conditions = []
52 |
53 | if (event) {
54 | conditions.push(`event = '${escapeString(event)}'`)
55 | }
56 | if (userId) {
57 | conditions.push(`user_id = '${escapeString(userId)}'`)
58 | }
59 | if (groupId) {
60 | conditions.push(`group_id = '${escapeString(groupId)}'`)
61 | }
62 | if (anonymousId) {
63 | conditions.push(`anonymous_id = '${escapeString(anonymousId)}'`)
64 | }
65 | if (instanceId) {
66 | conditions.push(`instance_id = '${escapeString(instanceId)}'`)
67 | }
68 | if (properties) {
69 | for (const [key, value] of Object.entries(properties)) {
70 | conditions.push(`JSONExtract(properties, '${key}', 'String') = '${escapeString(value)}'`)
71 | }
72 | }
73 | if (traits) {
74 | for (const [key, value] of Object.entries(traits)) {
75 | conditions.push(`JSONExtract(traits, '${key}', 'String') = '${escapeString(value)}'`)
76 | }
77 | }
78 | if (context) {
79 | for (const [key, value] of Object.entries(context)) {
80 | conditions.push(`JSONExtract(context, '${key}', 'String') = '${escapeString(value)}'`)
81 | }
82 | }
83 | if (startDate) {
84 | conditions.push(`timestamp >= '${escapeString(formatToClickhouseDate(new Date(startDate)))}'`)
85 | }
86 | if (endDate) {
87 | conditions.push(`timestamp <= '${escapeString(formatToClickhouseDate(new Date(endDate)))}'`)
88 | }
89 | if (uuid) {
90 | conditions.push(`uuid = '${escapeString(uuid)}'`)
91 | }
92 |
93 | const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : ''
94 |
95 | const orderByClause =
96 | orderByField && orderByDirection
97 | ? `ORDER BY ${escapeString(orderByField)} ${escapeString(orderByDirection)}`
98 | : 'ORDER BY timestamp DESC'
99 | const limitClause = `LIMIT ${maxRecords}`
100 | const offsetClause = offset ? `OFFSET ${offset}` : ''
101 |
102 | const clickhouseQuery = `SELECT * FROM events ${whereClause} ${orderByClause} ${limitClause} ${offsetClause}`
103 | if (!isReadOnlyQuery(clickhouseQuery)) {
104 | throw new BadRequestException('The provided query is not read-only')
105 | }
106 | const totalQuery = `SELECT COUNT(*) AS count FROM events ${whereClause}`
107 |
108 | try {
109 | const [result, total] = await Promise.all([
110 | this.clickhouse.queryResults(clickhouseQuery, workspace.databaseName),
111 | this.clickhouse.queryResults(totalQuery, workspace.databaseName),
112 | ])
113 | const results = result.map((row: any) => mapRowToEvent(row))
114 | const totalCount = +total[0].count
115 |
116 | return {
117 | results: results,
118 | limit: maxRecords,
119 | offset: +offset || 0,
120 | total: totalCount,
121 | }
122 | } catch (error) {
123 | throw new BadRequestException(`Error querying events: ${error.message}`)
124 | }
125 | }
126 |
127 | async createEvents(workspace: Workspace, eventDTOs: EventDTO[]): Promise {
128 | const records: KafkaEventWithUUID[] = eventDTOs.map((eventDTO) => {
129 | const uuid = uuidv4()
130 | const row = {
131 | instance_id: eventDTO.instanceId,
132 | uuid,
133 | event: eventDTO.event,
134 | type: eventDTO.type,
135 | user_id: eventDTO.userId,
136 | group_id: eventDTO.groupId,
137 | anonymous_id: eventDTO.anonymousId,
138 | properties: eventDTO.properties,
139 | traits: eventDTO.traits,
140 | context: eventDTO.context,
141 | timestamp: eventDTO.timestamp ? new Date(eventDTO.timestamp) : new Date(),
142 | }
143 | return {
144 | uuid,
145 | value: row,
146 | }
147 | })
148 |
149 | this.kafkaService.produceEvents(getKafkaTopicFromWorkspace(workspace), records)
150 |
151 | return records.map((record) => mapRowToEvent(record.value))
152 | }
153 | }
154 |
--------------------------------------------------------------------------------
/apps/trench/src/events/events.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { EventsService } from 'src/events/events.service'
3 | import { EventsController } from 'src/events/events.controller'
4 | import { KafkaModule } from 'src/services/data/kafka/kafka.module'
5 | import { ApiKeysModule } from 'src/api-keys/api-keys.module'
6 | import { EventsDao } from 'src/events/events.dao'
7 | import { ClickHouseModule } from 'src/services/data/click-house/click-house.module'
8 |
9 | @Module({
10 | imports: [KafkaModule, ApiKeysModule, ClickHouseModule],
11 | controllers: [EventsController],
12 | providers: [EventsService, EventsDao],
13 | exports: [EventsService],
14 | })
15 | export class EventsModule {}
16 |
--------------------------------------------------------------------------------
/apps/trench/src/events/events.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 | import { EventDTO, EventsQuery, PaginatedEventResponse } from 'src/events/events.interface'
3 | import { EventsDao } from 'src/events/events.dao'
4 | import { Event } from 'src/events/events.interface'
5 | import { Workspace } from 'src/workspaces/workspaces.interface'
6 |
7 | @Injectable()
8 | export class EventsService {
9 | constructor(private eventsDao: EventsDao) {}
10 |
11 | async createEvents(workspace: Workspace, eventDTOs: EventDTO[]): Promise {
12 | // validate event types
13 | const validEventTypes = ['page', 'track', 'identify', 'group']
14 | eventDTOs.forEach((eventDTO) => {
15 | if (!validEventTypes.includes(eventDTO.type)) {
16 | throw new Error(
17 | `Invalid event type: ${eventDTO.type}. Valid types are ${validEventTypes.join(', ')}.`
18 | )
19 | }
20 | })
21 |
22 | return this.eventsDao.createEvents(workspace, eventDTOs)
23 | }
24 |
25 | async getEventsByUUIDs(workspace: Workspace, uuids: string[]): Promise {
26 | return this.eventsDao.getEventsByUUIDs(workspace, uuids)
27 | }
28 |
29 | async getEventsByQuery(
30 | workspace: Workspace,
31 | query: EventsQuery
32 | ): Promise {
33 | return this.eventsDao.getEventsByQuery(workspace, query)
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/apps/trench/src/events/events.util.ts:
--------------------------------------------------------------------------------
1 | import { Event } from './events.interface'
2 |
3 | export function mapRowToEvent(row: any): Event {
4 | return {
5 | uuid: row.uuid,
6 | type: row.type,
7 | event: row.event,
8 | userId: row.user_id,
9 | groupId: row.group_id ? row.group_id : undefined,
10 | anonymousId: row.anonymous_id ? row.anonymous_id : undefined,
11 | instanceId: row.instance_id ? row.instance_id : undefined,
12 | properties: row.properties
13 | ? typeof row.properties === 'string'
14 | ? JSON.parse(row.properties)
15 | : row.properties
16 | : undefined,
17 | traits: row.traits
18 | ? typeof row.traits === 'string'
19 | ? JSON.parse(row.traits)
20 | : row.traits
21 | : undefined,
22 | context: row.context
23 | ? typeof row.context === 'string'
24 | ? JSON.parse(row.context)
25 | : row.context
26 | : undefined,
27 | timestamp: new Date(row.timestamp),
28 | parsedAt: new Date(row.parsed_at),
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/apps/trench/src/main.ts:
--------------------------------------------------------------------------------
1 | import { NestFactory } from '@nestjs/core'
2 | import { FastifyAdapter, NestFastifyApplication } from '@nestjs/platform-fastify'
3 | import { AppModule } from './app.module'
4 | import * as fs from 'fs'
5 | import * as process from 'process'
6 | import { DocumentBuilder, SwaggerModule } from '@nestjs/swagger'
7 | import { AppClusterService } from './appCluster.service'
8 | import { KafkaService } from './services/data/kafka/kafka.service'
9 | import { ClickHouseService } from './services/data/click-house/click-house.service'
10 | import { BootstrapService } from './services/data/bootstrap/bootstrap.service'
11 | import { Logger } from '@nestjs/common'
12 | import * as os from 'os'
13 |
14 | const logger = new Logger('Main')
15 |
16 | const CORS_OPTIONS = {
17 | origin: '*',
18 | allowedHeaders: [
19 | 'Access-Control-Allow-Origin',
20 | 'Origin',
21 | 'X-Requested-With',
22 | 'Accept',
23 | 'Content-Type',
24 | 'Authorization',
25 | ],
26 | exposedHeaders: ['Authorization'],
27 | credentials: true,
28 | methods: ['GET', 'PUT', 'OPTIONS', 'POST', 'DELETE'],
29 | }
30 |
31 | async function bootstrap(nodeNumber: number) {
32 | logger.log(`Starting node ${nodeNumber}`)
33 |
34 | let httpsOptions
35 |
36 | if (process.env.API_HTTPS === 'true') {
37 | logger.log('Using https')
38 | httpsOptions = {
39 | key: fs.readFileSync('/app/certs/server.key'),
40 | cert: fs.readFileSync('/app/certs/server.crt'),
41 | }
42 | } else {
43 | logger.log('Using http')
44 | }
45 |
46 | const fastifyAdapter = new FastifyAdapter({ https: httpsOptions, logger: true })
47 | fastifyAdapter.enableCors(CORS_OPTIONS)
48 | const app = await NestFactory.create(AppModule, fastifyAdapter)
49 |
50 | if (process.env.NODE_ENV === 'development') {
51 | const options = new DocumentBuilder().setTitle('trench API').setVersion('1.0').build()
52 | const document = SwaggerModule.createDocument(app, options)
53 |
54 | fs.writeFileSync('./swagger-spec.json', JSON.stringify(document))
55 | SwaggerModule.setup('/api', app, document)
56 | }
57 |
58 | const port = process.env.API_PORT ?? 4000
59 |
60 | if (nodeNumber === 1) {
61 | const bootstrapService = app.get(BootstrapService)
62 | await bootstrapService.bootstrap()
63 | }
64 |
65 | logger.log(`Listening on port ${port}`)
66 | await app.listen(port, '0.0.0.0')
67 | }
68 |
69 | if (process.env.NODE_ENV !== 'production' && process.env.FORCE_CLUSTER_MODE !== 'true') {
70 | logger.log('Running in single instance dev mode')
71 | bootstrap(1)
72 | } else {
73 | logger.log('Running in cluster mode with ' + os.cpus().length + ' processes')
74 | AppClusterService.clusterize(bootstrap)
75 | }
76 |
77 |
--------------------------------------------------------------------------------
/apps/trench/src/middlewares/admin-api.guard.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
3 | import { ApiGuard } from 'src/middlewares/api.guard'
4 |
5 | @Injectable()
6 | export class AdminApiGuard extends ApiGuard {
7 | constructor(apiKeysService: ApiKeysService) {
8 | super(apiKeysService, 'admin')
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/apps/trench/src/middlewares/api.guard.ts:
--------------------------------------------------------------------------------
1 | import { CanActivate, ExecutionContext, Injectable, UnauthorizedException } from '@nestjs/common'
2 | import { Observable } from 'rxjs'
3 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
4 | import { ApiKeyType } from 'src/api-keys/api-keys.interface'
5 |
6 | export class ApiGuard implements CanActivate {
7 | constructor(
8 | protected readonly apiKeysService: ApiKeysService,
9 | protected readonly apiKeyType: ApiKeyType
10 | ) {}
11 |
12 | canActivate(context: ExecutionContext): boolean | Promise | Observable {
13 | return this.validateRequest(context)
14 | }
15 |
16 | async validateRequest(context: ExecutionContext): Promise {
17 | const req = context.switchToHttp().getRequest()
18 | if (!req.headers.authorization) {
19 | throw new UnauthorizedException(
20 | `Missing Authorization header. Add "Authorization: Bearer " with your ${this.apiKeyType} API key to your request.`
21 | )
22 | }
23 |
24 | const apiKey = req.headers.authorization.replace('Bearer ', '')
25 | const workspace = await this.apiKeysService.getWorkspaceFromApiKey(apiKey, this.apiKeyType)
26 |
27 | if (!workspace) {
28 | throw new UnauthorizedException(`Invalid ${this.apiKeyType} API key`)
29 | }
30 |
31 | // Add workspace object to request context
32 | req.workspace = workspace
33 |
34 | return true
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/apps/trench/src/middlewares/private-api.guard.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
3 | import { ApiGuard } from 'src/middlewares/api.guard'
4 |
5 | @Injectable()
6 | export class PrivateApiGuard extends ApiGuard {
7 | constructor(apiKeysService: ApiKeysService) {
8 | super(apiKeysService, 'private')
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/apps/trench/src/middlewares/public-api.guard.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
3 | import { ApiGuard } from 'src/middlewares/api.guard'
4 |
5 | @Injectable()
6 | export class PublicApiGuard extends ApiGuard {
7 | constructor(apiKeysService: ApiKeysService) {
8 | super(apiKeysService, 'public')
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/apps/trench/src/queries/queries.controller.ts:
--------------------------------------------------------------------------------
1 | import { Controller, Post, Body, HttpException, HttpStatus, UseGuards, Req } from '@nestjs/common'
2 | import { QueriesService } from 'src/queries/queries.service'
3 | import { PaginatedQueryResponse, QueriesDTO } from 'src/queries/queries.interface'
4 | import { PrivateApiGuard } from 'src/middlewares/private-api.guard'
5 | import { PaginatedResponse } from 'src/common/models'
6 | import { ApiOperation, ApiResponse } from '@nestjs/swagger'
7 | import { getWorkspace } from 'src/common/request'
8 |
9 | @Controller('queries')
10 | @UseGuards(PrivateApiGuard)
11 | export class QueriesController {
12 | constructor(private readonly queriesService: QueriesService) {}
13 |
14 | @ApiOperation({ summary: 'Execute queries via SQL. Requires private API key in Bearer token.' })
15 | @ApiResponse({
16 | status: 200,
17 | description: 'The queries have been successfully executed.',
18 | type: PaginatedQueryResponse,
19 | })
20 | @Post()
21 | async executeQueries(
22 | @Body() queriesDto: QueriesDTO,
23 | @Req() req: Request
24 | ): Promise> {
25 | try {
26 | const workspace = getWorkspace(req)
27 | const results = await this.queriesService.sendQueries(workspace, queriesDto)
28 | return {
29 | results,
30 | limit: 0,
31 | offset: 0,
32 | total: queriesDto.queries.length,
33 | }
34 | } catch (error) {
35 | throw new HttpException(error.message, HttpStatus.BAD_REQUEST)
36 | }
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/apps/trench/src/queries/queries.interface.ts:
--------------------------------------------------------------------------------
1 | import { ApiProperty } from '@nestjs/swagger'
2 | import { PaginatedResponse } from 'src/common/models'
3 |
4 | export class QueriesDTO {
5 | @ApiProperty({
6 | type: [String],
7 | description: 'The queries to execute.',
8 | example: ['SELECT COUNT(*) FROM events WHERE event = "UserSignedUp"'],
9 | })
10 | queries: string[]
11 | }
12 |
13 | export class PaginatedQueryResponse extends PaginatedResponse {
14 | @ApiProperty({
15 | type: [Object],
16 | description: 'The results of the queries, returned in the same order as the queries.',
17 | example: [
18 | {
19 | results: [{ count: 3485241 }],
20 | },
21 | ],
22 | })
23 | results: any[]
24 | }
25 |
--------------------------------------------------------------------------------
/apps/trench/src/queries/queries.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { QueriesService } from 'src/queries/queries.service'
3 | import { QueriesController } from 'src/queries/queries.controller'
4 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
5 | import { ClickHouseModule } from 'src/services/data/click-house/click-house.module'
6 | import { ApiKeysModule } from 'src/api-keys/api-keys.module'
7 | import { WorkspacesModule } from 'src/workspaces/workspaces.module'
8 |
9 | @Module({
10 | imports: [ClickHouseModule, ApiKeysModule, WorkspacesModule],
11 | controllers: [QueriesController],
12 | providers: [QueriesService, ClickHouseService],
13 | })
14 | export class QueriesModule {}
15 |
--------------------------------------------------------------------------------
/apps/trench/src/queries/queries.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable } from '@nestjs/common'
2 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
3 | import {
4 | convertJsonKeysToCamelCase,
5 | convertObjectToArray,
6 | convertToKebabCase,
7 | isReadOnlyQuery,
8 | parseJsonFields,
9 | } from 'src/queries/queries.util'
10 | import { QueriesDTO } from 'src/queries/queries.interface'
11 | import { WorkspacesService } from 'src/workspaces/workspaces.service'
12 | import { Workspace } from 'src/workspaces/workspaces.interface'
13 |
14 | @Injectable()
15 | export class QueriesService {
16 | constructor(private readonly clickhouseService: ClickHouseService) {}
17 |
18 | async sendQueries(workspace: Workspace, queries: QueriesDTO): Promise {
19 | if (!queries.queries) {
20 | throw new Error('Request must contain a `queries` array')
21 | }
22 | // Validate that all queries are read-only
23 | for (const query of queries.queries) {
24 | if (!isReadOnlyQuery(query)) {
25 | throw new Error(
26 | `Query ${query} is not read-only. This endpoint is only for read-only queries to avoid accidentally corrupting data.`
27 | )
28 | }
29 | }
30 |
31 | const queryPromises = queries.queries.map((query) =>
32 | this.clickhouseService.queryResults(convertToKebabCase(query), workspace.databaseName)
33 | )
34 | const results = await Promise.all(queryPromises)
35 | return results.map((result) =>
36 | convertObjectToArray(parseJsonFields(convertJsonKeysToCamelCase(result)))
37 | )
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/apps/trench/src/queries/queries.util.ts:
--------------------------------------------------------------------------------
1 | export function isReadOnlyQuery(query: string): boolean {
2 | // Regular expression to match non-readonly SQL commands
3 | const nonReadOnlyCommands =
4 | /\b(INSERT|UPDATE|DELETE|CREATE|DROP|ALTER|TRUNCATE|REPLACE|MERGE|CALL|GRANT|REVOKE|LOCK|UNLOCK|system.databases|system.tables|SHOW DATABASES|SHOW SCHEMAS)\b/i
5 |
6 | // Remove string literals from the query to avoid false positives
7 | const cleanedQuery = query.replace(/'[^']*'/g, '')
8 |
9 | // Test the cleaned query against the regular expression
10 | return !nonReadOnlyCommands.test(cleanedQuery)
11 | }
12 |
13 | export function convertToKebabCase(query: string): string {
14 | return query
15 | .replaceAll(/userId/g, 'user_id')
16 | .replaceAll(/groupId/g, 'group_id')
17 | .replaceAll(/instanceId/g, 'instance_id')
18 | }
19 |
20 | export function convertJsonKeysToCamelCase(json: Record): Record {
21 | const result: Record = {}
22 |
23 | for (const key in json) {
24 | if (json.hasOwnProperty(key)) {
25 | const camelCaseKey = key.replace(/_([a-z])/g, (g) => g[1].toUpperCase())
26 | result[camelCaseKey] = json[key]
27 | }
28 | }
29 |
30 | return result
31 | }
32 |
33 | function parseJsonField(field: any): any {
34 | if (typeof field === 'string') {
35 | try {
36 | return JSON.parse(field)
37 | } catch (error) {
38 | console.error('Error parsing JSON field:', error)
39 | }
40 | }
41 | return field
42 | }
43 |
44 | export function parseJsonFields(json: Record): Record {
45 | const result: Record = { ...json }
46 |
47 | result.properties = parseJsonField(result.properties)
48 | result.context = parseJsonField(result.context)
49 |
50 | return result
51 | }
52 |
53 | export function convertObjectToArray(object: Record): any[] {
54 | const values = Object.values(object)
55 |
56 | return values.filter((value) => value !== null && value !== undefined)
57 | }
58 |
--------------------------------------------------------------------------------
/apps/trench/src/resources/migrations/v001_initial.sql:
--------------------------------------------------------------------------------
1 | create table if not exists kafka_events_data_{kafka_instance_id} (
2 | json String
3 | ) engine = Kafka settings
4 | kafka_broker_list = '{kafka_brokers}',
5 | kafka_topic_list = '{kafka_topic}',
6 | kafka_group_name = 'trench-clickhouse',
7 | kafka_format = 'JSONAsString',
8 | kafka_num_consumers = {kafka_partitions}
9 | ;
10 |
11 | create table if not exists events (
12 | uuid UUID,
13 | type String,
14 | event String,
15 | user_id String,
16 | group_id String,
17 | anonymous_id String,
18 | instance_id String,
19 | properties VARCHAR CODEC(ZSTD(3)),
20 | traits VARCHAR CODEC(ZSTD(3)),
21 | context VARCHAR CODEC(ZSTD(3)),
22 | timestamp DateTime64(6, 'UTC'),
23 | parsed_at DateTime64(6, 'UTC')
24 | ) engine = MergeTree()
25 | PARTITION BY instance_id
26 | ORDER BY (instance_id, user_id, -toUnixTimestamp(timestamp));
27 |
28 | CREATE MATERIALIZED VIEW kafka_events_consumer_{kafka_instance_id} TO events AS
29 | SELECT
30 | toUUID(JSONExtractString(json, 'uuid')) AS uuid,
31 | JSONExtractString(json, 'type') AS type,
32 | JSONExtractString(json, 'event') AS event,
33 | JSONExtractString(json, 'user_id') AS user_id,
34 | JSONExtractString(json, 'group_id') AS group_id,
35 | JSONExtractString(json, 'anonymous_id') AS anonymous_id,
36 | JSONExtractString(json, 'instance_id') AS instance_id,
37 | JSONExtractString(json, 'properties') AS properties,
38 | JSONExtractString(json, 'traits') AS traits,
39 | JSONExtractString(json, 'context') AS context,
40 | parseDateTimeBestEffort(JSONExtractString(json, 'timestamp')) AS timestamp,
41 | now64() AS parsed_at
42 | FROM kafka_events_data_{kafka_instance_id};
43 |
--------------------------------------------------------------------------------
/apps/trench/src/resources/migrations/v002_webhooks.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE IF NOT EXISTS webhooks (
2 | uuid UUID DEFAULT generateUUIDv4(),
3 | url String,
4 | enable_batching Bool DEFAULT false,
5 | created_at DateTime DEFAULT now(),
6 | event_types Array(String),
7 | event_names Array(String),
8 | ) ENGINE = MergeTree()
9 | ORDER BY created_at;
10 |
--------------------------------------------------------------------------------
/apps/trench/src/resources/migrations/v003_workspaces.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE IF NOT EXISTS workspaces (
2 | workspace_id UUID DEFAULT generateUUIDv4(),
3 | name String,
4 | database_name String,
5 | is_default Boolean DEFAULT false,
6 | created_at DateTime DEFAULT now()
7 | ) ENGINE = MergeTree()
8 | ORDER BY workspace_id;
9 |
10 | CREATE TABLE IF NOT EXISTS api_keys (
11 | api_key_id UUID DEFAULT generateUUIDv4(),
12 | workspace_id UUID,
13 | key String,
14 | type String,
15 | created_at DateTime DEFAULT now()
16 | ) ENGINE = MergeTree()
17 | ORDER BY (workspace_id, api_key_id);
18 |
19 |
--------------------------------------------------------------------------------
/apps/trench/src/resources/migrations/v004_workspaces_properties.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE workspaces
2 | ADD COLUMN properties String;
3 |
--------------------------------------------------------------------------------
/apps/trench/src/resources/migrations/v005_webhooks_flatten.sql:
--------------------------------------------------------------------------------
1 | ALTER TABLE webhooks
2 | ADD COLUMN IF NOT EXISTS flatten Boolean DEFAULT false;
3 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/bootstrap/bootstrap.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { BootstrapService } from 'src/services/data/bootstrap/bootstrap.service'
3 | import { KafkaModule } from 'src/services/data/kafka/kafka.module'
4 | import { ClickHouseModule } from 'src/services/data/click-house/click-house.module'
5 | import { WorkspacesModule } from 'src/workspaces/workspaces.module'
6 |
7 | @Module({
8 | imports: [ClickHouseModule, KafkaModule],
9 | providers: [BootstrapService],
10 | exports: [BootstrapService],
11 | })
12 | export class BootstrapModule {}
13 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/bootstrap/bootstrap.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable, Logger } from '@nestjs/common'
2 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
3 | import { KafkaService } from 'src/services/data/kafka/kafka.service'
4 | import { DEFAULT_WORKSPACE_ID } from 'src/common/constants'
5 | import { DEFAULT_WORKSPACE_NAME } from 'src/common/constants'
6 | import { getKafkaTopicFromWorkspace } from 'src/services/data/kafka/kafka.util'
7 | import { mapRowToWorkspace } from 'src/workspaces/workspaces.util'
8 | import { Workspace } from 'src/workspaces/workspaces.interface'
9 |
10 | @Injectable()
11 | export class BootstrapService {
12 | private readonly logger = new Logger(BootstrapService.name)
13 | constructor(
14 | private readonly clickhouseService: ClickHouseService,
15 | private readonly kafkaService: KafkaService
16 | ) {}
17 |
18 | async bootstrap() {
19 | // This creates everything needed for the default workspace
20 | await this.kafkaService.createTopicIfNotExists()
21 | await this.clickhouseService.runMigrations()
22 | await this.createDefaultRecordsIfNotExist()
23 | // This creates creates any maintains any additional workspaces kafka topics and tables
24 | const additionalWorkspacesResult = await this.clickhouseService.queryResults(`
25 | SELECT * FROM workspaces
26 | `)
27 | const additionalWorkspaces = additionalWorkspacesResult.map(mapRowToWorkspace)
28 |
29 | for (const workspace of additionalWorkspaces) {
30 | if (workspace.isDefault) {
31 | continue
32 | }
33 | await this.bootstrapWorkspace(workspace)
34 | }
35 | }
36 |
37 | async bootstrapWorkspace(workspace: Workspace) {
38 | this.logger.log(`Creating topics and running migrations for workspace ${workspace.name}`)
39 | const kafkaTopicName = await this.kafkaService.createTopicIfNotExists(
40 | getKafkaTopicFromWorkspace(workspace)
41 | )
42 | await this.clickhouseService.runMigrations(workspace.databaseName, kafkaTopicName)
43 | this.logger.log(
44 | `Successfully finished creating topics and running migrations for workspace ${workspace.name}`
45 | )
46 | }
47 |
48 | private async createDefaultRecordsIfNotExist() {
49 | // Check if default workspace exists
50 | let defaultWorkspace = await this.clickhouseService.queryResults(
51 | `SELECT * FROM workspaces WHERE name = '${DEFAULT_WORKSPACE_NAME}'`
52 | )
53 | if (defaultWorkspace.length === 0) {
54 | await this.clickhouseService.insert('workspaces', [
55 | {
56 | workspace_id: DEFAULT_WORKSPACE_ID,
57 | name: DEFAULT_WORKSPACE_NAME,
58 | is_default: true,
59 | database_name: process.env.CLICKHOUSE_DATABASE,
60 | },
61 | ])
62 | }
63 |
64 | defaultWorkspace = await this.clickhouseService.queryResults(
65 | `SELECT * FROM workspaces WHERE name = '${DEFAULT_WORKSPACE_NAME}'`
66 | )
67 |
68 | const defaultWorkspaceId = defaultWorkspace[0].workspace_id
69 |
70 | const publicApiKeys = process.env.PUBLIC_API_KEYS?.split(',') || []
71 | const privateApiKeys = process.env.PRIVATE_API_KEYS?.split(',') || []
72 |
73 | const existingApiKeys = await this.clickhouseService.queryResults(
74 | `SELECT * FROM api_keys WHERE workspace_id = '${defaultWorkspaceId}'`
75 | )
76 |
77 | for (const publicKey of publicApiKeys) {
78 | if (!existingApiKeys.find((key) => key.key === publicKey)) {
79 | await this.clickhouseService.insert('api_keys', [
80 | {
81 | workspace_id: defaultWorkspaceId,
82 | key: publicKey,
83 | type: 'public',
84 | },
85 | ])
86 | }
87 | }
88 |
89 | for (const privateKey of privateApiKeys) {
90 | if (!existingApiKeys.find((key) => key.key === privateKey)) {
91 | await this.clickhouseService.insert('api_keys', [
92 | {
93 | workspace_id: defaultWorkspaceId, // Use the ID directly instead of a subquery
94 | key: privateKey,
95 | type: 'admin',
96 | },
97 | {
98 | workspace_id: defaultWorkspaceId,
99 | key: privateKey,
100 | type: 'private',
101 | },
102 | ])
103 | }
104 | }
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/click-house/click-house.interface.ts:
--------------------------------------------------------------------------------
1 | export class Migration {
2 | name: string
3 | checksum: string
4 | executed_at: string
5 | }
6 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/click-house/click-house.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { ClickHouseService } from './click-house.service'
3 |
4 | @Module({
5 | providers: [ClickHouseService],
6 | exports: [ClickHouseService],
7 | })
8 | export class ClickHouseModule {}
9 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/click-house/click-house.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable, Logger } from '@nestjs/common'
2 | import { createClient, ClickHouseClient } from '@clickhouse/client'
3 | import * as fs from 'fs'
4 | import * as path from 'path'
5 | import { Migration } from 'src/services/data/click-house/click-house.interface'
6 | import { md5 } from 'src/common/crypto'
7 | import {
8 | DEFAULT_KAFKA_BROKERS,
9 | DEFAULT_KAFKA_PARTITIONS,
10 | DEFAULT_KAFKA_TOPIC,
11 | } from 'src/common/constants'
12 |
13 | @Injectable()
14 | export class ClickHouseService {
15 | private readonly logger = new Logger(ClickHouseService.name)
16 | private clientMap: Map = new Map()
17 |
18 | getClient(databaseName?: string): ClickHouseClient {
19 | if (!databaseName) {
20 | databaseName = process.env.CLICKHOUSE_DATABASE
21 | }
22 |
23 | if (!this.clientMap.has(databaseName)) {
24 | this.clientMap.set(
25 | databaseName,
26 | createClient({
27 | host: `${
28 | process.env.CLICKHOUSE_PROTOCOL ?? 'http'
29 | }://${process.env.CLICKHOUSE_USER}:${process.env.CLICKHOUSE_PASSWORD}@${process.env.CLICKHOUSE_HOST}:${process.env.CLICKHOUSE_PORT}`,
30 | database: databaseName,
31 | })
32 | )
33 | }
34 |
35 | return this.clientMap.get(databaseName)
36 | }
37 |
38 | private applySubstitutions(sql: string, kafkaTopicName?: string) {
39 | const kafkaBrokerList = process.env.KAFKA_BROKERS ?? DEFAULT_KAFKA_BROKERS
40 | const kafkaTopicList = kafkaTopicName ?? process.env.KAFKA_TOPIC ?? DEFAULT_KAFKA_TOPIC
41 | const kafkaInstanceId = md5(kafkaBrokerList + kafkaTopicList).slice(0, 6)
42 | const kafkaPartitions = process.env.KAFKA_PARTITIONS ?? DEFAULT_KAFKA_PARTITIONS
43 |
44 | return sql
45 | .replaceAll('{kafka_brokers}', kafkaBrokerList)
46 | .replaceAll('{kafka_topic}', kafkaTopicList)
47 | .replaceAll('{kafka_instance_id}', kafkaInstanceId)
48 | .replaceAll('{kafka_partitions}', kafkaPartitions.toString())
49 | }
50 |
51 | async runMigrations(databaseName?: string, kafkaTopicName?: string) {
52 | if (!databaseName) {
53 | databaseName = process.env.CLICKHOUSE_DATABASE
54 | }
55 |
56 | const migrationsDir = path.join(__dirname, '../../../resources/migrations')
57 | const files = fs
58 | .readdirSync(migrationsDir)
59 | .filter((file) => file.endsWith('.sql'))
60 | .sort()
61 |
62 | // Create the _migrations table if it doesn't exist
63 | await this.getClient(databaseName).query({
64 | query: `
65 | CREATE TABLE IF NOT EXISTS _migrations (
66 | name String,
67 | checksum String,
68 | executed_at DateTime DEFAULT now()
69 | ) ENGINE = MergeTree()
70 | ORDER BY executed_at
71 | `,
72 | })
73 |
74 | // Get the list of already executed migrations
75 | const executedMigrations = (await this.getClient(databaseName)
76 | .query({
77 | query: `
78 | SELECT * FROM _migrations
79 | `,
80 | })
81 | .then((resultSet) => resultSet.json().then((json) => json.data))) as unknown as Migration[]
82 |
83 | const executedFiles = new Set(executedMigrations.map((migration) => migration.name))
84 |
85 | for (const file of files) {
86 | if (executedFiles.has(file)) {
87 | this.logger.log(`Skipping migration ${file}, already executed `)
88 | continue
89 | }
90 |
91 | this.logger.log(`Executing migration ${file}`)
92 |
93 | const filePath = path.join(migrationsDir, file)
94 | const query = this.applySubstitutions(fs.readFileSync(filePath, 'utf8'), kafkaTopicName)
95 | const queries = query.split(';')
96 | for (const query of queries) {
97 | if (query.trim() === '') {
98 | continue
99 | }
100 | try {
101 | await this.getClient(databaseName).query({
102 | query,
103 | })
104 | } catch (error) {
105 | // if the error is a duplicate table or column error, we can ignore it
106 | if (String(error).includes('already exists')) {
107 | continue
108 | }
109 | this.logger.error(`Error executing migration ${file} with query ${query}: ${error}`, error.stack)
110 | throw error
111 | }
112 | }
113 | await this.insert('_migrations', [
114 | {
115 | name: file,
116 | checksum: md5(query),
117 | },
118 | ])
119 |
120 | this.logger.log(`Migration ${file} executed successfully`)
121 | }
122 | }
123 |
124 | async queryResults(query: string, databaseName?: string): Promise {
125 | const result = await this.getClient(databaseName).query({ query })
126 | return result.json().then((json) => json.data)
127 | }
128 |
129 | async query(query: string, databaseName?: string): Promise {
130 | await this.getClient(databaseName).query({ query })
131 | }
132 |
133 | async command(query: string, databaseName?: string): Promise {
134 | await this.getClient(databaseName).command({ query })
135 | }
136 |
137 | async insert(table: string, values: Record[], databaseName?: string): Promise {
138 | await this.getClient(databaseName).insert({
139 | table,
140 | values,
141 | format: 'JSONEachRow',
142 | })
143 | }
144 | }
145 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/click-house/click-house.util.ts:
--------------------------------------------------------------------------------
1 | export function escapeString(str: string) {
2 | return str
3 | .replace(/\\/g, '\\\\') // Escape backslashes
4 | .replace(/'/g, "\\'") // Escape single quotes
5 | .replace(/"/g, '\\"') // Escape double quotes
6 | }
7 |
8 | /**
9 | * Formats a date to be used in a ClickHouse query.
10 | *
11 | * The date will be formatted as ISO 8601, without the timezone "Z" at the end.
12 | * The date will also be escaped to be safe to use in a ClickHouse query.
13 | */
14 | export function formatToClickhouseDate(date: Date): string {
15 | const isoString = date.toISOString()
16 | const clickhouseDate = isoString.replace('Z', '')
17 | return escapeString(clickhouseDate)
18 | }
19 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/kafka/kafka.interface.ts:
--------------------------------------------------------------------------------
1 | export class KafkaEvent {
2 | instance_id: string
3 | uuid: string
4 | event?: string
5 | type: string
6 | user_id?: string
7 | group_id?: string
8 | anonymous_id?: string
9 | properties?: Record
10 | traits?: Record
11 | context?: Record
12 | timestamp: Date
13 | }
14 |
15 | export class KafkaEventWithUUID {
16 | uuid: string
17 | value: KafkaEvent
18 | }
19 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/kafka/kafka.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { KafkaService } from './kafka.service'
3 |
4 | @Module({
5 | imports: [],
6 | controllers: [],
7 | providers: [KafkaService],
8 | exports: [KafkaService],
9 | })
10 | export class KafkaModule {}
11 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/kafka/kafka.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable, Logger } from '@nestjs/common'
2 | import { Consumer, Kafka, Producer } from 'kafkajs'
3 | import { KafkaEventWithUUID } from 'src/services/data/kafka/kafka.interface'
4 | import { DEFAULT_KAFKA_CLIENT_ID, DEFAULT_KAFKA_PARTITIONS } from 'src/common/constants'
5 |
6 | @Injectable()
7 | export class KafkaService {
8 | private readonly logger = new Logger(KafkaService.name)
9 | private hasConnectedToProducer = false
10 | private kafka: Kafka
11 | private producer: Producer
12 |
13 | constructor() {
14 | this.kafka = new Kafka({
15 | clientId: process.env.KAFKA_CLIENT_ID ?? DEFAULT_KAFKA_CLIENT_ID,
16 | brokers: process.env.KAFKA_BROKERS.split(','),
17 | })
18 | this.producer = this.kafka.producer()
19 | this.connectToProducer()
20 | }
21 |
22 | async createTopicIfNotExists(topic?: string) {
23 | if (!topic) {
24 | topic = process.env.KAFKA_TOPIC
25 | }
26 | try {
27 | const topicPromise = this.createTopic(
28 | topic,
29 | process.env.KAFKA_PARTITIONS
30 | ? Number(process.env.KAFKA_PARTITIONS)
31 | : DEFAULT_KAFKA_PARTITIONS
32 | ).then(() => this.logger.log(`Created topic ${topic}`))
33 |
34 | if (process.env.NODE_ENV !== 'development') {
35 | await topicPromise
36 | }
37 | } catch (e) {
38 | this.logger.log(`Skipping topic creation, topic ${process.env.KAFKA_TOPIC} already exists.`)
39 | }
40 |
41 | return topic
42 | }
43 |
44 | private async connectToProducer() {
45 | if (this.hasConnectedToProducer) {
46 | return
47 | }
48 | await this.producer.connect()
49 | this.hasConnectedToProducer = true
50 | }
51 |
52 | async createTopic(topic: string, partitions: number) {
53 | const admin = this.kafka.admin()
54 | await admin.connect()
55 | await admin.createTopics({
56 | topics: [{ topic, numPartitions: partitions, replicationFactor: 1 }],
57 | })
58 | await admin.disconnect()
59 | }
60 |
61 | async produceEvents(topic: string, events: KafkaEventWithUUID[]) {
62 | await this.connectToProducer()
63 | await this.producer.send({
64 | topic,
65 | messages: events.map((record) => ({
66 | key: record.uuid,
67 | value: JSON.stringify(record.value),
68 | })),
69 | })
70 | }
71 |
72 | async initiateConsumer(
73 | topic: string,
74 | groupId: string,
75 | eachBatch: (payloads: any[], consumer: Consumer) => Promise,
76 | enableBatching: boolean = false
77 | ) {
78 | const consumer = this.kafka.consumer({ groupId })
79 | await consumer.connect()
80 | await consumer.subscribe({ topic, fromBeginning: false })
81 |
82 | try {
83 | await consumer.run({
84 | eachBatch: async ({ batch }) => {
85 | if (enableBatching) {
86 | // Process all messages in batch at once
87 | await eachBatch(
88 | batch.messages.map((message) => JSON.parse(message.value.toString())),
89 | consumer
90 | )
91 | } else {
92 | // Process messages one at a time
93 | for (const message of batch.messages) {
94 | await eachBatch([JSON.parse(message.value.toString())], consumer)
95 | }
96 | }
97 | },
98 | autoCommit: true,
99 | autoCommitInterval: 1000,
100 | partitionsConsumedConcurrently: 4,
101 | })
102 | } catch (e) {
103 | this.logger.log(`Error initiating consumer for groupId ${groupId}.`, e)
104 | }
105 | }
106 | }
107 |
--------------------------------------------------------------------------------
/apps/trench/src/services/data/kafka/kafka.util.ts:
--------------------------------------------------------------------------------
1 | import { Workspace } from 'src/workspaces/workspaces.interface'
2 |
3 | export function getKafkaTopicFromWorkspace(workspace: Workspace): string {
4 | if (workspace.isDefault) {
5 | return process.env.KAFKA_TOPIC
6 | }
7 | return `${workspace.databaseName}_${process.env.KAFKA_TOPIC}`
8 | }
9 |
--------------------------------------------------------------------------------
/apps/trench/src/webhooks/webhooks.controller.ts:
--------------------------------------------------------------------------------
1 | import { Controller, Get, Post, Delete, Put, Body, Param, UseGuards, Request } from '@nestjs/common'
2 | import { WebhooksService } from 'src/webhooks/webhooks.service'
3 | import { PaginatedWebhookResponse, Webhook, WebhookDTO } from 'src/webhooks/webhooks.interface'
4 | import { PrivateApiGuard } from 'src/middlewares/private-api.guard'
5 | import { ApiBearerAuth, ApiOperation, ApiResponse } from '@nestjs/swagger'
6 | import { PaginatedResponse } from 'src/common/models'
7 | import { getWorkspace } from 'src/common/request'
8 |
9 | @ApiBearerAuth()
10 | @Controller('webhooks')
11 | @UseGuards(PrivateApiGuard)
12 | export class WebhooksController {
13 | constructor(private readonly webhooksService: WebhooksService) {}
14 |
15 | @Get()
16 | @ApiOperation({ summary: 'Get all webhooks' })
17 | @ApiResponse({
18 | status: 200,
19 | description:
20 | 'The webhooks have been successfully retrieved. Requires private API key in Bearer token.',
21 | type: PaginatedWebhookResponse,
22 | })
23 | async getWebhooks(@Request() request: Request): Promise {
24 | const workspace = getWorkspace(request)
25 | const result = await this.webhooksService.getWebhooks(workspace)
26 | return {
27 | results: result,
28 | limit: 0,
29 | offset: 0,
30 | total: result.length,
31 | }
32 | }
33 |
34 | @Post()
35 | @ApiOperation({ summary: 'Create a webhook' })
36 | @ApiResponse({
37 | status: 200,
38 | description:
39 | 'The webhook has been successfully created. Requires private API key in Bearer token.',
40 | type: Webhook,
41 | })
42 | async createWebhook(@Request() request: Request, @Body() webhookDTO: WebhookDTO) {
43 | const workspace = getWorkspace(request)
44 | return this.webhooksService.createWebhook(workspace, webhookDTO)
45 | }
46 |
47 | @Put(':uuid')
48 | @ApiOperation({ summary: 'Update a webhook' })
49 | @ApiResponse({
50 | status: 200,
51 | description:
52 | 'The webhook has been successfully updated. Requires private API key in Bearer token.',
53 | type: Webhook,
54 | })
55 | async updateWebhook(
56 | @Request() request: Request,
57 | @Param('uuid') uuid: string,
58 | @Body() webhookDTO: WebhookDTO
59 | ) {
60 | const workspace = getWorkspace(request)
61 | return this.webhooksService.updateWebhook(workspace, uuid, webhookDTO)
62 | }
63 |
64 | @Delete(':uuid')
65 | @ApiOperation({ summary: 'Delete a webhook' })
66 | @ApiResponse({
67 | status: 200,
68 | description:
69 | 'The webhook has been successfully deleted. Requires private API key in Bearer token.',
70 | })
71 | async deleteWebhook(@Request() request: Request, @Param('uuid') uuid: string) {
72 | const workspace = getWorkspace(request)
73 | return this.webhooksService.deleteWebhook(workspace, uuid)
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/apps/trench/src/webhooks/webhooks.dao.ts:
--------------------------------------------------------------------------------
1 | import { BadRequestException, Inject, Injectable } from '@nestjs/common'
2 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
3 | import { Webhook, WebhookDTO } from 'src/webhooks/webhooks.interface'
4 | import { CACHE_MANAGER } from '@nestjs/cache-manager'
5 | import { Cache } from 'cache-manager'
6 | import { v4 as uuidv4 } from 'uuid'
7 | import { Workspace } from 'src/workspaces/workspaces.interface'
8 | const CACHE_KEY = 'webhooks'
9 | @Injectable()
10 | export class WebhooksDao {
11 | constructor(
12 | private readonly clickhouse: ClickHouseService,
13 | @Inject(CACHE_MANAGER) private cacheManager: Cache
14 | ) {}
15 |
16 | getCacheKey(workspace: Workspace): string {
17 | return `${CACHE_KEY}_${workspace.workspaceId}`
18 | }
19 |
20 | async getWebhooks(workspace: Workspace): Promise {
21 | const cacheKey = this.getCacheKey(workspace)
22 | const cachedWebhooks = await this.cacheManager.get(cacheKey)
23 | if (cachedWebhooks) {
24 | return cachedWebhooks
25 | }
26 | const query = 'SELECT * FROM webhooks'
27 | const result = await this.clickhouse.queryResults(query, workspace.databaseName)
28 | const resultData = result.map((row: any) => ({
29 | uuid: row.uuid,
30 | url: row.url,
31 | enableBatching: row.enable_batching,
32 | createdAt: new Date(row.created_at),
33 | eventTypes: row.event_types,
34 | eventNames: row.event_names,
35 | flatten: row.flatten,
36 | }))
37 | await this.cacheManager.set(cacheKey, resultData, 60000) // Cache for 1 minute
38 | return resultData
39 | }
40 |
41 | async createWebhook(
42 | workspace: Workspace,
43 | webhookDTO: WebhookDTO,
44 | existingUuid?: string
45 | ): Promise {
46 | if (!webhookDTO.url) {
47 | throw new BadRequestException('URL is required to create a webhook')
48 | }
49 |
50 | const uuid = existingUuid ?? uuidv4()
51 | await this.clickhouse.insert(
52 | 'webhooks',
53 | [
54 | {
55 | uuid,
56 | url: webhookDTO.url,
57 | enable_batching: webhookDTO.enableBatching ?? false,
58 | event_types: webhookDTO.eventTypes ?? ['*'],
59 | event_names: webhookDTO.eventNames ?? ['*'],
60 | flatten: webhookDTO.flatten ?? false,
61 | },
62 | ],
63 | workspace.databaseName
64 | )
65 | await this.cacheManager.del(this.getCacheKey(workspace))
66 |
67 | return {
68 | uuid,
69 | url: webhookDTO.url,
70 | enableBatching: webhookDTO.enableBatching ?? false,
71 | createdAt: new Date(),
72 | eventTypes: webhookDTO.eventTypes ?? ['*'],
73 | eventNames: webhookDTO.eventNames ?? ['*'],
74 | flatten: webhookDTO.flatten ?? false,
75 | }
76 | }
77 |
78 | async deleteWebhook(workspace: Workspace, uuid: string): Promise {
79 | await this.clickhouse.query(
80 | `ALTER TABLE webhooks DELETE WHERE uuid = '${uuid}'`,
81 | workspace.databaseName
82 | )
83 | await this.cacheManager.del(this.getCacheKey(workspace))
84 | }
85 | }
86 |
--------------------------------------------------------------------------------
/apps/trench/src/webhooks/webhooks.interface.ts:
--------------------------------------------------------------------------------
1 | import { ApiProperty } from '@nestjs/swagger'
2 | import { PaginatedResponse } from 'src/common/models'
3 |
4 | export class Webhook {
5 | @ApiProperty({
6 | description: 'The UUID of the webhook. Automatically generated.',
7 | example: '123e4567-e89b-12d3-a456-426614174000',
8 | })
9 | uuid: string
10 |
11 | @ApiProperty({
12 | description: 'The URL that the webhook will send events to.',
13 | example: 'https://your-webhook-url.com',
14 | })
15 | url: string
16 |
17 | @ApiProperty({
18 | description: 'Whether to enable batching for the webhook.',
19 | example: true,
20 | })
21 | enableBatching: boolean
22 |
23 | @ApiProperty({
24 | description: 'The date and time the webhook was created.',
25 | example: '2021-01-01T00:00:00.000Z',
26 | })
27 | createdAt: Date
28 |
29 | @ApiProperty({
30 | description: 'The event types that the webhook will send. Use `*` to match all event types.',
31 | example: ['page', 'track', 'identify', 'group'],
32 | })
33 | eventTypes: string[]
34 |
35 | @ApiProperty({
36 | description: 'The event names that the webhook will send. Use `*` to match all event names.',
37 | example: ['UserSignedUp', 'UserLoggedIn'],
38 | })
39 | eventNames: string[]
40 |
41 | @ApiProperty({
42 | description:
43 | "Whether to flatten the event data. This is useful for downstream systems that don't support nested data structures.",
44 | example: true,
45 | })
46 | flatten: boolean
47 | }
48 |
49 | export class WebhookDTO {
50 | @ApiProperty({
51 | description: 'The URL that the webhook will send events to.',
52 | example: 'https://your-webhook-url.com',
53 | })
54 | url: string
55 |
56 | @ApiProperty({
57 | description: 'Whether to enable batching for the webhook. Defaults to `false`.',
58 | example: true,
59 | required: false,
60 | })
61 | enableBatching?: boolean
62 |
63 | @ApiProperty({
64 | description:
65 | 'The event types that the webhook will send. Defaults to `["*"] (all event types)`.',
66 | example: ['page', 'track', 'identify', 'group'],
67 | required: false,
68 | })
69 | eventTypes?: string[]
70 |
71 | @ApiProperty({
72 | description:
73 | 'The event names that the webhook will send. Defaults to `["*"] (all event names)`.',
74 | example: ['UserSignedUp', 'UserLoggedIn'],
75 | required: false,
76 | })
77 | eventNames?: string[]
78 |
79 | @ApiProperty({
80 | description:
81 | "Whether to flatten the event data. This is useful for downstream systems that don't support nested data structures. Defaults to `false`.",
82 | example: true,
83 | required: false,
84 | })
85 | flatten?: boolean
86 | }
87 |
88 | export class PaginatedWebhookResponse extends PaginatedResponse {
89 | @ApiProperty({ type: [Webhook] })
90 | results: Webhook[]
91 | }
92 |
--------------------------------------------------------------------------------
/apps/trench/src/webhooks/webhooks.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { WebhooksService } from 'src/webhooks/webhooks.service'
3 | import { ClickHouseModule } from 'src/services/data/click-house/click-house.module'
4 | import { KafkaModule } from 'src/services/data/kafka/kafka.module'
5 | import { EventsModule } from 'src/events/events.module'
6 | import { WebhooksDao } from 'src/webhooks/webhooks.dao'
7 | import { WebhooksController } from 'src/webhooks/webhooks.controller'
8 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
9 | import { ApiKeysModule } from 'src/api-keys/api-keys.module'
10 | import { CacheModule } from '@nestjs/cache-manager'
11 | import { WorkspacesModule } from 'src/workspaces/workspaces.module'
12 |
13 | @Module({
14 | imports: [
15 | KafkaModule,
16 | ClickHouseModule,
17 | EventsModule,
18 | WebhooksModule,
19 | ApiKeysModule,
20 | WorkspacesModule,
21 | CacheModule.register({
22 | ttl: 1000 * 60 * 10, // 10 minutes (in milliseconds)
23 | max: 100000, // maximum number of items in cache
24 | }),
25 | ],
26 | providers: [WebhooksService, WebhooksDao],
27 | controllers: [WebhooksController],
28 | })
29 | export class WebhooksModule {}
30 |
--------------------------------------------------------------------------------
/apps/trench/src/webhooks/webhooks.service.ts:
--------------------------------------------------------------------------------
1 | import { Injectable, Logger, OnModuleInit } from '@nestjs/common'
2 | import { KafkaService } from 'src/services/data/kafka/kafka.service'
3 | import { WebhooksDao } from 'src/webhooks/webhooks.dao'
4 | import { DEFAULT_KAFKA_TOPIC } from 'src/common/constants'
5 | import { KafkaEvent } from 'src/services/data/kafka/kafka.interface'
6 | import { Webhook, WebhookDTO } from 'src/webhooks/webhooks.interface'
7 |
8 | import { EventsService } from 'src/events/events.service'
9 | import { Event } from 'src/events/events.interface'
10 | import { flatten } from 'src/common/utils'
11 | import { Workspace } from 'src/workspaces/workspaces.interface'
12 | import { WorkspacesService } from 'src/workspaces/workspaces.service'
13 | import { getKafkaTopicFromWorkspace } from 'src/services/data/kafka/kafka.util'
14 | import { shouldProcessEvent } from 'src/webhooks/webhooks.util'
15 | import { Consumer } from 'kafkajs'
16 | @Injectable()
17 | export class WebhooksService implements OnModuleInit {
18 | private readonly logger = new Logger(WebhooksService.name)
19 | constructor(
20 | private readonly webhooksDao: WebhooksDao,
21 | private readonly kafkaService: KafkaService,
22 | private readonly eventsService: EventsService,
23 | private readonly workspacesService: WorkspacesService
24 | ) {}
25 |
26 | async onModuleInit() {
27 | this.logger.log('Starting Kafka consumers... this might take a while...')
28 | const workspaces = await this.workspacesService.getWorkspaces()
29 | for (const workspace of workspaces) {
30 | const webhooks = await this.webhooksDao.getWebhooks(workspace)
31 | for (const webhook of webhooks) {
32 | this.logger.log('Initiating consumer for webhook:', webhook.uuid, webhook.url)
33 | this.initiateConsumer(webhook, workspace)
34 | .then(() => {
35 | this.logger.log(`Consumer for webhook ${webhook.uuid} has been initiated.`)
36 | })
37 | .catch((e) => {
38 | this.logger.error(`Error initiating consumer for webhook ${webhook.uuid}.`, e.message, e.stack)
39 | })
40 | }
41 | }
42 | }
43 |
44 | private getGroupId(webhookUUID: string) {
45 | return `${webhookUUID.substring(0, 6)}-webhook-group`
46 | }
47 |
48 | async initiateConsumer(webhook: Webhook, workspace: Workspace) {
49 | await this.kafkaService.initiateConsumer(
50 | getKafkaTopicFromWorkspace(workspace),
51 | this.getGroupId(webhook.uuid),
52 | (payloads, consumer) => this.processMessages(payloads, webhook.uuid, workspace, consumer),
53 | webhook.enableBatching
54 | )
55 | }
56 |
57 | async processMessages(
58 | payloads: KafkaEvent[],
59 | webhookUUID: string,
60 | workspace: Workspace,
61 | consumer: Consumer
62 | ) {
63 | const webhooks = await this.webhooksDao.getWebhooks(workspace)
64 | const thisWebhook = webhooks.find((webhook) => webhook.uuid === webhookUUID)
65 |
66 | if (!thisWebhook) {
67 | this.logger.error(
68 | `Webhook not found. Skipping processing for ${webhookUUID} and disconnecting consumer.`
69 | )
70 | await consumer.stop()
71 | await consumer.disconnect()
72 | return
73 | }
74 |
75 | payloads = payloads.filter((payload) => shouldProcessEvent(payload, thisWebhook))
76 |
77 | if (payloads.length === 0) {
78 | return
79 | }
80 |
81 | const maxRetries = 8
82 | const delay = (ms: number) => new Promise((resolve) => setTimeout(resolve, ms))
83 | const numberOfEventsToFind = payloads.length
84 | let retries = 0
85 |
86 | let eventsFound: Event[] = []
87 |
88 | while (eventsFound.length < numberOfEventsToFind && retries < maxRetries) {
89 | const events = await this.eventsService.getEventsByUUIDs(
90 | workspace,
91 | payloads.map((payload) => payload.uuid)
92 | )
93 | if (events.length > 0) {
94 | eventsFound = eventsFound.concat(events)
95 | } else {
96 | retries++
97 | const backoffTime = Math.pow(2, retries) * 1000 // Exponential backoff
98 | await delay(backoffTime)
99 | }
100 | }
101 |
102 | if (eventsFound.length < numberOfEventsToFind) {
103 | this.logger.error(
104 | `Error: Not all events found after ${maxRetries} retries for webhook ${webhookUUID}.`
105 | )
106 | }
107 |
108 | if (eventsFound.length > 0) {
109 | await this.sendWebhook(thisWebhook, eventsFound)
110 | }
111 | }
112 |
113 | async sendWebhook(webhook: Webhook, events: Event[]) {
114 | try {
115 | const payload = {
116 | data: events,
117 | }
118 | const response = await fetch(webhook.url, {
119 | method: 'POST',
120 | headers: {
121 | 'Content-Type': 'application/json',
122 | },
123 | body: JSON.stringify(webhook.flatten ? flatten(payload) : payload),
124 | })
125 | if (!response.ok) {
126 | this.logger.error('Error sending webhook:', webhook.url, response.statusText)
127 | }
128 | } catch (error) {
129 | this.logger.error('Error sending webhook:', webhook.url, error.message, error.stack)
130 | }
131 | }
132 |
133 | async getWebhooks(workspace: Workspace): Promise {
134 | return await this.webhooksDao.getWebhooks(workspace)
135 | }
136 |
137 | async createWebhook(workspace: Workspace, webhookDTO: WebhookDTO, uuid?: string) {
138 | const newWebhook = await this.webhooksDao.createWebhook(workspace, webhookDTO, uuid)
139 | await this.initiateConsumer(newWebhook, workspace)
140 | return newWebhook
141 | }
142 |
143 | async deleteWebhook(workspace: Workspace, uuid: string) {
144 | await this.webhooksDao.deleteWebhook(workspace, uuid)
145 | }
146 |
147 | async updateWebhook(workspace: Workspace, uuid: string, webhookDTO: WebhookDTO) {
148 | await this.deleteWebhook(workspace, uuid)
149 | return await this.createWebhook(workspace, webhookDTO, uuid)
150 | }
151 | }
152 |
--------------------------------------------------------------------------------
/apps/trench/src/webhooks/webhooks.util.ts:
--------------------------------------------------------------------------------
1 | import { KafkaEvent } from 'src/services/data/kafka/kafka.interface'
2 | import { Webhook } from 'src/webhooks/webhooks.interface'
3 |
4 | export function shouldProcessEvent(event: KafkaEvent, webhook: Webhook): boolean {
5 | const typeMatches = webhook.eventTypes.includes('*') || webhook.eventTypes.includes(event.type)
6 | if (!typeMatches) {
7 | return false
8 | }
9 |
10 | const nameMatches = webhook.eventNames.includes('*') || webhook.eventNames.includes(event.event)
11 | if (!nameMatches) {
12 | return false
13 | }
14 |
15 | return true
16 | }
17 |
--------------------------------------------------------------------------------
/apps/trench/src/workspaces/workspaces.controller.ts:
--------------------------------------------------------------------------------
1 | import {
2 | Controller,
3 | Post,
4 | Body,
5 | UseGuards,
6 | Delete,
7 | Param,
8 | Put,
9 | NotFoundException,
10 | Get,
11 | } from '@nestjs/common'
12 |
13 | import { AdminApiGuard } from 'src/middlewares/admin-api.guard'
14 | import { WorkspacesService } from 'src/workspaces/workspaces.service'
15 | import { CreateWorkspaceDto, Workspace } from 'src/workspaces/workspaces.interface'
16 | import { ApiOperation, ApiResponse } from '@nestjs/swagger'
17 |
18 | @Controller('workspaces')
19 | @UseGuards(AdminApiGuard)
20 | export class WorkspacesController {
21 | constructor(private readonly workspacesService: WorkspacesService) {}
22 | @Post()
23 | @ApiOperation({ summary: 'Create a workspace' })
24 | @ApiResponse({
25 | status: 200,
26 | description:
27 | 'The workspace has been successfully created. Requires private API key in Bearer token.',
28 | type: Workspace,
29 | })
30 | async create(@Body() createWorkspaceDto: CreateWorkspaceDto) {
31 | const newWorkspace = await this.workspacesService.createNewWorkspace(createWorkspaceDto)
32 |
33 | return newWorkspace
34 | }
35 |
36 | @Delete(':workspaceId')
37 | async delete(@Param('workspaceId') workspaceId: string) {
38 | await this.workspacesService.deleteWorkspace(workspaceId)
39 | }
40 |
41 | @Put(':workspaceId')
42 | @ApiOperation({ summary: 'Update a workspace' })
43 | @ApiResponse({
44 | status: 200,
45 | description:
46 | 'The workspace has been successfully updated. Requires private API key in Bearer token.',
47 | type: Workspace,
48 | })
49 | async update(
50 | @Param('workspaceId') workspaceId: string,
51 | @Body() updateWorkspaceDto: CreateWorkspaceDto
52 | ) {
53 | // Assuming the method name should be 'updateWorkspace' based on the error
54 | const updatedWorkspace = await this.workspacesService.updateWorkspace(
55 | workspaceId,
56 | updateWorkspaceDto
57 | )
58 |
59 | return updatedWorkspace
60 | }
61 |
62 | @Get(':workspaceId')
63 | @ApiOperation({ summary: 'Get a workspace by ID' })
64 | @ApiResponse({
65 | status: 200,
66 | description: 'The workspace has been successfully retrieved.',
67 | type: Workspace,
68 | })
69 | async getById(@Param('workspaceId') workspaceId: string) {
70 | const workspace = await this.workspacesService.getWorkspaceById(workspaceId)
71 |
72 | if (!workspace) {
73 | throw new NotFoundException('Workspace not found')
74 | }
75 |
76 | return workspace
77 | }
78 | }
79 |
--------------------------------------------------------------------------------
/apps/trench/src/workspaces/workspaces.interface.ts:
--------------------------------------------------------------------------------
1 | import { ApiProperty, ApiPropertyOptional } from '@nestjs/swagger'
2 |
3 | export class Workspace {
4 | @ApiProperty({
5 | description: 'The unique identifier of the workspace.',
6 | example: 'workspace-1234',
7 | })
8 | workspaceId: string
9 |
10 | @ApiProperty({
11 | description: 'The name of the workspace.',
12 | example: 'Development Workspace',
13 | })
14 | name: string
15 |
16 | @ApiProperty({
17 | description: 'Indicates if this is the default workspace.',
18 | example: true,
19 | })
20 | isDefault: boolean
21 |
22 | @ApiProperty({
23 | description: 'The name of the database associated with the workspace.',
24 | example: 'default',
25 | })
26 | databaseName: string
27 |
28 | @ApiProperty({
29 | description: 'The date and time when the workspace was created.',
30 | example: '2023-01-01T00:00:00.000Z',
31 | })
32 | createdAt: Date
33 |
34 | @ApiPropertyOptional({
35 | description: 'The properties of the workspace.',
36 | example: '{}',
37 | })
38 | properties: Record
39 | }
40 |
41 | export class CreateWorkspaceDto {
42 | @ApiProperty({
43 | description: 'The name of the workspace to be created.',
44 | example: 'New Workspace',
45 | })
46 | name: string
47 |
48 | @ApiPropertyOptional({
49 | description: 'The name of the database associated with the workspace.',
50 | example: 'workspace_db',
51 | })
52 | databaseName?: string
53 |
54 | @ApiPropertyOptional({
55 | description: 'Indicates if the new workspace should be set as default. Defaults to `false`.',
56 | example: false,
57 | })
58 | isDefault?: boolean
59 |
60 | @ApiPropertyOptional({
61 | description: 'The properties of the workspace.',
62 | example: '{}',
63 | })
64 | properties?: Record
65 | }
66 |
67 | export class UpdateWorkspaceDto {
68 | @ApiPropertyOptional({
69 | description: 'The name of the workspace to be updated.',
70 | example: 'Updated Workspace',
71 | })
72 | name?: string
73 |
74 | @ApiPropertyOptional({
75 | description: 'The properties of the workspace.',
76 | example: '{}',
77 | })
78 | properties?: Record
79 | }
80 |
81 | export class WorkspaceCreationResult extends Workspace {
82 | @ApiProperty({
83 | description: 'The private API key for the workspace.',
84 | example: 'private-api-key-1234',
85 | })
86 | privateApiKey: string
87 |
88 | @ApiProperty({
89 | description: 'The public API key for the workspace.',
90 | example: 'public-api-key-1234',
91 | })
92 | publicApiKey: string
93 | }
94 |
--------------------------------------------------------------------------------
/apps/trench/src/workspaces/workspaces.module.ts:
--------------------------------------------------------------------------------
1 | import { Module } from '@nestjs/common'
2 | import { WorkspacesService } from 'src/workspaces/workspaces.service'
3 | import { ClickHouseModule } from 'src/services/data/click-house/click-house.module'
4 | import { WorkspacesController } from 'src/workspaces/workspaces.controller'
5 | import { ApiKeysModule } from 'src/api-keys/api-keys.module'
6 | import { BootstrapModule } from 'src/services/data/bootstrap/bootstrap.module'
7 |
8 | @Module({
9 | imports: [ClickHouseModule, ApiKeysModule, BootstrapModule],
10 | controllers: [WorkspacesController],
11 | providers: [WorkspacesService],
12 | exports: [WorkspacesService],
13 | })
14 | export class WorkspacesModule {}
15 |
--------------------------------------------------------------------------------
/apps/trench/src/workspaces/workspaces.service.ts:
--------------------------------------------------------------------------------
1 | import { BadRequestException, Injectable } from '@nestjs/common'
2 | import { ClickHouseService } from 'src/services/data/click-house/click-house.service'
3 | import {
4 | CreateWorkspaceDto,
5 | UpdateWorkspaceDto,
6 | Workspace,
7 | WorkspaceCreationResult,
8 | } from 'src/workspaces/workspaces.interface'
9 | import { escapeString } from 'src/services/data/click-house/click-house.util'
10 | import { v4 as uuidv4 } from 'uuid'
11 | import { ApiKeysService } from 'src/api-keys/api-keys.service'
12 | import { mapRowToWorkspace, mapWorkspaceToRow } from 'src/workspaces/workspaces.util'
13 | import { BootstrapService } from 'src/services/data/bootstrap/bootstrap.service'
14 | @Injectable()
15 | export class WorkspacesService {
16 | constructor(
17 | private readonly clickhouseService: ClickHouseService,
18 | private readonly apiKeysService: ApiKeysService,
19 | private readonly bootstrapService: BootstrapService
20 | ) {}
21 |
22 | async createNewWorkspace(
23 | createWorkspaceDto: CreateWorkspaceDto
24 | ): Promise {
25 | let { name, databaseName, isDefault, properties } = createWorkspaceDto
26 | this.validateInputs(name, createWorkspaceDto.properties)
27 |
28 | name = (name ?? '').trim()
29 |
30 | const uuid = uuidv4()
31 |
32 | if (!databaseName) {
33 | databaseName = `trench_workspace_${name
34 | .replace(/[^a-zA-Z0-9-_]/g, '')
35 | .toLowerCase()
36 | .replace(/[\s-]+/g, '_')}`
37 | }
38 |
39 | const existingWorkspace = await this.getWorkspaceByName(name)
40 |
41 | if (existingWorkspace) {
42 | throw new BadRequestException(`Workspace name '${name}' already taken`)
43 | }
44 |
45 | // create the database
46 | try {
47 | await this.clickhouseService.command(
48 | `CREATE DATABASE IF NOT EXISTS ${escapeString(databaseName)};`
49 | )
50 | } catch (error) {
51 | throw new BadRequestException(
52 | `Failed to create database ${databaseName} for workspace ${name}: ${error}`
53 | )
54 | }
55 |
56 | await this.clickhouseService.insert('workspaces', [
57 | {
58 | workspace_id: uuid,
59 | name,
60 | database_name: databaseName,
61 | is_default: isDefault,
62 | properties: JSON.stringify(properties),
63 | },
64 | ])
65 |
66 | const privateApiKey = await this.apiKeysService.createApiKey(uuid, 'private')
67 | const publicApiKey = await this.apiKeysService.createApiKey(uuid, 'public')
68 |
69 | const workspace = await this.getWorkspaceById(uuid)
70 |
71 | await this.bootstrapService.bootstrapWorkspace(workspace)
72 |
73 | return {
74 | ...workspace,
75 | privateApiKey,
76 | publicApiKey,
77 | }
78 | }
79 |
80 | async getWorkspaceById(workspaceId: string): Promise {
81 | const result = await this.clickhouseService.queryResults(`
82 | SELECT *
83 | FROM workspaces
84 | WHERE workspace_id = '${escapeString(workspaceId)}'
85 | `)
86 |
87 | if (!result || result.length === 0) {
88 | return null
89 | }
90 |
91 | return mapRowToWorkspace(result[0])
92 | }
93 |
94 | async getWorkspaceByName(name: string): Promise {
95 | const result = await this.clickhouseService.queryResults(`
96 | SELECT *
97 | FROM workspaces
98 | WHERE name = '${escapeString(name)}'
99 | `)
100 |
101 | if (!result || result.length === 0) {
102 | return null
103 | }
104 |
105 | return mapRowToWorkspace(result[0])
106 | }
107 |
108 | async getDefaultWorkspace(): Promise {
109 | const query = `
110 | SELECT *
111 | FROM workspaces
112 | WHERE is_default = true
113 | ORDER BY created_at ASC
114 | LIMIT 1
115 | `
116 | const result = await this.clickhouseService.queryResults(query)
117 |
118 | if (!result || result.length === 0) {
119 | throw new Error('No workspace found')
120 | }
121 |
122 | return mapRowToWorkspace(result[0])
123 | }
124 |
125 | async deleteWorkspace(workspaceId: string): Promise {
126 | const query = `
127 | DELETE FROM workspaces
128 | WHERE workspace_id = '${escapeString(workspaceId)}'
129 | `
130 | await this.clickhouseService.command(query)
131 | }
132 |
133 | async getWorkspaces(): Promise {
134 | const result = await this.clickhouseService.queryResults(`
135 | SELECT *
136 | FROM workspaces
137 | `)
138 | return result.map((row) => mapRowToWorkspace(row))
139 | }
140 |
141 | async updateWorkspace(
142 | workspaceId: string,
143 | updateWorkspaceDto: UpdateWorkspaceDto
144 | ): Promise {
145 | this.validateInputs(updateWorkspaceDto.name, updateWorkspaceDto.properties)
146 |
147 | const existingWorkspace = await this.getWorkspaceById(workspaceId)
148 | if (!existingWorkspace) {
149 | throw new Error('Workspace not found')
150 | }
151 |
152 | const { name, properties } = updateWorkspaceDto
153 | const updatedWorkspace = {
154 | ...existingWorkspace,
155 | name: name || existingWorkspace.name,
156 | properties: properties || existingWorkspace.properties,
157 | }
158 |
159 | await this.deleteWorkspace(workspaceId)
160 |
161 | await this.clickhouseService.insert('workspaces', [mapWorkspaceToRow(updatedWorkspace)])
162 |
163 | return updatedWorkspace
164 | }
165 |
166 | private validateInputs(name?: string, properties?: Record) {
167 | if (!name || name.trim().length === 0) {
168 | throw new BadRequestException('Workspace name is required')
169 | }
170 |
171 | if (properties && typeof properties !== 'object') {
172 | throw new BadRequestException('Properties must be a valid JSON object')
173 | }
174 | }
175 | }
176 |
--------------------------------------------------------------------------------
/apps/trench/src/workspaces/workspaces.util.ts:
--------------------------------------------------------------------------------
1 | import { Workspace } from './workspaces.interface'
2 |
3 | export function mapRowToWorkspace(result: any): Workspace {
4 | return {
5 | workspaceId: result.workspace_id,
6 | name: result.name,
7 | isDefault: result.is_default,
8 | databaseName: result.database_name,
9 | createdAt: result.created_at,
10 | properties: result.properties ? JSON.parse(result.properties) : undefined,
11 | }
12 | }
13 |
14 | export function mapWorkspaceToRow(workspace: Workspace): any {
15 | return {
16 | workspace_id: workspace.workspaceId,
17 | name: workspace.name,
18 | is_default: workspace.isDefault,
19 | database_name: workspace.databaseName,
20 | created_at: workspace.createdAt,
21 | properties: JSON.stringify(workspace.properties),
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/apps/trench/test/e2e/app.e2e.test.ts:
--------------------------------------------------------------------------------
1 | import { authenticatedGet } from './utils'
2 |
3 | describe('/', () => {
4 | test('should return a 200 on /', async () => {
5 | const res = await authenticatedGet('/')
6 | expect(res.statusCode).toEqual(200)
7 | })
8 | })
9 |
--------------------------------------------------------------------------------
/apps/trench/test/e2e/events.e2e.test.ts:
--------------------------------------------------------------------------------
1 | import { authenticatedPost, PUBLIC_API_KEY, waitForQueryResults } from './utils'
2 |
3 | describe('events/', () => {
4 | test('should create a new event and fetch it', async () => {
5 | const newEvent = {
6 | type: 'track',
7 | event: 'User SignedUp',
8 | timestamp: new Date().toISOString(),
9 | }
10 |
11 | // Create a new event
12 | const createRes = await authenticatedPost('/events', PUBLIC_API_KEY).send({
13 | events: [newEvent],
14 | })
15 | expect(createRes.statusCode).toEqual(201)
16 | expect(createRes.body.results).toHaveLength(1)
17 | expect(createRes.body.total).toEqual(1)
18 | expect(createRes.body.results[0].uuid).toBeDefined()
19 | const uuid = createRes.body.results[0].uuid
20 |
21 | // Fetch the created event using the newly created util function
22 | const queryResults = await waitForQueryResults(`uuid=${uuid}`)
23 | expect(queryResults.results).toHaveLength(1)
24 | expect(queryResults.results[0].uuid).toEqual(uuid)
25 | })
26 |
27 | test('should create a new event with instanceId, event name with spaces, and userId, then fetch it', async () => {
28 | const newEvent = {
29 | uuid: '123e4567-e89b-12d3-a456-426614174001',
30 | type: 'track',
31 | event: 'User Logged In',
32 | userId: 'user-123',
33 | instanceId: 'instance-456',
34 | timestamp: new Date().toISOString(),
35 | }
36 |
37 | // Create a new event
38 | const createRes = await authenticatedPost('/events', PUBLIC_API_KEY).send({
39 | events: [newEvent],
40 | })
41 | expect(createRes.statusCode).toEqual(201)
42 | expect(createRes.body.results).toHaveLength(1)
43 | expect(createRes.body.total).toEqual(1)
44 | expect(createRes.body.results[0].uuid).toBeDefined()
45 | const eventUuid = createRes.body.results[0].uuid
46 |
47 | // Fetch the created event using the instanceId, event name, and userId
48 | const results = await waitForQueryResults(
49 | `uuid=${eventUuid}&event=User%20Logged%20In&userId=user-123&instanceId=instance-456`
50 | )
51 | expect(results.total).toEqual(1)
52 | expect(results.results[0].uuid).toEqual(eventUuid)
53 | expect(results.results[0].event).toEqual('User Logged In')
54 | expect(results.results[0].userId).toEqual('user-123')
55 | expect(results.results[0].instanceId).toEqual('instance-456')
56 | })
57 |
58 | test('should create a new event with properties and fetch it using properties', async () => {
59 | const newEvent = {
60 | uuid: '123e4567-e89b-12d3-a456-426614174002',
61 | type: 'track',
62 | event: 'User Updated Profile',
63 | properties: {
64 | plan: 'premium',
65 | country: 'USA',
66 | },
67 | timestamp: new Date().toISOString(),
68 | }
69 |
70 | // Create a new event
71 | const createRes = await authenticatedPost('/events', PUBLIC_API_KEY).send({
72 | events: [newEvent],
73 | })
74 | expect(createRes.statusCode).toEqual(201)
75 | expect(createRes.body.results).toHaveLength(1)
76 | expect(createRes.body.total).toEqual(1)
77 | expect(createRes.body.results[0].uuid).toBeDefined()
78 | const eventUuid = createRes.body.results[0].uuid
79 |
80 | // Fetch the created event using the properties
81 | const queryResults = await waitForQueryResults(
82 | `uuid=${eventUuid}&properties.plan=premium&properties.country=USA`
83 | )
84 | expect(queryResults.total).toEqual(1)
85 | expect(queryResults.results[0].uuid).toEqual(eventUuid)
86 | expect(queryResults.results[0].event).toEqual('User Updated Profile')
87 | expect(queryResults.results[0].properties.plan).toEqual('premium')
88 | expect(queryResults.results[0].properties.country).toEqual('USA')
89 | })
90 |
91 | test('should return an error when querying for a non-existent event', async () => {
92 | await expect(waitForQueryResults('event=NonExistentEvent')).rejects.toThrow(
93 | 'Timeout: No results found'
94 | )
95 | })
96 | })
97 |
--------------------------------------------------------------------------------
/apps/trench/test/e2e/queries.e2e.test.ts:
--------------------------------------------------------------------------------
1 | import { authenticatedPost, PUBLIC_API_KEY, waitForQueryResults } from './utils'
2 |
3 | describe('queries/', () => {
4 | test('should create an event and execute a simple read-only query on it', async () => {
5 | const newEvent = {
6 | type: 'track',
7 | event: 'User SignedUp',
8 | timestamp: new Date().toISOString(),
9 | }
10 |
11 | // Create a new event
12 | const createRes = await authenticatedPost('/events', PUBLIC_API_KEY).send({
13 | events: [newEvent],
14 | })
15 | expect(createRes.statusCode).toEqual(201)
16 | expect(createRes.body.results).toHaveLength(1)
17 | expect(createRes.body.total).toEqual(1)
18 | const eventUuid = createRes.body.results[0].uuid
19 | // Wait for the event to be created
20 | const queryResults = await waitForQueryResults(`uuid=${eventUuid}`)
21 | expect(queryResults.results).toHaveLength(1)
22 | expect(queryResults.results[0].uuid).toEqual(eventUuid)
23 |
24 | // Execute the query
25 | const query = `SELECT * FROM events WHERE uuid = '${eventUuid}'`
26 | const executeRes = await authenticatedPost('/queries').send({
27 | queries: [query],
28 | })
29 | expect(executeRes.statusCode).toEqual(201)
30 | expect(executeRes.body.results).toHaveLength(1)
31 | expect(executeRes.body.results[0][0].uuid).toEqual(eventUuid)
32 | })
33 | })
34 |
--------------------------------------------------------------------------------
/apps/trench/test/e2e/utils.ts:
--------------------------------------------------------------------------------
1 | import { v4 as uuidv4 } from 'uuid'
2 | import * as request from 'supertest'
3 |
4 | export const API_ROOT = 'http://127.0.0.1:4000'
5 |
6 | export const PRIVATE_API_KEY = 'private-d613be4e-di03-4b02-9058-70aa4j04ff28'
7 | export const PUBLIC_API_KEY = 'public-d613be4e-di03-4b02-9058-70aa4j04ff28'
8 |
9 | export function authenticatedGet(path: string, apiKey?: string) {
10 | return request(API_ROOT)
11 | .get(path)
12 | .trustLocalhost()
13 | .set('Content-Type', 'application/json')
14 | .set('Accept', 'application/json')
15 | .set('Authorization', 'Bearer ' + (apiKey ?? PRIVATE_API_KEY))
16 | }
17 |
18 | export function authenticatedPost(path: string, apiKey?: string) {
19 | return request(API_ROOT)
20 | .post(path)
21 | .trustLocalhost()
22 | .set('Content-Type', 'application/json')
23 | .set('Accept', 'application/json')
24 | .set('Authorization', 'Bearer ' + (apiKey ?? PRIVATE_API_KEY))
25 | }
26 |
27 | export function authenticatedPut(path: string, apiKey?: string) {
28 | return request(API_ROOT)
29 | .put(path)
30 | .trustLocalhost()
31 | .set('Content-Type', 'application/json')
32 | .set('Accept', 'application/json')
33 | .set('Authorization', 'Bearer ' + (apiKey ?? PRIVATE_API_KEY))
34 | }
35 |
36 | export function authenticatedDelete(path: string, apiKey?: string) {
37 | return request(API_ROOT)
38 | .delete(path)
39 | .trustLocalhost()
40 | .set('Content-Type', 'application/json')
41 | .set('Authorization', 'Bearer ' + (apiKey ?? PRIVATE_API_KEY))
42 | }
43 |
44 | export function getRandomID(): string {
45 | return uuidv4()
46 | }
47 |
48 | export async function waitForQueryResults(query: string, privateApiKey?: string) {
49 | const pollInterval = 100 // 100 ms
50 | const maxWaitTime = 10000 // 10 seconds
51 | const startTime = Date.now()
52 |
53 | while (true) {
54 | const res = await authenticatedGet(`/events?${query}`, privateApiKey)
55 | if (res.body.results && res.body.results.length > 0) {
56 | return res.body
57 | }
58 | if (Date.now() - startTime > maxWaitTime) {
59 | throw new Error(`Timeout: No results found within ${maxWaitTime / 1000} seconds`)
60 | }
61 | await new Promise((resolve) => setTimeout(resolve, pollInterval))
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/apps/trench/test/e2e/workspaces.e2e.test.ts:
--------------------------------------------------------------------------------
1 | import {
2 | authenticatedDelete,
3 | authenticatedGet,
4 | authenticatedPost,
5 | authenticatedPut,
6 | getRandomID,
7 | waitForQueryResults,
8 | } from './utils'
9 |
10 | describe('workspaces/', () => {
11 | test('should create a new workspace', async () => {
12 | const res = await authenticatedPost('/workspaces').send({ name: getRandomID() })
13 | expect(res.statusCode).toEqual(201)
14 | expect(res.body.workspaceId).toBeDefined()
15 | })
16 |
17 | test('should create a new workspace, create events, and query them', async () => {
18 | // Create a new workspace and get API keys
19 | const workspaceRes = await authenticatedPost('/workspaces').send({
20 | name: getRandomID(),
21 | properties: { test: 'test' },
22 | })
23 | expect(workspaceRes.body.properties).toEqual({ test: 'test' })
24 | expect(workspaceRes.statusCode).toEqual(201)
25 | expect(workspaceRes.body.workspaceId).toBeDefined()
26 | expect(workspaceRes.body.publicApiKey).toBeDefined()
27 | expect(workspaceRes.body.privateApiKey).toBeDefined()
28 | const newPublicApiKey = workspaceRes.body.publicApiKey
29 | const newPrivateApiKey = workspaceRes.body.privateApiKey
30 |
31 | // Create a new event using the private API key
32 | const newEvent = {
33 | type: 'track',
34 | event: 'User Created Workspace',
35 | }
36 | const createEventRes = await authenticatedPost('/events', newPublicApiKey).send({
37 | events: [newEvent],
38 | })
39 | expect(createEventRes.statusCode).toEqual(201)
40 | expect(createEventRes.body.results).toHaveLength(1)
41 | expect(createEventRes.body.results[0].uuid).toBeDefined()
42 | const eventUuid = createEventRes.body.results[0].uuid
43 |
44 | // Query the created event using the public API key
45 | const queryResults = await waitForQueryResults(`uuid=${eventUuid}`, newPrivateApiKey)
46 | expect(queryResults.results).toHaveLength(1)
47 | expect(queryResults.results[0].uuid).toEqual(eventUuid)
48 |
49 | // Ensure the new private api key cannot be used to create new workspaces
50 | const createWorkspaceRes = await authenticatedPost('/workspaces', newPrivateApiKey).send({
51 | name: getRandomID(),
52 | })
53 | expect(createWorkspaceRes.statusCode).toEqual(401)
54 | })
55 |
56 | test('should update an existing workspace', async () => {
57 | // Create a new workspace
58 | const createRes = await authenticatedPost('/workspaces').send({
59 | name: getRandomID(),
60 | properties: { test: 'test' },
61 | })
62 | expect(createRes.statusCode).toEqual(201)
63 | const workspaceId = createRes.body.workspaceId
64 | expect(workspaceId).toBeDefined()
65 |
66 | // Update the workspace
67 | const updatedName = getRandomID()
68 | const updatedProperties = { test: 'test2' }
69 | const updateRes = await authenticatedPut(`/workspaces/${workspaceId}`).send({
70 | name: updatedName,
71 | properties: updatedProperties,
72 | })
73 | expect(updateRes.statusCode).toEqual(200)
74 | expect(updateRes.body.name).toEqual(updatedName)
75 | expect(updateRes.body.properties).toEqual(updatedProperties)
76 | })
77 |
78 | test('should delete an existing workspace', async () => {
79 | // Create a new workspace
80 | const createRes = await authenticatedPost('/workspaces').send({ name: getRandomID() })
81 | expect(createRes.statusCode).toEqual(201)
82 | const workspaceId = createRes.body.workspaceId
83 |
84 | // Delete the workspace
85 | const deleteRes = await authenticatedDelete(`/workspaces/${workspaceId}`)
86 | expect(deleteRes.statusCode).toEqual(200)
87 |
88 | // Verify the workspace has been deleted
89 | const getRes = await authenticatedGet(`/workspaces/${workspaceId}`)
90 | expect(getRes.statusCode).toEqual(404)
91 | })
92 | })
93 |
--------------------------------------------------------------------------------
/apps/trench/test/unit/queries.util.test.ts:
--------------------------------------------------------------------------------
1 | import {
2 | isReadOnlyQuery,
3 | convertToKebabCase,
4 | convertJsonKeysToCamelCase,
5 | } from '../../src/queries/queries.util'
6 |
7 | describe('queries.util', () => {
8 | describe('isReadOnlyQuery', () => {
9 | test('should identify read-only queries', () => {
10 | expect(isReadOnlyQuery('SELECT * FROM users')).toBe(true)
11 | expect(isReadOnlyQuery('SELECT id, name FROM events WHERE id = 1')).toBe(true)
12 | })
13 |
14 | test('should identify non-read-only queries', () => {
15 | expect(isReadOnlyQuery('INSERT INTO users VALUES (1)')).toBe(false)
16 | expect(isReadOnlyQuery('UPDATE users SET name = "test"')).toBe(false)
17 | expect(isReadOnlyQuery('DELETE FROM users')).toBe(false)
18 | })
19 |
20 | test('should handle queries with string literals correctly', () => {
21 | expect(isReadOnlyQuery("SELECT * FROM users WHERE name = 'DELETE'")).toBe(true)
22 | expect(isReadOnlyQuery("SELECT * FROM users WHERE name = 'INSERT'")).toBe(true)
23 | })
24 |
25 | test('should not allow listing databases', () => {
26 | expect(isReadOnlyQuery('SHOW DATABASES')).toBe(false)
27 | expect(isReadOnlyQuery('SHOW SCHEMAS')).toBe(false)
28 | expect(isReadOnlyQuery('SELECT name FROM system.databases')).toBe(false)
29 | })
30 | })
31 |
32 | describe('convertToKebabCase', () => {
33 | test('should convert camelCase IDs to snake_case', () => {
34 | const input = 'SELECT userId, groupId, instanceId FROM users'
35 | const expected = 'SELECT user_id, group_id, instance_id FROM users'
36 | expect(convertToKebabCase(input)).toBe(expected)
37 | })
38 |
39 | test('should not modify other parts of the query', () => {
40 | const input = 'SELECT name, userId FROM users WHERE age > 18'
41 | const expected = 'SELECT name, user_id FROM users WHERE age > 18'
42 | expect(convertToKebabCase(input)).toBe(expected)
43 | })
44 | })
45 |
46 | describe('convertJsonKeysToCamelCase', () => {
47 | test('should convert snake_case keys to camelCase', () => {
48 | const input = {
49 | user_id: 1,
50 | first_name: 'John',
51 | last_name: 'Doe',
52 | }
53 | const expected = {
54 | userId: 1,
55 | firstName: 'John',
56 | lastName: 'Doe',
57 | }
58 | expect(convertJsonKeysToCamelCase(input)).toEqual(expected)
59 | })
60 |
61 | test('should handle nested properties correctly', () => {
62 | const input = {
63 | user_id: 1,
64 | user_data: {
65 | first_name: 'John',
66 | },
67 | }
68 | const expected = {
69 | userId: 1,
70 | userData: {
71 | first_name: 'John',
72 | },
73 | }
74 | expect(convertJsonKeysToCamelCase(input)).toEqual(expected)
75 | })
76 | })
77 | })
78 |
--------------------------------------------------------------------------------
/apps/trench/test/unit/webhooks.util.test.ts:
--------------------------------------------------------------------------------
1 | import { shouldProcessEvent } from '../../src/webhooks/webhooks.util'
2 | import { KafkaEvent } from '../../src/services/data/kafka/kafka.interface'
3 | import { Webhook } from '../../src/webhooks/webhooks.interface'
4 |
5 | describe('shouldProcessEvent', () => {
6 | const mockEvent: KafkaEvent = {
7 | instance_id: 'test-instance',
8 | uuid: '123',
9 | type: 'track',
10 | event: 'button_clicked',
11 | timestamp: new Date(),
12 | }
13 |
14 | const mockWebhook: Webhook = {
15 | uuid: '456',
16 | url: 'http://test.com',
17 | enableBatching: false,
18 | createdAt: new Date(),
19 | eventTypes: ['track'],
20 | eventNames: ['button_clicked'],
21 | flatten: false,
22 | }
23 |
24 | it('should return true when event type and name match exactly', () => {
25 | expect(shouldProcessEvent(mockEvent, mockWebhook)).toBe(true)
26 | })
27 |
28 | it('should return true when webhook has wildcard event type', () => {
29 | const wildcardWebhook = {
30 | ...mockWebhook,
31 | eventTypes: ['*'],
32 | }
33 | expect(shouldProcessEvent(mockEvent, wildcardWebhook)).toBe(true)
34 | })
35 |
36 | it('should return true when webhook has wildcard event name', () => {
37 | const wildcardWebhook = {
38 | ...mockWebhook,
39 | eventNames: ['*'],
40 | }
41 | expect(shouldProcessEvent(mockEvent, wildcardWebhook)).toBe(true)
42 | })
43 |
44 | it('should return false when event type does not match', () => {
45 | const differentTypeWebhook = {
46 | ...mockWebhook,
47 | eventTypes: ['page'],
48 | }
49 | expect(shouldProcessEvent(mockEvent, differentTypeWebhook)).toBe(false)
50 | })
51 |
52 | it('should return false when event name does not match', () => {
53 | const differentNameWebhook = {
54 | ...mockWebhook,
55 | eventNames: ['form_submitted'],
56 | }
57 | expect(shouldProcessEvent(mockEvent, differentNameWebhook)).toBe(false)
58 | })
59 |
60 | it('should return true when both type and name use wildcards', () => {
61 | const allWildcardWebhook = {
62 | ...mockWebhook,
63 | eventTypes: ['*'],
64 | eventNames: ['*'],
65 | }
66 | expect(shouldProcessEvent(mockEvent, allWildcardWebhook)).toBe(true)
67 | })
68 | })
69 |
--------------------------------------------------------------------------------
/apps/trench/tsconfig.build.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": "./tsconfig.json",
3 | "exclude": ["node_modules", "test", "dist", "**/*spec.ts"]
4 | }
5 |
--------------------------------------------------------------------------------
/apps/trench/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "declaration": true,
5 | "removeComments": true,
6 | "emitDecoratorMetadata": true,
7 | "experimentalDecorators": true,
8 | "allowSyntheticDefaultImports": true,
9 | "target": "es2021",
10 | "sourceMap": true,
11 | "outDir": "./dist",
12 | "baseUrl": "./",
13 | "incremental": true,
14 | "skipLibCheck": true,
15 | "strictNullChecks": false,
16 | "noImplicitAny": false,
17 | "strictBindCallApply": false,
18 | "forceConsistentCasingInFileNames": false,
19 | "noFallthroughCasesInSwitch": false,
20 | "types": [
21 | "node",
22 | "jest"
23 | ]
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/img/trench-cover.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/img/trench-cover.png
--------------------------------------------------------------------------------
/img/trench-dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/img/trench-dark.png
--------------------------------------------------------------------------------
/img/trench-dashboard-dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/img/trench-dashboard-dark.png
--------------------------------------------------------------------------------
/img/trench-dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/img/trench-dashboard.png
--------------------------------------------------------------------------------
/img/trench-light.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/FrigadeHQ/trench/61f60a34c7aaad9011e9b6cffac1f2593ec7166b/img/trench-light.png
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@frigadehq/trench",
3 | "version": "0.0.1",
4 | "license": "MIT",
5 | "description": "Open-Source Infrastructure for Tracking Events",
6 | "workspaces": [
7 | "apps/*",
8 | "packages/*"
9 | ],
10 | "scripts": {
11 | "build": "turbo build",
12 | "test": "turbo test",
13 | "dev": "turbo dev --no-cache --continue",
14 | "lint": "turbo lint",
15 | "clean": "turbo clean && rm -rf node_modules",
16 | "format": "prettier --write \"**/*.{ts,tsx,md}\"",
17 | "changeset": "changeset",
18 | "version-packages": "changeset version",
19 | "release": "turbo build && changeset publish"
20 | },
21 | "devDependencies": {
22 | "@changesets/cli": "^2.22.0",
23 | "eslint": "^7.32.0",
24 | "prettier": "^2.5.1",
25 | "rimraf": "^5.0.0",
26 | "turbo": "^1.10.14"
27 | },
28 | "author": "Frigade Inc.",
29 | "packageManager": "pnpm@8.6.12"
30 | }
31 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | /dist
3 | .DS_Store
4 | .idea
5 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "trailingComma": "es5",
4 | "printWidth": 120,
5 | "endOfLine": "auto"
6 | }
7 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # analytics-plugin-trench
2 |
3 | ## 0.0.9
4 |
5 | ### Patch Changes
6 |
7 | - f286470: Enables batching as well as click tracking in autocapture mode
8 |
9 | ## 0.0.8
10 |
11 | ### Patch Changes
12 |
13 | - 66e98aa: Fixes an issue where `identify` is not properly setting the traits
14 |
15 | ## 0.0.7
16 |
17 | ### Patch Changes
18 |
19 | - ba81a76: Adds missing `uuid` field to `BaseEvent` type
20 |
21 | ## 0.0.6
22 |
23 | ### Patch Changes
24 |
25 | - ed1cbe5: Adds support for the `page` event type
26 |
27 | ## 0.0.5
28 |
29 | ### Patch Changes
30 |
31 | - bc72637: Adds automatic deduplication of events
32 |
33 | ## 0.0.4
34 |
35 | ### Patch Changes
36 |
37 | - ccde48d: Adds the ability to auto capture events (implicit tracking), adds support for private API calls (queries, events)
38 |
39 | ## 0.0.3
40 |
41 | ### Patch Changes
42 |
43 | - c75b1e2: Fixes an issue where userId was not persisted after calling identify
44 |
45 | ## 0.0.2
46 |
47 | ### Patch Changes
48 |
49 | - 00316c6: Initial npm release
50 | - 1c7f432: Initial publish
51 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/README.md:
--------------------------------------------------------------------------------
1 | # analytics-plugin-trench
2 |
3 | A plugin for [analytics.js](https://github.com/DavidWells/analytics) that sends events to [Trench](https://github.com/frigadehq/trench).
4 |
5 | ## Installation
6 |
7 | 1. `npm i analytics-plugin-trench`
8 | 2. In `analytics` init, add Trench in the plugins array. Example config:
9 |
10 | ```ts
11 | import analytics from 'analytics';
12 | import trench from 'analytics-plugin-trench';
13 |
14 | analytics({
15 | plugins: [trench()],
16 | });
17 | ```
18 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "analytics-plugin-trench",
3 | "version": "0.0.9",
4 | "description": "Trench plugin for Analytics",
5 | "main": "dist/index.js",
6 | "types": "dist/index.d.ts",
7 | "files": [
8 | "dist"
9 | ],
10 | "scripts": {
11 | "build": "tsc && dts-bundle-generator -o dist/index.d.ts src/index.ts",
12 | "format": "prettier --write \"src/**/*.ts\"",
13 | "lint": "tslint -p tsconfig.json -c tslint.json --fix",
14 | "prepare": "npm run build",
15 | "prepublishOnly": "npm run lint",
16 | "preversion": "npm run lint",
17 | "version": "npm run format && git add -A src",
18 | "postversion": "git push && git push --tags"
19 | },
20 | "repository": {
21 | "type": "git",
22 | "url": "git+https://github.com/frigadehq/trench.git"
23 | },
24 | "keywords": [
25 | "analytics",
26 | "trench"
27 | ],
28 | "author": "Frigade Inc.",
29 | "license": "MIT",
30 | "bugs": {
31 | "url": "https://github.com/frigadehq/trench/issues"
32 | },
33 | "homepage": "https://github.com/frigadehq/trench/packages/analytics-plugin-trench#readme",
34 | "devDependencies": {
35 | "@types/jest": "29.5.13",
36 | "@types/node-fetch": "^2.6.11",
37 | "dts-bundle-generator": "^9.5.1",
38 | "jest": "^29.5.0",
39 | "jest-config": "^29.3.1",
40 | "jest-environment-jsdom": "^29.4.1",
41 | "prettier": "^2.8.8",
42 | "ts-jest": "^29.2.5",
43 | "tslint": "6.1.3",
44 | "tslint-plugin-prettier": "2.3.0",
45 | "tsup": "^8.1.0",
46 | "typescript": "^5.0.4"
47 | },
48 | "dependencies": {
49 | "node-fetch": "^2.6.6"
50 | },
51 | "jest": {
52 | "rootDir": ".",
53 | "moduleFileExtensions": [
54 | "js",
55 | "json",
56 | "ts",
57 | "tsx"
58 | ],
59 | "transform": {
60 | "^.+\\.(t|j)sx*$": "ts-jest"
61 | },
62 | "collectCoverageFrom": [
63 | "**/*.(t|j)s"
64 | ]
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/src/index.ts:
--------------------------------------------------------------------------------
1 | import fetch from 'node-fetch';
2 |
3 | export type TrenchConfig = {
4 | /**
5 | * The public API key.
6 | */
7 | publicApiKey: string;
8 | /**
9 | * Whether to enable the plugin.
10 | */
11 | enabled?: boolean;
12 | /**
13 | * The Trench API URL. E.g. https://api.trench.dev
14 | */
15 | serverUrl: string;
16 | /**
17 | * Whether to enable event batching. When enabled, events will be batched together
18 | * and sent periodically or when batch size is reached. Defaults to false.
19 | */
20 | batchingEnabled?: boolean;
21 | /**
22 | * Maximum number of events to collect before sending a batch. Default is 100.
23 | * Only applies when batchingEnabled is true.
24 | */
25 | batchSize?: number;
26 | /**
27 | * Maximum time in milliseconds to wait before sending a batch. Default is 5000ms.
28 | * Only applies when batchingEnabled is true.
29 | */
30 | batchTimeout?: number;
31 | };
32 |
33 | export interface BaseEvent {
34 | uuid?: string;
35 | anonymousId?: string;
36 | context?: {
37 | active?: boolean;
38 | app?: {
39 | name?: string;
40 | version?: string;
41 | build?: string;
42 | namespace?: string;
43 | };
44 | campaign?: {
45 | name?: string;
46 | source?: string;
47 | medium?: string;
48 | term?: string;
49 | content?: string;
50 | };
51 | device?: {
52 | id?: string;
53 | advertisingId?: string;
54 | adTrackingEnabled?: boolean;
55 | manufacturer?: string;
56 | model?: string;
57 | name?: string;
58 | type?: string;
59 | token?: string;
60 | };
61 | ip?: string;
62 | library?: {
63 | name?: string;
64 | version?: string;
65 | };
66 | locale?: string;
67 | network?: {
68 | bluetooth?: boolean;
69 | carrier?: string;
70 | cellular?: boolean;
71 | wifi?: boolean;
72 | };
73 | os?: {
74 | name?: string;
75 | version?: string;
76 | };
77 | page?: {
78 | path?: string;
79 | referrer?: string;
80 | search?: string;
81 | title?: string;
82 | url?: string;
83 | };
84 | referrer?: {
85 | id?: string;
86 | type?: string;
87 | };
88 | screen?: {
89 | width?: number;
90 | height?: number;
91 | density?: number;
92 | };
93 | groupId?: string;
94 | timezone?: string;
95 | userAgent?: string;
96 | userAgentData?: {
97 | brands?: {
98 | brand?: string;
99 | version?: string;
100 | }[];
101 | mobile?: boolean;
102 | platform?: string;
103 | };
104 | };
105 | integrations?: {
106 | All?: boolean;
107 | Mixpanel?: boolean;
108 | Salesforce?: boolean;
109 | };
110 | event?: string;
111 | messageId?: string;
112 | receivedAt?: string;
113 | sentAt?: string;
114 | timestamp?: string;
115 | type: 'page' | 'track' | 'identify' | 'group';
116 | userId?: string;
117 | groupId?: string;
118 | properties?: {
119 | [key: string]: any;
120 | };
121 | traits?: {
122 | [key: string]: any;
123 | };
124 | instanceId?: string;
125 | }
126 |
127 | const KEY_ANONYMOUS_ID = 'anonymousId';
128 | const KEY_TRAITS = 'traits';
129 | const DEFAULT_BATCH_SIZE = 100;
130 | const DEFAULT_BATCH_TIMEOUT = 5000;
131 |
132 | export function trench(config: TrenchConfig) {
133 | const globalPrefix = '__trench__';
134 | let isTrenchLoaded = false;
135 | let anonymousId: string | undefined;
136 | let currentUserId: string | undefined;
137 | let eventBatch: BaseEvent[] = [];
138 | let batchTimeout: NodeJS.Timeout | null = null;
139 |
140 | const batchSize = config.batchSize || DEFAULT_BATCH_SIZE;
141 | const batchTimeoutMs = config.batchTimeout || DEFAULT_BATCH_TIMEOUT;
142 |
143 | function setGlobalValue(key: string, value: any): void {
144 | const prefixedKey = `${globalPrefix}${key}`;
145 | if (typeof globalThis !== 'undefined') {
146 | (globalThis as any)[prefixedKey] = value;
147 | } else if (typeof window !== 'undefined') {
148 | (window as any)[prefixedKey] = value;
149 | } else if (typeof global !== 'undefined') {
150 | (global as any)[prefixedKey] = value;
151 | }
152 | }
153 |
154 | function getGlobalValue(key: string): T | undefined {
155 | const prefixedKey = `${globalPrefix}${key}`;
156 | if (typeof globalThis !== 'undefined') {
157 | return (globalThis as any)[prefixedKey] as T;
158 | } else if (typeof window !== 'undefined') {
159 | return (window as any)[prefixedKey] as T;
160 | } else if (typeof global !== 'undefined') {
161 | return (global as any)[prefixedKey] as T;
162 | }
163 | return undefined;
164 | }
165 |
166 | function setCurrentUserId(userId: string): void {
167 | currentUserId = userId;
168 | }
169 |
170 | function getCurrentUserId(): string | undefined {
171 | return currentUserId;
172 | }
173 |
174 | function getContext(): Record | undefined {
175 | if (getGlobalValue(KEY_TRAITS)) {
176 | return {
177 | traits: getGlobalValue(KEY_TRAITS),
178 | };
179 | }
180 | return undefined;
181 | }
182 |
183 | /* tslint:disable */
184 | function generateAnonymousId(): string {
185 | return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {
186 | const r = (Math.random() * 16) | 0,
187 | v = c === 'x' ? r : (r & 0x3) | 0x8;
188 | return v.toString(16);
189 | });
190 | }
191 | /* tslint:enable */
192 |
193 | function getAnonymousId(): string {
194 | if (typeof window !== 'undefined' && window.localStorage) {
195 | let storedAnonymousId = localStorage.getItem(KEY_ANONYMOUS_ID);
196 | if (!storedAnonymousId) {
197 | storedAnonymousId = generateAnonymousId();
198 | localStorage.setItem(KEY_ANONYMOUS_ID, storedAnonymousId);
199 | }
200 | return storedAnonymousId;
201 | } else {
202 | if (!anonymousId) {
203 | anonymousId = generateAnonymousId();
204 | }
205 | return anonymousId;
206 | }
207 | }
208 |
209 | async function flushEventBatch(): Promise {
210 | if (eventBatch.length === 0) return;
211 |
212 | const eventsToSend = [...eventBatch];
213 | eventBatch = [];
214 |
215 | if (batchTimeout) {
216 | clearTimeout(batchTimeout);
217 | batchTimeout = null;
218 | }
219 |
220 | await sendEvents(eventsToSend);
221 | }
222 |
223 | async function queueEvent(event: BaseEvent): Promise {
224 | if (config.enabled === false) {
225 | return;
226 | }
227 |
228 | if (!config.batchingEnabled) {
229 | await sendEvents([event]);
230 | return;
231 | }
232 |
233 | eventBatch.push(event);
234 |
235 | if (eventBatch.length >= batchSize) {
236 | await flushEventBatch();
237 | } else if (!batchTimeout) {
238 | batchTimeout = setTimeout(() => flushEventBatch(), batchTimeoutMs);
239 | }
240 | }
241 |
242 | async function sendEvents(events: BaseEvent[]): Promise {
243 | if (config.enabled === false) {
244 | return;
245 | }
246 |
247 | await fetch(`${removeTrailingSlash(config.serverUrl)}/events`, {
248 | method: 'POST',
249 | headers: {
250 | 'Content-Type': 'application/json',
251 | Authorization: `Bearer ${config.publicApiKey}`,
252 | },
253 | body: JSON.stringify({ events }),
254 | });
255 | }
256 |
257 | return {
258 | name: 'trench',
259 |
260 | initialize: (): void => {
261 | if (config.enabled !== false) {
262 | isTrenchLoaded = true;
263 | }
264 | },
265 |
266 | track: async ({ payload }: { payload: BaseEvent }): Promise => {
267 | if (config.enabled === false) {
268 | return;
269 | }
270 |
271 | await queueEvent({
272 | anonymousId: payload.userId ? undefined : getAnonymousId(),
273 | userId: payload.userId ?? getAnonymousId(),
274 | event: payload.event,
275 | properties: payload.properties,
276 | context: getContext(),
277 | type: 'track',
278 | });
279 | },
280 |
281 | page: async ({ payload }: { payload: BaseEvent }): Promise => {
282 | if (config.enabled === false) {
283 | return;
284 | }
285 |
286 | await queueEvent({
287 | anonymousId: payload.userId ? undefined : getAnonymousId(),
288 | userId: payload.userId ?? getAnonymousId(),
289 | event: '$pageview',
290 | properties: payload.properties,
291 | context: getContext(),
292 | type: 'page',
293 | });
294 | },
295 |
296 | identify: async ({
297 | payload,
298 | }: {
299 | payload: {
300 | userId: string;
301 | traits?: Record;
302 | };
303 | }): Promise => {
304 | if (config.enabled === false) {
305 | return;
306 | }
307 |
308 | const { userId } = payload;
309 |
310 | setCurrentUserId(userId);
311 |
312 | if (userId) {
313 | const traits = payload?.traits ?? {};
314 |
315 | setGlobalValue(KEY_TRAITS, traits);
316 |
317 | await queueEvent({
318 | anonymousId: getAnonymousId(),
319 | userId: payload.userId ?? getAnonymousId(),
320 | event: 'identify',
321 | traits,
322 | type: 'identify',
323 | });
324 | }
325 | },
326 |
327 | loaded: (): boolean => {
328 | return isTrenchLoaded;
329 | },
330 |
331 | // Custom Trench's functions to expose to analytics instance
332 | methods: {
333 | group: async (groupId: string, traits?: Record): Promise => {
334 | if (config.enabled === false) {
335 | return;
336 | }
337 |
338 | if (groupId) {
339 | await queueEvent({
340 | userId: getCurrentUserId() ?? getAnonymousId(),
341 | groupId,
342 | event: 'group',
343 | traits,
344 | type: 'group',
345 | });
346 | }
347 | },
348 | },
349 | };
350 | }
351 |
352 | function removeTrailingSlash(url: string): string {
353 | return url.endsWith('/') ? url.slice(0, -1) : url;
354 | }
355 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/test/analytics-plugin-trench.test.ts:
--------------------------------------------------------------------------------
1 | import { trench } from '../src';
2 | import fetch from 'node-fetch';
3 |
4 | jest.mock('node-fetch');
5 |
6 | const mockFetch = fetch as jest.MockedFunction;
7 |
8 | describe('analytics-plugin-trench', () => {
9 | const config = {
10 | publicApiKey: 'test-key',
11 | serverUrl: 'https://api.test.com',
12 | enabled: true,
13 | };
14 |
15 | beforeEach(() => {
16 | mockFetch.mockClear();
17 | mockFetch.mockResolvedValue({} as any);
18 | });
19 |
20 | it('should initialize correctly', () => {
21 | const plugin = trench(config);
22 | plugin.initialize();
23 | expect(plugin.loaded()).toBe(true);
24 | });
25 |
26 | it('should not initialize when disabled', () => {
27 | const plugin = trench({ ...config, enabled: false });
28 | plugin.initialize();
29 | expect(plugin.loaded()).toBe(false);
30 | });
31 |
32 | it('should track events', async () => {
33 | const plugin = trench(config);
34 | const payload = {
35 | event: 'test_event',
36 | properties: { foo: 'bar' },
37 | type: 'track' as const,
38 | };
39 |
40 | await plugin.track({ payload });
41 |
42 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
43 | method: 'POST',
44 | headers: {
45 | 'Content-Type': 'application/json',
46 | Authorization: 'Bearer test-key',
47 | },
48 | body: expect.stringContaining('test_event'),
49 | });
50 | });
51 |
52 | it('should track page views', async () => {
53 | const plugin = trench(config);
54 | const payload = {
55 | properties: { path: '/test' },
56 | type: 'page' as const,
57 | };
58 |
59 | await plugin.page({ payload });
60 |
61 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
62 | method: 'POST',
63 | headers: {
64 | 'Content-Type': 'application/json',
65 | Authorization: 'Bearer test-key',
66 | },
67 | body: expect.stringContaining('$pageview'),
68 | });
69 | });
70 |
71 | it('should identify users', async () => {
72 | const plugin = trench(config);
73 | const payload = {
74 | userId: 'test-user',
75 | traits: { name: 'Test User' },
76 | };
77 |
78 | await plugin.identify({ payload });
79 |
80 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
81 | method: 'POST',
82 | headers: {
83 | 'Content-Type': 'application/json',
84 | Authorization: 'Bearer test-key',
85 | },
86 | body: expect.stringContaining('identify'),
87 | });
88 |
89 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
90 | method: 'POST',
91 | headers: {
92 | 'Content-Type': 'application/json',
93 | Authorization: 'Bearer test-key',
94 | },
95 | body: expect.stringContaining('"traits":{"name":"Test User"}'),
96 | });
97 | });
98 |
99 | it('should handle group assignments', async () => {
100 | const plugin = trench(config);
101 | const groupId = 'test-group';
102 | const traits = { name: 'Test Group' };
103 |
104 | await plugin.methods.group(groupId, traits);
105 |
106 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
107 | method: 'POST',
108 | headers: {
109 | 'Content-Type': 'application/json',
110 | Authorization: 'Bearer test-key',
111 | },
112 | body: expect.stringContaining('group'),
113 | });
114 |
115 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
116 | method: 'POST',
117 | headers: {
118 | 'Content-Type': 'application/json',
119 | Authorization: 'Bearer test-key',
120 | },
121 | body: expect.stringContaining('"traits":{"name":"Test Group"}'),
122 | });
123 | });
124 |
125 | it('should not make requests when disabled', async () => {
126 | const plugin = trench({ ...config, enabled: false });
127 | const payload = {
128 | event: 'test_event',
129 | properties: { foo: 'bar' },
130 | type: 'track' as const,
131 | };
132 |
133 | await plugin.track({ payload });
134 | expect(mockFetch).not.toHaveBeenCalled();
135 | });
136 |
137 | it('should handle trailing slashes in serverUrl', async () => {
138 | const plugin = trench({
139 | ...config,
140 | serverUrl: 'https://api.test.com/',
141 | });
142 | const payload = {
143 | event: 'test_event',
144 | type: 'track' as const,
145 | };
146 |
147 | await plugin.track({ payload });
148 |
149 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', expect.any(Object));
150 | });
151 |
152 | it('should not send duplicate events when batching is disabled', async () => {
153 | const plugin = trench({
154 | ...config,
155 | batchingEnabled: false,
156 | });
157 |
158 | const payload = {
159 | event: 'test_event',
160 | type: 'track' as const,
161 | };
162 |
163 | await plugin.track({ payload });
164 | await plugin.track({ payload }); // Send same event again
165 |
166 | expect(mockFetch).toHaveBeenCalledTimes(2); // Should only be called once
167 | });
168 |
169 | describe('event batching', () => {
170 | beforeEach(() => {
171 | jest.useFakeTimers();
172 | });
173 |
174 | afterEach(() => {
175 | jest.useRealTimers();
176 | });
177 |
178 | it('should batch events when batching is enabled', async () => {
179 | const plugin = trench({
180 | ...config,
181 | batchingEnabled: true,
182 | batchSize: 2,
183 | });
184 |
185 | const payload1 = {
186 | event: 'test_event_1',
187 | type: 'track' as const,
188 | };
189 |
190 | const payload2 = {
191 | event: 'test_event_2',
192 | type: 'track' as const,
193 | };
194 |
195 | await plugin.track({ payload: payload1 });
196 | expect(mockFetch).not.toHaveBeenCalled();
197 |
198 | await plugin.track({ payload: payload2 });
199 | expect(mockFetch).toHaveBeenCalledTimes(1);
200 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
201 | method: 'POST',
202 | headers: {
203 | 'Content-Type': 'application/json',
204 | Authorization: 'Bearer test-key',
205 | },
206 | body: expect.stringContaining('"event":"test_event_1"'),
207 | });
208 | // @ts-ignore
209 | expect(mockFetch.mock.calls[0][1].body).toContain('"event":"test_event_2"');
210 | });
211 | it('should handle duplicate events in batches when interspersed with other events', async () => {
212 | const plugin = trench({
213 | ...config,
214 | batchingEnabled: true,
215 | batchSize: 3,
216 | });
217 |
218 | const duplicatePayload = {
219 | event: 'duplicate_event',
220 | type: 'track' as const,
221 | };
222 |
223 | const uniquePayload = {
224 | event: 'unique_event',
225 | type: 'track' as const,
226 | };
227 |
228 | await plugin.track({ payload: duplicatePayload });
229 | await plugin.track({ payload: uniquePayload });
230 | await plugin.track({ payload: duplicatePayload });
231 |
232 | expect(mockFetch).toHaveBeenCalledTimes(1);
233 |
234 | // @ts-ignore
235 | const requestBody = JSON.parse(mockFetch.mock.calls[0][1].body);
236 | expect(requestBody.events).toHaveLength(3);
237 | expect(requestBody.events[0].event).toBe('duplicate_event');
238 | expect(requestBody.events[1].event).toBe('unique_event');
239 | expect(requestBody.events[2].event).toBe('duplicate_event');
240 | });
241 |
242 | it('shoul not dedupe consecutive duplicate events in batch', async () => {
243 | const plugin = trench({
244 | ...config,
245 | batchingEnabled: true,
246 | batchSize: 2,
247 | });
248 |
249 | const duplicatePayload = {
250 | event: 'duplicate_event',
251 | type: 'track' as const,
252 | };
253 |
254 | // Send same event twice in a row
255 | await plugin.track({ payload: duplicatePayload });
256 | await plugin.track({ payload: duplicatePayload });
257 |
258 | expect(mockFetch).toHaveBeenCalledTimes(1);
259 |
260 | // @ts-ignore
261 | const requestBody = JSON.parse(mockFetch.mock.calls[0][1].body);
262 | expect(requestBody.events).toHaveLength(2); // Should dedupe to just 2 events
263 | expect(requestBody.events[0].event).toBe('duplicate_event'); // First duplicate
264 | expect(requestBody.events[1].event).toBe('duplicate_event'); // First duplicate
265 | });
266 |
267 | it('should flush batch after timeout', async () => {
268 | const plugin = trench({
269 | ...config,
270 | batchingEnabled: true,
271 | batchTimeout: 1000,
272 | });
273 |
274 | const payload = {
275 | event: 'test_event',
276 | type: 'track' as const,
277 | };
278 |
279 | await plugin.track({ payload });
280 | expect(mockFetch).not.toHaveBeenCalled();
281 |
282 | jest.advanceTimersByTime(1001);
283 |
284 | expect(mockFetch).toHaveBeenCalledTimes(1);
285 | expect(mockFetch).toHaveBeenCalledWith('https://api.test.com/events', {
286 | method: 'POST',
287 | headers: {
288 | 'Content-Type': 'application/json',
289 | Authorization: 'Bearer test-key',
290 | },
291 | body: expect.stringContaining('"event":"test_event"'),
292 | });
293 | });
294 |
295 | it('should not batch events when batching is disabled', async () => {
296 | const plugin = trench({
297 | ...config,
298 | batchingEnabled: false,
299 | });
300 |
301 | const payload = {
302 | event: 'test_event',
303 | type: 'track' as const,
304 | };
305 |
306 | await plugin.track({ payload });
307 | expect(mockFetch).toHaveBeenCalledTimes(1);
308 | });
309 | });
310 | });
311 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "target": "es2017",
4 | "module": "commonjs",
5 | "declaration": true,
6 | "outDir": "./dist",
7 | "rootDir": "./src",
8 | "strict": true
9 | },
10 | "include": ["src"],
11 | "exclude": ["node_modules"]
12 | }
13 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/tslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["tslint:latest"],
3 | "rulesDirectory": ["tslint-plugin-prettier"],
4 | "rules": {
5 | "prettier": true
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/packages/analytics-plugin-trench/tsup.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig, Options } from 'tsup';
2 |
3 | const commonConfig: Options = {
4 | minify: true,
5 | dts: true, // Generate type declarations
6 | format: ['esm', 'cjs'],
7 | sourcemap: true, // Enable source maps for debugging
8 | clean: true, // Clean the output directory before building
9 | noExternal: [/(.*)/], // Bundle all dependencies, including local packages
10 | };
11 |
12 | export default defineConfig([
13 | {
14 | ...commonConfig,
15 | entry: ['src/index.ts'], // Main entry point of the package
16 | outDir: 'dist', // Output directory
17 | },
18 | ]);
19 |
--------------------------------------------------------------------------------
/packages/trench-js/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | /dist
3 | .DS_Store
4 | .idea
5 |
--------------------------------------------------------------------------------
/packages/trench-js/.prettierrc:
--------------------------------------------------------------------------------
1 | {
2 | "singleQuote": true,
3 | "trailingComma": "es5",
4 | "printWidth": 120,
5 | "endOfLine": "auto"
6 | }
7 |
--------------------------------------------------------------------------------
/packages/trench-js/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # trench-js
2 |
3 | ## 0.0.17
4 |
5 | ### Patch Changes
6 |
7 | - 9964a3f: Skip tracking clicks on events with no names
8 |
9 | ## 0.0.16
10 |
11 | ### Patch Changes
12 |
13 | - f286470: Enables batching as well as click tracking in autocapture mode
14 | - Updated dependencies [f286470]
15 | - analytics-plugin-trench@0.0.9
16 |
17 | ## 0.0.15
18 |
19 | ### Patch Changes
20 |
21 | - 66e98aa: Fixes an issue where `identify` is not properly setting the traits
22 | - Updated dependencies [66e98aa]
23 | - analytics-plugin-trench@0.0.8
24 |
25 | ## 0.0.14
26 |
27 | ### Patch Changes
28 |
29 | - 30910fe: Make all `properties` optional
30 |
31 | ## 0.0.13
32 |
33 | ### Patch Changes
34 |
35 | - 066265e: Bubble up errors from the backend to the frontend exception
36 |
37 | ## 0.0.12
38 |
39 | ### Patch Changes
40 |
41 | - ba81a76: Adds missing `uuid` field to `BaseEvent` type
42 | - 2306864: Adds missing typescript types
43 | - Updated dependencies [ba81a76]
44 | - analytics-plugin-trench@0.0.7
45 |
46 | ## 0.0.11
47 |
48 | ### Patch Changes
49 |
50 | - ed1cbe5: Adds support for the `page` event type
51 | - Updated dependencies [ed1cbe5]
52 | - analytics-plugin-trench@0.0.6
53 |
54 | ## 0.0.10
55 |
56 | ### Patch Changes
57 |
58 | - 2e4a39f: Adds `Trench` to the `globalThis` scope
59 |
60 | ## 0.0.9
61 |
62 | ### Patch Changes
63 |
64 | - bc72637: Adds automatic deduplication of events
65 | - Updated dependencies [bc72637]
66 | - analytics-plugin-trench@0.0.5
67 |
68 | ## 0.0.8
69 |
70 | ### Patch Changes
71 |
72 | - 90dbddb: Fixes a typescript bug in PaginatedQueryResponse
73 |
74 | ## 0.0.7
75 |
76 | ### Patch Changes
77 |
78 | - 4da2b7b: Disables automatic tracking of clicks due to high event volume
79 |
80 | ## 0.0.6
81 |
82 | ### Patch Changes
83 |
84 | - ccde48d: Adds the ability to auto capture events (implicit tracking), adds support for private API calls (queries, events)
85 | - Updated dependencies [ccde48d]
86 | - analytics-plugin-trench@0.0.4
87 |
88 | ## 0.0.5
89 |
90 | ### Patch Changes
91 |
92 | - e772aaa: Updated README
93 |
94 | ## 0.0.4
95 |
96 | ### Patch Changes
97 |
98 | - 4c31f10: Removes redundant logging and adds minified version
99 |
100 | ## 0.0.3
101 |
102 | ### Patch Changes
103 |
104 | - c75b1e2: Fixes an issue where userId was not persisted after calling identify
105 | - Updated dependencies [c75b1e2]
106 | - analytics-plugin-trench@0.0.3
107 |
108 | ## 0.0.2
109 |
110 | ### Patch Changes
111 |
112 | - 00316c6: Initial npm release
113 | - Updated dependencies [00316c6]
114 | - Updated dependencies [1c7f432]
115 | - analytics-plugin-trench@0.0.2
116 |
--------------------------------------------------------------------------------
/packages/trench-js/README.md:
--------------------------------------------------------------------------------
1 | # trench-js
2 |
3 | `trench-js` is a client library for both web and Node.js environments that allows you to send events to any Trench instance. This library is designed to be easy to use and integrate into your existing projects.
4 |
5 | ## Installation
6 |
7 | Install `trench-js` using your preferred package manager:
8 |
9 | ```bash
10 | npm install trench-js
11 | ```
12 |
13 | You can now initialize the client with your Trench instance URL and public API key:
14 |
15 | ```ts
16 | import Trench from 'trench-js';
17 |
18 | const trench = new Trench({
19 | serverUrl: 'https://trench.example.com',
20 | publicApiKey: 'your-public-api-key',
21 | });
22 | ```
23 |
24 | The client is built on top of [Analytics](https://github.com/DavidWells/analytics) and supports all of its [methods](https://github.com/DavidWells/analytics#usage).
25 |
26 | For example, you can identify a user like this:
27 |
28 | ```ts
29 | trench.identify('user-id', {
30 | email: 'test@example.com',
31 | });
32 | ```
33 |
34 | And track an event like this:
35 |
36 | ```ts
37 | trench.track('test-event', {
38 | test: 'test-value',
39 | });
40 | ```
41 |
42 | Or to record a page view:
43 |
44 | ```ts
45 | trench.page();
46 | ```
47 |
48 | You can find the full documentation [here](https://docs.trench.dev/client).
49 |
--------------------------------------------------------------------------------
/packages/trench-js/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "trench-js",
3 | "version": "0.0.17",
4 | "description": "Trench JS client for browser and Node.js",
5 | "main": "./dist/index.js",
6 | "types": "./dist/index.d.ts",
7 | "files": [
8 | "./dist"
9 | ],
10 | "exports": {
11 | ".": {
12 | "require": "./dist/index.cjs",
13 | "import": "./dist/index.js"
14 | }
15 | },
16 | "scripts": {
17 | "build": "tsc && esbuild src/index.ts --bundle --minify --target=chrome58 --minify-identifiers=false > dist/trench.min.js",
18 | "test": "jest"
19 | },
20 | "repository": {
21 | "type": "git",
22 | "url": "git+https://github.com/frigadehq/trench.git"
23 | },
24 | "keywords": [
25 | "analytics",
26 | "trench"
27 | ],
28 | "author": "Frigade Inc.",
29 | "license": "MIT",
30 | "bugs": {
31 | "url": "https://github.com/frigadehq/trench/issues"
32 | },
33 | "homepage": "https://github.com/frigadehq/trench/packages/analytics-plugin-trench#readme",
34 | "devDependencies": {
35 | "@types/jest": "29.5.13",
36 | "esbuild": "^0.24.0",
37 | "jest": "^29.5.0",
38 | "jest-config": "^29.3.1",
39 | "jest-environment-jsdom": "^29.4.1",
40 | "prettier": "^2.8.8",
41 | "ts-jest": "^29.1.0",
42 | "tslint": "6.1.3",
43 | "tslint-plugin-prettier": "2.3.0",
44 | "tsup": "^8.1.0",
45 | "typescript": "^5.0.4"
46 | },
47 | "dependencies": {
48 | "analytics": "^0.8.14",
49 | "analytics-plugin-trench": "0.0.9"
50 | },
51 | "jest": {
52 | "rootDir": ".",
53 | "moduleFileExtensions": [
54 | "js",
55 | "json",
56 | "ts",
57 | "tsx"
58 | ],
59 | "transform": {
60 | "^.+\\.(t|j)sx*$": "ts-jest"
61 | },
62 | "collectCoverageFrom": [
63 | "**/*.(t|j)s"
64 | ]
65 | }
66 | }
67 |
--------------------------------------------------------------------------------
/packages/trench-js/src/index.ts:
--------------------------------------------------------------------------------
1 | import Analytics from 'analytics';
2 | import { trench, BaseEvent } from 'analytics-plugin-trench';
3 | import { PaginatedEventResponse, PaginatedQueryResponse, TrenchJSConfig } from './types';
4 |
5 | class Trench {
6 | private analytics: ReturnType;
7 | private config: TrenchJSConfig;
8 |
9 | constructor(config: TrenchJSConfig) {
10 | if (!config.serverUrl) {
11 | throw new Error('Trench serverUrl is required in the configuration.');
12 | }
13 |
14 | try {
15 | const url = new URL(config.serverUrl);
16 | } catch (error) {
17 | throw new Error(`Trench serverUrl '${config.serverUrl}' is not a valid URL.`, error);
18 | }
19 |
20 | if (!config.publicApiKey) {
21 | throw new Error('Trench publicApiKey is required.');
22 | }
23 |
24 | this.config = {
25 | ...config,
26 | serverUrl: this.removeTrailingSlash(config.serverUrl),
27 | };
28 | this.analytics = Analytics({
29 | app: 'trench-app',
30 | plugins: [trench(config)],
31 | });
32 |
33 | if (config.autoCaptureEvents) {
34 | if (config.batchingEnabled === undefined) {
35 | config.batchingEnabled = true;
36 | }
37 | this.page({});
38 | this.enableAutoCapture();
39 | }
40 | }
41 |
42 | private enableAutoCapture() {
43 | if (typeof window !== 'undefined') {
44 | let lastPage = '';
45 |
46 | const sendPageView = () => {
47 | const currentPage = window.location.href;
48 | if (currentPage !== lastPage) {
49 | this.page({});
50 | lastPage = currentPage;
51 | }
52 | };
53 |
54 | window.addEventListener('load', () => {
55 | sendPageView();
56 | });
57 |
58 | window.addEventListener('click', (event) => {
59 | const target = event.target as HTMLElement;
60 | const eventName = target.getAttribute('data-event-name') || 'click';
61 | function extractTextContent(element: HTMLElement): string | null {
62 | return (
63 | element.textContent?.trim() ||
64 | element.getAttribute('alt')?.trim() ||
65 | element.getAttribute('title')?.trim() ||
66 | null
67 | );
68 | }
69 |
70 | const textContent = extractTextContent(target);
71 | if (textContent && textContent.length < 30) {
72 | this.track(eventName, {
73 | tagName: target.tagName,
74 | id: target.id,
75 | className: target.className,
76 | textContent,
77 | });
78 | }
79 | });
80 |
81 | window.addEventListener('popstate', () => {
82 | sendPageView();
83 | });
84 |
85 | const originalPushState = history.pushState;
86 | history.pushState = function (...args) {
87 | originalPushState.apply(this, args);
88 | sendPageView();
89 | };
90 |
91 | const originalReplaceState = history.replaceState;
92 | history.replaceState = function (...args) {
93 | originalReplaceState.apply(this, args);
94 | sendPageView();
95 | };
96 | }
97 | }
98 |
99 | track(event: string, properties?: Record) {
100 | this.analytics.track(event, properties);
101 | }
102 |
103 | page(properties?: Record) {
104 | const mergedProperties = {
105 | referrer: typeof document !== 'undefined' ? document.referrer : undefined,
106 | userAgent: typeof navigator !== 'undefined' ? navigator.userAgent : undefined,
107 | ...properties,
108 | };
109 | this.analytics.page(mergedProperties);
110 | }
111 |
112 | identify(userId: string, traits?: Record) {
113 | this.analytics.identify(userId, traits);
114 | }
115 |
116 | group(groupId: string, traits?: Record) {
117 | // @ts-ignore
118 | this.analytics.plugins.trench.group(groupId, traits);
119 | }
120 | /**
121 | * Queries events from the Trench server.
122 | * Note: This method only works when a private API key is specified.
123 | * @param {object} queryParams - The query parameters to filter events.
124 | * @param {string} [queryParams.event] - The event name to filter by.
125 | * @param {string} [queryParams.userId] - The user ID to filter by.
126 | * @param {string} [queryParams.groupId] - The group ID to filter by.
127 | * @param {string} [queryParams.anonymousId] - The anonymous ID to filter by.
128 | * @param {string} [queryParams.instanceId] - The instance ID to filter by.
129 | * @param {string} [queryParams.startDate] - The start date to filter by.
130 | * @param {string} [queryParams.endDate] - The end date to filter by.
131 | * @param {number} [queryParams.limit] - The limit of records to return.
132 | * @param {number} [queryParams.offset] - The offset of records to return.
133 | * @param {string} [queryParams.orderByField] - The field to order by.
134 | * @param {string} [queryParams.orderByDirection] - The direction to order by. Available options: ASC, DESC.
135 | * @returns {Promise} - A promise that resolves to the queried events.
136 | */
137 | async getEvents(queryParams: {
138 | event?: string;
139 | userId?: string;
140 | groupId?: string;
141 | anonymousId?: string;
142 | instanceId?: string;
143 | startDate?: string;
144 | endDate?: string;
145 | limit?: number;
146 | offset?: number;
147 | orderByField?: string;
148 | orderByDirection?: 'ASC' | 'DESC';
149 | }): Promise {
150 | this.assertPrivateApiKey();
151 | const queryString = new URLSearchParams(queryParams as any).toString();
152 | const response = await fetch(`${this.config.serverUrl}/events?${queryString}`, {
153 | method: 'GET',
154 | headers: {
155 | Authorization: `Bearer ${this.config.privateApiKey}`,
156 | 'Content-Type': 'application/json',
157 | },
158 | });
159 |
160 | if (!response.ok) {
161 | if (response.status === 400) {
162 | const errorResponse = await response.json();
163 | throw new Error(errorResponse.message);
164 | }
165 | throw new Error('Failed to get events');
166 | }
167 | return response.json();
168 | }
169 |
170 | /**
171 | * Executes one or more raw SQL queries on the Trench server.
172 | * Note: This method only works when a private API key is specified.
173 | * @param {string[]} queries - The SQL queries to execute.
174 | * @returns {Promise} - A promise that resolves to the query results.
175 | */
176 | async executeQueries(queries: string[]): Promise {
177 | this.assertPrivateApiKey();
178 | const response = await fetch(`${this.config.serverUrl}/queries`, {
179 | method: 'POST',
180 | headers: {
181 | Authorization: `Bearer ${this.config.privateApiKey}`,
182 | 'Content-Type': 'application/json',
183 | },
184 | body: JSON.stringify({ queries }),
185 | });
186 |
187 | if (!response.ok) {
188 | if (response.status === 400) {
189 | const errorResponse = await response.json();
190 | throw new Error(errorResponse.message);
191 | }
192 | throw new Error('Failed to execute query');
193 | }
194 |
195 | return response.json();
196 | }
197 |
198 | loaded() {
199 | return true;
200 | }
201 |
202 | private assertPrivateApiKey() {
203 | if (!this.config.privateApiKey) {
204 | throw new Error('Trench privateApiKey is required to access private endpoints.');
205 | }
206 | }
207 |
208 | private removeTrailingSlash(url: string): string {
209 | return url.endsWith('/') ? url.slice(0, -1) : url;
210 | }
211 | }
212 |
213 | export default Trench;
214 |
215 | globalThis.Trench = Trench;
216 |
217 | export type { BaseEvent, PaginatedEventResponse, PaginatedQueryResponse, TrenchJSConfig };
218 |
--------------------------------------------------------------------------------
/packages/trench-js/src/types.ts:
--------------------------------------------------------------------------------
1 | import { TrenchConfig, BaseEvent } from 'analytics-plugin-trench';
2 |
3 | export type TrenchJSConfig = TrenchConfig & {
4 | /**
5 | * Private API key for authentication. WARNING: This should only be used if you're using this library from a backend environment.
6 | */
7 | privateApiKey?: string;
8 | /**
9 | * Boolean flag to enable or disable auto capturing of events. This will automatically record pageviews and track events if set to true in a web environment.
10 | * Defaults to false.
11 | */
12 | autoCaptureEvents?: boolean;
13 | /**
14 | * The base URL of the Trench server.
15 | */
16 | serverUrl?: string;
17 | };
18 | export type PaginatedResponse = {
19 | results: T[];
20 | limit: number | null;
21 | offset: number | null;
22 | total: number | null;
23 | };
24 |
25 | export type PaginatedQueryResponse = PaginatedResponse;
26 |
27 | export type PaginatedEventResponse = PaginatedResponse;
28 |
--------------------------------------------------------------------------------
/packages/trench-js/test/trench-js.test.ts:
--------------------------------------------------------------------------------
1 | import Trench from '../src';
2 |
3 | describe('Trench Analytics', () => {
4 | function getConfig() {
5 | return {
6 | publicApiKey: 'public-d613ee4e-d803-4b02-9058-70aa4a04ff28',
7 | enabled: true,
8 | serverUrl: 'https://sandbox.trench.dev',
9 | };
10 | }
11 |
12 | test('should identify a user', async () => {
13 | const trench = new Trench(getConfig());
14 | await trench.identify('user123', { email: 'user@example.com' });
15 | // Assuming there's a way to verify the user was identified
16 | });
17 |
18 | test('should initialize analytics', () => {
19 | const trench = new Trench(getConfig());
20 | expect(trench.loaded()).toBe(true);
21 | });
22 |
23 | test('should track an event', async () => {
24 | const trench = new Trench(getConfig());
25 | await trench.track('test_event', { key: 'value' });
26 | // Assuming there's a way to verify the event was tracked
27 | });
28 |
29 | test('should track a page view', async () => {
30 | const trench = new Trench(getConfig());
31 | await trench.page({ title: 'Test Page' });
32 | // Assuming there's a way to verify the page view was tracked
33 | });
34 |
35 | test('should group a user', async () => {
36 | const trench = new Trench(getConfig());
37 | await trench.group('group123', { groupName: 'Test Group' });
38 | // Assuming there's a way to verify the group was tracked
39 | });
40 | });
41 |
--------------------------------------------------------------------------------
/packages/trench-js/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "outDir": "./dist",
4 | "rootDir": "./src",
5 | "module": "esnext",
6 | "types": ["jest"],
7 | "lib": ["dom", "esnext"],
8 | "target": "ES6",
9 | "declaration": true,
10 | // output .js.map sourcemap files for consumers
11 | "sourceMap": true,
12 | // match output dir to input dir. e.g. dist/index instead of dist/src/index
13 | // stricter type-checking for stronger correctness. Recommended by TS
14 | "strict": true,
15 | "strictNullChecks": false,
16 | // linter checks for common issues
17 | "noImplicitReturns": false,
18 | "noFallthroughCasesInSwitch": true,
19 | "noImplicitAny": false,
20 | // noUnused* overlap with @typescript-eslint/no-unused-vars, can disable if duplicative
21 | "noUnusedParameters": true,
22 | // use Node's module resolution algorithm, instead of the legacy TS one
23 | "moduleResolution": "node",
24 | // transpile JSX to React.createElement
25 | "jsx": "react",
26 | // interop between ESM and CJS modules. Recommended by TS
27 | "esModuleInterop": true,
28 | // significant perf increase by skipping checking .d.ts files, particularly those in node_modules. Recommended by TS
29 | "skipLibCheck": true,
30 | // error out if import and file system have a casing mismatch. Recommended by TS
31 | "forceConsistentCasingInFileNames": true,
32 | "alwaysStrict": false
33 | },
34 | "include": ["src"]
35 | }
36 |
--------------------------------------------------------------------------------
/packages/trench-js/tslint.json:
--------------------------------------------------------------------------------
1 | {
2 | "extends": ["tslint:latest"],
3 | "rulesDirectory": ["tslint-plugin-prettier"],
4 | "rules": {
5 | "prettier": true
6 | }
7 | }
8 |
--------------------------------------------------------------------------------
/packages/trench-js/tsup.config.ts:
--------------------------------------------------------------------------------
1 | import { defineConfig, Options } from 'tsup';
2 |
3 | const commonConfig: Options = {
4 | minify: true,
5 | dts: true, // Generate type declarations
6 | format: ['esm', 'cjs'],
7 | sourcemap: true, // Enable source maps for debugging
8 | clean: true, // Clean the output directory before building
9 | noExternal: [/(.*)/], // Bundle all dependencies, including local packages
10 | };
11 |
12 | export default defineConfig([
13 | {
14 | ...commonConfig,
15 | entry: ['src/index.ts'], // Main entry point of the package
16 | outDir: 'dist', // Output directory
17 | },
18 | ]);
19 |
--------------------------------------------------------------------------------
/pnpm-workspace.yaml:
--------------------------------------------------------------------------------
1 | packages:
2 | - 'packages/*'
3 | - 'apps/*'
4 | # exclude packages that are inside test directories
5 | - '!**/test/**'
6 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "esnext",
4 | "target": "ES6",
5 | "lib": ["DOM", "esnext", "ES6", "DOM.Iterable", "ScriptHost", "ES2016.Array.Include"],
6 | "declaration": true,
7 | // output .js.map sourcemap files for consumers
8 | "sourceMap": true,
9 | // match output dir to input dir. e.g. dist/index instead of dist/src/index
10 | // stricter type-checking for stronger correctness. Recommended by TS
11 | "strict": true,
12 | "strictNullChecks": false,
13 | // linter checks for common issues
14 | "noImplicitReturns": false,
15 | "noFallthroughCasesInSwitch": true,
16 | "noImplicitAny": false,
17 | // noUnused* overlap with @typescript-eslint/no-unused-vars, can disable if duplicative
18 | "noUnusedParameters": true,
19 | // use Node's module resolution algorithm, instead of the legacy TS one
20 | "moduleResolution": "node",
21 | // transpile JSX to React.createElement
22 | "jsx": "react-jsx",
23 | // interop between ESM and CJS modules. Recommended by TS
24 | "esModuleInterop": true,
25 | // significant perf increase by skipping checking .d.ts files, particularly those in node_modules. Recommended by TS
26 | "skipLibCheck": true,
27 | // error out if import and file system have a casing mismatch. Recommended by TS
28 | "forceConsistentCasingInFileNames": true,
29 | "alwaysStrict": false,
30 | "noEmit": true,
31 | "baseUrl": ".",
32 | "paths": {
33 | "shared-models": ["packages/shared-models/src"]
34 | }
35 | },
36 | "exclude": ["**/__tests__/*", "node_modules", "dist", "**/dist/**", "packages/*/dist/**"]
37 | }
38 |
--------------------------------------------------------------------------------
/turbo.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://turbo.build/schema.json",
3 | "pipeline": {
4 | "build": {
5 | "outputs": ["dist/**"],
6 | "dependsOn": ["^build"]
7 | },
8 | "test": {
9 | "dependsOn": ["build"]
10 | },
11 | "lint": {},
12 | "dev": {
13 | "cache": false,
14 | "persistent": true
15 | },
16 | "clean": {
17 | "cache": false
18 | },
19 | "copy-version-number": {
20 | "steps": ["pnpm copy-version-number"],
21 | "outputs": ["src/**"]
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------