├── .gitignore
├── .npmignore
├── LICENSE
├── README.md
├── TODO.md
├── archive
└── call-ai.ts
├── bun.lockb
├── package.json
├── src
├── apiKeyUtils.ts
├── app.ts
├── askAI.ts
├── askPrompt.ts
├── call-ai-claude.ts
├── call-ai-gpt.ts
├── call-fireworks.ts
├── edit-history.ts
├── edit-processor.ts
├── github-utils.ts
├── models.ts
├── process-files.ts
├── prompt.ts
├── repo-analyzer.ts
├── types.ts
└── verify-edits.ts
├── tests
└── call-ai.test.ts
├── tsconfig.json
└── tsup.config.js
/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | zig-cache
3 | packages/*/*.wasm
4 | *.o
5 | *.a
6 | profile.json
7 | dist
8 |
9 | mandark-history.json
10 |
11 | compiled-code.txt
12 |
13 | .env
14 | node_modules
15 | .envrc
16 | .swcrc
17 | yarn.lock
18 | *.tmp
19 | *.log
20 | *.out.js
21 | *.out.refresh.js
22 | **/package-lock.json
23 | build
24 | *.wat
25 | zig-out
26 | pnpm-lock.yaml
27 | README.md.template
28 | src/deps/zig-clap/example
29 | src/deps/zig-clap/README.md
30 | src/deps/zig-clap/.github
31 | src/deps/zig-clap/.gitattributes
32 | out
33 | outdir
34 |
35 | .trace
36 | cover
37 | coverage
38 | coverv
39 | *.trace
40 | github
41 | out.*
42 | out
43 | .parcel-cache
44 | esbuilddir
45 | *.bun
46 | parceldist
47 | esbuilddir
48 | outdir/
49 | outcss
50 | .next
51 | txt.js
52 | .idea
53 | .vscode/cpp*
54 | .vscode/clang*
55 |
56 | node_modules_*
57 | *.jsb
58 | *.zip
59 | bun-zigld
60 | bun-singlehtreaded
61 | bun-nomimalloc
62 | bun-mimalloc
63 | examples/lotta-modules/bun-yday
64 | examples/lotta-modules/bun-old
65 | examples/lotta-modules/bun-nofscache
66 |
67 | src/node-fallbacks/out/*
68 | src/node-fallbacks/node_modules
69 | sign.json
70 | release/
71 | *.dmg
72 | sign.*.json
73 | packages/debug-*
74 | packages/bun-cli/postinstall.js
75 | packages/bun-*/bun
76 | packages/bun-*/bun-profile
77 | packages/bun-*/debug-bun
78 | packages/bun-*/*.o
79 | packages/bun-cli/postinstall.js
80 |
81 | packages/bun-cli/bin/*
82 | bun-test-scratch
83 | misctools/fetch
84 |
85 | src/deps/libiconv
86 | src/deps/openssl
87 | src/tests.zig
88 | *.blob
89 | src/deps/s2n-tls
90 | .npm
91 | .npm.gz
92 |
93 | bun-binary
94 |
95 | src/deps/PLCrashReporter/
96 |
97 | *.dSYM
98 | *.crash
99 | misctools/sha
100 | packages/bun-wasm/*.mjs
101 | packages/bun-wasm/*.cjs
102 | packages/bun-wasm/*.map
103 | packages/bun-wasm/*.js
104 | packages/bun-wasm/*.d.ts
105 | packages/bun-wasm/*.d.cts
106 | packages/bun-wasm/*.d.mts
107 | *.bc
108 |
109 | src/fallback.version
110 | src/runtime.version
111 | *.sqlite
112 | *.database
113 | *.db
114 | misctools/machbench
115 | *.big
116 | .eslintcache
117 |
118 | /bun-webkit
119 |
120 | src/deps/c-ares/build
121 | src/bun.js/bindings-obj
122 | src/bun.js/debug-bindings-obj
123 |
124 | failing-tests.txt
125 | test.txt
126 | myscript.sh
127 |
128 | cold-jsc-start
129 | cold-jsc-start.d
130 |
131 | bun.lockb
132 |
133 | /testdir
134 | /test.ts
135 | /test.js
136 |
137 | src/js/out/modules*
138 | src/js/out/functions*
139 | src/js/out/tmp
140 | src/js/out/DebugPath.h
141 |
142 | make-dev-stats.csv
143 |
144 | .uuid
145 | tsconfig.tsbuildinfo
146 |
147 | test/js/bun/glob/fixtures
148 | *.lib
149 | *.pdb
150 | CMakeFiles
151 | build.ninja
152 | .ninja_deps
153 | .ninja_log
154 | CMakeCache.txt
155 | cmake_install.cmake
156 | compile_commands.json
157 |
158 | *.lib
159 | x64
160 | **/*.vcxproj*
161 | **/*.sln*
162 | **/*.dir
163 | **/*.pdb
164 |
165 | /.webkit-cache
166 | /.cache
167 | /src/deps/libuv
168 | /build-*/
169 | /kcov-out
170 |
171 | .vs
172 |
173 | **/.verdaccio-db.json
174 | /test-report.md
175 | /test-report.json
176 |
177 | ########################### MY STUFF
178 |
179 | test
180 |
181 | tests/test-code.txt
182 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | archive
2 | tests
3 |
4 | .DS_Store
5 | zig-cache
6 | packages/*/*.wasm
7 | *.o
8 | *.a
9 | profile.json
10 |
11 | .env
12 | node_modules
13 | .envrc
14 | .swcrc
15 | yarn.lock
16 | *.tmp
17 | *.log
18 | *.out.js
19 | *.out.refresh.js
20 | **/package-lock.json
21 | build
22 | *.wat
23 | zig-out
24 | pnpm-lock.yaml
25 | README.md.template
26 | src/deps/zig-clap/example
27 | src/deps/zig-clap/README.md
28 | src/deps/zig-clap/.github
29 | src/deps/zig-clap/.gitattributes
30 | out
31 | outdir
32 |
33 | .trace
34 | cover
35 | coverage
36 | coverv
37 | *.trace
38 | github
39 | out.*
40 | out
41 | .parcel-cache
42 | esbuilddir
43 | *.bun
44 | parceldist
45 | esbuilddir
46 | outdir/
47 | outcss
48 | .next
49 | txt.js
50 | .idea
51 | .vscode/cpp*
52 | .vscode/clang*
53 |
54 | node_modules_*
55 | *.jsb
56 | *.zip
57 | bun-zigld
58 | bun-singlehtreaded
59 | bun-nomimalloc
60 | bun-mimalloc
61 | examples/lotta-modules/bun-yday
62 | examples/lotta-modules/bun-old
63 | examples/lotta-modules/bun-nofscache
64 |
65 | src/node-fallbacks/out/*
66 | src/node-fallbacks/node_modules
67 | sign.json
68 | release/
69 | *.dmg
70 | sign.*.json
71 | packages/debug-*
72 | packages/bun-cli/postinstall.js
73 | packages/bun-*/bun
74 | packages/bun-*/bun-profile
75 | packages/bun-*/debug-bun
76 | packages/bun-*/*.o
77 | packages/bun-cli/postinstall.js
78 |
79 | packages/bun-cli/bin/*
80 | bun-test-scratch
81 | misctools/fetch
82 |
83 | src/deps/libiconv
84 | src/deps/openssl
85 | src/tests.zig
86 | *.blob
87 | src/deps/s2n-tls
88 | .npm
89 | .npm.gz
90 |
91 | bun-binary
92 |
93 | src/deps/PLCrashReporter/
94 |
95 | *.dSYM
96 | *.crash
97 | misctools/sha
98 | packages/bun-wasm/*.mjs
99 | packages/bun-wasm/*.cjs
100 | packages/bun-wasm/*.map
101 | packages/bun-wasm/*.js
102 | packages/bun-wasm/*.d.ts
103 | packages/bun-wasm/*.d.cts
104 | packages/bun-wasm/*.d.mts
105 | *.bc
106 |
107 | src/fallback.version
108 | src/runtime.version
109 | *.sqlite
110 | *.database
111 | *.db
112 | misctools/machbench
113 | *.big
114 | .eslintcache
115 |
116 | /bun-webkit
117 |
118 | src/deps/c-ares/build
119 | src/bun.js/bindings-obj
120 | src/bun.js/debug-bindings-obj
121 |
122 | failing-tests.txt
123 | test.txt
124 | myscript.sh
125 |
126 | cold-jsc-start
127 | cold-jsc-start.d
128 |
129 | /testdir
130 | /test.ts
131 | /test.js
132 |
133 | src/js/out/modules*
134 | src/js/out/functions*
135 | src/js/out/tmp
136 | src/js/out/DebugPath.h
137 |
138 | make-dev-stats.csv
139 |
140 | .uuid
141 | tsconfig.tsbuildinfo
142 |
143 | test/js/bun/glob/fixtures
144 | *.lib
145 | *.pdb
146 | CMakeFiles
147 | build.ninja
148 | .ninja_deps
149 | .ninja_log
150 | CMakeCache.txt
151 | cmake_install.cmake
152 | compile_commands.json
153 |
154 | *.lib
155 | x64
156 | **/*.vcxproj*
157 | **/*.sln*
158 | **/*.dir
159 | **/*.pdb
160 |
161 | /.webkit-cache
162 | /.cache
163 | /src/deps/libuv
164 | /build-*/
165 | /kcov-out
166 |
167 | .vs
168 |
169 | **/.verdaccio-db.json
170 | /test-report.md
171 | /test-report.json
172 |
173 | ########################### MY STUFF
174 |
175 | test
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | npx mandark <folder or file>
6 |
7 |
8 |
9 | Mandark is a bog-simple (~80kb) AI intern that can do a lot, including building himself.
10 |
11 | https://github.com/user-attachments/assets/f3aff778-0839-4bb2-9fe3-c8c6b98434a5
12 |
13 | ## Features
14 |
15 | - Run without installation
16 | - **Ask questions about code:** `npx mandark ask ... "Your question here"`
17 | - **Copy code to clipboard:** `npx mandark copy ...`
18 | - **Pipe code to another command:** `npx mandark pipe ... | another-command`
19 | - Supports Claude-3.5 Sonnet, Haiku, GPT-4o-mini and GPT-4-turbo (now with llama405b from Fireworks)
20 | - Edit and create multiple files
21 | - Verify diffs from the command line
22 | - Install new packages as needed
23 | - Token and cost estimation **before execution**
24 | - Works with any codebase
25 |
26 | ## Usage
27 |
28 | Run mandark with:
29 |
30 | ```bash
31 | npx mandark folder1 file1 folder2 <-a> <-p>
32 | ```
33 |
34 | or
35 |
36 | ```bash
37 | npx mandark ask ... "Your question here"
38 | npx mandark copy ...
39 | npx mandark pipe ... | another-command
40 | ```
41 |
42 | - `[folder1]`, `[file1]`, `[file2]`, etc.: Paths to individual files or folders you want to process
43 | - ` ...`: One or more GitHub repository URLs.
44 | - `[modelName]`: (Optional) Nickname of the AI model to use (e.g., 'sonnet35', 'haiku', '4omini', '4turbo', 'llama405b'). Defaults to the first model if not provided.
45 | - `-p`: Print the line-tagged compiled code to a file and exit. Useful if you want to copy-paste into other assistants.
46 | - `-c`: Copy the line-tagged compiled code to the clipboard and exit.
47 | - `-a`: Include import statements when processing files. This is skipped by default to save tokens.
48 |
49 | ### New Modes:
50 |
51 | - **`ask` mode:** Allows you to ask a question about one or more GitHub repositories. The response will be streamed to your console.
52 | ```bash
53 | npx mandark ask https://github.com/hrishioa/mandark "What does the verifyEdit function do?"
54 | npx mandark ask https://github.com/hrishioa/mandark https://github.com/vercel/next.js "What is the main purpose of the app.ts file in each of these repos?"
55 | ```
56 | - **`copy` mode:** Copies the combined, line-tagged code from one or more GitHub repositories to your clipboard.
57 | ```bash
58 | npx mandark copy https://github.com/hrishioa/mandark
59 | npx mandark copy https://github.com/hrishioa/mandark https://github.com/vercel/next.js
60 | ```
61 | - **`pipe` mode:** Pipes the combined, line-tagged code from one or more GitHub repositories to stdout, allowing you to use it with other commands.
62 | ```bash
63 | npx mandark pipe https://github.com/hrishioa/mandark | wc -l
64 | npx mandark pipe https://github.com/hrishioa/mandark https://github.com/vercel/next.js | grep "L1:"
65 | ```
66 |
67 | ## Contributing
68 |
69 | Contributions are welcome! Please feel free to submit a Pull Request. `TODO.md` is a good place to start.
70 |
--------------------------------------------------------------------------------
/TODO.md:
--------------------------------------------------------------------------------
1 | 1. Ask the user if we can install a new package once one is found, get confirmation and use bun
2 | 2. Consider reimplementing continue functionality for claude from the archive
3 |
--------------------------------------------------------------------------------
/archive/call-ai.ts:
--------------------------------------------------------------------------------
1 | // Implements continue on long responses for claude, working perfectly, just thought we'd leave it out for simplicity for now
2 |
3 | import Anthropic from "@anthropic-ai/sdk";
4 | import { z } from "zod";
5 | import {
6 | MessageCreateParams,
7 | MessageParam,
8 | } from "@anthropic-ai/sdk/resources/index.mjs";
9 | import { Readable, Transform } from "stream";
10 | import oboe from "oboe";
11 | import { taskPrompt } from "./prompt.ts";
12 |
13 | const anthropic = new Anthropic({
14 | apiKey: process.env.ANTHROPIC_API_KEY,
15 | });
16 |
17 | const EditTypeSchema = z.discriminatedUnion("type", [
18 | z.object({
19 | type: z.literal("addition"),
20 | atLine: z.number(),
21 | }),
22 | z.object({
23 | type: z.literal("replacement"),
24 | fromLineNumber: z.number(),
25 | toLineNumber: z.number(),
26 | }),
27 | z.object({
28 | type: z.literal("npm_install"),
29 | packageName: z.string(),
30 | }),
31 | ]);
32 |
33 | const EditSchema = z.object({
34 | explain: z.string(),
35 | filename: z.string(),
36 | type: EditTypeSchema,
37 | code: z.string(),
38 | });
39 |
40 | type Edit = z.infer;
41 |
42 | export async function* getAIEdits(
43 | fileContent: string,
44 | task: string,
45 | model: MessageCreateParams["model"],
46 | maxCalls: number,
47 | collectedJSON?: string
48 | ): AsyncGenerator<
49 | | { type: "edit"; edit: Edit }
50 | | { type: "error"; error: string }
51 | | {
52 | type: "alledits";
53 | edits: Edit[];
54 | }
55 | | {
56 | type: "continuing";
57 | callsLeft: number;
58 | },
59 | void,
60 | undefined
61 | > {
62 | const jsonStart = collectedJSON ?? "[";
63 |
64 | let messages: MessageParam[] = [
65 | {
66 | role: "user",
67 | content: taskPrompt(task),
68 | },
69 | {
70 | role: "assistant",
71 | content: jsonStart,
72 | },
73 | ];
74 |
75 | console.log("Getting edits...");
76 |
77 | const stream = await anthropic.messages.create({
78 | messages,
79 | model,
80 | max_tokens: 4096,
81 | stream: true,
82 | temperature: 0,
83 | system: `CODE:\n${fileContent}\n`,
84 | });
85 |
86 | const tokenStream = new Readable({
87 | read() {},
88 | });
89 |
90 | const jsonStream = new Transform({
91 | transform(chunk, encoding, callback) {
92 | this.push(chunk);
93 | callback();
94 | },
95 | });
96 |
97 | tokenStream.pipe(jsonStream);
98 |
99 | let fullJSON = jsonStart;
100 | let collectedEdits: Edit[] = [];
101 | let latestEdits: Edit[] = [];
102 |
103 | const parsePromise = new Promise((resolve, reject) => {
104 | oboe(jsonStream)
105 | .node("!.*", (edit) => {
106 | try {
107 | const validatedEdit = EditSchema.parse(edit);
108 | // console.log("Valid edit object found:", validatedEdit);
109 | collectedEdits.push(validatedEdit);
110 | latestEdits.push(validatedEdit);
111 | } catch (error) {
112 | if (error instanceof z.ZodError) {
113 | console.warn("Invalid edit object encountered:", error.issues);
114 | }
115 | }
116 | })
117 | .done(() => {
118 | resolve();
119 | })
120 | .fail((error) => {
121 | console.error("JSON parsing error:", error);
122 | resolve();
123 | });
124 | });
125 |
126 | tokenStream.push(jsonStart);
127 |
128 | for await (const chunk of stream) {
129 | if (latestEdits.length > 0) {
130 | for (const edit of latestEdits) {
131 | yield { type: "edit", edit };
132 | }
133 | latestEdits = [];
134 | }
135 |
136 | if (
137 | chunk.type === "content_block_delta" &&
138 | chunk.delta?.type === "text_delta"
139 | ) {
140 | const text = chunk.delta.text;
141 | fullJSON += text;
142 | tokenStream.push(text);
143 | }
144 | }
145 |
146 | await parsePromise;
147 |
148 | try {
149 | const allEdits = JSON.parse(fullJSON);
150 | yield { type: "alledits", edits: allEdits };
151 | } catch (error) {
152 | // console.error("Failed to parse JSON:", error);
153 | if (collectedEdits.length > 0 && maxCalls > 0) {
154 | console.log("\n\n########### Will continue. ##########\n");
155 |
156 | const packetStream = await getAIEdits(
157 | fileContent,
158 | task,
159 | model,
160 | maxCalls - 1,
161 | fullJSON
162 | );
163 |
164 | for await (const packet of packetStream) {
165 | if (packet.type === "edit") {
166 | if (
167 | collectedEdits.find(
168 | (edit) =>
169 | edit.filename === packet.edit.filename &&
170 | JSON.stringify(edit.type) === JSON.stringify(packet.edit.type)
171 | )
172 | ) {
173 | console.warn("Duplicate edit found. Skipping...");
174 | continue;
175 | }
176 | collectedEdits.push(packet.edit);
177 | }
178 |
179 | yield packet;
180 | }
181 | } else {
182 | console.error("Failed to parse JSON:", error);
183 | yield { type: "error", error: "Failed to parse JSON." };
184 | }
185 | }
186 | }
187 |
--------------------------------------------------------------------------------
/bun.lockb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hrishioa/mandark/2a64e3b5a1f0c797c1f85c7490d34aaec1cb6b96/bun.lockb
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "mandark",
3 | "version": "0.2.0",
4 | "author": "Hrishi Olickel (https://olickel.com)",
5 | "description": "Super simple, easily modifiable AI Coder",
6 | "repository": {
7 | "type": "git",
8 | "url": "git+https://github.com/hrishioa/mandark.git"
9 | },
10 | "license": "Apache-2.0",
11 | "type": "module",
12 | "dependencies": {
13 | "@anthropic-ai/sdk": "^0.24.3",
14 | "@anthropic-ai/tokenizer": "^0.0.4",
15 | "chalk": "4.1.2",
16 | "clipboardy": "^4.0.0",
17 | "fast-glob": "^3.3.2",
18 | "inquirer": "^10.0.3",
19 | "oboe": "^2.1.5",
20 | "openai": "^4.52.7",
21 | "ora": "5.4.1",
22 | "zod": "^3.23.8"
23 | },
24 | "scripts": {
25 | "package": "tsup"
26 | },
27 | "files": [
28 | "dist"
29 | ],
30 | "engines": {
31 | "node": ">=18.17.0"
32 | },
33 | "main": "./dist/app.cjs",
34 | "bin": {
35 | "mandark": "dist/app.cjs"
36 | },
37 | "engineStrict": true,
38 | "devDependencies": {
39 | "@swc/core": "^1.7.0",
40 | "@types/bun": "^1.1.6",
41 | "@types/node": "^20.14.11",
42 | "tsup": "^8.2.0",
43 | "typescript": "^5.5.3"
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/src/apiKeyUtils.ts:
--------------------------------------------------------------------------------
1 | import chalk from "chalk";
2 | import { password } from "@inquirer/prompts";
3 |
4 | export function checkAPIKey(provider: string): boolean {
5 | const envVar = getEnvVarName(provider);
6 | return !!process.env[envVar];
7 | }
8 |
9 | export async function getAndSetAPIKey(provider: string): Promise {
10 | const envVar = getEnvVarName(provider);
11 | if (!process.env[envVar]) {
12 | console.log(chalk.yellow(`\n${envVar} is not set in your environment.`));
13 | const apiKey = await password({
14 | message: `Please enter your ${
15 | provider.charAt(0).toUpperCase() + provider.slice(1)
16 | } API key:`,
17 | mask: "*",
18 | });
19 | process.env[envVar] = apiKey;
20 | }
21 | return process.env[envVar]!;
22 | }
23 |
24 | function getEnvVarName(provider: string): string {
25 | switch (provider) {
26 | case "anthropic":
27 | return "ANTHROPIC_API_KEY";
28 | case "openai":
29 | return "OPENAI_API_KEY";
30 | case "fireworks":
31 | return "FIREWORKS_API_KEY";
32 | default:
33 | throw new Error(`Unsupported provider: ${provider}`);
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/app.ts:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | import { askAI } from "./askAI";
3 | import { input, confirm, password } from "@inquirer/prompts";
4 | import chalk from "chalk";
5 | import { getAIEditsFromClaude } from "./call-ai-claude";
6 | import { getAIEditsFromGPT } from "./call-ai-gpt";
7 | import { processFiles } from "./process-files";
8 | import fs from "node:fs";
9 | import { EditProcessor } from "./edit-processor";
10 | import { countTokens } from "@anthropic-ai/tokenizer";
11 | import { taskPrompt } from "./prompt";
12 | import { models, preferredVerifierModel } from "./models";
13 | import { getAIEditsFromFireworks } from "./call-fireworks";
14 | import { verifyEditStream } from "./verify-edits";
15 | import { checkAPIKey, getAndSetAPIKey } from "./apiKeyUtils";
16 | import { revertLastChanges } from "./edit-history";
17 | import { extractGitHubUrl } from "./github-utils";
18 |
19 | function listAvailableModels() {
20 | console.log(
21 | "\nAvailable models:",
22 | models.map((model) => model.nickName).join(", ")
23 | );
24 | console.log(
25 | "\nYou can append the model nickname to the end of your command to use a specific model."
26 | );
27 | }
28 |
29 | async function checkAndSetAPIKey(selectedModel: (typeof models)[number]) {
30 | const provider = selectedModel.provider;
31 | const apiKeyPresent = checkAPIKey(provider);
32 |
33 | if (!apiKeyPresent) {
34 | await getAndSetAPIKey(provider);
35 | }
36 |
37 | console.log(chalk.green(`API key for ${provider} has been set.`));
38 | }
39 |
40 | function checkContextWindowOverflow(
41 | inputTokens: number,
42 | selectedModel: (typeof models)[number]
43 | ): { overflow: boolean; overflowTokens?: number; overflowPercentage?: number } {
44 | const availableTokens =
45 | selectedModel.contextWindow - selectedModel.outputLength;
46 | if (inputTokens > availableTokens) {
47 | const overflowTokens = inputTokens - availableTokens;
48 | const overflowPercentage = (overflowTokens / availableTokens) * 100;
49 | return { overflow: true, overflowTokens, overflowPercentage };
50 | }
51 | return { overflow: false };
52 | }
53 |
54 | function displayHelp() {
55 | const helpText = `
56 | Mandark - Super simple, easily modifiable AI Coder
57 |
58 | Usage:
59 | npx mandark [options] [model-nickname]
60 | npx mandark ask ... "Your question here"
61 | npx mandark pipe ... | another-command
62 | npx mandark copy ...
63 |
64 | Options:
65 | --include-imports, -a Include import statements in the code
66 | --print, -p Save processed code to compiled-code.txt and exit
67 | --copy, -c Copy processed code to clipboard and exit
68 | --no-line-numbers, -n Don't add line numbers to the code
69 | --pattern= Specify the glob pattern for file types (default: **/*.{ts,tsx,js,jsx,mjs,cjs,py,rs,go,c,cpp,h,hpp,java,rb,php,cs,swift,kt,scala,sh,md,json,yaml,yml,html,css,scss,less,txt})
70 | --help, -h Show this help message
71 |
72 | Models:
73 | `;
74 | console.log(helpText);
75 | listAvailableModels();
76 | }
77 |
78 | async function main() {
79 | let inputs = process.argv.slice(2);
80 |
81 | // Show help if no arguments or help flag is present
82 | if (
83 | inputs.length === 0 ||
84 | inputs.includes("--help") ||
85 | inputs.includes("-h")
86 | ) {
87 | displayHelp();
88 | return;
89 | }
90 |
91 | if (inputs[0] === "revert") {
92 | revertLastChanges();
93 | return;
94 | }
95 |
96 | let includeImports = false;
97 | let printCodeAndExit = false;
98 | let copyToClipboard = false;
99 | let noLineNumbers = false;
100 | let filePattern: string | undefined = undefined;
101 |
102 | // Parse special arguments
103 | inputs = inputs.filter((input) => {
104 | if (input === "--include-imports" || input === "-a") {
105 | includeImports = true;
106 | return false;
107 | }
108 | if (input === "--print" || input === "-p") {
109 | printCodeAndExit = true;
110 | return false;
111 | }
112 | if (input === "--copy" || input === "-c") {
113 | copyToClipboard = true;
114 | return false;
115 | }
116 | if (input === "--no-line-numbers" || input === "-n") {
117 | noLineNumbers = true;
118 | return false;
119 | }
120 | if (input.startsWith("--pattern=")) {
121 | filePattern = input.slice("--pattern=".length);
122 | return false;
123 | }
124 | if (input === "--help" || input === "-h") {
125 | displayHelp();
126 | process.exit(0);
127 | }
128 | return true;
129 | });
130 |
131 | // Handle new modes: ask, copy, pipe
132 | if (inputs[0] === "ask") {
133 | const githubUrls = inputs.slice(1).filter(extractGitHubUrl);
134 | const question = inputs.slice(githubUrls.length + 1).join(" ");
135 |
136 | if (githubUrls.length === 0 || !question) {
137 | console.error(
138 | 'Usage: npx mandark ask ... "Your question here"'
139 | );
140 | process.exit(1);
141 | }
142 | let combinedCode = "";
143 | for (const url of githubUrls) {
144 | const processedFiles = await processFiles(
145 | [url],
146 | includeImports,
147 | noLineNumbers,
148 | filePattern
149 | );
150 | combinedCode += processedFiles.code;
151 | }
152 |
153 | const selectedModel = models[0]; // Default model for ask
154 | await checkAndSetAPIKey(selectedModel);
155 |
156 | const answerStream = askAI(
157 | combinedCode,
158 | question,
159 | selectedModel.name,
160 | selectedModel.provider
161 | );
162 | for await (const chunk of answerStream) {
163 | process.stdout.write(chunk);
164 | }
165 | console.log("\n");
166 | return;
167 | }
168 |
169 | if (inputs[0] === "copy") {
170 | const githubUrls = inputs.slice(1).filter(extractGitHubUrl);
171 | if (githubUrls.length === 0) {
172 | console.error("Usage: npx mandark copy ...");
173 | process.exit(1);
174 | }
175 | let combinedCode = "";
176 | for (const url of githubUrls) {
177 | const processedFiles = await processFiles(
178 | [url],
179 | includeImports,
180 | noLineNumbers,
181 | filePattern
182 | );
183 | combinedCode += processedFiles.code;
184 | }
185 | await import("clipboardy").then((clipboardy) =>
186 | clipboardy.default.writeSync(combinedCode)
187 | );
188 | console.log("Line tagged code copied to clipboard");
189 | return;
190 | }
191 |
192 | if (inputs[0] === "pipe") {
193 | const githubUrls = inputs.slice(1).filter(extractGitHubUrl);
194 | if (githubUrls.length === 0) {
195 | console.error(
196 | "Usage: npx mandark pipe ... | another-command"
197 | );
198 | process.exit(1);
199 | }
200 |
201 | let combinedCode = "";
202 | for (const url of githubUrls) {
203 | const processedFiles = await processFiles(
204 | [url],
205 | includeImports,
206 | noLineNumbers,
207 | filePattern
208 | );
209 | combinedCode += processedFiles.code;
210 | }
211 | process.stdout.write(combinedCode);
212 | return;
213 | }
214 |
215 | const modelNickname = inputs.pop()!;
216 | let selectedModel = models.find((model) => model.nickName === modelNickname);
217 |
218 | if (!selectedModel) {
219 | if (modelNickname) inputs.push(modelNickname);
220 | selectedModel = models[0];
221 | }
222 |
223 | if (inputs.length === 0) {
224 | console.error("Problem: No files or folders to process");
225 | process.exit(1);
226 | }
227 |
228 | const processedFiles = await processFiles(
229 | inputs,
230 | includeImports,
231 | noLineNumbers,
232 | filePattern
233 | );
234 |
235 | if (printCodeAndExit || copyToClipboard) {
236 | if (printCodeAndExit) {
237 | fs.writeFileSync("compiled-code.txt", processedFiles.code);
238 | console.log("Line tagged code saved to compiled-code.txt");
239 | }
240 | if (copyToClipboard) {
241 | await import("clipboardy").then((clipboardy) =>
242 | clipboardy.default.writeSync(processedFiles.code)
243 | );
244 | console.log("Line tagged code copied to clipboard");
245 | }
246 | process.exit(0);
247 | }
248 |
249 | console.log("\n\nWelcome to Mandark!");
250 |
251 | listAvailableModels();
252 |
253 | console.log(
254 | `Selected model: ${selectedModel.nickName} (${selectedModel.name} from ${selectedModel.provider})\n`
255 | );
256 |
257 | await checkAndSetAPIKey(selectedModel);
258 |
259 | const estimatedTokens = countTokens(
260 | processedFiles.code + taskPrompt("x".repeat(100))
261 | );
262 |
263 | const overflowCheck = checkContextWindowOverflow(
264 | estimatedTokens,
265 | selectedModel
266 | );
267 | if (overflowCheck.overflow) {
268 | console.log(
269 | chalk.yellow(
270 | `Warning: Input exceeds model's context window by ${
271 | overflowCheck.overflowTokens
272 | } tokens (${overflowCheck.overflowPercentage?.toFixed(2)}%).`
273 | )
274 | );
275 | const continueAnyway = await confirm({
276 | message:
277 | "Do you want to continue anyway? (This may result in incomplete processing)",
278 | default: false,
279 | });
280 | if (!continueAnyway) {
281 | console.log("Please reduce the input size and try again.");
282 | process.exit(0);
283 | }
284 | }
285 |
286 | const estimatedCosts =
287 | (estimatedTokens / 1000000) * selectedModel.inputCPM +
288 | selectedModel.outputCPM * (selectedModel.outputLength / 10000000);
289 |
290 | console.log(
291 | `Loaded ${
292 | processedFiles.count
293 | } files (${estimatedTokens} tokens). Estimated max cost: $${estimatedCosts.toFixed(
294 | 4
295 | )}`
296 | );
297 |
298 | const task = await input({
299 | message:
300 | "What do you need me to do? (Type 'ask' followed by your question to ask a question instead): ",
301 | validate: (input: string) => input.trim() !== "" || "Task cannot be empty",
302 | });
303 |
304 | if (task.toLowerCase().startsWith("ask ")) {
305 | const question = task.slice(4).trim();
306 | const answerStream = askAI(
307 | processedFiles.code,
308 | question,
309 | selectedModel.name,
310 | selectedModel.provider
311 | );
312 | for await (const chunk of answerStream) {
313 | process.stdout.write(chunk);
314 | }
315 | console.log("\n");
316 | } else {
317 | let editPacketStream;
318 | if (selectedModel.provider === "anthropic") {
319 | editPacketStream = await getAIEditsFromClaude(
320 | processedFiles.code,
321 | task,
322 | selectedModel.name as
323 | | "claude-3-5-sonnet-20240620"
324 | | "claude-3-haiku-20240307"
325 | );
326 | } else if (selectedModel.provider === "openai") {
327 | editPacketStream = await getAIEditsFromGPT(
328 | processedFiles.code,
329 | task,
330 | selectedModel.name
331 | );
332 | } else if (selectedModel.provider === "fireworks") {
333 | editPacketStream = await getAIEditsFromFireworks(
334 | processedFiles.code,
335 | task,
336 | selectedModel.name
337 | );
338 | } else {
339 | console.error(`Unsupported provider: ${selectedModel.provider}`);
340 | process.exit(1);
341 | }
342 |
343 | const editProcessor = new EditProcessor();
344 | const verifiedEditStream = await verifyEditStream(
345 | editPacketStream,
346 | checkAPIKey(preferredVerifierModel.provider)
347 | ? preferredVerifierModel.provider
348 | : selectedModel.provider
349 | );
350 | await editProcessor.processEditStream(verifiedEditStream);
351 | }
352 |
353 | console.log(
354 | chalk.cyan(
355 | "Leave a star if you like it! https://github.com/hrishioa/mandark"
356 | )
357 | );
358 | }
359 |
360 | main().catch(console.error);
361 |
--------------------------------------------------------------------------------
/src/askAI.ts:
--------------------------------------------------------------------------------
1 | import { Anthropic } from "@anthropic-ai/sdk";
2 | import OpenAI from "openai";
3 | import { askPrompt } from "./askPrompt";
4 | import { Readable } from "stream";
5 | import { models } from "./models";
6 |
7 | export async function* askAI(
8 | fileContent: string,
9 | question: string,
10 | model: string,
11 | provider: (typeof models)[number]["provider"]
12 | ): AsyncGenerator {
13 | if (provider === "anthropic") {
14 | const anthropic = new Anthropic();
15 | const stream = await anthropic.messages.create({
16 | messages: [{ role: "user", content: askPrompt(question) }],
17 | model: model,
18 | max_tokens: 4096,
19 | stream: true,
20 | system: `CODE:\n${fileContent}\n`,
21 | });
22 |
23 | for await (const chunk of stream) {
24 | if (
25 | chunk.type === "content_block_delta" &&
26 | chunk.delta?.type === "text_delta"
27 | ) {
28 | yield chunk.delta.text;
29 | }
30 | }
31 | } else if (provider === "openai") {
32 | const openai = new OpenAI();
33 | const stream = await openai.chat.completions.create({
34 | model: model,
35 | messages: [
36 | { role: "system", content: `CODE:\n${fileContent}\n` },
37 | { role: "user", content: askPrompt(question) },
38 | ],
39 | stream: true,
40 | });
41 |
42 | for await (const chunk of stream) {
43 | yield chunk.choices[0]?.delta?.content || "";
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/src/askPrompt.ts:
--------------------------------------------------------------------------------
1 | // prettier-ignore
2 | export const askPrompt = (question: string) =>
3 | `Use the provided code to answer this question. Answer succinctly and provide code snippets if needed.
4 |
5 | Use this format for code snippets:
6 |
7 | ===
8 | filePath.ts:123
9 | \`\`\`typescript
10 | // code goes here
11 | \`\`\`
12 | ===
13 |
14 | Question: ${question}
15 | `;
16 |
--------------------------------------------------------------------------------
/src/call-ai-claude.ts:
--------------------------------------------------------------------------------
1 | import Anthropic from "@anthropic-ai/sdk";
2 | import { z } from "zod";
3 | import {
4 | MessageCreateParams,
5 | MessageParam,
6 | } from "@anthropic-ai/sdk/resources/index.mjs";
7 | import { Readable, Transform } from "stream";
8 | import oboe from "oboe";
9 | import { taskPrompt } from "./prompt";
10 | import { AIEditGenerator, EditSchema, Edits } from "./types";
11 | import ora, { Ora } from "ora";
12 |
13 | export async function* getAIEditsFromClaude(
14 | fileContent: string,
15 | task: string,
16 | model: Extract<
17 | MessageCreateParams["model"],
18 | "claude-3-5-sonnet-20240620" | "claude-3-haiku-20240307"
19 | >
20 | ): AIEditGenerator {
21 | const anthropic = new Anthropic();
22 |
23 | const jsonStart = "[";
24 |
25 | let messages: MessageParam[] = [
26 | {
27 | role: "user",
28 | content:
29 | taskPrompt(task) +
30 | `\nRespond only with valid JSON array elements following the Edits typespec.`,
31 | },
32 | {
33 | role: "assistant",
34 | content: jsonStart,
35 | },
36 | ];
37 |
38 | console.log("\n\nGetting edits...");
39 |
40 | const tokens = model === "claude-3-5-sonnet-20240620" ? 8192 : 4096;
41 | const headers =
42 | model === "claude-3-5-sonnet-20240620"
43 | ? {
44 | "anthropic-beta": "max-tokens-3-5-sonnet-2024-07-15",
45 | }
46 | : {};
47 |
48 | const stream = await anthropic.messages.create(
49 | {
50 | messages,
51 | model,
52 | max_tokens: tokens,
53 | stream: true,
54 | temperature: 0,
55 | system: `CODE:\n${fileContent}\n`,
56 | },
57 | {
58 | headers,
59 | }
60 | );
61 |
62 | const tokenStream = new Readable({
63 | read() {},
64 | });
65 |
66 | const jsonStream = new Transform({
67 | transform(chunk, encoding, callback) {
68 | this.push(chunk);
69 | callback();
70 | },
71 | });
72 |
73 | tokenStream.pipe(jsonStream);
74 |
75 | let fullJSON = jsonStart;
76 | let collectedEdits: Edits = [];
77 | let latestEdits: Edits = [];
78 | let streamStatus: string = "notStarted";
79 | let codeStreamingSpinner: null | Ora = null;
80 | let codeTokens = 0;
81 | const codeStreamingMessage = `Streaming code...`;
82 |
83 | const parsePromise = new Promise((resolve, reject) => {
84 | oboe(jsonStream)
85 | .node("!.*", (edit) => {
86 | if (codeStreamingSpinner) {
87 | codeStreamingSpinner.stop();
88 | codeStreamingSpinner = null;
89 | }
90 |
91 | try {
92 | const validatedEdit = EditSchema.parse(edit);
93 | collectedEdits.push(validatedEdit);
94 | latestEdits.push(validatedEdit);
95 | } catch (error) {
96 | if (error instanceof z.ZodError) {
97 | console.warn("Invalid edit object encountered:", error.issues);
98 | }
99 | }
100 | })
101 | .node("!.*.filename", (filename: string) => {
102 | console.log("\nTo File: ", filename);
103 | })
104 | .path("!.*.explain", () => {
105 | streamStatus = "reasonEntered";
106 | })
107 | .path("!.*.code", () => {
108 | codeStreamingSpinner = ora(codeStreamingMessage).start();
109 | })
110 | .done(() => {
111 | resolve();
112 | })
113 | .fail((error) => {
114 | console.error("JSON parsing error:", error);
115 | resolve();
116 | });
117 | });
118 |
119 | tokenStream.push(jsonStart);
120 |
121 | for await (const chunk of stream) {
122 | if (latestEdits.length > 0) {
123 | for (const edit of latestEdits) {
124 | yield { type: "edit", edit };
125 | }
126 | latestEdits = [];
127 | }
128 |
129 | if (
130 | chunk.type === "content_block_delta" &&
131 | chunk.delta?.type === "text_delta"
132 | ) {
133 | const text = chunk.delta.text;
134 |
135 | if (codeStreamingSpinner) {
136 | codeTokens++;
137 | (
138 | codeStreamingSpinner as Ora
139 | ).text = `${codeStreamingMessage} (${codeTokens} tokens)`;
140 | }
141 |
142 | if (streamStatus === "reasonEntered") {
143 | if (text.includes(`"`)) {
144 | process.stdout.write("\nChange: ");
145 | streamStatus = "reasonStarted";
146 | }
147 | process.stdout.write(
148 | text.split(`"`).length > 1 ? text.split(`"`)[1] : ""
149 | );
150 | } else if (streamStatus === "reasonStarted") {
151 | process.stdout.write(text.split(`"`)[0]);
152 | if (text.includes(`"`)) {
153 | streamStatus = "notStarted";
154 | }
155 | }
156 |
157 | fullJSON += text;
158 | tokenStream.push(text);
159 | }
160 | }
161 |
162 | await parsePromise;
163 |
164 | try {
165 | const allEdits = JSON.parse(fullJSON);
166 | yield { type: "alledits", edits: allEdits };
167 | } catch (error) {
168 | console.error("Failed to parse JSON:", error);
169 | yield { type: "error", error: "Failed to parse JSON." };
170 | }
171 | }
172 |
--------------------------------------------------------------------------------
/src/call-ai-gpt.ts:
--------------------------------------------------------------------------------
1 | import OpenAI from "openai";
2 | import { z } from "zod";
3 | import { Readable, Transform } from "stream";
4 | import oboe from "oboe";
5 | import { taskPrompt } from "./prompt";
6 | import { ChatCompletionMessageParam } from "openai/resources/index.mjs";
7 | import { AIEditGenerator, EditSchema, Edits } from "./types";
8 |
9 | export async function* getAIEditsFromGPT(
10 | fileContent: string,
11 | task: string,
12 | model: string
13 | ): AIEditGenerator {
14 | const openai = new OpenAI();
15 |
16 | let messages: ChatCompletionMessageParam[] = [
17 | {
18 | role: "system",
19 | content: `CODE:\n${fileContent}\n`,
20 | },
21 | {
22 | role: "user",
23 | content:
24 | taskPrompt(task) +
25 | `\nRespond with a valid JSON object with the key 'edits' which contains an array of edit objects following the Edits typespec.`,
26 | },
27 | ];
28 |
29 | const tokens = model.includes("mini") ? 16384 : 4096;
30 |
31 | const stream = await openai.chat.completions.create({
32 | model: model,
33 | messages: messages,
34 | stream: true,
35 | temperature: 0,
36 | response_format: { type: "json_object" },
37 | max_tokens: tokens,
38 | });
39 |
40 | const tokenStream = new Readable({
41 | read() {},
42 | });
43 |
44 | const jsonStream = new Transform({
45 | transform(chunk, encoding, callback) {
46 | this.push(chunk);
47 | callback();
48 | },
49 | });
50 |
51 | tokenStream.pipe(jsonStream);
52 |
53 | let fullJSON = "";
54 | let collectedEdits: Edits = [];
55 | let latestEdits: Edits = [];
56 | let streamStatus: string = "notStarted";
57 |
58 | const parsePromise = new Promise((resolve, reject) => {
59 | oboe(jsonStream)
60 | .node("edits.*", (edit) => {
61 | try {
62 | const validatedEdit = EditSchema.parse(edit);
63 | collectedEdits.push(validatedEdit);
64 | latestEdits.push(validatedEdit);
65 | } catch (error) {
66 | if (error instanceof z.ZodError) {
67 | console.warn("Invalid edit object encountered:", error.issues);
68 | }
69 | }
70 | })
71 | .node("edits.*.filename", (filename: string) => {
72 | console.log("\nTo File: ", filename);
73 | })
74 | .path("edits.*.explain", () => {
75 | streamStatus = "reasonEntered";
76 | })
77 | .done(() => {
78 | resolve();
79 | })
80 | .fail((error) => {
81 | console.error("JSON parsing error:", error);
82 | resolve();
83 | });
84 | });
85 |
86 | for await (const chunk of stream) {
87 | if (latestEdits.length > 0) {
88 | for (const edit of latestEdits) {
89 | yield { type: "edit", edit };
90 | }
91 | latestEdits = [];
92 | }
93 |
94 | const text = chunk.choices[0]?.delta?.content || "";
95 | fullJSON += text;
96 |
97 | if (streamStatus === "reasonEntered") {
98 | if (text.includes('"')) {
99 | process.stdout.write("\nChange: ");
100 | streamStatus = "reasonStarted";
101 | }
102 | process.stdout.write(
103 | text.split('"').length > 1 ? text.split('"')[1] : ""
104 | );
105 | } else if (streamStatus === "reasonStarted") {
106 | process.stdout.write(text.split('"')[0]);
107 | if (text.includes('"')) {
108 | streamStatus = "reasonEnded";
109 | }
110 | }
111 |
112 | tokenStream.push(text);
113 | }
114 |
115 | await parsePromise;
116 |
117 | try {
118 | const parsedJSON = JSON.parse(fullJSON);
119 | const allEdits = parsedJSON.edits || [];
120 | yield { type: "alledits", edits: allEdits };
121 | } catch (error) {
122 | console.error("Failed to parse JSON:", error);
123 | yield { type: "error", error: "Failed to parse JSON." };
124 | }
125 | }
126 |
--------------------------------------------------------------------------------
/src/call-fireworks.ts:
--------------------------------------------------------------------------------
1 | import { z } from "zod";
2 | import { Readable, Transform } from "stream";
3 | import oboe from "oboe";
4 | import { taskPrompt } from "./prompt";
5 | import { AIEditGenerator, EditSchema, Edits } from "./types";
6 |
7 | export async function* getAIEditsFromFireworks(
8 | fileContent: string,
9 | task: string,
10 | model: string
11 | ): AIEditGenerator {
12 | const apiKey = process.env.FIREWORKS_API_KEY;
13 | if (!apiKey) {
14 | throw new Error(
15 | "FIREWORKS_API_KEY is not set in the environment variables"
16 | );
17 | }
18 |
19 | const messages = [
20 | {
21 | role: "user",
22 | content:
23 | taskPrompt(task) +
24 | `\nRespond with a valid JSON object with the key 'edits' which contains an array of edit objects following the Edits typespec.`,
25 | },
26 | ];
27 |
28 | console.log("Sending messages to Fireworks API...");
29 |
30 | const response = await fetch(
31 | "https://api.fireworks.ai/inference/v1/chat/completions",
32 | {
33 | method: "POST",
34 | headers: {
35 | Accept: "application/json",
36 | "Content-Type": "application/json",
37 | Authorization: `Bearer ${apiKey}`,
38 | },
39 | body: JSON.stringify({
40 | model: model,
41 | messages: [
42 | {
43 | role: "system",
44 | content: `You are a helpful code editor that can only respond with a valid JSON object with the key 'edits' which contains an array of edit objects following the Edits typespec.\nCODE:\n${fileContent}\n`,
45 | },
46 | ...messages,
47 | ],
48 | max_tokens: 16384,
49 | temperature: 0,
50 | top_p: 1,
51 | top_k: 40,
52 | presence_penalty: 0,
53 | frequency_penalty: 0,
54 | stream: true,
55 | response_format: { type: "json_object" },
56 | }),
57 | }
58 | );
59 |
60 | if (!response.ok) {
61 | throw new Error(`HTTP error! status: ${response.status}`);
62 | }
63 |
64 | if (!response.body) {
65 | throw new Error("Response body is null");
66 | }
67 |
68 | const reader = response.body.getReader();
69 | const decoder = new TextDecoder();
70 |
71 | const tokenStream = new Readable({
72 | read() {},
73 | });
74 |
75 | const jsonStream = new Transform({
76 | transform(chunk, encoding, callback) {
77 | this.push(chunk);
78 | callback();
79 | },
80 | });
81 |
82 | tokenStream.pipe(jsonStream);
83 |
84 | let fullJSON = "";
85 | let collectedEdits: Edits = [];
86 | let latestEdits: Edits = [];
87 |
88 | const parsePromise = new Promise((resolve, reject) => {
89 | oboe(jsonStream)
90 | .node("edits.*", (edit) => {
91 | try {
92 | const validatedEdit = EditSchema.parse(edit);
93 | collectedEdits.push(validatedEdit);
94 | latestEdits.push(validatedEdit);
95 | } catch (error) {
96 | if (error instanceof z.ZodError) {
97 | console.warn("Invalid edit object encountered:", error.issues);
98 | }
99 | }
100 | })
101 | .done(() => {
102 | resolve();
103 | })
104 | .fail((error) => {
105 | console.error("JSON parsing error:", error);
106 | resolve();
107 | });
108 | });
109 |
110 | while (true) {
111 | const { value, done } = await reader.read();
112 | if (done) break;
113 |
114 | const chunk = decoder.decode(value, { stream: true });
115 | const lines = chunk.split("\n");
116 |
117 | for (const line of lines) {
118 | if (line.startsWith("data: ")) {
119 | const data = line.slice(6);
120 | if (data === "[DONE]") break;
121 |
122 | try {
123 | const parsed = JSON.parse(data);
124 | if (
125 | parsed.choices &&
126 | parsed.choices[0] &&
127 | parsed.choices[0].delta &&
128 | parsed.choices[0].delta.content
129 | ) {
130 | const content = parsed.choices[0].delta.content;
131 | fullJSON += content;
132 | tokenStream.push(content);
133 |
134 | process.stdout.write(content);
135 |
136 | if (latestEdits.length > 0) {
137 | for (const edit of latestEdits) {
138 | yield { type: "edit", edit };
139 | }
140 | latestEdits = [];
141 | }
142 | }
143 | } catch (error) {
144 | console.error("Error parsing JSON:", error);
145 | }
146 | }
147 | }
148 | }
149 |
150 | await parsePromise;
151 |
152 | try {
153 | const parsedJSON = JSON.parse(fullJSON);
154 | const allEdits = parsedJSON.edits || [];
155 | yield { type: "alledits", edits: allEdits };
156 | } catch (error) {
157 | console.error("Failed to parse JSON:", error);
158 | yield { type: "error", error: "Failed to parse JSON." };
159 | }
160 | }
161 |
--------------------------------------------------------------------------------
/src/edit-history.ts:
--------------------------------------------------------------------------------
1 | import fs from "fs";
2 | import path from "path";
3 | import { Edits } from "./types";
4 | import { confirm } from "@inquirer/prompts";
5 |
6 | const HISTORY_FILE = "mandark-history.json";
7 |
8 | interface FileHistory {
9 | filename: string;
10 | originalContent: string;
11 | edits: Edits;
12 | }
13 |
14 | export function saveEdits(edits: Edits): void {
15 | const historyPath = path.join(process.cwd(), HISTORY_FILE);
16 | let history: FileHistory[] = [];
17 |
18 | if (fs.existsSync(historyPath)) {
19 | const historyContent = fs.readFileSync(historyPath, "utf-8");
20 | history = JSON.parse(historyContent);
21 | }
22 |
23 | for (const edit of edits) {
24 | const existingFileHistory = history.find(
25 | (h) => h.filename === edit.filename
26 | );
27 | if (existingFileHistory) {
28 | existingFileHistory.edits.push(edit);
29 | } else {
30 | const originalContent = fs.existsSync(edit.filename)
31 | ? fs.readFileSync(edit.filename, "utf-8")
32 | : "";
33 | history.push({
34 | filename: edit.filename,
35 | originalContent,
36 | edits: [edit],
37 | });
38 | }
39 | }
40 |
41 | fs.writeFileSync(historyPath, JSON.stringify(history, null, 2));
42 | }
43 |
44 | export async function revertLastChanges(): Promise {
45 | const historyPath = path.join(process.cwd(), HISTORY_FILE);
46 | if (!fs.existsSync(historyPath)) {
47 | console.log("No history file found. Nothing to revert.");
48 | return;
49 | }
50 |
51 | const historyContent = fs.readFileSync(historyPath, "utf-8");
52 | const history: FileHistory[] = JSON.parse(historyContent);
53 |
54 | if (history.length === 0) {
55 | console.log("No changes to revert.");
56 | return;
57 | }
58 |
59 | console.log("Recent changes:");
60 | history.forEach((fileHistory, index) => {
61 | console.log(
62 | `${index + 1}. ${fileHistory.filename} (${
63 | fileHistory.edits.length
64 | } edits)`
65 | );
66 | });
67 |
68 | const userResponse = await confirm({
69 | message: "Do you want to revert all changes?",
70 | default: false,
71 | });
72 |
73 | if (userResponse) {
74 | for (const fileHistory of history) {
75 | if (fileHistory.originalContent === "") {
76 | // This was a newly created file, so we should delete it
77 | if (fs.existsSync(fileHistory.filename)) {
78 | fs.unlinkSync(fileHistory.filename);
79 | console.log(`Deleted file: ${fileHistory.filename}`);
80 | }
81 | } else {
82 | // This was an existing file, so we should restore its original content
83 | fs.writeFileSync(fileHistory.filename, fileHistory.originalContent);
84 | console.log(`Reverted changes in: ${fileHistory.filename}`);
85 | }
86 | }
87 |
88 | // Clear the history file
89 | fs.writeFileSync(historyPath, "[]");
90 | console.log("All changes have been reverted and history has been cleared.");
91 | } else {
92 | console.log("Revert operation cancelled.");
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/edit-processor.ts:
--------------------------------------------------------------------------------
1 | import * as path from "path";
2 | import * as fs from "fs";
3 | import { execSync } from "child_process";
4 | import { confirm } from "@inquirer/prompts";
5 | import { EditPackets, Edits } from "./types";
6 | import chalk from "chalk";
7 | import { saveEdits } from "./edit-history";
8 |
9 | function createSeparator(text: string = ""): string {
10 | const separatorLength = 50;
11 | const paddedText = text ? ` ${text} ` : "";
12 | const remainingLength = Math.max(separatorLength - paddedText.length, 0);
13 | const leftPadding = "=".repeat(Math.floor(remainingLength / 2));
14 | const rightPadding = "=".repeat(Math.ceil(remainingLength / 2));
15 | return chalk.yellow(`${leftPadding}${paddedText}${rightPadding}`);
16 | }
17 |
18 | class FileManager {
19 | private files: Map = new Map();
20 |
21 | loadFile(filePath: string): void {
22 | const absolutePath = path.resolve(process.cwd(), filePath);
23 | if (!this.files.has(absolutePath)) {
24 | try {
25 | const content = fs.readFileSync(absolutePath, "utf-8").split("\n");
26 | this.files.set(absolutePath, content);
27 | } catch (error) {
28 | if (!fs.existsSync(absolutePath)) {
29 | this.files.set(absolutePath, []);
30 | } else {
31 | console.error(`Error reading file ${filePath}:`, error);
32 | }
33 | }
34 | }
35 | }
36 |
37 | getFileContent(filePath: string): string[] {
38 | const absolutePath = path.resolve(process.cwd(), filePath);
39 | return this.files.get(absolutePath) || [];
40 | }
41 |
42 | updateFile(filePath: string, newContent: string[]): void {
43 | const absolutePath = path.resolve(process.cwd(), filePath);
44 | this.files.set(absolutePath, newContent);
45 | }
46 |
47 | saveAllFiles(): void {
48 | for (const [filePath, content] of this.files.entries()) {
49 | const dir = path.dirname(filePath);
50 | if (!fs.existsSync(dir)) {
51 | fs.mkdirSync(dir, { recursive: true });
52 | }
53 | fs.writeFileSync(filePath, content.join("\n"));
54 | }
55 | console.log("All changes have been saved.");
56 | }
57 | }
58 |
59 | export class EditProcessor {
60 | private fileManager: FileManager;
61 | private confirmedEdits: Edits = [];
62 |
63 | constructor() {
64 | this.fileManager = new FileManager();
65 | }
66 |
67 | async processEditStream(
68 | editStream: AsyncGenerator
69 | ): Promise {
70 | for await (const editPacket of editStream) {
71 | if (editPacket.type === "edit") {
72 | const confirmed = await this.confirmEdit(editPacket.edit);
73 | if (confirmed) {
74 | this.confirmedEdits.push(editPacket.edit);
75 | }
76 | } else if (editPacket.type === "alledits") {
77 | console.log("All edits processed.");
78 | break;
79 | } else if (editPacket.type === "error") {
80 | console.error(`Error getting further edits: ${editPacket.error}`);
81 | break;
82 | }
83 | }
84 |
85 | if (this.confirmedEdits.length > 0) {
86 | const userResponse = await confirm({
87 | message: "\nDo you want to apply all confirmed edits?",
88 | default: true,
89 | transformer: (answer) => (answer ? "👍" : "👎"),
90 | });
91 |
92 | if (userResponse) {
93 | await this.applyConfirmedEdits();
94 | } else {
95 | console.log("All changes discarded.");
96 | }
97 | } else {
98 | console.log("No edits were confirmed.");
99 | }
100 | }
101 |
102 | private async confirmEdit(edit: Edits[number]): Promise {
103 | this.fileManager.loadFile(edit.filename);
104 | const fileContent = this.fileManager.getFileContent(edit.filename);
105 |
106 | let startLine: number, endLine: number;
107 | let newContent: string[];
108 |
109 | switch (edit.change.type) {
110 | case "addition":
111 | startLine = edit.change.atLine - 1;
112 | endLine = startLine;
113 | newContent = edit.code.split("\n");
114 | break;
115 | case "replacement":
116 | startLine = edit.change.fromLineNumber - 1;
117 | endLine = edit.change.toLineNumber;
118 | newContent = edit.code.split("\n");
119 | break;
120 | default:
121 | console.error("Unknown edit type");
122 | return false;
123 | }
124 |
125 | console.log(createSeparator("Proposed Change"));
126 | console.log(chalk.cyan(`\n${edit.explain}\n`));
127 | console.log(createSeparator(`Diff for ${edit.filename}`));
128 | this.printColoredDiff(fileContent, newContent, startLine, endLine);
129 | console.log(createSeparator());
130 |
131 | const userResponse = await confirm({
132 | message: "\nDo you want to confirm this change?",
133 | default: true,
134 | transformer: (answer) => (answer ? "👍" : "👎"),
135 | });
136 |
137 | if (userResponse && edit.newPackages && edit.newPackages.length > 0) {
138 | await this.confirmNewPackages(edit.newPackages);
139 | }
140 |
141 | return userResponse;
142 | }
143 |
144 | private async applyConfirmedEdits(): Promise {
145 | const sortedEdits = this.sortEdits(this.confirmedEdits); // Sort edits from top to bottom
146 | const totalLinesChanged = this.calculateTotalLinesChanged(sortedEdits);
147 |
148 | console.log(createSeparator("Applying Confirmed Edits"));
149 |
150 | for (const edit of sortedEdits) {
151 | this.fileManager.loadFile(edit.filename);
152 | const fileContent = this.fileManager.getFileContent(edit.filename);
153 |
154 | let startLine: number, endLine: number;
155 | let newContent: string[];
156 |
157 | switch (edit.change.type) {
158 | case "addition":
159 | startLine = edit.change.atLine - 1;
160 | endLine = startLine;
161 | newContent = edit.code.split("\n");
162 | break;
163 | case "replacement":
164 | startLine = edit.change.fromLineNumber - 1;
165 | endLine = edit.change.toLineNumber;
166 | newContent = edit.code.split("\n");
167 | break;
168 | }
169 |
170 | const updatedContent = [
171 | ...fileContent.slice(0, startLine),
172 | ...newContent,
173 | ...fileContent.slice(endLine),
174 | ];
175 | this.fileManager.updateFile(edit.filename, updatedContent);
176 | console.log(chalk.green(`✓ Applied change to ${edit.filename}`));
177 | if (edit.newPackages && edit.newPackages.length > 0) {
178 | const shouldInstall = await this.confirmNewPackages(edit.newPackages);
179 | if (shouldInstall) {
180 | await this.installNewPackages(edit.newPackages);
181 | }
182 | }
183 | }
184 |
185 | console.log(createSeparator("Summary"));
186 | console.log(chalk.cyan(`Total lines changed: ${totalLinesChanged}`));
187 | saveEdits(this.confirmedEdits);
188 |
189 | this.fileManager.saveAllFiles();
190 | console.log(chalk.green("All changes have been saved."));
191 | console.log(chalk.green("Edit history has been updated."));
192 | console.log(createSeparator());
193 | }
194 | private calculateTotalLinesChanged(edits: Edits): number {
195 | return edits.reduce((total, edit) => {
196 | if (edit.change.type === "addition") {
197 | return total + edit.code.split("\n").length;
198 | } else if (edit.change.type === "replacement") {
199 | const oldLines =
200 | edit.change.toLineNumber - edit.change.fromLineNumber + 1;
201 | const newLines = edit.code.split("\n").length;
202 | return total + Math.abs(newLines - oldLines);
203 | }
204 | return total;
205 | }, 0);
206 | }
207 |
208 | private sortEdits(edits: Edits): Edits {
209 | return edits.sort((a, b) => {
210 | const aLine =
211 | a.change.type === "addition"
212 | ? a.change.atLine
213 | : a.change.fromLineNumber;
214 | const bLine =
215 | b.change.type === "addition"
216 | ? b.change.atLine
217 | : b.change.fromLineNumber;
218 | return aLine - bLine; // Sort in ascending order (top to bottom)
219 | });
220 | }
221 |
222 | private printColoredDiff(
223 | oldLines: string[],
224 | newLines: string[],
225 | startLine: number,
226 | endLine: number
227 | ): void {
228 | const padding = 3;
229 |
230 | for (
231 | let i = Math.max(0, startLine - padding);
232 | i < Math.min(oldLines.length, endLine + padding);
233 | i++
234 | ) {
235 | if (i >= startLine && i < endLine) {
236 | console.log(chalk.red(`- ${oldLines[i]}`));
237 | } else if (i >= startLine - padding && i < startLine) {
238 | console.log(chalk.dim(` ${oldLines[i]}`));
239 | }
240 | }
241 |
242 | for (const newLine of newLines) {
243 | console.log(chalk.green(`+ ${newLine}`));
244 | }
245 |
246 | for (
247 | let i = endLine;
248 | i < Math.min(oldLines.length, endLine + padding);
249 | i++
250 | ) {
251 | console.log(chalk.dim(` ${oldLines[i]}`));
252 | }
253 | }
254 |
255 | private async confirmNewPackages(packages: string[]): Promise {
256 | const userResponse = await confirm({
257 | message: `This change requires the following new packages: ${packages.join(
258 | ", "
259 | )}. Do you want to install them? (Needs bun)`,
260 | default: true,
261 | transformer: (answer) => (answer ? "👍" : "👎"),
262 | });
263 | if (userResponse) {
264 | console.log("Packages will be installed when changes are applied.");
265 | } else {
266 | console.log("Package installation will be skipped.");
267 | }
268 | return userResponse;
269 | }
270 |
271 | private async installNewPackages(packages: string[]): Promise {
272 | if (packages.length === 0) return;
273 |
274 | console.log("Installing new packages...");
275 | try {
276 | execSync(`bun install ${packages.join(" ")}`, { stdio: "inherit" });
277 | console.log("Packages installed successfully.");
278 | } catch (error) {
279 | console.error("Failed to install packages:", error);
280 | }
281 | }
282 | }
283 |
--------------------------------------------------------------------------------
/src/github-utils.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs";
2 | import path from "node:path";
3 | import { execSync } from "child_process";
4 | import os from "os";
5 | import { analyzeRepoStructure } from "./repo-analyzer";
6 |
7 | const REPO_CACHE_DIR = path.join(os.tmpdir(), "mandark-repos");
8 |
9 | export function extractGitHubUrl(input: string): string | null {
10 | const githubUrlRegex =
11 | /(?:https?:\/\/)?(?:www\.)?github\.com\/([\w-]+)\/([\w.-]+)(?:\.git)?/;
12 | const match = input.match(githubUrlRegex);
13 | return match ? match[0] : null;
14 | }
15 |
16 | export function getRepoCacheDir(repoUrl: string): string {
17 | const repoName = repoUrl.split("/").pop()?.replace(".git", "") || "";
18 | return path.join(REPO_CACHE_DIR, repoName);
19 | }
20 |
21 | export interface RepoInfo {
22 | path: string;
23 | sourceDirs: string[];
24 | }
25 |
26 | export function cloneOrUpdateRepo(repoUrl: string): RepoInfo {
27 | if (!fs.existsSync(REPO_CACHE_DIR)) {
28 | fs.mkdirSync(REPO_CACHE_DIR, { recursive: true });
29 | }
30 |
31 | const repoCacheDir = getRepoCacheDir(repoUrl);
32 |
33 | if (fs.existsSync(repoCacheDir)) {
34 | console.log(
35 | `Repository cache exists at ${repoCacheDir}, pulling latest changes...`
36 | );
37 | try {
38 | execSync("git pull", { cwd: repoCacheDir, stdio: "inherit" });
39 | } catch (error) {
40 | console.warn("Failed to pull latest changes, using cached version");
41 | }
42 | } else {
43 | console.log(`Cloning repository to ${repoCacheDir}...`);
44 | execSync(`git clone ${repoUrl} ${repoCacheDir}`, { stdio: "inherit" });
45 | }
46 |
47 | const sourceDirs = analyzeRepoStructure(repoCacheDir);
48 | return { path: repoCacheDir, sourceDirs };
49 | }
50 |
--------------------------------------------------------------------------------
/src/models.ts:
--------------------------------------------------------------------------------
1 | export const models: {
2 | name: string;
3 | provider: "anthropic" | "openai" | "fireworks";
4 | nickName: string;
5 | outputCPM: number;
6 | inputCPM: number;
7 | outputLength: number;
8 | contextWindow: number;
9 | verifyModel?: boolean;
10 | }[] = [
11 | {
12 | name: "claude-3-5-sonnet-latest",
13 | provider: "anthropic",
14 | nickName: "sonnet35",
15 | outputCPM: 15,
16 | inputCPM: 3,
17 | outputLength: 8092,
18 | contextWindow: 200000,
19 | // verifyModel: true,
20 | },
21 | {
22 | name: "claude-3-haiku-20240307",
23 | provider: "anthropic",
24 | nickName: "haiku",
25 | outputCPM: 1.25,
26 | inputCPM: 0.25,
27 | outputLength: 4096,
28 | contextWindow: 200000,
29 | verifyModel: true,
30 | },
31 | {
32 | name: "gpt-4o-mini",
33 | provider: "openai",
34 | nickName: "4omini",
35 | outputCPM: 0.6,
36 | inputCPM: 0.15,
37 | outputLength: 16384,
38 | contextWindow: 128000,
39 | verifyModel: true,
40 | },
41 | {
42 | name: "gpt-4-turbo",
43 | provider: "openai",
44 | nickName: "4turbo",
45 | outputCPM: 30,
46 | inputCPM: 10,
47 | outputLength: 4096,
48 | contextWindow: 128000,
49 | },
50 | {
51 | name: "accounts/fireworks/models/llama-v3p1-405b-instruct",
52 | provider: "fireworks",
53 | nickName: "llama405b",
54 | outputCPM: 3,
55 | inputCPM: 3,
56 | outputLength: 16384,
57 | contextWindow: 262144,
58 | },
59 | ];
60 |
61 | export const preferredVerifierModel = models.find(
62 | (model) => model.nickName === "4omini"
63 | )!;
64 |
--------------------------------------------------------------------------------
/src/process-files.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs";
2 | import path from "node:path";
3 | import fastGlob from "fast-glob";
4 | import { extractGitHubUrl, cloneOrUpdateRepo } from "./github-utils";
5 |
6 | // Default glob pattern for code files
7 | const DEFAULT_FILE_PATTERN =
8 | "**/*.{ts,tsx,js,jsx,mjs,cjs,py,rs,go,c,cpp,h,hpp,java,rb,php,cs,swift,kt,scala,sh,md,json,yaml,yml,html,css,scss,less,txt}";
9 |
10 | export async function processFiles(
11 | inputs: string[],
12 | includeImports: boolean,
13 | noLineNumbers: boolean = false,
14 | filePattern: string = DEFAULT_FILE_PATTERN
15 | ): Promise<{
16 | code: string;
17 | count: number;
18 | }> {
19 | const allFiles: string[] = [];
20 |
21 | for (const input of inputs) {
22 | const githubUrl = extractGitHubUrl(input);
23 |
24 | if (githubUrl) {
25 | const repoInfo = cloneOrUpdateRepo(githubUrl);
26 |
27 | // First, check for README
28 | const readmePath = path.join(repoInfo.path, "README.md");
29 | if (fs.existsSync(readmePath)) {
30 | allFiles.push(readmePath);
31 | }
32 |
33 | if (repoInfo.sourceDirs.length > 0) {
34 | // If we found source directories, only search in those
35 | for (const sourceDir of repoInfo.sourceDirs) {
36 | const files = await fastGlob(`${sourceDir}/${filePattern}`, {
37 | absolute: true,
38 | ignore: ["**/node_modules/**", "**/.git/**"],
39 | });
40 | allFiles.push(...files);
41 | }
42 | } else {
43 | // Fallback to searching the entire repo if no source dirs found
44 | const files = await fastGlob(`${repoInfo.path}/${filePattern}`, {
45 | absolute: true,
46 | ignore: [
47 | "**/node_modules/**",
48 | "**/.git/**",
49 | "**/test/**",
50 | "**/tests/**",
51 | "**/dist/**",
52 | "**/build/**",
53 | ],
54 | });
55 | allFiles.push(...files);
56 | }
57 | continue;
58 | }
59 | const stat = fs.statSync(input);
60 | if (stat.isDirectory()) {
61 | const files = await fastGlob(`${input}/${filePattern}`, {
62 | absolute: true,
63 | ignore: ["**/node_modules/**"],
64 | });
65 | allFiles.push(...files);
66 | } else if (stat.isFile()) {
67 | allFiles.push(path.resolve(input));
68 | } else {
69 | console.warn(`Skipping invalid input: ${input}`);
70 | }
71 | }
72 |
73 | let combinedCode = "";
74 | for (const file of allFiles) {
75 | combinedCode += await loadNumberedFile(file, includeImports, noLineNumbers);
76 | }
77 |
78 | return {
79 | code: combinedCode,
80 | count: allFiles.length,
81 | };
82 | }
83 |
84 | export async function loadNumberedFile(
85 | filePath: string,
86 | includeImports: boolean,
87 | noLineNumbers: boolean = false
88 | ): Promise {
89 | const content = fs.readFileSync(filePath, "utf-8");
90 | let processedContent = noLineNumbers
91 | ? content
92 | : content
93 | .split("\n")
94 | .map((line, index) => `L${index + 1}: ${line}`)
95 | .join("\n");
96 |
97 | if (!includeImports) {
98 | if (noLineNumbers) {
99 | processedContent = processedContent
100 | .replace(/import.*?;?\n/g, "")
101 | .replace(/import.*?{[\s\S]*?}\n/g, "");
102 | } else {
103 | processedContent = processedContent
104 | .replace(/L\d+:\s*import.*?;?\n/g, "")
105 | .replace(/L\d+:\s*import.*?{[\s\S]*?}\n/g, "");
106 | }
107 | }
108 |
109 | // Get relative path to file
110 | const relativePath = path.relative(process.cwd(), filePath);
111 |
112 | return `<${relativePath}>\n${processedContent}\n${relativePath}>\n\n`;
113 | }
114 |
--------------------------------------------------------------------------------
/src/prompt.ts:
--------------------------------------------------------------------------------
1 | import { EditTypeStr } from "./types";
2 |
3 | export const taskPrompt = (task: string) => `Task: ${task}
4 |
5 | Follow this typespec and return ONLY VALID JSON to suggest additions or replacements in files to make this change in this codebase.
6 |
7 | Facts:
8 | 1. You can provide a new filename to create a file.
9 | 2. Leave toLine empty for additions.
10 | 3. Make sure the code snippet in the edit is complete. Feel free to make multiple edits, avoid repeating existing code if you can.
11 | 4. Ensure the line numbers are accurate. Feel free to repeat existing code from previous or after lines to be sure.
12 |
13 | \`\`\`typescript
14 | ${EditTypeStr}
15 | \`\`\``;
16 |
--------------------------------------------------------------------------------
/src/repo-analyzer.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs";
2 | import path from "node:path";
3 |
4 | interface DirAnalysis {
5 | path: string;
6 | score: number;
7 | }
8 |
9 | const COMMON_SOURCE_DIRS = ["src", "lib", "source", "app"];
10 | const IGNORE_DIRS = [
11 | "test",
12 | "tests",
13 | "node_modules",
14 | ".git",
15 | "build",
16 | "dist",
17 | "coverage",
18 | ];
19 |
20 | export function analyzeRepoStructure(repoPath: string): string[] {
21 | const sourceDirs: DirAnalysis[] = [];
22 |
23 | function exploreDirectory(dirPath: string, depth = 0): void {
24 | if (depth > 3) return; // Limit depth to avoid excessive recursion
25 |
26 | const items = fs.readdirSync(dirPath);
27 |
28 | for (const item of items) {
29 | const fullPath = path.join(dirPath, item);
30 | if (!fs.statSync(fullPath).isDirectory()) continue;
31 |
32 | // Skip ignored directories
33 | if (IGNORE_DIRS.includes(item)) continue;
34 |
35 | let score = 0;
36 |
37 | // Boost score for common source directory names
38 | if (COMMON_SOURCE_DIRS.includes(item)) {
39 | score += 10;
40 | }
41 |
42 | // Check for presence of source files
43 | const hasSourceFiles = fs
44 | .readdirSync(fullPath)
45 | .some(
46 | (file) =>
47 | file.endsWith(".ts") ||
48 | file.endsWith(".js") ||
49 | file.endsWith(".tsx") ||
50 | file.endsWith(".jsx") ||
51 | file.endsWith(".mjs") ||
52 | file.endsWith(".cjs") ||
53 | file.endsWith(".rs")
54 | );
55 |
56 | if (hasSourceFiles) {
57 | score += 5;
58 | }
59 |
60 | if (score > 0) {
61 | sourceDirs.push({
62 | path: fullPath,
63 | score,
64 | });
65 | }
66 |
67 | exploreDirectory(fullPath, depth + 1);
68 | }
69 | }
70 |
71 | exploreDirectory(repoPath);
72 |
73 | // Sort by score and return paths
74 | return sourceDirs.sort((a, b) => b.score - a.score).map((dir) => dir.path);
75 | }
76 |
--------------------------------------------------------------------------------
/src/types.ts:
--------------------------------------------------------------------------------
1 | import { z } from "zod";
2 |
3 | const EditTypeSchema = z.discriminatedUnion("type", [
4 | z.object({
5 | type: z.literal("addition"),
6 | atLine: z.number(),
7 | }),
8 | z.object({
9 | type: z.literal("replacement"),
10 | fromLineNumber: z.number(),
11 | toLineNumber: z.number(),
12 | }),
13 | ]);
14 |
15 | export const EditSchema = z.object({
16 | explain: z.string(),
17 | filename: z.string(),
18 | change: EditTypeSchema,
19 | code: z.string(),
20 | newPackages: z.array(z.string()).optional(),
21 | });
22 |
23 | export type EditPackets =
24 | | { type: "edit"; edit: Edits[number] }
25 | | { type: "error"; error: string }
26 | | {
27 | type: "alledits";
28 | edits: Edits;
29 | };
30 |
31 | export type AIEditGenerator = AsyncGenerator;
32 |
33 | export type Edits = {
34 | explain: string; // explain what you want to do and why you're making this change.
35 | filename: string;
36 | change:
37 | | {
38 | type: "addition";
39 | atLine: number;
40 | }
41 | | {
42 | type: "replacement";
43 | fromLineNumber: number;
44 | toLineNumber: number;
45 | };
46 | code: string; // Code to insert or replace
47 | newPackages?: string[]; // Does this code need new packages to be installed?
48 | }[];
49 |
50 | export const EditTypeStr = `export type Edits = {
51 | explain: string; // explain what you want to do and why you're making this change.
52 | filename: string;
53 | change:
54 | | {
55 | type: "addition";
56 | atLine: number;
57 | }
58 | | {
59 | type: "replacement";
60 | fromLineNumber: number;
61 | toLineNumber: number;
62 | };
63 | code: string; // Code to insert or replace
64 | newPackages?: string[]; // Does this code need new packages to be installed?
65 | }[];
66 | `;
67 |
68 | // Edit correction
69 |
70 | export type CorrectedEditChange =
71 | | {
72 | type: "addition";
73 | atLine: number;
74 | }
75 | | {
76 | type: "replacement";
77 | fromLineNumber: number;
78 | toLineNumber: number;
79 | }
80 | | {
81 | type: "skip";
82 | };
83 |
84 | export const CorrectedEditChangeTypeStr = `type CorrectedEditChange =
85 | {
86 | reason: string; // Explain why this change was made, write 'no change' if there's no need for a change
87 | type: "addition";
88 | atLine: number;
89 | } | {
90 | reason: string;
91 | type: "replacement";
92 | fromLineNumber: number;
93 | toLineNumber: number;
94 | } | {
95 | reason: string;
96 | type: 'skip' // Means to skip this edit, no need to apply
97 | };`;
98 |
99 | export const CorrectedEditChangeSchema = z.discriminatedUnion("type", [
100 | z.object({
101 | type: z.literal("addition"),
102 | atLine: z.number(),
103 | }),
104 | z.object({
105 | type: z.literal("replacement"),
106 | fromLineNumber: z.number(),
107 | toLineNumber: z.number(),
108 | }),
109 | z.object({
110 | type: z.literal("skip"),
111 | }),
112 | ]);
113 |
--------------------------------------------------------------------------------
/src/verify-edits.ts:
--------------------------------------------------------------------------------
1 | import Anthropic from "@anthropic-ai/sdk";
2 | import { models } from "./models";
3 | import {
4 | CorrectedEditChange,
5 | CorrectedEditChangeSchema,
6 | CorrectedEditChangeTypeStr,
7 | EditPackets,
8 | Edits,
9 | } from "./types";
10 | import OpenAI from "openai";
11 | import fs from "node:fs";
12 | import { loadNumberedFile } from "./process-files";
13 | import ora from "ora";
14 |
15 | // prettier-ignore
16 | const verifyPrompt = (edit: Edits[number]) =>
17 | `Edits:
18 | \`\`\`json
19 | ${JSON.stringify(edit, null, 2)}
20 | \`\`\`
21 |
22 | Above is an edit to the provided code.
23 | Verify this change:
24 |
25 | \`\`\`json
26 | ${JSON.stringify(edit.change, null, 2)}
27 | \`\`\`
28 |
29 | and make sure it's needed and it's only replacing the correct lines, or adding the code to the correct place. Feel free to change additions to replacements or vice versa, or skip edits if they're not needed.
30 | Return only the fixed change object following this typespec:
31 | \`\`\`typescript
32 | ${CorrectedEditChangeTypeStr}
33 | \`\`\`
34 | `
35 |
36 | export async function verifyEdit(
37 | edit: Edits[number],
38 | fullCode: string,
39 | preferredProvider: (typeof models)[number]["provider"]
40 | ): Promise {
41 | let fixedEditJSON: string = "";
42 |
43 | const selectedModel: (typeof models)[number] =
44 | models.find(
45 | (model) => model.provider === preferredProvider && model.verifyModel
46 | ) || models.find((model) => model.verifyModel)!;
47 |
48 | // console.log("Full code: \n", fullCode);
49 |
50 | const verifySpinner = ora(
51 | "Verifying edit with " + selectedModel.name
52 | ).start();
53 |
54 | if (selectedModel.provider === "anthropic") {
55 | // prettier-ignore
56 | const jsonStart = `{`;
57 | // const jsonStart = `{
58 | // "type": "${edit.change.type}",`;
59 |
60 | const anthropic = new Anthropic();
61 | const params: any = {
62 | messages: [
63 | { role: "user", content: verifyPrompt(edit) },
64 | {
65 | role: "assistant",
66 | content: `\`\`\`\n` + jsonStart,
67 | },
68 | ],
69 | model: selectedModel.name,
70 | max_tokens: 4096,
71 | system: `CODE:\n${fullCode}\n`,
72 | };
73 |
74 | // console.log("params: ", params);
75 |
76 | const response = await anthropic.messages.create(params);
77 |
78 | fixedEditJSON =
79 | response.content[0].type === "text"
80 | ? jsonStart + response.content[0].text.split(`\`\`\``)[0]
81 | : "";
82 | } else if (selectedModel.provider === "openai") {
83 | const openai = new OpenAI();
84 |
85 | const response = await openai.chat.completions.create({
86 | model: selectedModel.name,
87 | messages: [
88 | { role: "system", content: `CODE:\n${fullCode}\n` },
89 | { role: "user", content: verifyPrompt(edit) },
90 | ],
91 | response_format: { type: "json_object" },
92 | });
93 | if (response.choices[0].message.content)
94 | fixedEditJSON = response.choices[0].message.content;
95 | }
96 |
97 | if (!fixedEditJSON) {
98 | verifySpinner.stop();
99 | return edit;
100 | }
101 |
102 | try {
103 | const fixedEdit: CorrectedEditChange = JSON.parse(fixedEditJSON);
104 | const verifiedFix: CorrectedEditChange =
105 | CorrectedEditChangeSchema.parse(fixedEdit);
106 |
107 | if (verifiedFix.type === "skip") {
108 | // console.log("Skipping edit");
109 | verifySpinner.stop();
110 | return null;
111 | }
112 | edit.change = verifiedFix;
113 |
114 | // console.log("Verified edit: ", JSON.stringify(verifiedFix, null, 2));
115 | verifySpinner.stop();
116 | return edit;
117 | } catch (error) {
118 | console.error(
119 | "Failed to parse Corrected edit:",
120 | error,
121 | " in received JSON: ",
122 | fixedEditJSON
123 | );
124 | verifySpinner.stop();
125 | return edit;
126 | }
127 | }
128 |
129 | export async function* verifyEditStream(
130 | // TODO: Type this better when we get a chance
131 | editStream: AsyncGenerator,
132 | provider: (typeof models)[number]["provider"]
133 | ): AsyncGenerator {
134 | for await (const editPacket of editStream) {
135 | if (editPacket.type === "edit") {
136 | if (!fs.existsSync(editPacket.edit.filename)) {
137 | yield { type: "edit", edit: editPacket.edit };
138 | continue;
139 | }
140 |
141 | const loadedCode = await loadNumberedFile(editPacket.edit.filename, true);
142 | const verifiedEdit = await verifyEdit(
143 | editPacket.edit,
144 | loadedCode,
145 | provider
146 | );
147 | if (verifiedEdit) yield { type: "edit", edit: verifiedEdit };
148 | } else {
149 | yield editPacket;
150 | }
151 | }
152 | }
153 |
--------------------------------------------------------------------------------
/tests/call-ai.test.ts:
--------------------------------------------------------------------------------
1 | import fs from "node:fs";
2 | import path from "node:path";
3 | import { getAIEdits } from "../src/call-ai";
4 |
5 | const testCode = fs.readFileSync(path.join(__dirname, "test-code.txt"), "utf8");
6 |
7 | (async () => {
8 | console.log("Running AI tests...");
9 |
10 | const res = await getAIEdits(
11 | testCode,
12 | "Add support for openai and progress bars for everything",
13 | "claude-3-haiku-20240307",
14 | 2
15 | );
16 |
17 | for await (const edit of res) {
18 | console.log("Packet - ", edit);
19 | }
20 | })();
21 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "ts-node": {
3 | "files": true
4 | },
5 | "compilerOptions": {
6 | "baseUrl": ".",
7 | "target": "es2020",
8 | "module": "commonjs",
9 | "esModuleInterop": true,
10 | "forceConsistentCasingInFileNames": true,
11 | "strict": true,
12 | "strictPropertyInitialization": false,
13 | "skipLibCheck": true,
14 | // "strictNullChecks": true,
15 | "strictBindCallApply": false,
16 | "declaration": true,
17 | "emitDecoratorMetadata": true,
18 | "experimentalDecorators": true,
19 | "allowSyntheticDefaultImports": true,
20 | "moduleResolution": "node",
21 | "sourceMap": true,
22 | "outDir": "./dist",
23 | "incremental": true,
24 | "noImplicitAny": false,
25 | "noFallthroughCasesInSwitch": false,
26 | "resolveJsonModule": true
27 | },
28 | "include": ["src/**/*"]
29 | }
30 |
--------------------------------------------------------------------------------
/tsup.config.js:
--------------------------------------------------------------------------------
1 | import { defineConfig } from "tsup";
2 |
3 | export default defineConfig({
4 | entry: ["src/app.ts"],
5 | publicDir: false,
6 | clean: true,
7 | minify: true,
8 | format: ["cjs"], // 👈 Node
9 | });
10 |
--------------------------------------------------------------------------------