├── .cursor
└── mcp.json
├── .github
└── workflows
│ └── rust.yml
├── .gitignore
├── Cargo.lock
├── Cargo.toml
├── README.md
├── assets
├── OpenSans-Regular.ttf
└── dock_icon.png
├── examples
└── dev-client.rs
├── media
├── cursor.png
├── example.png
├── icon.png
└── screenshot.png
└── src
├── .DS_Store
├── cargo_remote.rs
├── context.rs
├── docs
├── extract_md.rs
├── generate.rs
├── index.rs
├── mod.rs
├── utils.rs
└── walk.rs
├── lsp
├── change_notifier.rs
├── client_state.rs
├── mod.rs
├── rust_analyzer_lsp.rs
└── utils.rs
├── main.rs
├── mcp
├── cargo_check.rs
├── cargo_test.rs
├── crate_docs.rs
├── mod.rs
├── symbol_docs.rs
├── symbol_impl.rs
├── symbol_references.rs
├── symbol_resolve.rs
└── utils.rs
├── project.rs
└── ui
├── app.rs
├── log.rs
├── mod.rs
└── theme.rs
/.cursor/mcp.json:
--------------------------------------------------------------------------------
1 | {
2 | "mcpServers": {
3 | "cursor_rust_tools": {
4 | "url": "http://localhost:4000/sse",
5 | "env": {
6 | "API_KEY": "value"
7 | }
8 | }
9 | }
10 | }
--------------------------------------------------------------------------------
/.github/workflows/rust.yml:
--------------------------------------------------------------------------------
1 | name: Rust
2 |
3 | on:
4 | push:
5 | branches: ["main"]
6 | pull_request:
7 | branches: ["main"]
8 |
9 | env:
10 | CARGO_TERM_COLOR: always
11 |
12 | jobs:
13 | build:
14 | runs-on: macos-latest
15 |
16 | steps:
17 | - uses: actions/checkout@v4
18 | - name: Build
19 | run: cargo build --verbose
20 | - name: Run tests
21 | run: cargo test --verbose
22 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /target
2 | .DS_Store
3 | .docs-cache
4 |
--------------------------------------------------------------------------------
/Cargo.toml:
--------------------------------------------------------------------------------
1 | [package]
2 | name = "cursor-rust-tools"
3 | version = "0.1.0"
4 | edition = "2024"
5 |
6 | [[bin]]
7 | name = "cursor-rust-tools"
8 | path = "src/main.rs"
9 |
10 | [dependencies]
11 | anyhow = "1.0.97"
12 | async-lsp = "0.2.2"
13 | async-process = "2.3.0"
14 | flume = "0.11.1"
15 | futures = "0.3.31"
16 | lsp-types = "0.95.1"
17 | mcp-core = { git = "https://github.com/terhechte/mcp-core", branch = "allow-tool-context", features = ["sse"]}
18 | serde = { version = "1.0.219", features = ["derive"] }
19 | serde_json = "1.0.140"
20 | tokio = { version = "1.44", features = ["full"] }
21 | tower = "0.5.2"
22 | tracing = "0.1.41"
23 | tracing-subscriber = { version = "0.3.19", features = ["env-filter"] }
24 | url = "2.5.4"
25 | schemars = "0.8.22"
26 | fuzzt = "0.3.1"
27 | regex = "1.11.1"
28 | glob = "0.3.2"
29 | toml = "0.8.20"
30 | sha2 = "0.10.8"
31 | ignore = "0.4.23"
32 | html2md = "0.2.15"
33 | egui = "0.31"
34 | eframe = "0.31"
35 | lazy_static = "1.5.0"
36 | egui-aesthetix = { git = "https://github.com/thebashpotato/egui-aesthetix", features = ["all_themes"]}
37 | rfd = "0.15.3"
38 | open = "5"
39 | shellexpand = "3.1.0"
40 | chrono = "0.4.40"
41 | notify = "8.0.0"
42 | notify-debouncer-mini = "0.6.0"
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | # Cursor Rust Tools
5 |
6 | A [MCP](https://www.anthropic.com/news/model-context-protocol) server to allow the LLM in Cursor to
7 | access Rust Analyzer, Crate Docs and Cargo Commands.
8 |
9 | Includes an UI for configuration.
10 |
11 | 
12 |
13 | ## What it does
14 |
15 | Currently, various AI agents don't offer the AI the ability to access Rust type information from the LSP.
16 | This is a hurdle because instead of seeing the type, the LLM has to reason about the potential type.
17 |
18 | In addition, the only information about the dependencies (say `tokio`) are what they were trained on which is
19 | out of date and potentially for a different version. This can lead to all kinds of issues.
20 |
21 | `Cursor Rust Tools` makes these available over the Model Context Protocol (`MCP`).
22 |
23 | - Get the documentation for a `crate` or for a specific symbol in the `crate` (e.g. `tokio` or `tokio::spawn`)
24 | - Get the hover information (type, description) for a specific symbol in a file
25 | - Get a list of all the references for a specific symbol in a file
26 | - Get the implementation of a symbol in a file (retrieves the whole file that contains the implementation)
27 | - Find a type just by name in a file the project and return the hover information
28 | - Get the output of `cargo test`
29 | - Get the output of `cargo check`
30 |
31 | 
32 |
33 | ## How it works
34 |
35 | For the LSP functionality `src/lsp` it spins up a new Rust Analyzer that indexes your codebase just like the on running in your editor. We can't query the one running in the editor because Rust Analyzer is bound to be used by a single consumer (e.g. the `open document` action requires a `close document` in the right order, etc)
36 |
37 | For documentation, it will run `cargo docs` and then parse the html documentation into markdown locally.
38 | This information is stored in the project root in the `.docs-cache` folder.
39 |
40 | ## Installation
41 |
42 | ```sh
43 | cargo install --git https://github.com/terhechte/cursor-rust-tools
44 | ```
45 |
46 | ### Run With UI
47 |
48 | ``` sh
49 | cursor-rust-tools
50 | ```
51 |
52 | This will bring up a UI in which you can add projects, install the `mcp.json` and see the activity.
53 |
54 | ### Run Without UI
55 |
56 | Alternatively, once you have a `~/.cursor-rust-tools` set up with projects, you can also just run it via
57 |
58 | ``` sh
59 | cursor-rust-tools --no-ui
60 | ```
61 |
62 | ## Configuration
63 |
64 | In stead of using the UI to create a configuration, you can also set up `~/.cursor-rust-tools` yourself:
65 |
66 | ``` toml
67 | [[projects]]
68 | root = "/Users/terhechte/Developer/Rust/example1"
69 | ignore_crates = []
70 |
71 | [[projects]]
72 | root = "/Users/terhechte/Developer/Rust/example2"
73 | ignore_crates = []
74 | ```
75 |
76 | `ignore_crates` is a list of crate dependency names that you don't want to be indexed for documentation. For example because they're too big.
77 |
78 | ## Configuring Cursor
79 |
80 | One the app is running, you can configure Cursor to use it. This requires multiple steps.
81 |
82 | 1. Add a `project-dir/.cursor/mcp.json` to your project. The `Cursor Rust Tools` UI has a button to do that for you. Running it without UI will also show you the `mcp.json` contents in the terminal.
83 | 2. As soon as you save that file, Cursor will detect that a new MCP server has been added and ask you to enable it. (in a dialog in the bottom right).
84 | 3. You can check the Cursor settings (under `MCP`) to see where it is working correctly
85 | 4. To test, make sure you have `Agent Mode` selected in the current `Chat`. And then you can ask it to utilize one of the new tools, for example the `cargo_check` tool.
86 | 5. [You might want to add cursor rules to tell the LLM to prefer using these tools whenever possible. I'm still experimenting with this.](https://docs.cursor.com/context/rules-for-ai)
87 |
88 | 
89 |
90 | **The contents of all the `mcp.json` is the same. Cursor Rust Tools figures out the correct project via
91 | the filepath**
92 |
93 | ## Open Todos
94 |
95 | - [ ] Create a [Zed](https://zed.dev) extension to allow using this
96 | - [ ] Proper shutdown without errors
97 | - [ ] Removing a project is a bit frail right now (in the UI)
98 | - [ ] Expose more LSP commands
99 | - [ ] Allow the LLM to perform [Grit operations](https://docs.grit.io/patterns#Miscellaneous)
100 |
--------------------------------------------------------------------------------
/assets/OpenSans-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/assets/OpenSans-Regular.ttf
--------------------------------------------------------------------------------
/assets/dock_icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/assets/dock_icon.png
--------------------------------------------------------------------------------
/examples/dev-client.rs:
--------------------------------------------------------------------------------
1 | // Not really an example. Instead, just a small client to test the MCP server.
2 |
3 | use anyhow::Result;
4 | use mcp_core::{
5 | client::ClientBuilder,
6 | transport::ClientSseTransportBuilder,
7 | types::{ClientCapabilities, Implementation},
8 | };
9 | use serde_json::json;
10 |
11 | #[tokio::main]
12 | async fn main() -> Result<()> {
13 | let tool = std::env::args()
14 | .nth(1)
15 | .unwrap_or("symbol_references".to_string());
16 | let client = ClientBuilder::new(
17 | ClientSseTransportBuilder::new("http://localhost:4000/sse".to_string()).build(),
18 | )
19 | .build();
20 | client.open().await?;
21 |
22 | client
23 | .initialize(
24 | Implementation {
25 | name: "echo".to_string(),
26 | version: "1.0".to_string(),
27 | },
28 | ClientCapabilities::default(),
29 | )
30 | .await?;
31 |
32 | let response = match tool.as_str() {
33 | "symbol_references" => {
34 | client
35 | .call_tool(
36 | "symbol_references",
37 | Some(json!({
38 | "file": "/Users/terhechte/Developer/Rust/supatest/src/main.rs",
39 | "line": 26,
40 | "symbol": "ApiKey"
41 | })),
42 | )
43 | .await?
44 | }
45 | "cargo_check" => {
46 | client
47 | .call_tool(
48 | "cargo_check",
49 | Some(json!({
50 | "file": "/Users/terhechte/Developer/Rust/supatest/Cargo.toml",
51 | })),
52 | )
53 | .await?
54 | }
55 | _ => todo!(),
56 | };
57 | dbg!(&response);
58 | Ok(())
59 | }
60 |
--------------------------------------------------------------------------------
/media/cursor.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/media/cursor.png
--------------------------------------------------------------------------------
/media/example.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/media/example.png
--------------------------------------------------------------------------------
/media/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/media/icon.png
--------------------------------------------------------------------------------
/media/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/media/screenshot.png
--------------------------------------------------------------------------------
/src/.DS_Store:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/terhechte/cursor-rust-tools/1aec56464b8fef08a53b40f48420468145e22e4c/src/.DS_Store
--------------------------------------------------------------------------------
/src/cargo_remote.rs:
--------------------------------------------------------------------------------
1 | use anyhow::Result;
2 | use serde::{Deserialize, Serialize};
3 | use serde_json as json;
4 | use tokio::process::Command;
5 |
6 | use crate::project::Project;
7 |
8 | #[derive(Clone, Debug, Deserialize, Serialize)]
9 | #[serde(tag = "reason", rename_all = "kebab-case")]
10 | pub enum CargoMessage {
11 | CompilerArtifact(json::Value),
12 | BuildScriptExecuted(json::Value),
13 | CompilerMessage { message: CompilerMessage },
14 | BuildFinished { success: bool },
15 | }
16 |
17 | #[derive(Clone, Debug, Deserialize, Serialize)]
18 | #[serde(rename_all = "snake_case")]
19 | pub struct CompilerMessage {
20 | pub rendered: String,
21 | pub code: Option,
22 | pub level: String,
23 | pub spans: Vec,
24 | }
25 |
26 | #[derive(Clone, Debug, Deserialize, Serialize)]
27 | #[serde(rename_all = "snake_case")]
28 | pub struct CompilerMessageSpan {
29 | pub column_start: usize,
30 | pub column_end: usize,
31 | pub file_name: String,
32 | pub line_start: usize,
33 | pub line_end: usize,
34 | }
35 |
36 | #[derive(Clone, Debug)]
37 | pub struct CargoRemote {
38 | repository: Project,
39 | }
40 |
41 | impl CargoRemote {
42 | pub fn new(repository: Project) -> Self {
43 | Self { repository }
44 | }
45 |
46 | async fn run_cargo_command(
47 | &self,
48 | args: &[&str],
49 | backtrace: bool,
50 | ) -> Result<(Vec, Vec)> {
51 | let output = Command::new("cargo")
52 | .current_dir(self.repository.root())
53 | .args(args)
54 | .env("RUST_BACKTRACE", if backtrace { "full" } else { "0" })
55 | .output()
56 | .await?;
57 |
58 | let stdout = String::from_utf8(output.stdout)?;
59 |
60 | let mut messages = Vec::new();
61 | let mut test_messages = Vec::new();
62 | for line in stdout.lines().filter(|line| !line.is_empty()) {
63 | match json::from_str::(line) {
64 | Ok(message) => {
65 | messages.push(message);
66 | }
67 | Err(_) => {
68 | // Cargo test doesn't respect `message-format=json`
69 | test_messages.push(line.to_string());
70 | }
71 | }
72 | }
73 |
74 | Ok((messages, test_messages))
75 | }
76 |
77 | pub async fn check(&self, only_errors: bool) -> Result> {
78 | let (messages, _) = self
79 | .run_cargo_command(&["check", "--message-format=json"], false)
80 | .await?;
81 | Ok(messages
82 | .into_iter()
83 | .filter_map(|message| match message {
84 | CargoMessage::CompilerMessage { message } => {
85 | if only_errors && message.level != "error" {
86 | return None;
87 | }
88 | Some(message.rendered)
89 | }
90 | _ => None,
91 | })
92 | .collect::>())
93 | }
94 |
95 | pub async fn test(&self, test_name: Option, backtrace: bool) -> Result> {
96 | let mut args = vec!["test", "--message-format=json"];
97 | if let Some(ref test_name) = test_name {
98 | args.push("--");
99 | args.push("--nocapture");
100 | args.push(test_name);
101 | }
102 | let (_, messages) = self.run_cargo_command(&args, backtrace).await?;
103 | Ok(messages)
104 | }
105 | }
106 |
--------------------------------------------------------------------------------
/src/context.rs:
--------------------------------------------------------------------------------
1 | use std::collections::HashMap;
2 | use std::fs;
3 | use std::path::{Path, PathBuf};
4 | use std::sync::Arc;
5 | use std::sync::atomic::AtomicBool;
6 | use tokio::sync::{RwLock, RwLockWriteGuard};
7 |
8 | use crate::cargo_remote::CargoRemote;
9 | use crate::docs::{Docs, DocsNotification};
10 | use crate::lsp::LspNotification;
11 | use crate::mcp::McpNotification;
12 | use crate::ui::ProjectDescription;
13 | use crate::{
14 | lsp::RustAnalyzerLsp,
15 | project::{Project, TransportType},
16 | };
17 | use anyhow::Result;
18 | use flume::Sender;
19 | use serde::{Deserialize, Serialize};
20 |
21 | #[derive(Debug, Clone)]
22 | pub enum ContextNotification {
23 | Lsp(LspNotification),
24 | Docs(DocsNotification),
25 | Mcp(McpNotification),
26 | ProjectAdded(PathBuf),
27 | ProjectRemoved(PathBuf),
28 | ProjectDescriptions(Vec),
29 | }
30 |
31 | impl ContextNotification {
32 | pub fn notification_path(&self) -> PathBuf {
33 | match self {
34 | ContextNotification::Lsp(LspNotification::Indexing { project, .. }) => project.clone(),
35 | ContextNotification::Docs(DocsNotification::Indexing { project, .. }) => {
36 | project.clone()
37 | }
38 | ContextNotification::Mcp(McpNotification::Request { project, .. }) => project.clone(),
39 | ContextNotification::Mcp(McpNotification::Response { project, .. }) => project.clone(),
40 | ContextNotification::ProjectAdded(project) => project.clone(),
41 | ContextNotification::ProjectRemoved(project) => project.clone(),
42 | ContextNotification::ProjectDescriptions(_) => PathBuf::from("project_descriptions"),
43 | }
44 | }
45 |
46 | pub fn description(&self) -> String {
47 | match self {
48 | ContextNotification::Lsp(LspNotification::Indexing { is_indexing, .. }) => {
49 | format!(
50 | "LSP Indexing: {}",
51 | if *is_indexing { "Started" } else { "Finished" }
52 | )
53 | }
54 | ContextNotification::Docs(DocsNotification::Indexing { is_indexing, .. }) => {
55 | format!(
56 | "Docs Indexing: {}",
57 | if *is_indexing { "Started" } else { "Finished" }
58 | )
59 | }
60 | ContextNotification::Mcp(McpNotification::Request { content, .. }) => {
61 | format!("MCP Request: {:?}", content)
62 | }
63 | ContextNotification::Mcp(McpNotification::Response { content, .. }) => {
64 | format!("MCP Response: {:?}", content)
65 | }
66 | ContextNotification::ProjectAdded(project) => {
67 | format!("Project Added: {:?}", project)
68 | }
69 | ContextNotification::ProjectRemoved(project) => {
70 | format!("Project Removed: {:?}", project)
71 | }
72 | ContextNotification::ProjectDescriptions(_) => "Project Descriptions".to_string(),
73 | }
74 | }
75 | }
76 |
77 | const HOSTNAME: &str = "localhost";
78 | const CONFIGURATION_FILE: &str = ".cursor-rust-tools";
79 |
80 | #[derive(Debug)]
81 | pub struct ProjectContext {
82 | pub project: Project,
83 | pub lsp: RustAnalyzerLsp,
84 | pub docs: Docs,
85 | pub cargo_remote: CargoRemote,
86 | pub is_indexing_lsp: AtomicBool,
87 | pub is_indexing_docs: AtomicBool,
88 | }
89 |
90 | #[derive(Clone)]
91 | pub struct Context {
92 | projects: Arc>>>,
93 | transport: TransportType,
94 | lsp_sender: Sender,
95 | docs_sender: Sender,
96 | mcp_sender: Sender,
97 | notifier: Sender,
98 | }
99 |
100 | impl Context {
101 | pub async fn new(port: u16, notifier: Sender) -> Self {
102 | let (lsp_sender, lsp_receiver) = flume::unbounded();
103 | let (docs_sender, docs_receiver) = flume::unbounded();
104 | let (mcp_sender, mcp_receiver) = flume::unbounded();
105 |
106 | let projects = Arc::new(RwLock::new(HashMap::new()));
107 |
108 | let cloned_projects = projects.clone();
109 | let cloned_notifier = notifier.clone();
110 | tokio::spawn(async move {
111 | loop {
112 | tokio::select! {
113 | Ok(notification) = mcp_receiver.recv_async() => {
114 | if let Err(e) = cloned_notifier.send(ContextNotification::Mcp(notification)) {
115 | tracing::error!("Failed to send MCP notification: {}", e);
116 | }
117 | }
118 | Ok(ref notification @ DocsNotification::Indexing { ref project, is_indexing }) = docs_receiver.recv_async() => {
119 | if let Err(e) = cloned_notifier.send(ContextNotification::Docs(notification.clone())) {
120 | tracing::error!("Failed to send docs notification: {}", e);
121 | }
122 | let mut projects: RwLockWriteGuard<'_, HashMap>> = cloned_projects.write().await;
123 | if let Some(project) = projects.get_mut(project) {
124 | project.is_indexing_docs.store(is_indexing, std::sync::atomic::Ordering::Relaxed);
125 | }
126 | }
127 | Ok(ref notification @ LspNotification::Indexing { ref project, is_indexing }) = lsp_receiver.recv_async() => {
128 | if let Err(e) = cloned_notifier.send(ContextNotification::Lsp(notification.clone())) {
129 | tracing::error!("Failed to send LSP notification: {}", e);
130 | }
131 | let mut projects: RwLockWriteGuard<'_, HashMap>> = cloned_projects.write().await;
132 | if let Some(project) = projects.get_mut(project) {
133 | project.is_indexing_lsp.store(is_indexing, std::sync::atomic::Ordering::Relaxed);
134 | }
135 | }
136 | }
137 | }
138 | });
139 |
140 | Self {
141 | projects,
142 | transport: TransportType::Sse {
143 | host: HOSTNAME.to_string(),
144 | port,
145 | },
146 | lsp_sender,
147 | docs_sender,
148 | mcp_sender,
149 | notifier,
150 | }
151 | }
152 |
153 | pub fn address_information(&self) -> (String, u16) {
154 | match &self.transport {
155 | TransportType::Stdio => ("stdio".to_string(), 0),
156 | TransportType::Sse { host, port } => (host.clone(), *port),
157 | }
158 | }
159 |
160 | pub fn mcp_configuration(&self) -> String {
161 | let (host, port) = self.address_information();
162 | CONFIG_TEMPLATE
163 | .replace("{{HOST}}", &host)
164 | .replace("{{PORT}}", &port.to_string())
165 | }
166 |
167 | pub fn configuration_file(&self) -> String {
168 | format!("~/{}", CONFIGURATION_FILE)
169 | }
170 |
171 | pub async fn project_descriptions(&self) -> Vec {
172 | let projects_map = self.projects.read().await;
173 | project_descriptions(&projects_map).await
174 | }
175 |
176 | pub fn transport(&self) -> &TransportType {
177 | &self.transport
178 | }
179 |
180 | pub async fn send_mcp_notification(&self, notification: McpNotification) -> Result<()> {
181 | self.mcp_sender.send(notification)?;
182 | Ok(())
183 | }
184 |
185 | fn config_path(&self) -> PathBuf {
186 | let parsed = shellexpand::tilde(&self.configuration_file()).to_string();
187 | PathBuf::from(parsed)
188 | }
189 |
190 | async fn write_config(&self) -> Result<()> {
191 | let projects_map = self.projects.read().await;
192 | let projects_to_save: Vec = projects_map
193 | .values()
194 | .map(|pc| &pc.project)
195 | .map(|p| SerProject {
196 | root: p.root().to_string_lossy().to_string(),
197 | ignore_crates: p.ignore_crates().to_vec(),
198 | })
199 | .collect();
200 | let config = SerConfig {
201 | projects: projects_to_save,
202 | };
203 |
204 | let config_path = self.config_path();
205 |
206 | let toml_string = toml::to_string_pretty(&config)?;
207 | if let Some(parent) = config_path.parent() {
208 | fs::create_dir_all(parent)?;
209 | }
210 | fs::write(&config_path, toml_string)?;
211 | tracing::debug!("Wrote config file to {:?}", config_path);
212 | Ok(())
213 | }
214 |
215 | pub async fn load_config(&self) -> Result<()> {
216 | let config_path = self.config_path();
217 |
218 | if !config_path.exists() {
219 | tracing::warn!(
220 | "Configuration file not found at {:?}, skipping load.",
221 | config_path
222 | );
223 | return Ok(());
224 | }
225 |
226 | let toml_string = match fs::read_to_string(&config_path) {
227 | Ok(content) => content,
228 | Err(e) => {
229 | tracing::error!("Failed to read config file {:?}: {}", config_path, e);
230 | return Err(e.into()); // Propagate read error
231 | }
232 | };
233 |
234 | if toml_string.trim().is_empty() {
235 | tracing::warn!(
236 | "Configuration file {:?} is empty, skipping load.",
237 | config_path
238 | );
239 | return Ok(());
240 | }
241 |
242 | let loaded_config: SerConfig = match toml::from_str(&toml_string) {
243 | Ok(config) => config,
244 | Err(e) => {
245 | tracing::error!(
246 | "Failed to parse TOML from config file {:?}: {}",
247 | config_path,
248 | e
249 | );
250 | // Don't return error here, maybe the file is corrupt but we can continue
251 | return Ok(());
252 | }
253 | };
254 |
255 | for project in loaded_config.projects {
256 | let project = Project {
257 | root: PathBuf::from(&project.root),
258 | ignore_crates: project.ignore_crates,
259 | };
260 | // Validate project root before adding
261 | if !project.root().exists() || !project.root().is_dir() {
262 | tracing::warn!(
263 | "Project root {:?} from config does not exist or is not a directory, skipping.",
264 | project.root()
265 | );
266 | continue;
267 | }
268 | // We need to canonicalize again as the stored path might be relative or different
269 | match Project::new(project.root()) {
270 | Ok(new_project) => {
271 | if let Err(e) = self.add_project(new_project).await {
272 | tracing::error!(
273 | "Failed to add project {:?} from config: {}",
274 | project.root(),
275 | e
276 | );
277 | }
278 | }
279 | Err(e) => {
280 | tracing::error!(
281 | "Failed to create project for root {:?} from config: {}",
282 | project.root(),
283 | e
284 | );
285 | }
286 | }
287 | }
288 |
289 | Ok(())
290 | }
291 |
292 | /// Add a new project to the context
293 | pub async fn add_project(&self, project: Project) -> Result<()> {
294 | let root = project.root().clone();
295 | let lsp = RustAnalyzerLsp::new(&project, self.lsp_sender.clone()).await?;
296 | let docs = Docs::new(project.clone(), self.docs_sender.clone())?;
297 | docs.update_index().await?;
298 | let cargo_remote = CargoRemote::new(project.clone());
299 | let project_context = Arc::new(ProjectContext {
300 | project,
301 | lsp,
302 | docs,
303 | cargo_remote,
304 | is_indexing_lsp: AtomicBool::new(true),
305 | is_indexing_docs: AtomicBool::new(true),
306 | });
307 |
308 | let mut projects_map = self.projects.write().await;
309 | projects_map.insert(root.clone(), project_context);
310 | drop(projects_map);
311 |
312 | self.request_project_descriptions();
313 |
314 | // Write config after successfully adding
315 | if let Err(e) = self.write_config().await {
316 | tracing::error!("Failed to write config after adding project: {}", e);
317 | }
318 |
319 | if let Err(e) = self.notifier.send(ContextNotification::ProjectAdded(root)) {
320 | tracing::error!("Failed to send project added notification: {}", e);
321 | }
322 |
323 | Ok(())
324 | }
325 |
326 | /// Remove a project from the context
327 | pub async fn remove_project(&self, root: &PathBuf) -> Option> {
328 | let project = {
329 | let mut projects_map = self.projects.write().await;
330 | projects_map.remove(root)
331 | };
332 |
333 | if project.is_some() {
334 | if let Err(e) = self
335 | .notifier
336 | .send(ContextNotification::ProjectRemoved(root.clone()))
337 | {
338 | tracing::error!("Failed to send project removed notification: {}", e);
339 | }
340 | // Write config after successfully removing
341 | if let Err(e) = self.write_config().await {
342 | tracing::error!("Failed to write config after removing project: {}", e);
343 | }
344 | }
345 | project
346 | }
347 |
348 | pub fn request_project_descriptions(&self) {
349 | let projects = self.projects.clone();
350 | let notifier = self.notifier.clone();
351 | tokio::spawn(async move {
352 | let projects_map = projects.read().await;
353 | let project_descriptions = project_descriptions(&projects_map).await;
354 | if let Err(e) = notifier.send(ContextNotification::ProjectDescriptions(
355 | project_descriptions,
356 | )) {
357 | tracing::error!("Failed to send project descriptions: {}", e);
358 | }
359 | });
360 | }
361 |
362 | /// Get a reference to a project context by its root path
363 | pub async fn get_project(&self, root: &PathBuf) -> Option> {
364 | let projects_map = self.projects.read().await;
365 | projects_map.get(root).cloned()
366 | }
367 |
368 | /// Get a reference to a project context by any path within the project
369 | /// Will traverse up the path hierarchy until it finds a matching project root
370 | pub async fn get_project_by_path(&self, path: &Path) -> Option> {
371 | let mut current_path = path.to_path_buf();
372 |
373 | let projects_map = self.projects.read().await;
374 |
375 | if let Some(project) = projects_map.get(¤t_path) {
376 | return Some(project.clone());
377 | }
378 |
379 | while let Some(parent) = current_path.parent() {
380 | current_path = parent.to_path_buf();
381 | if let Some(project) = projects_map.get(¤t_path) {
382 | return Some(project.clone());
383 | }
384 | }
385 |
386 | None
387 | }
388 |
389 | pub async fn force_index_docs(&self, project: &PathBuf) -> Result<()> {
390 | let Some(project_context) = self.get_project(project).await else {
391 | return Err(anyhow::anyhow!("Project not found"));
392 | };
393 | let oldval = project_context
394 | .is_indexing_docs
395 | .load(std::sync::atomic::Ordering::Relaxed);
396 | project_context
397 | .is_indexing_docs
398 | .store(!oldval, std::sync::atomic::Ordering::Relaxed);
399 | Ok(())
400 | }
401 |
402 | pub async fn shutdown_all(&self) {
403 | let projects = self.projects.write().await;
404 | for p in projects.values() {
405 | if let Err(e) = p.lsp.shutdown().await {
406 | tracing::error!(
407 | "Failed to shutdown LSP for project {:?}: {}",
408 | p.project.root(),
409 | e
410 | );
411 | }
412 | }
413 | }
414 | }
415 |
416 | const CONFIG_TEMPLATE: &str = r#"
417 | {
418 | "mcpServers": {
419 | "cursor_rust_tools": {
420 | "url": "http://{{HOST}}:{{PORT}}/sse",
421 | "env": {
422 | "API_KEY": ""
423 | }
424 | }
425 | }
426 | }
427 | "#;
428 |
429 | #[derive(Serialize, Deserialize, Debug)]
430 | struct SerConfig {
431 | projects: Vec,
432 | }
433 |
434 | #[derive(Serialize, Deserialize, Debug)]
435 | struct SerProject {
436 | root: String,
437 | ignore_crates: Vec,
438 | }
439 |
440 | async fn project_descriptions(
441 | projects: &HashMap>,
442 | ) -> Vec {
443 | projects
444 | .values()
445 | .map(|project| ProjectDescription {
446 | root: project.project.root().clone(),
447 | name: project
448 | .project
449 | .root()
450 | .file_name()
451 | .unwrap()
452 | .to_string_lossy()
453 | .to_string(),
454 | is_indexing_lsp: project
455 | .is_indexing_lsp
456 | .load(std::sync::atomic::Ordering::Relaxed),
457 | is_indexing_docs: project
458 | .is_indexing_docs
459 | .load(std::sync::atomic::Ordering::Relaxed),
460 | })
461 | .collect()
462 | }
463 |
--------------------------------------------------------------------------------
/src/docs/extract_md.rs:
--------------------------------------------------------------------------------
1 | use regex::Regex;
2 | use serde_json::Value;
3 |
4 | pub fn extract_md(html: &str) -> String {
5 | // Remove head section before processing
6 | let re = regex::Regex::new(r".*?").unwrap();
7 | let html = re.replace(html, "");
8 | let re = regex::Regex::new(r"").unwrap();
9 | let html = re.replace(&html, "");
10 | let md = html2md::parse_html(&html);
11 | let md = extract_lines_after_package(&md);
12 | remove_backslashes(&remove_tags(&remove_markdown_links(&md)))
13 | }
14 |
15 | fn remove_markdown_links(input: &str) -> String {
16 | let re = regex::Regex::new(r"\[([^\[\]]+)\]\(([^)]+)\)").unwrap();
17 | let replaced = re.replace_all(input, |caps: ®ex::Captures| {
18 | caps.get(1).unwrap().as_str().to_string()
19 | });
20 | replaced.to_string()
21 | }
22 |
23 | fn remove_backslashes(input: &str) -> String {
24 | input
25 | .lines() // Iterate over each line
26 | .map(|line| {
27 | if line.starts_with("//") || line.starts_with("///") {
28 | line.to_string() // Keep the line unchanged
29 | } else {
30 | line.replace("\\", "") // Remove all backslashes
31 | }
32 | })
33 | .collect::>() // Collect the processed lines into a Vec
34 | .join("\n") // Recombine them into a single string
35 | }
36 |
37 | fn remove_tags(input: &str) -> String {
38 | // Regex to match with any attributes
39 | let details_open_tag = Regex::new(r"]*>").unwrap();
40 | let summary_open_tag = Regex::new(r"]*>").unwrap();
41 | let href_open_tag = Regex::new(r"]*>").unwrap();
42 |
43 | // Regex to match
, , and
44 | let other_tags = Regex::new(r"?details>|?summary>|?a>").unwrap();
45 |
46 | // Remove tags
47 | let without_details_open = details_open_tag.replace_all(input, "");
48 | // Remove tags
49 | let without_summary_open = summary_open_tag.replace_all(&without_details_open, "");
50 | // Remove tags
51 | let without_href_open = href_open_tag.replace_all(&without_summary_open, "");
52 | // Remove
, , and
tags
53 | let result = other_tags.replace_all(&without_href_open, "");
54 |
55 | result.to_string()
56 | }
57 |
58 | fn extract_lines_after_package(input: &str) -> String {
59 | let mut lines = input
60 | .lines()
61 | .filter(|line| !line.trim().is_empty())
62 | .peekable();
63 | let mut name = String::new();
64 | let mut version = String::new();
65 | let mut line_cache = Vec::new();
66 |
67 | // Find the first line with `Docs.rs` and parse the next line
68 | while let Some(line) = lines.next() {
69 | line_cache.push(line);
70 | if line.contains("Docs.rs") {
71 | if let Some(next_line) = lines.next() {
72 | if let Ok(json) = serde_json::from_str::(next_line) {
73 | if let (Some(n), Some(v)) = (json.get("name"), json.get("version")) {
74 | name = n.as_str().map(|s| s.to_string()).unwrap_or_default();
75 | version = v.as_str().map(|s| s.to_string()).unwrap_or_default();
76 | }
77 | }
78 | }
79 | break;
80 | }
81 |
82 | // Fallback if we don't find name / version
83 | if line.contains(r#"