├── resources ├── default_scripts │ ├── python │ │ ├── __init__.py │ │ ├── timestamp_generator.py │ │ ├── logging_utils.py │ │ ├── content_metrics_generator.py │ │ ├── inline_tags.py │ │ ├── code_bookmarks.py │ │ ├── kanban_board.py │ │ └── time_tracker.py │ └── lua │ │ ├── metadata_generator.lua │ │ └── lua_executor.lua ├── icon.png └── banner.png ├── src ├── embedded.rs ├── lib.rs ├── bin │ ├── note_cli.rs │ ├── note_settings.rs │ └── note_tray.rs ├── event.rs ├── logging.rs ├── utils.rs ├── observers │ ├── mod.rs │ ├── timestamp.rs │ ├── tag_index.rs │ ├── toc.rs │ └── sqlite_store.rs ├── window_manager.rs ├── metadata.rs ├── observer_registry.rs ├── cli.rs ├── handlers.rs ├── editor.rs ├── watcher.rs ├── settings.rs ├── settings_dialog.rs └── script_loader.rs ├── .gitignore ├── configexample.toml ├── Cargo.toml ├── install.sh ├── README.md └── LICENSE /resources/default_scripts/python/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /resources/icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realjockel/noorg/HEAD/resources/icon.png -------------------------------------------------------------------------------- /resources/banner.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/realjockel/noorg/HEAD/resources/banner.png -------------------------------------------------------------------------------- /src/embedded.rs: -------------------------------------------------------------------------------- 1 | use rust_embed::RustEmbed; 2 | 3 | #[derive(RustEmbed)] 4 | #[folder = "resources/default_scripts"] 5 | pub struct DefaultScripts; 6 | -------------------------------------------------------------------------------- /src/lib.rs: -------------------------------------------------------------------------------- 1 | pub mod cli; 2 | pub mod editor; 3 | pub mod embedded; 4 | pub mod event; 5 | pub mod handlers; 6 | pub mod logging; 7 | pub mod metadata; 8 | pub mod note; 9 | pub mod observer_registry; 10 | pub mod observers; 11 | pub mod script_loader; 12 | pub mod settings; 13 | pub mod settings_dialog; 14 | pub mod utils; 15 | pub mod watcher; 16 | pub mod window_manager; 17 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /target 2 | notes 3 | data/frontmatter.db 4 | db/ 5 | notes/ 6 | src/note_cli.code-workspace 7 | .DS_Store 8 | Cargo.lock 9 | /target 10 | /dist/* 11 | **/*.rs.bk 12 | Cargo.lock 13 | .DS_Store 14 | /python_build_env 15 | /build 16 | *.spec 17 | *.pyc 18 | __pycache__/ 19 | .env 20 | .venv 21 | /notes 22 | /data 23 | Cargo.lock 24 | dist/Note-0.1.0-mac.dmg 25 | scripts/* 26 | entitlements.plist 27 | scripts/build.sh 28 | src/.DS_Store -------------------------------------------------------------------------------- /configexample.toml: -------------------------------------------------------------------------------- 1 | file_type = 'md' 2 | timestamps = true 3 | note_dir = '/FULLPATH/notes' 4 | scripts_dir = 'scripts' 5 | enabled_observers = ['timestamp', 'sqlite', 'tag_index', 'similar_notes', 'toc'] 6 | obsidian_vault_path='/FULLPATH/iCloud~md~obsidian/Documents/Obsidian' 7 | [similar_notes] 8 | excluded_notes = [ 9 | "_kanban", 10 | "_tag_index", 11 | "project", 12 | # Add any other notes you want to exclude 13 | ] 14 | excluded_from_references = [ 15 | "_tag_index", 16 | "_kanban", 17 | # Add notes that shouldn't appear as similar 18 | ] -------------------------------------------------------------------------------- /resources/default_scripts/lua/metadata_generator.lua: -------------------------------------------------------------------------------- 1 | -- local json = require("json") 2 | 3 | -- function on_event(event_json) 4 | -- local event = json.decode(event_json) 5 | -- print("Debug - Received event in Lua:", event_json) -- Debug print 6 | 7 | -- -- Check for Created, Updated, or Synced in the event table 8 | -- if event.Created or event.Updated or event.Synced then 9 | -- local metadata = { 10 | -- tags = "example, lua, note", 11 | -- summary = "Generated by Lua script" 12 | -- } 13 | -- local result = json.encode(metadata) 14 | -- print("Debug - Returning metadata from Lua:", result) -- Debug print 15 | -- return result 16 | -- end 17 | 18 | -- return nil 19 | -- end 20 | -------------------------------------------------------------------------------- /resources/default_scripts/python/timestamp_generator.py: -------------------------------------------------------------------------------- 1 | """ 2 | import json 3 | from datetime import datetime 4 | 5 | def process_event(event_json): 6 | try: 7 | event = json.loads(event_json) 8 | print(f"Debug - Received event: {event}") # Debug print 9 | 10 | # Check if event is a dict and has the expected structure 11 | if isinstance(event, dict): 12 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 13 | if event_type: 14 | metadata = { 15 | "created_at": datetime.now().isoformat(), 16 | } 17 | print(f"Debug - Returning metadata: {metadata}") # Debug print 18 | return json.dumps(metadata) 19 | 20 | print(f"Debug - No metadata generated") 21 | return None 22 | 23 | except Exception as e: 24 | print(f"Debug - Error processing event: {e}") 25 | return None 26 | """ -------------------------------------------------------------------------------- /src/bin/note_cli.rs: -------------------------------------------------------------------------------- 1 | use clap::Parser; 2 | use noorg::{ 3 | cli::Cli, handlers::handle_command, logging::init_logging, observer_registry::ObserverRegistry, 4 | script_loader::ScriptLoader, settings::Settings, 5 | }; 6 | use std::{io, sync::Arc}; 7 | 8 | #[tokio::main] 9 | async fn main() -> io::Result<()> { 10 | // Parse CLI args first to get debug flag 11 | let cli = Cli::parse(); 12 | 13 | // Initialize logging before any other operations 14 | init_logging(cli.debug); 15 | 16 | let settings = Settings::new(); 17 | let script_loader = ScriptLoader::new(settings.scripts_dir.clone(), settings.clone()); 18 | 19 | // Load observers asynchronously 20 | let observers = script_loader.load_observers(&settings.enabled_observers)?; 21 | let observer_registry = Arc::new(ObserverRegistry::new()); 22 | 23 | // Register observers 24 | for observer in observers { 25 | observer_registry.register(observer).await; 26 | } 27 | 28 | handle_command(cli.command, settings, observer_registry, None).await?; 29 | 30 | Ok(()) 31 | } 32 | -------------------------------------------------------------------------------- /resources/default_scripts/python/logging_utils.py: -------------------------------------------------------------------------------- 1 | from typing import Any 2 | import json 3 | import sys 4 | 5 | def log_error(message: str, *args: Any) -> None: 6 | """Log an error message that will be parsed by Rust.""" 7 | print(json.dumps({"level": "ERROR", "message": message.format(*args)}), file=sys.stderr) 8 | 9 | def log_warn(message: str, *args: Any) -> None: 10 | """Log a warning message that will be parsed by Rust.""" 11 | print(json.dumps({"level": "WARN", "message": message.format(*args)})) 12 | 13 | def log_info(message: str, *args: Any) -> None: 14 | """Log an info message that will be parsed by Rust.""" 15 | print(json.dumps({"level": "INFO", "message": message.format(*args)})) 16 | 17 | def log_debug(message: str, *args: Any) -> None: 18 | """Log a debug message that will be parsed by Rust.""" 19 | print(json.dumps({"level": "DEBUG", "message": message.format(*args)})) 20 | 21 | def log_trace(message: str, *args: Any) -> None: 22 | """Log a trace message that will be parsed by Rust.""" 23 | print(json.dumps({"level": "TRACE", "message": message.format(*args)})) -------------------------------------------------------------------------------- /src/event.rs: -------------------------------------------------------------------------------- 1 | use std::any::Any; 2 | use std::collections::HashMap; 3 | use std::future::Future; 4 | use std::io; 5 | use std::pin::Pin; 6 | 7 | use serde::{Deserialize, Serialize}; 8 | 9 | #[derive(Debug, Clone, Serialize, Deserialize)] 10 | pub enum NoteEvent { 11 | Created { 12 | title: String, 13 | content: String, 14 | file_path: String, 15 | frontmatter: HashMap, 16 | }, 17 | Updated { 18 | title: String, 19 | content: String, 20 | file_path: String, 21 | frontmatter: HashMap, 22 | }, 23 | Synced { 24 | title: String, 25 | content: String, 26 | file_path: String, 27 | frontmatter: HashMap, 28 | }, 29 | } 30 | 31 | #[derive(Debug, Clone)] 32 | pub struct ObserverResult { 33 | pub metadata: Option>, 34 | pub content: Option, 35 | } 36 | 37 | pub trait NoteObserver: Send + Sync + 'static { 38 | fn on_event_boxed( 39 | &self, 40 | event: NoteEvent, 41 | ) -> Pin>> + Send + '_>>; 42 | 43 | fn name(&self) -> String; 44 | fn priority(&self) -> i32 { 45 | 0 46 | } 47 | fn as_any(&self) -> &dyn Any; 48 | } 49 | -------------------------------------------------------------------------------- /src/bin/note_settings.rs: -------------------------------------------------------------------------------- 1 | use iced::{Application, Settings as IcedSettings}; 2 | use noorg::settings::Settings; 3 | use noorg::settings_dialog::SettingsDialog; 4 | 5 | fn main() { 6 | let args: Vec = std::env::args().collect(); 7 | if args.len() != 2 { 8 | eprintln!("Usage: note_settings "); 9 | std::process::exit(1); 10 | } 11 | 12 | let settings_path = &args[1]; 13 | let settings = match std::fs::read_to_string(settings_path) { 14 | Ok(content) => match toml::from_str::(&content) { 15 | Ok(settings) => settings, 16 | Err(e) => { 17 | eprintln!("Failed to parse settings: {}", e); 18 | std::process::exit(1); 19 | } 20 | }, 21 | Err(e) => { 22 | eprintln!("Failed to read settings file: {}", e); 23 | std::process::exit(1); 24 | } 25 | }; 26 | 27 | let iced_settings = IcedSettings { 28 | flags: settings, 29 | window: iced::window::Settings { 30 | size: (600, 800), 31 | ..Default::default() 32 | }, 33 | ..Default::default() 34 | }; 35 | 36 | if let Err(e) = SettingsDialog::run(iced_settings) { 37 | eprintln!("Failed to run settings dialog: {}", e); 38 | std::process::exit(1); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "noorg" 3 | version = "0.1.0" 4 | edition = "2021" 5 | 6 | [[bin]] 7 | name = "note_cli" 8 | path = "src/bin/note_cli.rs" 9 | 10 | [[bin]] 11 | name = "note_tray" 12 | path = "src/bin/note_tray.rs" 13 | 14 | [[bin]] 15 | name = "note_settings" 16 | path = "src/bin/note_settings.rs" 17 | 18 | 19 | [dependencies] 20 | 21 | config = "0.14.0" 22 | serde = { version = "1.0", features = ["derive"] } 23 | clap = { version = "4.0", features = ["derive"] } 24 | chrono = { version = "0.4", features = ["serde"] } 25 | tempfile = "3.2.0" 26 | serde_yaml = "0.9" 27 | serde_json = "1.0" 28 | tokio = { version = "1.0", features = ["full"] } 29 | mlua = { version = "0.10.0", features = ["lua54", "async", "send","serialize"] } 30 | pyo3 = { version = "0.22.5", features = ["auto-initialize", "gil-refs"] } 31 | lazy_static = "1.4" 32 | rusqlite = "0.32.1" 33 | futures = "0.3" 34 | once_cell = "1.8" 35 | notify = "6.1.1" 36 | url = "2.4.0" 37 | urlencoding = "2.1.2" 38 | percent-encoding = "2.3.0" 39 | pulldown-cmark = "0.9.1" 40 | sha2 = "0.10.8" 41 | tracing = "0.1" 42 | tracing-subscriber = { version = "0.3", features = ["env-filter"] } 43 | directories = "5.0" 44 | dirs = "5.0" 45 | toml = "0.8" 46 | rust-embed = "8.0" 47 | tray-icon = "0.19.2" 48 | tao = "0.30.8" 49 | rfd = "0.15.1" 50 | image = "0.24.6" 51 | iced = { version = "0.10", features = ["tokio","debug"] } 52 | tracing-appender = "0.2" 53 | [build] 54 | rustflags = ["-C", "target-cpu=native"] 55 | 56 | [target.x86_64-apple-darwin] 57 | rustflags = ["-C", "target-feature=-avx,-avx2"] 58 | 59 | [target.'cfg(target_os = "macos")'.dependencies] 60 | cocoa = "0.24.0" 61 | objc = "0.2" 62 | core-foundation = "0.10.0" 63 | 64 | [profile.release] 65 | lto = true -------------------------------------------------------------------------------- /src/logging.rs: -------------------------------------------------------------------------------- 1 | use std::fs; 2 | use tracing_appender::rolling::{RollingFileAppender, Rotation}; 3 | use tracing_subscriber::{fmt, prelude::*, EnvFilter}; 4 | 5 | pub fn init_logging(debug: bool) { 6 | // Determine the log directory 7 | let log_dir = dirs::home_dir() 8 | .unwrap_or_else(|| std::path::PathBuf::from(".")) 9 | .join("Library") 10 | .join("Logs") 11 | .join("noorg"); 12 | 13 | // Ensure the log directory exists 14 | fs::create_dir_all(&log_dir).expect("Failed to create log directory"); 15 | 16 | // Set up file appender 17 | let file_appender = RollingFileAppender::new(Rotation::DAILY, log_dir.clone(), "note_app.log"); 18 | 19 | // Create the file layer 20 | let file_layer = fmt::layer() 21 | .with_file(true) 22 | .with_line_number(true) 23 | .with_thread_ids(true) 24 | .with_target(true) 25 | .with_writer(file_appender) 26 | .with_filter(if debug { 27 | EnvFilter::new("debug") 28 | } else { 29 | EnvFilter::new("info") 30 | }); 31 | 32 | // Create the terminal layer 33 | let terminal_layer = fmt::layer() 34 | .with_file(true) 35 | .with_line_number(true) 36 | .with_thread_ids(true) 37 | .with_target(true) 38 | .with_filter(if debug { 39 | EnvFilter::new("debug") 40 | } else { 41 | EnvFilter::new("info") 42 | }); 43 | 44 | // Combine both layers 45 | tracing_subscriber::registry() 46 | .with(terminal_layer) 47 | .with(file_layer) 48 | .init(); 49 | 50 | tracing::info!("Logging initialized"); 51 | tracing::info!( 52 | "Log file location: {}", 53 | log_dir.join("note_app.log").display() 54 | ); 55 | } 56 | -------------------------------------------------------------------------------- /src/utils.rs: -------------------------------------------------------------------------------- 1 | use crate::settings::Settings; 2 | use std::path::{Path, PathBuf}; 3 | use tracing::{debug, error, warn}; 4 | 5 | pub fn get_absolute_note_path(title: &str, settings: &Settings) -> String { 6 | debug!("Getting absolute note path for title: {}", title); 7 | let path = PathBuf::from(&settings.note_dir).join(format!( 8 | "{}.{}", 9 | title.replace(" ", "%20"), 10 | settings.file_type 11 | )); 12 | 13 | match path.to_str() { 14 | Some(p) => { 15 | debug!("Generated absolute path: {}", p); 16 | p.to_string() 17 | } 18 | None => { 19 | error!("Failed to convert path to string for title: {}", title); 20 | format!( 21 | "{}/{}.{}", 22 | settings.note_dir, 23 | title.replace(" ", "%20"), 24 | settings.file_type 25 | ) 26 | } 27 | } 28 | } 29 | 30 | pub fn get_fs_path(title: &str, settings: &Settings) -> PathBuf { 31 | debug!("Getting filesystem path for title: {}", title); 32 | let path = PathBuf::from(&settings.note_dir).join(format!( 33 | "{}.{}", 34 | title.replace(" ", "%20"), 35 | settings.file_type 36 | )); 37 | debug!("Generated filesystem path: {}", path.display()); 38 | path 39 | } 40 | 41 | pub fn get_note_title_from_path(path: &str) -> String { 42 | debug!("Extracting note title from path: {}", path); 43 | let title = Path::new(path) 44 | .file_stem() 45 | .and_then(|s| s.to_str()) 46 | .map(|s| s.replace("%20", " ")) 47 | .unwrap_or_default(); 48 | 49 | if title.is_empty() { 50 | warn!("Could not extract title from path: {}", path); 51 | } else { 52 | debug!("Extracted title: {}", title); 53 | } 54 | 55 | title 56 | } 57 | -------------------------------------------------------------------------------- /src/observers/mod.rs: -------------------------------------------------------------------------------- 1 | use crate::event::NoteObserver; 2 | use crate::settings::Settings; 3 | use std::collections::HashMap; 4 | use std::sync::Arc; 5 | 6 | pub mod sqlite_store; 7 | pub mod tag_index; 8 | pub mod timestamp; 9 | mod toc; 10 | use toc::TocObserver; 11 | 12 | // Update the type to include Settings 13 | type ObserverConstructor = fn(Arc) -> Box; 14 | 15 | // Function to create TimestampObserver 16 | fn create_timestamp_observer(_settings: Arc) -> Box { 17 | Box::new(timestamp::TimestampObserver) 18 | } 19 | 20 | // Function to create SqliteObserver 21 | fn create_sqlite_observer(settings: Arc) -> Box { 22 | Box::new(sqlite_store::SqliteObserver::new(settings).unwrap()) 23 | } 24 | 25 | // Function to create TagIndexObserver 26 | fn create_tag_index_observer(settings: Arc) -> Box { 27 | Box::new(tag_index::TagIndexObserver::new(settings).unwrap()) 28 | } 29 | 30 | // Function to create TocObserver 31 | fn create_toc_observer(_settings: Arc) -> Box { 32 | Box::new(TocObserver::new()) 33 | } 34 | 35 | // Static registry of available Rust observers 36 | lazy_static::lazy_static! { 37 | static ref OBSERVER_REGISTRY: HashMap<&'static str, ObserverConstructor> = { 38 | let mut m = HashMap::new(); 39 | m.insert("timestamp", create_timestamp_observer as ObserverConstructor); 40 | // m.insert("llm_metadata", create_llm_metadata_observer as ObserverConstructor); 41 | // m.insert("similar_notes", create_similar_notes_observer as ObserverConstructor); 42 | m.insert("sqlite", create_sqlite_observer as ObserverConstructor); 43 | m.insert("tag_index", create_tag_index_observer as ObserverConstructor); 44 | m.insert("toc", create_toc_observer as ObserverConstructor); 45 | m 46 | }; 47 | } 48 | 49 | pub fn get_available_observers() -> Vec<&'static str> { 50 | OBSERVER_REGISTRY.keys().cloned().collect() 51 | } 52 | 53 | // Update create_observer to take settings 54 | pub fn create_observer(name: &str, settings: Arc) -> Option> { 55 | OBSERVER_REGISTRY 56 | .get(name) 57 | .map(|constructor| constructor(settings)) 58 | } 59 | 60 | pub fn create_observers(settings: Settings) -> Vec> { 61 | let settings = Arc::new(settings); 62 | let mut observers: Vec> = Vec::new(); 63 | 64 | // ... other observers ... 65 | 66 | if settings.enabled_observers.contains(&"toc".to_string()) { 67 | observers.push(create_toc_observer(settings.clone())); 68 | } 69 | 70 | observers 71 | } 72 | -------------------------------------------------------------------------------- /src/window_manager.rs: -------------------------------------------------------------------------------- 1 | use crate::settings::Settings; 2 | use directories::ProjectDirs; 3 | use std::path::PathBuf; 4 | use std::process::Command; 5 | use std::sync::Arc; 6 | use tokio::sync::Mutex; 7 | use tracing::{error, info}; 8 | 9 | pub fn open_settings(settings: Arc>) { 10 | // Get the project directory for settings 11 | let config_path = if let Some(proj_dirs) = ProjectDirs::from("", "norg", "norg") { 12 | proj_dirs.config_dir().join("config.toml") 13 | } else { 14 | error!("Could not determine config directory"); 15 | return; 16 | }; 17 | 18 | // Get settings in a separate thread to avoid blocking 19 | std::thread::spawn(move || { 20 | // Create a new runtime for this thread 21 | let rt = tokio::runtime::Runtime::new().unwrap(); 22 | 23 | // Get settings 24 | let settings_clone = rt.block_on(async { 25 | let settings = settings.lock().await; 26 | settings.clone() 27 | }); 28 | 29 | // Save to config file 30 | if let Ok(config_str) = toml::to_string_pretty(&settings_clone) { 31 | if let Err(e) = std::fs::write(&config_path, &config_str) { 32 | error!("Failed to save settings: {}", e); 33 | return; 34 | } 35 | 36 | // Get the path to the settings binary 37 | let settings_binary = if let Ok(exe_path) = std::env::current_exe() { 38 | let mut path = exe_path 39 | .parent() 40 | .unwrap_or(&PathBuf::from(".")) 41 | .to_path_buf(); 42 | path.push("note_settings"); 43 | #[cfg(target_os = "windows")] 44 | path.set_extension("exe"); 45 | path 46 | } else { 47 | PathBuf::from("note_settings") 48 | }; 49 | 50 | // Launch settings dialog 51 | match Command::new(&settings_binary).arg(&config_path).spawn() { 52 | Ok(_) => { 53 | info!("Settings dialog opened with binary: {:?}", settings_binary); 54 | } 55 | Err(e) => { 56 | error!( 57 | "Failed to open settings using binary {:?}: {}", 58 | settings_binary, e 59 | ); 60 | rfd::MessageDialog::new() 61 | .set_title("Error") 62 | .set_description(&format!("Failed to open settings: {}", e)) 63 | .set_level(rfd::MessageLevel::Error) 64 | .show(); 65 | } 66 | } 67 | } 68 | }); 69 | } 70 | -------------------------------------------------------------------------------- /src/observers/timestamp.rs: -------------------------------------------------------------------------------- 1 | use crate::event::{NoteEvent, NoteObserver, ObserverResult}; 2 | use chrono::Local; 3 | use std::any::Any; 4 | use std::collections::HashMap; 5 | use std::future::Future; 6 | use std::pin::Pin; 7 | use tracing::{debug, info}; 8 | 9 | pub struct TimestampObserver; 10 | 11 | impl NoteObserver for TimestampObserver { 12 | fn on_event_boxed( 13 | &self, 14 | event: NoteEvent, 15 | ) -> Pin>> + Send + '_>> { 16 | Box::pin(async move { 17 | let mut metadata = HashMap::new(); 18 | match event { 19 | NoteEvent::Created { 20 | title, frontmatter, .. 21 | } => { 22 | debug!("Processing creation timestamp for '{}'", title); 23 | 24 | if !frontmatter.contains_key("created_at") { 25 | let created_at = Local::now().format("%Y-%m-%d %H:%M:%S %z").to_string(); 26 | debug!("Setting initial created_at: {}", created_at); 27 | metadata.insert("created_at".to_string(), created_at); 28 | } 29 | 30 | let updated_at = Local::now().format("%Y-%m-%d %H:%M:%S.%f %z").to_string(); 31 | debug!("Setting updated_at: {}", updated_at); 32 | metadata.insert("updated_at".to_string(), updated_at); 33 | 34 | info!("✨ Timestamps initialized for new note '{}'", title); 35 | } 36 | NoteEvent::Updated { 37 | title, frontmatter, .. 38 | } 39 | | NoteEvent::Synced { 40 | title, frontmatter, .. 41 | } => { 42 | debug!("Processing update timestamp for '{}'", title); 43 | 44 | if let Some(created) = frontmatter.get("created_at") { 45 | debug!("Preserving existing created_at: {}", created); 46 | metadata.insert("created_at".to_string(), created.clone()); 47 | } 48 | 49 | let updated_at = Local::now().format("%Y-%m-%d %H:%M:%S.%f %z").to_string(); 50 | debug!("Setting updated_at: {}", updated_at); 51 | metadata.insert("updated_at".to_string(), updated_at); 52 | 53 | info!("✨ Updated timestamp for '{}'", title); 54 | } 55 | } 56 | 57 | Ok(Some(ObserverResult { 58 | metadata: Some(metadata), 59 | content: None, 60 | })) 61 | }) 62 | } 63 | 64 | fn name(&self) -> String { 65 | "timestamp".to_string() 66 | } 67 | 68 | fn as_any(&self) -> &dyn Any { 69 | self 70 | } 71 | 72 | fn priority(&self) -> i32 { 73 | 0 // Run after metadata generation but before storage 74 | } 75 | } 76 | -------------------------------------------------------------------------------- /src/metadata.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use tracing::{debug, trace}; 3 | 4 | pub fn merge_metadata(existing: &mut HashMap, new: HashMap) { 5 | debug!("Merging metadata with {} new fields", new.len()); 6 | trace!("Existing metadata: {:?}", existing); 7 | trace!("New metadata: {:?}", new); 8 | 9 | for (key, value) in new { 10 | match key.as_str() { 11 | "tags" => { 12 | debug!("Merging tags field"); 13 | let existing_tags: Vec = existing 14 | .get("tags") 15 | .map(|t| { 16 | trace!("Existing tags: {}", t); 17 | t.split(',').map(|s| s.trim().to_string()).collect() 18 | }) 19 | .unwrap_or_default(); 20 | 21 | let new_tags: Vec = 22 | value.split(',').map(|s| s.trim().to_string()).collect(); 23 | trace!("New tags: {:?}", new_tags); 24 | 25 | let mut combined_tags: Vec = 26 | existing_tags.into_iter().chain(new_tags).collect(); 27 | 28 | combined_tags.sort(); 29 | combined_tags.dedup(); 30 | trace!("Combined and deduplicated tags: {:?}", combined_tags); 31 | 32 | existing.insert(key, combined_tags.join(", ")); 33 | } 34 | "topics" => { 35 | debug!("Merging topics field"); 36 | let existing_items: Vec = existing 37 | .get("topics") 38 | .map(|t| { 39 | trace!("Existing topics: {}", t); 40 | t.split(',').map(|s| s.trim().to_string()).collect() 41 | }) 42 | .unwrap_or_default(); 43 | 44 | let new_items: Vec = 45 | value.split(',').map(|s| s.trim().to_string()).collect(); 46 | trace!("New topics: {:?}", new_items); 47 | 48 | let mut combined: Vec = 49 | existing_items.into_iter().chain(new_items).collect(); 50 | 51 | combined.sort(); 52 | combined.dedup(); 53 | trace!("Combined and deduplicated topics: {:?}", combined); 54 | 55 | existing.insert(key, combined.join(", ")); 56 | } 57 | "created_at" => { 58 | debug!("Processing created_at field"); 59 | if !existing.contains_key(&key) { 60 | trace!("Setting initial created_at: {}", value); 61 | existing.insert(key, value); 62 | } else { 63 | trace!("Keeping existing created_at timestamp"); 64 | } 65 | } 66 | "updated_at" => { 67 | debug!("Updating updated_at field to: {}", value); 68 | existing.insert(key, value); 69 | } 70 | "timestamp" => { 71 | debug!("Skipping redundant timestamp field"); 72 | } 73 | _ => { 74 | debug!("Setting field '{}' to '{}'", key, value); 75 | existing.insert(key, value); 76 | } 77 | } 78 | } 79 | 80 | debug!("Metadata merge completed"); 81 | trace!("Final metadata state: {:?}", existing); 82 | } 83 | -------------------------------------------------------------------------------- /src/observer_registry.rs: -------------------------------------------------------------------------------- 1 | use crate::event::{NoteEvent, NoteObserver}; 2 | use crate::metadata::merge_metadata; 3 | use std::collections::HashMap; 4 | use std::io; 5 | use std::sync::Arc; 6 | use tokio::sync::RwLock; 7 | use tracing::{debug, error, info, trace}; 8 | 9 | pub struct ObserverRegistry { 10 | observers: RwLock>>>, 11 | } 12 | 13 | impl ObserverRegistry { 14 | pub fn new() -> Self { 15 | debug!("Creating new ObserverRegistry"); 16 | Self { 17 | observers: RwLock::new(Vec::new()), 18 | } 19 | } 20 | 21 | pub async fn register(&self, observer: Box) { 22 | let name = observer.name(); 23 | debug!("Registering new observer: {}", name); 24 | let mut observers = self.observers.write().await; 25 | observers.push(Arc::new(observer)); 26 | info!("✅ Observer '{}' registered successfully", name); 27 | } 28 | 29 | pub async fn notify(&self, event: NoteEvent) -> io::Result> { 30 | debug!("Starting notification process for event"); 31 | trace!("Event details: {:?}", event); 32 | 33 | let observers = self.observers.read().await; 34 | let mut sorted_observers = observers.iter().collect::>(); 35 | 36 | debug!("Sorting observers by priority"); 37 | sorted_observers.sort_by_key(|o| -o.priority()); 38 | 39 | // Move special observers to end 40 | if let Some(pos) = sorted_observers 41 | .iter() 42 | .position(|o| o.name() == "tag_index") 43 | { 44 | debug!("Moving tag_index observer to end"); 45 | let tag_index = sorted_observers.remove(pos); 46 | sorted_observers.push(tag_index); 47 | } 48 | if let Some(pos) = sorted_observers.iter().position(|o| o.name() == "sqlite") { 49 | debug!("Moving sqlite observer to end"); 50 | let sqlite = sorted_observers.remove(pos); 51 | sorted_observers.push(sqlite); 52 | } 53 | 54 | let mut combined_metadata = HashMap::new(); 55 | 56 | for observer in sorted_observers { 57 | info!("🔵 Starting observer: {}", observer.name()); 58 | debug!("Processing event for observer: {}", observer.name()); 59 | trace!("Event details for {}: {:?}", observer.name(), event); 60 | 61 | match observer.on_event_boxed(event.clone()).await { 62 | Ok(Some(result)) => { 63 | if let Some(metadata) = result.metadata { 64 | debug!("Observer '{}' returned metadata", observer.name()); 65 | trace!("Metadata from {}: {:?}", observer.name(), metadata); 66 | merge_metadata(&mut combined_metadata, metadata); 67 | } 68 | // Content changes are handled at the note level 69 | } 70 | Ok(None) => debug!("Observer '{}' returned no changes", observer.name()), 71 | Err(e) => error!("Observer '{}' error: {}", observer.name(), e), 72 | } 73 | 74 | debug!("Completed processing for observer: {}", observer.name()); 75 | } 76 | 77 | debug!("Notification process completed"); 78 | trace!("Final combined metadata: {:?}", combined_metadata); 79 | Ok(combined_metadata) 80 | } 81 | 82 | pub async fn get_observers(&self) -> Vec>> { 83 | debug!("Retrieving observer list"); 84 | let observers = self.observers.read().await; 85 | let result = observers.iter().cloned().collect(); 86 | trace!("Retrieved {} observers", observers.len()); 87 | result 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /resources/default_scripts/python/content_metrics_generator.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from collections import Counter 4 | from datetime import datetime 5 | from logging_utils import log_debug, log_info, log_error, log_trace 6 | 7 | def calculate_metrics(content): 8 | log_debug("Calculating content metrics") 9 | 10 | # Basic counts 11 | word_count = len(content.split()) 12 | char_count = len(content) 13 | log_trace("Basic counts - words: {}, chars: {}", word_count, char_count) 14 | 15 | # Count sentences (basic approximation) 16 | sentences = re.split(r'[.!?]+', content) 17 | sentence_count = len([s for s in sentences if s.strip()]) 18 | 19 | # Average words per sentence 20 | avg_words_per_sentence = round(word_count / sentence_count if sentence_count > 0 else 0, 2) 21 | log_debug("Sentence analysis - count: {}, avg words: {}", 22 | sentence_count, avg_words_per_sentence) 23 | 24 | # Count links 25 | markdown_links = len(re.findall(r'\[([^\]]+)\]\(([^\)]+)\)', content)) 26 | log_trace("Found {} markdown links", markdown_links) 27 | 28 | # Count headers (excluding frontmatter) 29 | headers = len(re.findall(r'^#{1,6}\s+.+$', content, re.MULTILINE)) 30 | log_trace("Found {} headers", headers) 31 | 32 | # Count bullet points 33 | bullet_points = len(re.findall(r'^\s*[-*+]\s+', content, re.MULTILINE)) 34 | log_trace("Found {} bullet points", bullet_points) 35 | 36 | # Find most common words (excluding common stop words) 37 | stop_words = {'the', 'a', 'an', 'and', 'or', 'but', 'in', 'on', 38 | 'at', 'to', 'for', 'of', 'with', 'by'} 39 | words = [word.lower() for word in re.findall(r'\b\w+\b', content)] 40 | word_freq = Counter(w for w in words if w not in stop_words) 41 | top_words = ', '.join([word for word, _ in word_freq.most_common(5)]) 42 | log_debug("Top words found: {}", top_words) 43 | 44 | metrics = { 45 | "word_count": str(word_count), 46 | "char_count": str(char_count), 47 | "sentence_count": str(sentence_count), 48 | "avg_words_per_sentence": str(avg_words_per_sentence), 49 | "link_count": str(markdown_links), 50 | "header_count": str(headers), 51 | "bullet_point_count": str(bullet_points), 52 | "top_words": top_words, 53 | "last_analyzed": datetime.now().strftime("%Y-%m-%d %H:%M:%S %z") 54 | } 55 | 56 | log_debug("Generated metrics: {}", metrics) 57 | return metrics 58 | 59 | def process_event(event_json): 60 | try: 61 | event = json.loads(event_json) 62 | log_info("📊 Processing content metrics") 63 | 64 | if isinstance(event, dict): 65 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 66 | if event_type and "content" in event_type: 67 | title = event_type.get("title", "unknown") 68 | log_debug("Analyzing content for note: {}", title) 69 | 70 | metrics = calculate_metrics(event_type["content"]) 71 | log_info("✨ Generated metrics for '{}' - {} words, {} sentences", 72 | title, metrics["word_count"], metrics["sentence_count"]) 73 | 74 | # Wrap metrics in the expected metadata structure 75 | result = { 76 | "metadata": metrics, 77 | "content": None 78 | } 79 | 80 | return json.dumps(result) 81 | 82 | log_debug("No suitable content found for metrics calculation") 83 | return None 84 | 85 | except Exception as e: 86 | log_error("Failed to process content metrics: {}", str(e)) 87 | return None -------------------------------------------------------------------------------- /src/cli.rs: -------------------------------------------------------------------------------- 1 | use clap::{arg, Parser, Subcommand}; 2 | use tracing::{debug, error}; 3 | 4 | #[derive(Parser, Debug, Clone)] 5 | #[clap(author, version, about)] 6 | pub struct Cli { 7 | #[clap(subcommand)] 8 | pub command: Command, 9 | 10 | /// Enable debug logging 11 | #[arg(long, global = true, help = "Enable verbose debug output")] 12 | pub debug: bool, 13 | } 14 | 15 | #[derive(Debug, Clone, Subcommand)] 16 | pub enum Command { 17 | /// List all notes 18 | List { 19 | /// Filter notes from this date (YYYY-MM-DD) 20 | #[arg(long)] 21 | from: Option, 22 | /// Filter notes until this date (YYYY-MM-DD) 23 | #[arg(long)] 24 | to: Option, 25 | /// Filter notes by frontmatter key-value pairs 26 | #[arg(short, long, value_parser = parse_key_val, help = "Filter by key:value (e.g. tags:rust)")] 27 | filter: Vec<(String, String)>, 28 | }, 29 | /// Add a new note 30 | Add { 31 | /// Title of the note 32 | #[arg(short, long)] 33 | title: String, 34 | /// Content of the note (optional, will open editor if not provided) 35 | #[arg(short, long)] 36 | body: Option, 37 | /// Frontmatter key-value pairs 38 | #[arg(short, long, value_parser = parse_key_val, help = "Add frontmatter key:value (e.g. tags:rust)")] 39 | frontmatter: Vec<(String, String)>, 40 | }, 41 | /// Delete a note 42 | Delete { 43 | /// Title of the note to delete 44 | #[arg(short, long)] 45 | title: String, 46 | }, 47 | #[clap(name = "observers")] 48 | ListObservers, 49 | /// Sync all notes with observers 50 | #[clap(name = "sync")] 51 | Sync, 52 | /// Query notes using natural language or SQL 53 | Query { 54 | /// Query string (natural language or SQL) 55 | #[arg(short, long)] 56 | query: String, 57 | }, 58 | Watch, 59 | } 60 | 61 | /// Helper function to parse key-value pairs. 62 | pub fn parse_key_val(s: &str) -> Result<(String, String), String> { 63 | debug!("Parsing key-value pair: {}", s); 64 | 65 | let parts: Vec<&str> = s.splitn(2, ':').collect(); 66 | if parts.len() == 2 { 67 | let key = parts[0].trim(); 68 | let value = parts[1].trim(); 69 | 70 | if key.is_empty() { 71 | error!("Empty key in key-value pair: {}", s); 72 | return Err("Key cannot be empty".to_string()); 73 | } 74 | 75 | if value.is_empty() { 76 | error!("Empty value in key-value pair: {}", s); 77 | return Err("Value cannot be empty".to_string()); 78 | } 79 | 80 | debug!("Successfully parsed key-value pair: {}:{}", key, value); 81 | Ok((key.to_string(), value.to_string())) 82 | } else { 83 | error!("Invalid key-value format: {}", s); 84 | Err(format!( 85 | "'{}' is not a valid key:value pair (use 'key:value' format)", 86 | s 87 | )) 88 | } 89 | } 90 | 91 | #[cfg(test)] 92 | mod tests { 93 | use super::*; 94 | 95 | #[test] 96 | fn test_parse_key_val() { 97 | // Valid cases 98 | assert_eq!( 99 | parse_key_val("tags:rust").unwrap(), 100 | ("tags".to_string(), "rust".to_string()) 101 | ); 102 | assert_eq!( 103 | parse_key_val("title:My Note").unwrap(), 104 | ("title".to_string(), "My Note".to_string()) 105 | ); 106 | 107 | // Invalid cases 108 | assert!(parse_key_val("invalid").is_err()); 109 | assert!(parse_key_val(":empty_key").is_err()); 110 | assert!(parse_key_val("empty_value:").is_err()); 111 | assert!(parse_key_val(":").is_err()); 112 | } 113 | } 114 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | VERSION=$(grep version Cargo.toml | head -n 1 | cut -d '"' -f 2) 4 | RELEASE_DIR="target/release" 5 | 6 | check_permissions() { 7 | if [ "$EUID" -ne 0 ]; then 8 | echo "Please run as root or use sudo" 9 | exit 1 10 | fi 11 | } 12 | 13 | install_unix() { 14 | check_permissions 15 | echo "Building for Unix-like system..." 16 | cargo build --release 17 | 18 | # Create application directories 19 | mkdir -p /usr/local/bin 20 | mkdir -p /usr/local/share/noorg/{bin,resources} 21 | 22 | # Copy binaries 23 | echo "Installing noorg binaries..." 24 | cp "$RELEASE_DIR/note_tray" /usr/local/share/noorg/bin/ 25 | cp "$RELEASE_DIR/note_cli" /usr/local/share/noorg/bin/ 26 | cp "$RELEASE_DIR/note_settings" /usr/local/share/noorg/bin/ 27 | 28 | # Set permissions 29 | chmod +x /usr/local/share/noorg/bin/* 30 | 31 | # Create command entry point with CLI support 32 | echo "Creating noorg command..." 33 | cat > /tmp/noorg << 'EOF' 34 | #!/bin/bash 35 | cd /usr/local/share/noorg 36 | 37 | if [ "$1" = "note_cli" ]; then 38 | shift # Remove 'note_cli' from the arguments 39 | exec bin/note_cli "$@" 40 | elif [ "$1" = "settings" ]; then 41 | exec bin/note_settings "$@" 42 | else 43 | exec bin/note_tray "$@" 44 | fi 45 | EOF 46 | 47 | mv /tmp/noorg /usr/local/bin/noorg 48 | chmod +x /usr/local/bin/noorg 49 | } 50 | 51 | install_unix_cli_only() { 52 | check_permissions 53 | echo "Building for Unix-like system (CLI only)..." 54 | cargo build --release 55 | 56 | # Create application directories 57 | mkdir -p /usr/local/bin 58 | mkdir -p /usr/local/share/noorg/bin 59 | 60 | # Copy binaries 61 | echo "Installing noorg CLI binary..." 62 | cp "$RELEASE_DIR/note_cli" /usr/local/share/noorg/bin/ 63 | 64 | # Set permissions 65 | chmod +x /usr/local/share/noorg/bin/note_cli 66 | 67 | # Create command entry point with CLI support 68 | echo "Creating noorg command..." 69 | cat > /tmp/noorg << 'EOF' 70 | #!/bin/bash 71 | cd /usr/local/share/noorg 72 | 73 | exec bin/note_cli "$@" 74 | EOF 75 | 76 | mv /tmp/noorg /usr/local/bin/noorg 77 | chmod +x /usr/local/bin/noorg 78 | } 79 | 80 | install_windows() { 81 | echo "Building for Windows..." 82 | cargo build --release 83 | 84 | # Create application directories 85 | mkdir -p "C:/Program Files/noorg/bin" 86 | mkdir -p "C:/Program Files/noorg/resources" 87 | 88 | # Copy binaries 89 | echo "Installing noorg binaries..." 90 | cp "$RELEASE_DIR/note_tray.exe" "C:/Program Files/noorg/bin/" 91 | cp "$RELEASE_DIR/note_cli.exe" "C:/Program Files/noorg/bin/" 92 | cp "$RELEASE_DIR/note_settings.exe" "C:/Program Files/noorg/bin/" 93 | 94 | # Add to PATH 95 | setx PATH "%PATH%;C:\Program Files\noorg\bin" 96 | } 97 | 98 | uninstall() { 99 | check_permissions 100 | case "$(uname -s)" in 101 | Darwin*|Linux*) 102 | echo "Uninstalling noorg..." 103 | rm -f /usr/local/bin/noorg 104 | rm -rf /usr/local/share/noorg 105 | ;; 106 | MINGW*|MSYS*|CYGWIN*) 107 | echo "Uninstalling noorg..." 108 | rm -rf "C:/Program Files/noorg" 109 | ;; 110 | *) 111 | echo "Unsupported platform" 112 | ;; 113 | esac 114 | echo "✅ noorg uninstalled successfully" 115 | } 116 | 117 | case "$1" in 118 | uninstall) 119 | uninstall 120 | ;; 121 | cli-only) 122 | case "$(uname -s)" in 123 | Darwin*|Linux*) install_unix_cli_only ;; 124 | MINGW*|MSYS*|CYGWIN*) echo "CLI-only installation is not supported on Windows" ;; 125 | *) echo "Unsupported platform" ;; 126 | esac 127 | ;; 128 | *) 129 | case "$(uname -s)" in 130 | Darwin*|Linux*) install_unix ;; 131 | MINGW*|MSYS*|CYGWIN*) install_windows ;; 132 | *) echo "Unsupported platform" ;; 133 | esac 134 | ;; 135 | esac -------------------------------------------------------------------------------- /resources/default_scripts/python/inline_tags.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | import os 4 | import sys 5 | import traceback 6 | from typing import Set 7 | from logging_utils import log_debug, log_info, log_error, log_warn 8 | 9 | def get_environment_info(): 10 | """Collect environment information for debugging.""" 11 | info = { 12 | "Python Version": sys.version, 13 | "Python Path": sys.path, 14 | "Working Directory": os.getcwd(), 15 | "Environment Variables": { 16 | k: v for k, v in os.environ.items() 17 | if k.startswith(("PYTHON", "PATH", "HOME", "USER")) 18 | }, 19 | "Module Search Paths": [str(p) for p in sys.path], 20 | } 21 | log_debug("Environment Info: {}", json.dumps(info, indent=2)) 22 | return info 23 | 24 | def extract_inline_tags(content: str) -> Set[str]: 25 | """Extract all inline hashtags from the content, excluding heading anchors.""" 26 | try: 27 | log_debug("Starting inline tag extraction") 28 | env_info = get_environment_info() 29 | 30 | # First, remove all heading anchor links 31 | content = re.sub(r'\* \[.*?\]\(#.*?\)', '', content) 32 | 33 | # Match hashtags 34 | tags = re.findall(r'(? str: 47 | """Merge existing tags with new tags, avoiding duplicates.""" 48 | try: 49 | # Convert existing tags string to set 50 | if existing_tags: 51 | current_tags = {tag.strip() for tag in existing_tags.split(',')} 52 | else: 53 | current_tags = set() 54 | 55 | # Merge with new tags 56 | all_tags = current_tags.union(new_tags) 57 | 58 | # Sort and join tags 59 | return ', '.join(sorted(all_tags)) 60 | except Exception as e: 61 | log_error("Error in merge_tags: {}\n{}", str(e), traceback.format_exc()) 62 | raise 63 | 64 | def process_event(event_json): 65 | try: 66 | log_info(" Starting Python observer execution") 67 | log_debug("Received event: {}", event_json) 68 | 69 | # Verify Python environment 70 | env_info = get_environment_info() 71 | 72 | # Parse event 73 | event = json.loads(event_json) 74 | log_info("📝 Processing event for tag extraction") 75 | 76 | if not isinstance(event, dict): 77 | log_error("Event is not a dictionary: {}", type(event)) 78 | return json.dumps({"metadata": {}, "error": "Invalid event format"}) 79 | 80 | # Extract event type 81 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 82 | if not event_type: 83 | log_error("No valid event type found in: {}", event.keys()) 84 | return json.dumps({"metadata": {}, "error": "Invalid event type"}) 85 | 86 | # Process content 87 | content = event_type.get("content") 88 | if not content: 89 | log_warn("No content found in event") 90 | return json.dumps({"metadata": {}, "error": "No content found"}) 91 | 92 | # Extract tags 93 | inline_tags = extract_inline_tags(content) 94 | if inline_tags: 95 | log_info("🏷️ Found {} inline tags", len(inline_tags)) 96 | log_debug("Tags: {}", inline_tags) 97 | else: 98 | log_info("ℹ️ No inline tags found") 99 | 100 | # Process frontmatter 101 | existing_tags = "" 102 | if "frontmatter" in event_type: 103 | existing_tags = event_type["frontmatter"].get("tags", "") 104 | log_debug("Existing tags: {}", existing_tags) 105 | 106 | # Merge tags 107 | combined_tags = merge_tags(existing_tags, inline_tags) 108 | 109 | # Prepare response 110 | metadata = { 111 | "metadata": { 112 | "tags": combined_tags, 113 | "inline_tags_found": str(len(inline_tags)), 114 | "python_version": sys.version.split()[0], 115 | "script_path": __file__, 116 | } 117 | } 118 | 119 | log_debug("Generated metadata: {}", metadata) 120 | return json.dumps(metadata) 121 | 122 | except Exception as e: 123 | error_info = { 124 | "error": str(e), 125 | "traceback": traceback.format_exc(), 126 | "python_version": sys.version, 127 | "python_path": sys.path, 128 | "working_dir": os.getcwd(), 129 | "env_vars": {k: v for k, v in os.environ.items() if k.startswith(("PYTHON", "PATH"))} 130 | } 131 | log_error("Error processing event: {}\nFull error info: {}", 132 | str(e), json.dumps(error_info, indent=2)) 133 | return json.dumps({"metadata": {}, "error": error_info}) 134 | -------------------------------------------------------------------------------- /resources/default_scripts/lua/lua_executor.lua: -------------------------------------------------------------------------------- 1 | local json = require("json") 2 | local log = require("logging_utils") 3 | 4 | -- Safely execute Lua code with restrictions 5 | function safe_execute(code) 6 | log.debug("Executing Lua code block with safety restrictions") 7 | 8 | -- Create a new environment with limited functions 9 | local env = { 10 | print = print, 11 | string = string, 12 | table = table, 13 | math = math, 14 | tonumber = tonumber, 15 | tostring = tostring, 16 | type = type, 17 | select = select, 18 | pairs = pairs, 19 | ipairs = ipairs, 20 | } 21 | 22 | -- Create the function with restricted environment 23 | local func, err = load(code, "code block", "t", env) 24 | if not func then 25 | log.error("Failed to load code block: {}", err) 26 | return "Error: " .. err 27 | end 28 | 29 | -- Capture output 30 | local outputs = {} 31 | env.print = function(...) 32 | local args = {...} 33 | local str_args = {} 34 | for i, v in ipairs(args) do 35 | str_args[i] = tostring(v) 36 | end 37 | table.insert(outputs, table.concat(str_args, "\t")) 38 | end 39 | 40 | -- Execute and return results 41 | local success, result = pcall(func) 42 | if not success then 43 | log.error("Failed to execute code block: {}", result) 44 | return "Error: " .. result 45 | end 46 | 47 | local output = table.concat(outputs, "\n") 48 | log.trace("Code block execution output: {}", output) 49 | return output 50 | end 51 | 52 | -- Extract and process Lua code blocks 53 | function process_lua_blocks(content) 54 | log.debug("Processing Lua code blocks in content") 55 | local modified_content = content 56 | local has_changes = false 57 | local blocks_processed = 0 58 | 59 | -- Process each Lua code block 60 | local pos = 1 61 | while true do 62 | -- First try to find a code block with existing output 63 | local start, finish = modified_content:find("```lua\n.-\n```\n\n> Output:\n>[^\n]*\n", pos) 64 | 65 | if not start then 66 | -- Try to find a lone code block without output 67 | local code_start, code_finish, code = modified_content:find("```lua\n(.-)\n```", pos) 68 | if not code_start then break end 69 | 70 | log.debug("Found new Lua code block at position {}", code_start) 71 | 72 | -- Execute the code 73 | local output = safe_execute(code) 74 | 75 | -- Format the output block 76 | local output_block = string.format("```lua\n%s\n```\n\n> Output:\n> %s\n", 77 | code, 78 | output) 79 | 80 | -- Replace the code block with code + output 81 | modified_content = modified_content:sub(1, code_start-1) .. output_block .. modified_content:sub(code_finish+1) 82 | pos = code_start + #output_block 83 | has_changes = true 84 | blocks_processed = blocks_processed + 1 85 | log.trace("Processed new code block: {}", code) 86 | else 87 | -- Extract the code and existing output 88 | local code_block = modified_content:sub(start, finish) 89 | local _, _, code = code_block:find("```lua\n(.-)\n```") 90 | 91 | log.debug("Found existing Lua code block at position {}", start) 92 | 93 | -- Execute the code 94 | local output = safe_execute(code) 95 | 96 | -- Format the new block 97 | local output_block = string.format("```lua\n%s\n```\n\n> Output:\n> %s\n", 98 | code, 99 | output) 100 | 101 | -- Replace the entire block 102 | modified_content = modified_content:sub(1, start-1) .. output_block .. modified_content:sub(finish+1) 103 | pos = start + #output_block 104 | has_changes = true 105 | blocks_processed = blocks_processed + 1 106 | log.trace("Re-processed existing code block: {}", code) 107 | end 108 | end 109 | 110 | -- Clean up any extra newlines 111 | modified_content = modified_content:gsub("\n\n\n+", "\n\n") 112 | 113 | if blocks_processed > 0 then 114 | log.info("✨ Processed {} Lua code blocks", blocks_processed) 115 | else 116 | log.debug("No Lua code blocks found in content") 117 | end 118 | 119 | return modified_content, has_changes 120 | end 121 | 122 | function on_event(event_json) 123 | log.debug("Processing event for Lua execution") 124 | 125 | local success, event = pcall(json.decode, event_json) 126 | if not success then 127 | log.error("Failed to decode event JSON: {}", event) 128 | return nil 129 | end 130 | 131 | local event_data = event.Created or event.Updated or event.Synced 132 | 133 | if event_data and event_data.content then 134 | local title = event_data.title or "unknown" 135 | log.debug("Processing content from note: {}", title) 136 | 137 | local new_content, has_changes = process_lua_blocks(event_data.content) 138 | 139 | if has_changes then 140 | log.info("🔧 Updated Lua code blocks in '{}'", title) 141 | local result = { 142 | metadata = { 143 | lua_blocks_executed = "true", 144 | last_executed = os.date("%Y-%m-%d %H:%M:%S") 145 | }, 146 | content = new_content 147 | } 148 | log.debug("Generated result with updated content and metadata") 149 | return json.encode(result) 150 | else 151 | log.debug("No changes made to content") 152 | end 153 | else 154 | log.debug("No suitable content found for processing") 155 | end 156 | 157 | return nil 158 | end -------------------------------------------------------------------------------- /src/handlers.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use std::io; 3 | use std::sync::atomic::AtomicBool; 4 | use std::sync::Arc; 5 | use tracing::{debug, error, info, warn}; 6 | 7 | use crate::cli::Command; 8 | use crate::editor::open_editor; 9 | use crate::note::NoteManager; 10 | use crate::observers::sqlite_store::SqliteObserver; 11 | use crate::settings::Settings; 12 | use crate::watcher::watch_directory; 13 | use crate::{observer_registry::ObserverRegistry, observers}; 14 | 15 | pub async fn handle_command( 16 | command: Command, 17 | settings: Settings, 18 | observer_registry: Arc, 19 | stop_signal: Option>, 20 | ) -> io::Result<()> { 21 | debug!("Initializing note manager"); 22 | let note_manager = NoteManager::new(settings.clone(), observer_registry.clone()).await?; 23 | 24 | match command { 25 | Command::List { from, to, filter } => { 26 | debug!("Handling list command with filters: {:?}", filter); 27 | 28 | let from_date = from 29 | .map(|d| NoteManager::parse_date_string(&d)) 30 | .transpose() 31 | .map_err(|e| { 32 | error!("Invalid 'from' date format: {}", e); 33 | io::Error::new(io::ErrorKind::InvalidInput, e) 34 | })?; 35 | 36 | let to_date = to 37 | .map(|d| NoteManager::parse_date_string(&d)) 38 | .transpose() 39 | .map_err(|e| { 40 | error!("Invalid 'to' date format: {}", e); 41 | io::Error::new(io::ErrorKind::InvalidInput, e) 42 | })?; 43 | 44 | let filters: HashMap = filter.into_iter().collect(); 45 | debug!( 46 | "Listing notes with date range {:?} to {:?}", 47 | from_date, to_date 48 | ); 49 | note_manager.list_notes_with_filter(from_date, to_date, filters)?; 50 | } 51 | Command::Add { 52 | title, 53 | body, 54 | frontmatter, 55 | } => { 56 | debug!("Handling add command for note '{}'", title); 57 | 58 | let content = match body { 59 | Some(text) => { 60 | debug!("Using provided content for note"); 61 | text 62 | } 63 | None => { 64 | info!("Opening editor for note content..."); 65 | open_editor("", &settings)? 66 | } 67 | }; 68 | 69 | if content.trim().is_empty() { 70 | warn!("Note creation cancelled - empty content"); 71 | return Ok(()); 72 | } 73 | 74 | let mut frontmatter_data = HashMap::new(); 75 | for (key, value) in frontmatter { 76 | match key.as_str() { 77 | "tags" => { 78 | let tags: Vec = value 79 | .split(',') 80 | .map(|s| s.trim().to_string()) 81 | .filter(|s| !s.is_empty()) 82 | .collect(); 83 | debug!("Processing tags: {:?}", tags); 84 | frontmatter_data.insert(key, tags.join(", ")); 85 | } 86 | _ => { 87 | debug!("Adding frontmatter: {} = {}", key, value); 88 | frontmatter_data.insert(key, value); 89 | } 90 | } 91 | } 92 | 93 | note_manager 94 | .add_note(title.clone(), content, frontmatter_data) 95 | .await?; 96 | info!("✨ Note '{}' added successfully", title); 97 | } 98 | Command::Delete { title } => { 99 | debug!("Handling delete command for note '{}'", title); 100 | note_manager.delete_note(&title)?; 101 | info!("🗑️ Note '{}' deleted successfully", title); 102 | } 103 | Command::Sync => { 104 | info!("🔄 Syncing all notes with observers..."); 105 | note_manager.sync_notes().await?; 106 | info!("✨ Sync completed successfully"); 107 | } 108 | Command::ListObservers => { 109 | info!("📋 Available Rust observers:"); 110 | for observer in observers::get_available_observers() { 111 | info!("- {}", observer); 112 | } 113 | } 114 | Command::Query { query } => { 115 | debug!("Handling query command: {}", query); 116 | let observers = observer_registry.get_observers().await; 117 | let sqlite_observer = observers 118 | .iter() 119 | .find(|o| o.name() == "sqlite") 120 | .and_then(|o| o.as_any().downcast_ref::()) 121 | .ok_or_else(|| { 122 | error!("SQLite observer not found in registry"); 123 | io::Error::new( 124 | io::ErrorKind::NotFound, 125 | "SQLite observer not found in registry", 126 | ) 127 | })?; 128 | 129 | debug!("Executing {} query", &query); 130 | let results = sqlite_observer.query(&query).await?; 131 | 132 | if results.rows.is_empty() { 133 | info!("No matching notes found"); 134 | } else { 135 | debug!("Found {} matching notes", results.rows.len()); 136 | print_query_results(&results); 137 | info!("📊 Found {} notes", results.rows.len()); 138 | } 139 | } 140 | Command::Watch => { 141 | let stop = stop_signal.unwrap_or_else(|| Arc::new(AtomicBool::new(false))); 142 | watch_directory(settings, observer_registry, stop).await? 143 | } 144 | } 145 | 146 | Ok(()) 147 | } 148 | 149 | fn print_query_results(results: &crate::observers::sqlite_store::QueryResult) { 150 | // Print column headers 151 | println!("| {} |", results.columns.join(" | ")); 152 | let separator = results 153 | .columns 154 | .iter() 155 | .map(|_| "---") 156 | .collect::>() 157 | .join("|"); 158 | println!("|{}|", separator); 159 | 160 | // Print rows 161 | for row in &results.rows { 162 | let values: Vec = results 163 | .columns 164 | .iter() 165 | .map(|col| { 166 | let empty = String::new(); 167 | let value = row.get(col).unwrap_or(&empty); 168 | if col == "title" { 169 | let path = row.get("path").unwrap_or(&empty); 170 | format!("[{}]({})", value, path) 171 | } else if col != "path" { 172 | value.to_string() 173 | } else { 174 | empty 175 | } 176 | }) 177 | .filter(|s| !s.is_empty()) 178 | .collect(); 179 | println!("| {} |", values.join(" | ")); 180 | } 181 | } 182 | -------------------------------------------------------------------------------- /resources/default_scripts/python/code_bookmarks.py: -------------------------------------------------------------------------------- 1 | # code_bookmarks.py 2 | 3 | import json 4 | import re 5 | from pathlib import Path 6 | from typing import Dict, List, Tuple 7 | from logging_utils import log_debug, log_info, log_error 8 | 9 | def extract_code_blocks(content: str) -> List[Tuple[str, str, str]]: 10 | """Extract code blocks with language and optional title.""" 11 | pattern = r'```(\w+)(?:\s+([^`\n]*)?)?\n(.*?)```' 12 | blocks = [] 13 | 14 | try: 15 | # Skip frontmatter 16 | content_without_frontmatter = content 17 | if content.startswith('---'): 18 | end_marker = content.find('---', 3) 19 | if end_marker != -1: 20 | content_without_frontmatter = content[end_marker + 3:] 21 | 22 | for match in re.finditer(pattern, content_without_frontmatter, re.DOTALL): 23 | language = match.group(1).lower() 24 | title = match.group(2).strip() if match.group(2) else '' 25 | code = match.group(3).strip() 26 | 27 | # Skip empty blocks or bookmarks header content 28 | if not code or "Code blocks extracted from notes" in code: 29 | continue 30 | 31 | # Skip if code looks like frontmatter or bookmarks header 32 | if code.startswith('---') and 'code_bookmarks:' in code: 33 | continue 34 | 35 | blocks.append((language, title, code)) 36 | 37 | log_debug(f"Extracted {len(blocks)} code blocks") 38 | return blocks 39 | 40 | except Exception as e: 41 | log_error(f"Error extracting code blocks: {e}") 42 | return [] 43 | 44 | def get_bookmarks_file(note_dir: str) -> Path: 45 | """Get path to code bookmarks file.""" 46 | return Path(note_dir) / '_code_bookmarks.md' 47 | 48 | def get_json_file(note_dir: str) -> Path: 49 | """Get path to JSON file for storing code blocks.""" 50 | return Path(note_dir) / '_code_bookmarks.json' 51 | 52 | def load_json(json_file: Path) -> Dict[str, List[Dict[str, str]]]: 53 | """Load code blocks from JSON file.""" 54 | if not json_file.exists(): 55 | return {} 56 | 57 | try: 58 | with open(json_file, 'r') as f: 59 | return json.load(f) 60 | except Exception as e: 61 | log_error(f"Error loading JSON file: {e}") 62 | return {} 63 | 64 | def save_json(json_file: Path, data: Dict[str, List[Dict[str, str]]]) -> None: 65 | """Save code blocks to JSON file.""" 66 | try: 67 | with open(json_file, 'w') as f: 68 | json.dump(data, f, indent=4) 69 | except Exception as e: 70 | log_error(f"Error saving JSON file: {e}") 71 | 72 | def generate_header() -> List[str]: 73 | """Generate the standard header for code bookmarks file.""" 74 | return [ 75 | "---", 76 | "code_bookmarks: true", 77 | "skip_observers: all", 78 | "---", 79 | "", 80 | "# 📚 Code Bookmarks", 81 | "", 82 | "Code blocks extracted from notes, organized by language.", 83 | "" 84 | ] 85 | 86 | def generate_bookmarks_content(data: Dict[str, List[Dict[str, str]]]) -> str: 87 | """Generate markdown content organizing code blocks by language.""" 88 | lines = [] 89 | 90 | for lang in sorted(data.keys()): 91 | if not data[lang]: # Skip empty language sections 92 | continue 93 | lines.extend([ 94 | f"## {lang.upper()}", 95 | "" 96 | ]) 97 | 98 | for block in data[lang]: 99 | title = block['title'] 100 | code = block['code'] 101 | source = block['source'] 102 | if title.strip(): 103 | lines.append(f"### {title}") 104 | lines.append(f"From: [{source}](./{source}.md)") 105 | lines.extend([ 106 | "```" + lang, 107 | code, 108 | "```", 109 | "" 110 | ]) 111 | 112 | return "\n".join(lines) # Return only content, no header 113 | 114 | def process_event(event_json: str) -> str: 115 | try: 116 | event = json.loads(event_json) 117 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 118 | if not event_type: 119 | return json.dumps({"metadata": {}}) 120 | 121 | content = event_type.get("content", "") 122 | title = event_type.get("title", "") 123 | file_path = event_type.get("file_path", "") 124 | 125 | if title == "_code_bookmarks": 126 | return json.dumps({"metadata": {}}) 127 | 128 | # Get new blocks from current file (skip SQL) 129 | new_blocks = [ 130 | block for block in extract_code_blocks(content) 131 | if block[0].lower() != 'sql' and block[2].strip() 132 | ] 133 | 134 | if not new_blocks: 135 | return json.dumps({"metadata": {}}) 136 | 137 | # Setup JSON file 138 | note_dir = str(Path(file_path).parent) 139 | json_file = get_json_file(note_dir) 140 | 141 | # Load existing data from JSON file 142 | data = load_json(json_file) 143 | 144 | # Update JSON data with new blocks 145 | for lang, title, code in new_blocks: 146 | if lang not in data: 147 | data[lang] = [] 148 | data[lang].append({ 149 | 'title': title, 150 | 'code': code, 151 | 'source': file_path # Correctly set the source file 152 | }) 153 | 154 | # Save updated data to JSON file 155 | save_json(json_file, data) 156 | 157 | # Generate content using JSON data 158 | content = generate_bookmarks_content(data) 159 | 160 | # Write file with header if it doesn't exist 161 | bookmarks_file = get_bookmarks_file(note_dir) 162 | if not bookmarks_file.exists(): 163 | with open(bookmarks_file, 'w') as f: 164 | f.write("\n".join(generate_header()) + "\n" + content) # Fix concatenation 165 | else: 166 | # Read existing header 167 | with open(bookmarks_file, 'r') as f: 168 | existing_content = f.read() 169 | header_end = existing_content.find("# 📚 Code Bookmarks") + len("# 📚 Code Bookmarks") 170 | header = existing_content[:header_end] 171 | 172 | # Write updated content with preserved header 173 | with open(bookmarks_file, 'w') as f: 174 | f.write(header + "\n" + content) 175 | 176 | return json.dumps({ 177 | "metadata": { 178 | "code_blocks_added": str(len(new_blocks)), 179 | "total_code_blocks": str(sum(len(blocks) for blocks in data.values())), 180 | "languages": ", ".join(sorted(data.keys())), 181 | "bookmarks_updated": "true" 182 | } 183 | }) 184 | 185 | except Exception as e: 186 | log_error(f"Failed to process code bookmarks: {e}") 187 | return json.dumps({"metadata": {}, "error": str(e)}) -------------------------------------------------------------------------------- /src/editor.rs: -------------------------------------------------------------------------------- 1 | use percent_encoding::{utf8_percent_encode, NON_ALPHANUMERIC}; 2 | use std::env; 3 | use std::fs; 4 | use std::io; 5 | use std::path::PathBuf; 6 | use std::process::Command; 7 | use std::thread; 8 | use std::time::Duration; 9 | use tempfile::NamedTempFile; 10 | use tracing::{debug, error, info, warn}; 11 | 12 | use crate::settings::Settings; 13 | 14 | pub fn open_editor(initial_content: &str, settings: &Settings) -> io::Result { 15 | debug!( 16 | "Opening editor with {} bytes of initial content", 17 | initial_content.len() 18 | ); 19 | 20 | let editor = env::var("EDITOR").unwrap_or_else(|_| { 21 | debug!("No EDITOR environment variable found, checking common editors"); 22 | for editor in ["nvim", "vim", "nano"] { 23 | if command_exists(editor) { 24 | debug!("Found editor: {}", editor); 25 | return editor.to_string(); 26 | } 27 | } 28 | warn!("No common editors found, defaulting to vim"); 29 | "vim".to_string() 30 | }); 31 | 32 | if editor.to_lowercase() == "obsidian" { 33 | debug!("Using Obsidian as editor"); 34 | return open_in_obsidian(initial_content, settings); 35 | } 36 | 37 | debug!("Creating temporary file for editing"); 38 | let temp_file = NamedTempFile::new().map_err(|e| { 39 | error!("Failed to create temporary file: {}", e); 40 | io::Error::new(io::ErrorKind::Other, e) 41 | })?; 42 | 43 | if !initial_content.is_empty() { 44 | debug!("Writing initial content to temporary file"); 45 | fs::write(&temp_file, initial_content).map_err(|e| { 46 | error!("Failed to write initial content: {}", e); 47 | e 48 | })?; 49 | } 50 | 51 | info!("🖊️ Opening {} editor", editor); 52 | let result = Command::new(&editor) 53 | .arg(temp_file.path()) 54 | .status() 55 | .map_err(|e| { 56 | error!("Failed to open editor '{}': {}", editor, e); 57 | io::Error::new( 58 | io::ErrorKind::NotFound, 59 | format!("Failed to open editor '{}': {}. Please ensure it's installed or set a different editor using the EDITOR environment variable.", editor, e) 60 | ) 61 | })?; 62 | 63 | if !result.success() { 64 | error!("Editor '{}' returned non-zero status", editor); 65 | return Err(io::Error::new( 66 | io::ErrorKind::Other, 67 | format!("Editor '{}' returned non-zero status", editor), 68 | )); 69 | } 70 | 71 | debug!("Reading edited content from temporary file"); 72 | let content = fs::read_to_string(temp_file.path()).map_err(|e| { 73 | error!("Failed to read edited content: {}", e); 74 | e 75 | })?; 76 | 77 | info!("✨ Editor closed successfully"); 78 | Ok(content) 79 | } 80 | 81 | fn open_in_obsidian(initial_content: &str, settings: &Settings) -> io::Result { 82 | debug!("Opening note in Obsidian"); 83 | let notes_dir = settings.obsidian_vault_path.clone().unwrap_or_else(|| { 84 | warn!("No Obsidian vault path found in config, using default path"); 85 | "./notes".to_string() 86 | }); 87 | 88 | info!("📂 Using Obsidian vault path: {}", notes_dir); 89 | let notes_path = PathBuf::from(¬es_dir); 90 | if !notes_path.exists() { 91 | error!("Obsidian vault directory not found: {}", notes_dir); 92 | return Err(io::Error::new( 93 | io::ErrorKind::NotFound, 94 | format!("Obsidian vault directory not found: {}", notes_dir), 95 | )); 96 | } 97 | 98 | debug!("Creating temporary directory for Obsidian"); 99 | let temp_dir = notes_path.join("_temp"); 100 | fs::create_dir_all(&temp_dir).map_err(|e| { 101 | error!("Failed to create temp directory: {}", e); 102 | e 103 | })?; 104 | 105 | let temp_filename = format!("temp_{}.md", chrono::Utc::now().timestamp()); 106 | let temp_path = temp_dir.join(&temp_filename); 107 | debug!("Created temporary file at: {}", temp_path.display()); 108 | 109 | if !initial_content.is_empty() { 110 | debug!("Writing initial content to temporary file"); 111 | fs::write(&temp_path, initial_content).map_err(|e| { 112 | error!("Failed to write initial content: {}", e); 113 | e 114 | })?; 115 | } 116 | 117 | info!("🚀 Launching Obsidian..."); 118 | let launch_status = if cfg!(target_os = "macos") { 119 | Command::new("open").arg("obsidian://open").status() 120 | } else if cfg!(target_os = "windows") { 121 | Command::new("cmd") 122 | .args(["/C", "start", "obsidian://open"]) 123 | .status() 124 | } else { 125 | Command::new("xdg-open").arg("obsidian://open").status() 126 | } 127 | .map_err(|e| { 128 | error!("Failed to launch Obsidian: {}", e); 129 | e 130 | })?; 131 | 132 | if !launch_status.success() { 133 | error!("Failed to launch Obsidian"); 134 | return Err(io::Error::new( 135 | io::ErrorKind::Other, 136 | "Failed to launch Obsidian", 137 | )); 138 | } 139 | 140 | debug!("Waiting for Obsidian to start..."); 141 | thread::sleep(Duration::from_secs(1)); 142 | 143 | let absolute_path = temp_path.canonicalize().map_err(|e| { 144 | error!("Failed to get absolute path: {}", e); 145 | e 146 | })?; 147 | let path_str = absolute_path.to_string_lossy(); 148 | let encoded_path = utf8_percent_encode(&path_str, NON_ALPHANUMERIC).to_string(); 149 | let obsidian_url = format!("obsidian://open?path={}", encoded_path); 150 | 151 | debug!("Opening note with URL: {}", obsidian_url); 152 | 153 | let status = if cfg!(target_os = "macos") { 154 | Command::new("open").arg(&obsidian_url).status() 155 | } else if cfg!(target_os = "windows") { 156 | Command::new("cmd") 157 | .args(["/C", "start", "", &obsidian_url]) 158 | .status() 159 | } else { 160 | Command::new("xdg-open").arg(&obsidian_url).status() 161 | } 162 | .map_err(|e| { 163 | error!("Failed to open note in Obsidian: {}", e); 164 | e 165 | })?; 166 | 167 | if !status.success() { 168 | error!("Failed to open note in Obsidian"); 169 | return Err(io::Error::new( 170 | io::ErrorKind::Other, 171 | "Failed to open note in Obsidian", 172 | )); 173 | } 174 | 175 | info!("📝 Note opened in Obsidian. Press Enter when you're done editing..."); 176 | let mut input = String::new(); 177 | io::stdin().read_line(&mut input)?; 178 | 179 | debug!("Reading edited content"); 180 | let content = fs::read_to_string(&temp_path).map_err(|e| { 181 | error!("Failed to read edited content: {}", e); 182 | e 183 | })?; 184 | 185 | debug!("Cleaning up temporary files"); 186 | if let Err(e) = fs::remove_file(&temp_path) { 187 | warn!("Failed to remove temporary file: {}", e); 188 | } 189 | if let Err(e) = fs::remove_dir(&temp_dir) { 190 | warn!("Failed to remove temporary directory: {}", e); 191 | } 192 | 193 | info!("✨ Successfully saved changes from Obsidian"); 194 | Ok(content) 195 | } 196 | 197 | fn command_exists(command: &str) -> bool { 198 | debug!("Checking if command exists: {}", command); 199 | let exists = if cfg!(target_os = "windows") { 200 | Command::new("where") 201 | .arg(command) 202 | .output() 203 | .map(|output| output.status.success()) 204 | .unwrap_or(false) 205 | } else { 206 | Command::new("which") 207 | .arg(command) 208 | .output() 209 | .map(|output| output.status.success()) 210 | .unwrap_or(false) 211 | }; 212 | 213 | if exists { 214 | debug!("Command '{}' found", command); 215 | } else { 216 | debug!("Command '{}' not found", command); 217 | } 218 | exists 219 | } 220 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ![banner](resources/banner.png) 2 | 3 | [Website 🚀](https://noorg.dev) [Docs ✍️](https://noorg.dev/docs/intro) 4 | 5 | ⚠️ Currently only buildable on MacOS and Linux, still experimental 6 | 7 | Capture everything, organize nothing. 8 | 9 | Noorg is not just another note-taking tool—it's an editor-agnostic platform designed to integrate seamlessly with any text editor you prefer. Whether you're a fan of Vim, Emacs, VSCode, Obsidian or any other editor, Noorg empowers you to focus on what truly matters: capturing your thoughts and ideas without the burden of organization. 10 | 11 | ## A New Paradigm in Note Management 12 | 13 | For those of us who struggle with organization, and dedicate too much time into creating the perfect system and note structure, **Noorg** offers a liberating approach. It shifts the focus from organizing to capturing thoughts and ideas effortlessly. By leveraging the power of Markdown and its frontmatter capabilities, Noorg allows you to annotate your notes with metadata, making them easily searchable and sortable without the need for manual organization. 14 | 15 | But that's not all, Noorg is also a runtime that allows you to extend its functionality using Python, Lua, and Rust. This flexibility means you can tailor Noorg to fit unlimited use cases, from simple note-taking to complex data processing tasks. 16 | 17 | ## Extensible Runtime 18 | 19 | At its core, Noorg is a highly extensible runtime, allowing you to enhance its capabilities using `Python` , `Lua` , and `Rust`. This flexibility means you can tailor Noorg to fit unlimited use cases, from simple note-taking to complex data processing tasks. 20 | 21 | ### Observer Pattern 22 | 23 | Noorg employs the observer pattern to provide dynamic, real-time processing of your notes. Current observers, such as the time tracker and inline tags, are just examples of what's possible. These observers automatically process your notes, adding context and metadata without interrupting your flow. Imagine a daily journal that compiles all notes created on a specific day, or a system that tags notes based on content—these are just a few possibilities. 24 | 25 | ## Use Cases 26 | 27 | - **Journal Creation**: Automatically compile a daily journal from notes created throughout the day. 28 | - **Time Tracking**: Integrate time tracking to monitor how much time you spend on different topics. 29 | - **Tagging System**: Use inline tags to categorize notes on-the-fly and to create dynamic views. 30 | - **Dynamic Views**: Create dynamic views utilizing SQL to filter, sort and display your notes. (Comparable to Obsidian's Dataview plugin) 31 | - **Kanban Board**: Create a kanban board to visualize your notes and tasks. 32 | - **Custom Processing**: Use Python, Lua or Rust to process your notes and add custom metadata. 33 | - **Lua executor**: Execute Lua inside your notes. 34 | - **Unlimited Possibilities**: The possibilities are endless. You could build a system to automatically transcribe your notes, built presentations, call external APIs, integrate LLMs, and more. 35 | 36 | 37 | 38 | ## Caution: Pre-Alpha Software 39 | 40 | Noorg is currently in a pre-alpha stage. While it offers powerful features, it is still under active development and may not be stable. We strongly advise starting slowly and backing up your note directory regularly. Experiment with Noorg in a safe environment to discover its potential without risking your important data. 41 | 42 | ## Join the Community 43 | 44 | Noorg is for those minds, who want to break free from the constraints of traditional note-taking systems. It's editor agnostic, offline first, free, open source, community driven, and extensible. It's for thos who want to capture their thoughts, and not built the perfect organization system. Join the community in redefining how we manage our knowledge. Start using, contributing and building. 45 | 46 | 47 | ## Features 48 | - Editor agnostic 49 | - Runs as a system tray application which "watches" your note directory and automatically processes your notes 50 | - Offline first, no cloud dependencies 51 | - Extensible with Python, Lua and Rust 52 | - SQL based dynamic views 53 | - Kanban board 54 | - Time tracker 55 | - Inline tag detection and creation of Tag index 56 | - Lua executor to execute Lua inside your notes 57 | 58 | ## Installation 59 | 60 | ### Prerequisites 61 | #### macOS 62 | 1. Install Rust and Cargo: 63 | ```bash 64 | # macOS 65 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh 66 | ``` 67 | 68 | 2. Install Python dependencies: 69 | ```bash 70 | brew install python@3.9 71 | 72 | # Add to ~/.zshrc or ~/.bashrc 73 | export PYTHON_CONFIGURE_OPTS="--enable-framework" 74 | export PYO3_PYTHON="/opt/homebrew/opt/python@3.9/bin/python3.9" 75 | ``` 76 | 3. Install Lua 77 | 78 | ```bash 79 | brew install lua # macOS 80 | 81 | # find lua path 82 | lua -e "print(package.path:match('([^;]+)/?.lua'))" 83 | 84 | # download json.lua dependency 85 | curl -O https://raw.githubusercontent.com/rxi/json.lua/master/json.lua 86 | 87 | # macOS: Copy to Lua package path 88 | cp json.lua /opt/homebrew/share/lua/5.4/json.lua 89 | 90 | # Verify installation 91 | lua -e "require('json')" 92 | ``` 93 | #### Linux 94 | 1. Install Rust and Cargo: 95 | ```bash 96 | curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh 97 | ``` 98 | 99 | 2. Install Python Dependencies: 100 | ```bash 101 | add-apt-repository ppa:deadsnakes/ppa 102 | apt-get install -y python3.9 python3.9-distutils python3.9-venv 103 | apt-get install -y python3.12-dev 104 | ``` 105 | 106 | 3. Install Lua Depenedncies 107 | ```bash 108 | apt-get install -y lua5.4 liblua5.4-dev liblua5.1-0-dev 109 | curl -O https://raw.githubusercontent.com/rxi/json.lua/master/json.lua 110 | mkdir /usr/local/share/lua/5.4/ -p 111 | cp json.lua /usr/local/share/lua/5.4/json.lua 112 | ``` 113 | 114 | 4. Install additionl dependencies 115 | ```bash 116 | apt-get install -y libxdo-dev libsqlite3-dev libssl-dev libglib2.0-dev libgirepository1.0-dev gcc g++ clang libgtk-3-dev 117 | ``` 118 | 119 | ### Option Build from Source 120 | 121 | ```bash 122 | # Clone repository 123 | git clone https://github.com/realjockel/noorg.git 124 | cd noorg 125 | 126 | # Build and install 127 | cargo install --path . 128 | 129 | # Build release binaries 130 | cargo build --release 131 | ``` 132 | 133 | 134 | ### Install Script (prefered) 135 | If you want the system tray and settings UI, you can use the full install script. 136 | 137 | ```bash 138 | ./install.sh 139 | ``` 140 | For Linux (because the system tray and UI doesn't work on Linux yet), you can install without the system tray and UI: 141 | 142 | ```bash 143 | ./install.sh cli-only 144 | ``` 145 | 146 | 147 | Uninstall with: 148 | ```bash 149 | ./install.sh uninstall 150 | ``` 151 | 152 | ## Usage 153 | 154 | ### CLI Commands 155 | 156 | Run `noorg` system tray application. 157 | ```bash 158 | noorg 159 | ``` 160 | 161 | Run `noorg note_cli` to use the command line interface. 162 | ```bash 163 | noorg note_cli 164 | ``` 165 | 166 | Run `noorg watch` to watch your note directory and automatically process your notes. 167 | ```bash 168 | noorg note_cli watch 169 | ``` 170 | 171 | Run `noorg note_cli sync` to run all observers on all notes in your note directory. 172 | ```bash 173 | noorg note_cli sync 174 | ``` 175 | 176 | Add a note: 177 | ```bash 178 | noorg note_cli add --title "My Note" --body "Content" --frontmatter "tags:rust" 179 | 180 | # Or without body (will open editor defined as EDITOR env variable to edit note) 181 | noorg note_cli add --title "My Note" --frontmatter "tags:rust" 182 | 183 | # Or with multiple frontmatter fields 184 | noorg note_cli add -t "Rust Notes" -b "Discussed lifetimes" -f "priority:high" -f "project:X" 185 | ``` 186 | 187 | 188 | ### System Tray Application 189 | 190 | The system tray application provides quick access to: 191 | - Note creation 192 | - Settings 193 | - Starting a watch to automatically process your notes on change 194 | 195 | ## Configuration 196 | The configuration file (`config.toml`) is automatically created in the following location: 197 | 198 | ### Config Location 199 | - **macOS**: `~/Library/Application Support/norg/config.toml` 200 | 201 | ### Data Directory 202 | Application data is stored in: 203 | 204 | - **macOS**: `~/Library/Application Support/norg/` 205 | 206 | ## Development Roadmap 207 | 208 | - [ ] Fix query and list cli commands 209 | - [ ] Add more tests 210 | - [ ] Add more examples 211 | - [ ] fix similar notes observer (RAG) 212 | 213 | ## License 214 | 215 | This project is licensed under Apache 2.0. See the [LICENSE](LICENSE) file for details. 216 | 217 | 218 | -------------------------------------------------------------------------------- /src/observers/tag_index.rs: -------------------------------------------------------------------------------- 1 | use tracing::{debug, info}; 2 | 3 | use crate::event::{NoteEvent, NoteObserver, ObserverResult}; 4 | use crate::settings::Settings; 5 | use std::any::Any; 6 | use std::collections::{BTreeMap, HashMap}; 7 | use std::fs::File; 8 | use std::future::Future; 9 | use std::io::{self, Read, Write}; 10 | use std::path::Path; 11 | use std::pin::Pin; 12 | use std::sync::Arc; 13 | 14 | pub struct TagIndexObserver { 15 | index_path: String, 16 | settings: Arc, 17 | } 18 | 19 | impl TagIndexObserver { 20 | pub fn new(settings: Arc) -> io::Result { 21 | // Use the configured notes directory 22 | let index_path = Path::new(&settings.note_dir).join("_tag_index.md"); 23 | 24 | // Create empty index file if it doesn't exist 25 | if !index_path.exists() { 26 | let mut file = File::create(&index_path)?; 27 | writeln!(file, "# Tag Index\n")?; 28 | } 29 | 30 | Ok(Self { 31 | index_path: index_path.to_str().unwrap_or("_tag_index.md").to_string(), 32 | settings, 33 | }) 34 | } 35 | 36 | fn parse_index(&self) -> io::Result>> { 37 | let mut content = String::new(); 38 | File::open(&self.index_path)?.read_to_string(&mut content)?; 39 | 40 | let mut index: BTreeMap> = BTreeMap::new(); 41 | let mut current_tag = String::new(); 42 | 43 | for line in content.lines() { 44 | if line.starts_with("## ") { 45 | current_tag = line[3..].trim().to_string(); 46 | } else if line.starts_with("- ") && !current_tag.is_empty() { 47 | if let Some(link_start) = line.find('[') { 48 | if let Some(link_end) = line.find(']') { 49 | if let Some(path_start) = line.find('(') { 50 | if let Some(path_end) = line.find(')') { 51 | let title = line[link_start + 1..link_end].to_string(); 52 | let path = line[path_start + 1..path_end].to_string(); 53 | index 54 | .entry(current_tag.clone()) 55 | .or_default() 56 | .push((title, path)); 57 | } 58 | } 59 | } 60 | } 61 | } 62 | } 63 | 64 | Ok(index) 65 | } 66 | 67 | fn write_index(&self, index: &BTreeMap>) -> io::Result<()> { 68 | // First read existing content to preserve frontmatter 69 | let existing_content = if let Ok(mut content) = 70 | File::open(&self.index_path).and_then(|mut f| { 71 | let mut content = String::new(); 72 | f.read_to_string(&mut content)?; 73 | Ok(content) 74 | }) { 75 | content 76 | } else { 77 | String::new() 78 | }; 79 | 80 | // Extract frontmatter if it exists 81 | let frontmatter = if existing_content.starts_with("---") { 82 | if let Some(end) = existing_content.find("---\n") { 83 | if let Some(second) = existing_content[end + 4..].find("---") { 84 | Some(existing_content[..end + second + 7].to_string()) 85 | } else { 86 | None 87 | } 88 | } else { 89 | None 90 | } 91 | } else { 92 | None 93 | }; 94 | 95 | let mut file = File::create(&self.index_path)?; 96 | 97 | // Write frontmatter if it exists 98 | if let Some(fm) = frontmatter { 99 | writeln!(file, "{}\n", fm)?; 100 | } 101 | 102 | writeln!(file, "# _tag_index\n")?; 103 | 104 | for (tag, entries) in index { 105 | writeln!(file, "## {}\n", tag)?; 106 | 107 | for (title, path) in entries { 108 | writeln!(file, "- [{}]({})", title, path)?; 109 | } 110 | writeln!(file)?; 111 | } 112 | 113 | Ok(()) 114 | } 115 | 116 | fn update_index(&self, title: &str, _file_path: &str, tags: &[String]) -> io::Result<()> { 117 | let mut index = self.parse_index()?; 118 | 119 | // Remove existing entries for this note 120 | for entries in index.values_mut() { 121 | entries.retain(|(t, _)| t != title); 122 | } 123 | 124 | // Add new entries with relative paths 125 | for tag in tags { 126 | let tag = tag.trim(); 127 | if !tag.is_empty() && !tag.starts_with("tags:") { 128 | // Use relative path from the notes directory 129 | let file_path = format!("./{}.{}", title, self.settings.file_type); 130 | index 131 | .entry(tag.to_string()) 132 | .or_default() 133 | .push((title.to_string(), file_path)); 134 | } 135 | } 136 | 137 | // Sort entries within each tag 138 | for entries in index.values_mut() { 139 | entries.sort_by(|a, b| a.0.cmp(&b.0)); 140 | } 141 | 142 | // Remove empty tags 143 | index.retain(|_, entries| !entries.is_empty()); 144 | 145 | self.write_index(&index) 146 | } 147 | 148 | fn parse_frontmatter_tags(&self, frontmatter: &str) -> Vec { 149 | frontmatter 150 | .lines() 151 | .find(|line| line.trim().starts_with("tags:")) 152 | .map(|tags_line| { 153 | tags_line 154 | .trim_start_matches("tags:") 155 | .split(',') 156 | .map(|s| s.trim().to_string()) 157 | .filter(|s| !s.is_empty() && !s.starts_with("tags:")) 158 | .collect() 159 | }) 160 | .unwrap_or_default() 161 | } 162 | } 163 | 164 | impl NoteObserver for TagIndexObserver { 165 | fn on_event_boxed( 166 | &self, 167 | event: NoteEvent, 168 | ) -> Pin>> + Send + '_>> { 169 | Box::pin(async move { 170 | match event { 171 | NoteEvent::Created { 172 | title, 173 | file_path, 174 | frontmatter, 175 | .. 176 | } 177 | | NoteEvent::Updated { 178 | title, 179 | file_path, 180 | frontmatter, 181 | .. 182 | } 183 | | NoteEvent::Synced { 184 | title, 185 | file_path, 186 | frontmatter, 187 | .. 188 | } => { 189 | // Extract tags from frontmatter directly 190 | if let Some(tags) = frontmatter.get("tags") { 191 | let tags: Vec = tags 192 | .split(',') 193 | .map(|s| s.trim().to_string()) 194 | .filter(|s| !s.is_empty() && !s.starts_with("tags:")) 195 | .collect(); 196 | 197 | debug!( 198 | "🏷️ Updating tag index for '{}' with tags: {:?}", 199 | title, tags 200 | ); 201 | self.update_index(&title, &file_path, &tags)?; 202 | info!("✅ Tag index updated successfully"); 203 | 204 | // Return the tags in the metadata 205 | let mut metadata = HashMap::new(); 206 | metadata.insert("tags".to_string(), tags.join(", ")); 207 | 208 | Ok(Some(ObserverResult { 209 | metadata: Some(metadata), 210 | content: None, 211 | })) 212 | } else { 213 | Ok(None) 214 | } 215 | } 216 | } 217 | }) 218 | } 219 | 220 | fn name(&self) -> String { 221 | "tag_index".to_string() 222 | } 223 | 224 | fn as_any(&self) -> &dyn Any { 225 | self 226 | } 227 | 228 | fn priority(&self) -> i32 { 229 | -99 // Run after metadata generation but before storage 230 | } 231 | } 232 | -------------------------------------------------------------------------------- /src/observers/toc.rs: -------------------------------------------------------------------------------- 1 | use crate::event::{NoteEvent, NoteObserver, ObserverResult}; 2 | use pulldown_cmark::{Event as MarkdownEvent, HeadingLevel, Parser, Tag}; 3 | use std::any::Any; 4 | use std::collections::HashMap; 5 | use std::future::Future; 6 | use std::io; 7 | use std::pin::Pin; 8 | use tracing::{debug, info}; 9 | 10 | pub struct TocObserver; 11 | 12 | impl TocObserver { 13 | pub fn new() -> Self { 14 | debug!("Initializing TOC observer"); 15 | TocObserver 16 | } 17 | 18 | fn generate_toc(&self, content: &str) -> Option { 19 | let mut headings = Vec::new(); 20 | let parser = Parser::new(content); 21 | let mut in_heading = false; 22 | let mut current_level = 0; 23 | let mut current_heading = String::new(); 24 | let mut first_h1_seen = false; 25 | 26 | debug!("Collecting headings from content"); 27 | for event in parser { 28 | match event { 29 | MarkdownEvent::Start(Tag::Heading(level, ..)) => { 30 | in_heading = true; 31 | current_level = match level { 32 | HeadingLevel::H1 => 1, 33 | HeadingLevel::H2 => 2, 34 | HeadingLevel::H3 => 3, 35 | HeadingLevel::H4 => 4, 36 | HeadingLevel::H5 => 5, 37 | HeadingLevel::H6 => 6, 38 | }; 39 | } 40 | MarkdownEvent::Text(text) | MarkdownEvent::Code(text) if in_heading => { 41 | current_heading.push_str(&text); 42 | } 43 | MarkdownEvent::End(Tag::Heading(..)) => { 44 | if !current_heading.is_empty() { 45 | if current_level == 1 { 46 | if !first_h1_seen { 47 | first_h1_seen = true; 48 | debug!("Skipping first H1 heading: {}", current_heading); 49 | } else { 50 | let anchor = self.create_anchor(¤t_heading); 51 | debug!("Adding H1 heading: {} ({})", current_heading, anchor); 52 | headings.push((current_level, current_heading.clone(), anchor)); 53 | } 54 | } else { 55 | let anchor = self.create_anchor(¤t_heading); 56 | debug!( 57 | "Adding H{} heading: {} ({})", 58 | current_level, current_heading, anchor 59 | ); 60 | headings.push((current_level, current_heading.clone(), anchor)); 61 | } 62 | current_heading.clear(); 63 | } 64 | in_heading = false; 65 | } 66 | _ => {} 67 | } 68 | } 69 | 70 | if headings.is_empty() { 71 | debug!("No headings found, skipping TOC generation"); 72 | return None; 73 | } 74 | 75 | debug!("Generating TOC with {} headings", headings.len()); 76 | let mut toc = String::from("## Contents\n\n"); 77 | 78 | for (level, heading, anchor) in headings { 79 | let indent = " ".repeat(level - 1); 80 | toc.push_str(&format!("{}* [{}](#{})\n", indent, heading, anchor)); 81 | } 82 | 83 | Some(toc.to_string()) 84 | } 85 | 86 | fn create_anchor(&self, heading: &str) -> String { 87 | heading 88 | .to_lowercase() 89 | .replace(' ', "-") 90 | .replace(|c: char| !c.is_alphanumeric() && c != '-', "") 91 | } 92 | 93 | fn insert_toc(&self, content: &str) -> Option { 94 | let toc = self.generate_toc(content)?; 95 | debug!("Generated TOC content:\n{}", toc); 96 | debug!("Processing content for TOC insertion"); 97 | 98 | let lines: Vec<&str> = content.lines().collect(); 99 | let mut output = Vec::new(); 100 | let mut in_frontmatter = false; 101 | let mut frontmatter_end = 0; 102 | let mut first_heading_found = false; 103 | let mut first_heading_pos = 0; 104 | 105 | // Find frontmatter end and first heading 106 | for (i, line) in lines.iter().enumerate() { 107 | if line.trim() == "---" { 108 | if !in_frontmatter { 109 | in_frontmatter = true; 110 | debug!("Found start of frontmatter at line {}", i); 111 | } else { 112 | frontmatter_end = i; 113 | debug!("Found end of frontmatter at line {}", i); 114 | } 115 | } 116 | 117 | if line.starts_with("# ") && !first_heading_found { 118 | first_heading_found = true; 119 | first_heading_pos = i; 120 | debug!("Found first heading at line {}", i); 121 | } 122 | } 123 | 124 | // Copy frontmatter 125 | for i in 0..=frontmatter_end { 126 | output.push(lines[i]); 127 | } 128 | output.push(""); // Add blank line after frontmatter 129 | 130 | // Copy content up to first heading 131 | for i in (frontmatter_end + 1)..first_heading_pos { 132 | output.push(lines[i]); 133 | } 134 | 135 | // Add first heading 136 | output.push(lines[first_heading_pos]); 137 | output.push(""); // Add blank line after heading 138 | 139 | // Add TOC after first heading 140 | output.extend(toc.lines()); 141 | output.push(""); // Add blank line after TOC 142 | 143 | // Add remaining content, skipping old TOC if present 144 | let mut skip_old_toc = false; 145 | for i in (first_heading_pos + 1)..lines.len() { 146 | let line = lines[i]; 147 | 148 | if line.starts_with("## Contents") || line.starts_with("## Table of Contents") { 149 | skip_old_toc = true; 150 | continue; 151 | } 152 | 153 | if skip_old_toc { 154 | if line.starts_with("## ") { 155 | skip_old_toc = false; 156 | } else { 157 | continue; 158 | } 159 | } 160 | 161 | output.push(line); 162 | } 163 | 164 | Some(output.join("\n") + "\n") 165 | } 166 | } 167 | 168 | impl NoteObserver for TocObserver { 169 | fn on_event_boxed( 170 | &self, 171 | event: NoteEvent, 172 | ) -> Pin>> + Send + '_>> { 173 | Box::pin(async move { 174 | match event { 175 | NoteEvent::Created { content, title, .. } 176 | | NoteEvent::Updated { content, title, .. } 177 | | NoteEvent::Synced { content, title, .. } => { 178 | debug!("Processing TOC for note '{}'", title); 179 | 180 | if content.len() < 50 || !content.contains('#') { 181 | debug!( 182 | "Skipping TOC generation for '{}' (too short or no headers)", 183 | title 184 | ); 185 | return Ok(None); 186 | } 187 | 188 | if let Some(updated_content) = self.insert_toc(&content) { 189 | if updated_content != content { 190 | info!("📚 Generated table of contents for '{}'", title); 191 | debug!("Updated content:\n{}", updated_content); 192 | Ok(Some(ObserverResult { 193 | content: Some(updated_content), 194 | metadata: Some(HashMap::from([( 195 | "toc_generated".to_string(), 196 | "true".to_string(), 197 | )])), 198 | })) 199 | } else { 200 | debug!("No changes needed for TOC in '{}'", title); 201 | Ok(None) 202 | } 203 | } else { 204 | debug!("No TOC generated for '{}'", title); 205 | Ok(None) 206 | } 207 | } 208 | } 209 | }) 210 | } 211 | 212 | fn name(&self) -> String { 213 | "toc".to_string() 214 | } 215 | 216 | fn as_any(&self) -> &dyn Any { 217 | self 218 | } 219 | 220 | fn priority(&self) -> i32 { 221 | 0 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /src/watcher.rs: -------------------------------------------------------------------------------- 1 | use notify::{Config, Event, RecommendedWatcher, RecursiveMode, Watcher}; 2 | use percent_encoding::percent_decode_str; 3 | use std::collections::HashSet; 4 | use std::fs; 5 | use std::io; 6 | use std::path::Path; 7 | use std::sync::atomic::{AtomicBool, Ordering}; 8 | use std::sync::{Arc, Mutex}; 9 | use std::time::{Duration, Instant}; 10 | use tokio::runtime::Handle; 11 | use tokio::sync::mpsc; 12 | use tracing::{debug, error, info, trace, warn}; 13 | 14 | use crate::note::{Note, NoteManager}; 15 | use crate::observer_registry::ObserverRegistry; 16 | use crate::settings::Settings; 17 | 18 | fn convert_notify_error(e: notify::Error) -> io::Error { 19 | error!("Notify error: {}", e); 20 | io::Error::new(io::ErrorKind::Other, e) 21 | } 22 | 23 | pub async fn watch_directory( 24 | settings: Settings, 25 | observer_registry: Arc, 26 | stop_signal: Arc, 27 | ) -> io::Result<()> { 28 | debug!("Initializing directory watcher"); 29 | 30 | // Test write permissions 31 | let note_dir = Path::new(&settings.note_dir); 32 | if !note_dir.exists() { 33 | debug!("Creating notes directory: {}", settings.note_dir); 34 | fs::create_dir_all(note_dir).map_err(|e| { 35 | error!("Failed to create notes directory. This might be a permissions issue: {}", e); 36 | io::Error::new( 37 | io::ErrorKind::PermissionDenied, 38 | format!("Cannot create or access notes directory: {}. Please check app permissions in System Settings > Privacy & Security > Files and Folders.", e) 39 | ) 40 | })?; 41 | } 42 | 43 | // Test write permissions with a temporary file 44 | let test_file = note_dir.join(".permissions_test"); 45 | match fs::write(&test_file, "") { 46 | Ok(_) => { 47 | fs::remove_file(test_file).ok(); // Clean up test file 48 | debug!("Successfully verified write permissions"); 49 | } 50 | Err(e) => { 51 | error!( 52 | "Failed to write test file. This might be a permissions issue: {}", 53 | e 54 | ); 55 | return Err(io::Error::new( 56 | io::ErrorKind::PermissionDenied, 57 | "Cannot write to notes directory. Please check app permissions in System Settings > Privacy & Security > Files and Folders." 58 | )); 59 | } 60 | } 61 | 62 | let (tx, mut rx) = mpsc::channel(100); 63 | let note_manager = NoteManager::new(settings.clone(), observer_registry.clone()).await?; 64 | 65 | // Track recently processed files to avoid loops 66 | let processing_files = Arc::new(Mutex::new(HashSet::new())); 67 | let debounce_duration = Duration::from_millis(100); 68 | debug!("Using debounce duration: {:?}", debounce_duration); 69 | 70 | let runtime_handle = Handle::current(); 71 | let processing_files_clone = processing_files.clone(); 72 | 73 | let mut watcher = RecommendedWatcher::new( 74 | move |res: Result| { 75 | let tx = tx.clone(); 76 | let _processing_files = processing_files_clone.clone(); 77 | 78 | if let Ok(event) = res { 79 | trace!("Received file system event: {:?}", event); 80 | let handle = runtime_handle.clone(); 81 | std::thread::spawn(move || { 82 | handle.block_on(async { 83 | if let Err(e) = tx.send(event).await { 84 | error!("Failed to send event: {}", e); 85 | } 86 | }); 87 | }); 88 | } 89 | }, 90 | Config::default(), 91 | ) 92 | .map_err(convert_notify_error)?; 93 | 94 | watcher 95 | .watch(Path::new(&settings.note_dir), RecursiveMode::Recursive) 96 | .map_err(convert_notify_error)?; 97 | 98 | info!("🔍 Watching directory: {}", settings.note_dir); 99 | 100 | let _watcher = watcher; 101 | let mut last_events = std::collections::HashMap::new(); 102 | 103 | while let Some(event) = rx.recv().await { 104 | // Check if we should stop 105 | if stop_signal.load(Ordering::SeqCst) { 106 | info!("Stop signal received, shutting down watcher"); 107 | break; 108 | } 109 | 110 | match event.kind { 111 | notify::EventKind::Modify(_) | notify::EventKind::Create(_) => { 112 | for path in event.paths { 113 | if path.extension().and_then(|s| s.to_str()) == Some(&settings.file_type) { 114 | if let Some(title) = path.file_stem().and_then(|s| s.to_str()) { 115 | // Decode any percent-encoded characters in the title 116 | let decoded_title = percent_decode_str(title) 117 | .decode_utf8() 118 | .unwrap_or_else(|_| title.into()) 119 | .into_owned(); 120 | 121 | let path_str = path.to_string_lossy().to_string(); 122 | debug!("Processing change for note: {}", decoded_title); 123 | 124 | // Check if we recently processed this file 125 | let mut processing = processing_files.lock().unwrap(); 126 | if !processing.contains(&path_str) { 127 | // Check if we need to debounce 128 | let now = Instant::now(); 129 | if let Some(last_time) = last_events.get(&path_str) { 130 | if now.duration_since(*last_time) < debounce_duration { 131 | trace!("Debouncing change for: {}", decoded_title); 132 | continue; 133 | } 134 | } 135 | 136 | // Mark file as being processed 137 | processing.insert(path_str.clone()); 138 | last_events.insert(path_str.clone(), now); 139 | 140 | info!("📝 Change detected in note: {}", decoded_title); 141 | 142 | // Read the file content first to check if it really changed 143 | match Note::from_file(&path) { 144 | Ok(Some((content, _frontmatter))) => { 145 | debug!("Successfully read note content"); 146 | // Only sync if content changed 147 | if note_manager 148 | .should_process_note(&decoded_title, &content) 149 | .await 150 | { 151 | debug!("Content changed, syncing note"); 152 | if let Err(e) = note_manager 153 | .sync_single_note(&decoded_title, true) 154 | .await 155 | { 156 | error!( 157 | "Failed to sync note '{}': {}", 158 | decoded_title, e 159 | ); 160 | } 161 | } else { 162 | info!( 163 | "⏭️ Content unchanged for '{}', skipping sync", 164 | decoded_title 165 | ); 166 | } 167 | } 168 | Ok(None) => warn!("Could not parse note: {}", decoded_title), 169 | Err(e) => { 170 | error!("Error reading note '{}': {}", decoded_title, e) 171 | } 172 | } 173 | 174 | // Remove from processing after a delay 175 | let processing = processing_files.clone(); 176 | let path_str = path_str.clone(); 177 | tokio::spawn(async move { 178 | trace!("Starting debounce timer for: {}", path_str); 179 | tokio::time::sleep(debounce_duration).await; 180 | processing.lock().unwrap().remove(&path_str); 181 | trace!("Removed {} from processing list", path_str); 182 | }); 183 | } else { 184 | trace!("Note already being processed: {}", decoded_title); 185 | } 186 | } 187 | } 188 | } 189 | } 190 | _ => { 191 | trace!("Ignoring non-modify/create event: {:?}", event.kind); 192 | } 193 | } 194 | } 195 | 196 | info!("Watcher stopped"); 197 | Ok(()) 198 | } 199 | -------------------------------------------------------------------------------- /src/settings.rs: -------------------------------------------------------------------------------- 1 | use crate::embedded::DefaultScripts; 2 | use config::{Config, ConfigError}; 3 | use directories::ProjectDirs; 4 | use serde::{Deserialize, Serialize}; 5 | use std::fs; 6 | use std::path::PathBuf; 7 | use tracing::{debug, error, info}; 8 | 9 | #[derive(Debug, Deserialize, Serialize, Default, Clone)] 10 | pub struct SimilarNotesConfig { 11 | pub excluded_notes: Option>, 12 | pub excluded_from_references: Option>, 13 | } 14 | 15 | #[derive(Debug, Deserialize, Serialize, Default, Clone)] 16 | #[allow(dead_code)] 17 | pub struct Settings { 18 | pub file_type: String, 19 | pub timestamps: bool, 20 | pub note_dir: String, 21 | pub scripts_dir: String, 22 | pub obsidian_vault_path: Option, 23 | pub enabled_observers: Vec, 24 | pub similar_notes: SimilarNotesConfig, 25 | } 26 | 27 | impl Settings { 28 | pub fn new() -> Self { 29 | debug!("Loading application settings"); 30 | 31 | let config_path = match Self::ensure_config_exists() { 32 | Ok(path) => { 33 | debug!("Using config file at: {:?}", path); 34 | path 35 | } 36 | Err(e) => { 37 | error!("Failed to initialize config: {}", e); 38 | panic!("Failed to initialize config: {}", e); 39 | } 40 | }; 41 | 42 | let config_result = Config::builder() 43 | .add_source(config::File::from(config_path).required(true)) 44 | .add_source(config::Environment::with_prefix("NOTE_CLI")) 45 | .build(); 46 | 47 | let settings = match config_result { 48 | Ok(config) => { 49 | debug!("Configuration sources loaded successfully"); 50 | match config.try_deserialize::() { 51 | Ok(settings) => { 52 | debug!("Settings deserialized successfully"); 53 | trace_settings(&settings); 54 | settings 55 | } 56 | Err(e) => { 57 | error!("Failed to deserialize configuration: {}", e); 58 | panic!("Failed to deserialize configuration: {}", e); 59 | } 60 | } 61 | } 62 | Err(e) => { 63 | error!("Failed to load configuration: {}", e); 64 | panic!("Failed to load configuration: {}", e); 65 | } 66 | }; 67 | 68 | Self::ensure_directories_exist(&settings); 69 | 70 | info!("✨ Settings loaded successfully"); 71 | settings 72 | } 73 | 74 | fn ensure_config_exists() -> Result { 75 | let proj_dirs = ProjectDirs::from("", "norg", "norg") 76 | .ok_or_else(|| ConfigError::NotFound("Could not determine config directory".into()))?; 77 | 78 | let config_dir = proj_dirs.config_dir(); 79 | debug!("Config directory: {:?}", config_dir); 80 | 81 | // Create the config directory if it does not exist 82 | if !config_dir.exists() { 83 | fs::create_dir_all(config_dir).map_err(|e| { 84 | ConfigError::NotFound(format!("Failed to create config directory: {}", e)) 85 | })?; 86 | } 87 | 88 | // Create base directories 89 | let norg_base_dir = dirs::document_dir() 90 | .map(|d| d.join("norg")) 91 | .unwrap_or_else(|| PathBuf::from("./norg")); 92 | 93 | let scripts_dir = norg_base_dir.join("scripts"); 94 | 95 | // Copy default scripts before creating config 96 | Self::copy_default_scripts(&scripts_dir)?; 97 | 98 | let config_path = config_dir.join("config.toml"); 99 | debug!("Config file path: {:?}", config_path); 100 | 101 | if !config_path.exists() { 102 | debug!("Creating default config file"); 103 | let norg_base_dir = dirs::document_dir() 104 | .map(|d| d.join("norg")) 105 | .unwrap_or_else(|| PathBuf::from("./norg")); 106 | 107 | let default_settings = Settings { 108 | file_type: "md".to_string(), 109 | timestamps: true, 110 | note_dir: norg_base_dir.join("notes").to_string_lossy().into_owned(), 111 | scripts_dir: norg_base_dir.join("scripts").to_string_lossy().into_owned(), 112 | obsidian_vault_path: Some( 113 | dirs::home_dir() 114 | .map(|h| { 115 | h.join("Library/Mobile Documents/iCloud~md~obsidian/Documents/Obsidian") 116 | }) 117 | .unwrap_or_default() 118 | .to_string_lossy() 119 | .into_owned(), 120 | ), 121 | enabled_observers: vec![ 122 | "timestamp".to_string(), 123 | "sqlite".to_string(), 124 | "tag_index".to_string(), 125 | "toc".to_string(), 126 | ], 127 | similar_notes: SimilarNotesConfig { 128 | excluded_notes: Some(vec![ 129 | "_kanban".to_string(), 130 | "_tag_index".to_string(), 131 | "project".to_string(), 132 | ]), 133 | excluded_from_references: Some(vec![ 134 | "_tag_index".to_string(), 135 | "_kanban".to_string(), 136 | ]), 137 | }, 138 | }; 139 | 140 | let config_str = toml::to_string_pretty(&default_settings).map_err(|e| { 141 | ConfigError::NotFound(format!("Failed to serialize default config: {}", e)) 142 | })?; 143 | 144 | fs::write(&config_path, config_str).map_err(|e| { 145 | ConfigError::NotFound(format!("Failed to write default config: {}", e)) 146 | })?; 147 | 148 | debug!("Created default config at {:?}", config_path); 149 | } 150 | 151 | Ok(config_path) 152 | } 153 | 154 | fn ensure_directories_exist(settings: &Settings) { 155 | if let Err(e) = fs::create_dir_all(&settings.note_dir) { 156 | error!("Failed to create note directory: {}", e); 157 | panic!("Failed to create note directory: {}", e); 158 | } 159 | 160 | if let Err(e) = fs::create_dir_all(&settings.scripts_dir) { 161 | error!("Failed to create scripts directory: {}", e); 162 | panic!("Failed to create scripts directory: {}", e); 163 | } 164 | } 165 | 166 | fn copy_default_scripts(target_dir: &PathBuf) -> Result<(), ConfigError> { 167 | fs::create_dir_all(target_dir).map_err(|e| { 168 | ConfigError::NotFound(format!("Failed to create scripts directory: {}", e)) 169 | })?; 170 | 171 | for file in DefaultScripts::iter() { 172 | let file_path = PathBuf::from(file.as_ref()); 173 | let script_path = target_dir.join(&file_path); 174 | 175 | // Create parent directories if they don't exist 176 | if let Some(parent) = script_path.parent() { 177 | fs::create_dir_all(parent).map_err(|e| { 178 | ConfigError::NotFound(format!( 179 | "Failed to create directory {}: {}", 180 | parent.display(), 181 | e 182 | )) 183 | })?; 184 | } 185 | 186 | if !script_path.exists() { 187 | if let Some(content) = DefaultScripts::get(&file) { 188 | fs::write(&script_path, content.data).map_err(|e| { 189 | ConfigError::NotFound(format!("Failed to write script {}: {}", file, e)) 190 | })?; 191 | 192 | #[cfg(unix)] 193 | { 194 | use std::os::unix::fs::PermissionsExt; 195 | fs::set_permissions(&script_path, fs::Permissions::from_mode(0o755)) 196 | .map_err(|e| { 197 | ConfigError::NotFound(format!( 198 | "Failed to make script {} executable: {}", 199 | file, e 200 | )) 201 | })?; 202 | } 203 | 204 | debug!("Created script {} at {:?}", file, script_path); 205 | } 206 | } 207 | } 208 | 209 | Ok(()) 210 | } 211 | 212 | pub fn get_data_dir() -> PathBuf { 213 | ProjectDirs::from("", "norg", "norg") 214 | .map(|proj_dirs| proj_dirs.data_dir().to_path_buf()) 215 | .unwrap_or_else(|| PathBuf::from("./data")) 216 | } 217 | } 218 | 219 | fn trace_settings(settings: &Settings) { 220 | debug!("Loaded settings:"); 221 | debug!(" File type: {}", settings.file_type); 222 | debug!(" Timestamps enabled: {}", settings.timestamps); 223 | debug!(" Note directory: {}", settings.note_dir); 224 | debug!(" Scripts directory: {}", settings.scripts_dir); 225 | 226 | if let Some(ref vault_path) = settings.obsidian_vault_path { 227 | debug!(" Obsidian vault path: {}", vault_path); 228 | } else { 229 | debug!(" No Obsidian vault path configured"); 230 | } 231 | 232 | debug!(" Enabled observers: {:?}", settings.enabled_observers); 233 | 234 | if let Some(ref excluded) = settings.similar_notes.excluded_notes { 235 | debug!(" Excluded notes: {:?}", excluded); 236 | } 237 | 238 | if let Some(ref excluded_refs) = settings.similar_notes.excluded_from_references { 239 | debug!(" Excluded from references: {:?}", excluded_refs); 240 | } 241 | } 242 | -------------------------------------------------------------------------------- /resources/default_scripts/python/kanban_board.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from datetime import datetime 4 | from pathlib import Path 5 | import os 6 | import sys 7 | from typing import Dict, List, Any, Optional 8 | from logging_utils import log_debug, log_info, log_error, log_warn, log_trace 9 | 10 | KANBAN_STATES = ['planned', 'todo', 'doing', 'done'] 11 | 12 | def get_app_data_dir(event_json: str) -> Path: 13 | """Get the platform-specific application data directory.""" 14 | try: 15 | event = json.loads(event_json) 16 | if isinstance(event, dict): 17 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 18 | if event_type and "data_dir" in event_type: 19 | return Path(event_type["data_dir"]) 20 | except Exception as e: 21 | log_error("Failed to get data dir from event: {}", str(e)) 22 | 23 | # Fallback to default paths 24 | if sys.platform == 'darwin': # macOS 25 | return Path.home() / 'Library' / 'Application Support' / 'norg' / 'norg' 26 | elif sys.platform == 'win32': # Windows 27 | app_data = os.getenv('APPDATA') 28 | if app_data: 29 | return Path(app_data) / 'norg' / 'norg' 30 | else: # Linux and others 31 | return Path.home() / '.config' / 'norg' / 'norg' 32 | return Path('./data') # Final fallback 33 | 34 | def get_cache_file(event_json: str) -> Path: 35 | """Get the path to the kanban cache file.""" 36 | cache_dir = get_app_data_dir(event_json) / 'cache' 37 | cache_dir.mkdir(parents=True, exist_ok=True) 38 | return cache_dir / 'kanban_cache.json' 39 | 40 | def get_kanban_file(note_dir: str) -> Path: 41 | """Get the path to the kanban board file.""" 42 | return Path(note_dir) / '_kanban.md' 43 | 44 | def load_tasks_cache(event_json: str) -> Dict[str, Any]: 45 | cache_file = get_cache_file(event_json) 46 | log_debug("Loading tasks cache from: {}", cache_file) 47 | if cache_file.exists(): 48 | try: 49 | with open(cache_file, 'r') as f: 50 | cache = json.load(f) 51 | log_debug("Loaded {} notes from cache", len(cache)) 52 | return cache 53 | except Exception as e: 54 | log_error("Failed to load tasks cache: {}", str(e)) 55 | return {} 56 | log_debug("No existing cache found") 57 | return {} 58 | 59 | def save_tasks_cache(cache: Dict[str, Any], event_json: str) -> None: 60 | cache_file = get_cache_file(event_json) 61 | log_debug("Saving tasks cache with {} notes", len(cache)) 62 | try: 63 | with open(cache_file, 'w') as f: 64 | json.dump(cache, f, indent=2) 65 | log_debug("Cache saved successfully") 66 | except Exception as e: 67 | log_error("Failed to save tasks cache: {}", str(e)) 68 | 69 | def get_context_for_tag(content: str, tag_position: int) -> str: 70 | """Extract the context in parentheses after the tag.""" 71 | text_after = content[tag_position:tag_position + 200] 72 | match = re.search(r'#\w+\s*\((.*?)\)', text_after) 73 | if match: 74 | context = match.group(1).strip() 75 | log_trace("Found task context: {}", context) 76 | return context 77 | 78 | log_trace("No context found for tag at position {}", tag_position) 79 | return "" 80 | 81 | def extract_tasks(content: str, note_title: str, note_path: str) -> Dict[str, List[Dict[str, Any]]]: 82 | """Extract tasks and their context based on kanban tags.""" 83 | log_debug("Extracting tasks from note: {}", note_title) 84 | tasks = {state: [] for state in KANBAN_STATES} 85 | 86 | if note_title == "_kanban" or "📋 Kanban Board" in content: 87 | log_debug("Skipping kanban board itself") 88 | return tasks 89 | 90 | content = re.sub(r'\n## References\n.*$', '', content, flags=re.DOTALL) 91 | relative_path = f"./{note_title}.md" 92 | 93 | total_tasks = 0 94 | for state in KANBAN_STATES: 95 | tag = f'#{state}' 96 | for match in re.finditer(rf'{tag}\b', content, re.IGNORECASE): 97 | tag_pos = match.start() 98 | context = get_context_for_tag(content, tag_pos) 99 | 100 | if not context: 101 | continue 102 | 103 | surrounding_text = content[max(0, tag_pos-200):min(len(content), tag_pos+200)] 104 | paragraph_end = surrounding_text.find('\n\n') 105 | if paragraph_end != -1: 106 | surrounding_text = surrounding_text[:paragraph_end] 107 | 108 | links = re.findall(r'\[([^\]]+)\]\(([^\)]+)\)', surrounding_text) 109 | 110 | seen_links = set() 111 | processed_links = [] 112 | for link_title, link_path in links: 113 | if ('_kanban' not in link_title.lower() and 114 | '_kanban' not in link_path.lower() and 115 | 'kanban board' not in link_title.lower() and 116 | 'References' not in link_title): 117 | link_title = link_title.strip() 118 | link_path = f"./{Path(link_path).stem}.md" 119 | link_key = (link_title, link_path) 120 | if link_key not in seen_links: 121 | processed_links.append(link_key) 122 | seen_links.add(link_key) 123 | 124 | tasks[state].append({ 125 | 'context': context, 126 | 'links': processed_links, 127 | 'source': { 128 | 'title': note_title, 129 | 'path': relative_path 130 | } 131 | }) 132 | total_tasks += 1 133 | log_trace("Added task in state '{}': {}", state, context) 134 | 135 | log_debug("Found {} tasks in note '{}'", total_tasks, note_title) 136 | return tasks 137 | 138 | def generate_kanban_board(tasks: Dict[str, List[Dict[str, Any]]]) -> str: 139 | """Generate a markdown kanban board.""" 140 | log_debug("Generating kanban board") 141 | current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S") 142 | 143 | board = [ 144 | "---", 145 | "kanban: true", 146 | f"last_updated: {current_time}", 147 | "---", 148 | "", 149 | "# 📋 Kanban Board", 150 | f"\nLast updated: {current_time}\n", 151 | "| 📅 Planned | ✅ Todo | 🏃 Doing | ✨ Done |", 152 | "|------------|---------|----------|---------|", 153 | "| | | | |" # Add empty row for empty board 154 | ] 155 | 156 | total_tasks = 0 157 | for state in KANBAN_STATES: 158 | for task in tasks[state]: 159 | row = [" "] * len(KANBAN_STATES) 160 | 161 | cell_parts = [] 162 | cell_parts.append(task['context']) 163 | cell_parts.append(f"📎 [View in {task['source']['title']}]({task['source']['path']})") 164 | 165 | if task['links']: 166 | cell_parts.append("🔗 Related:") 167 | for title, url in task['links']: 168 | cell_parts.append(f"- [{title}]({url})") 169 | 170 | row[KANBAN_STATES.index(state)] = "
".join(cell_parts) 171 | board.append(f"| {' | '.join(row)} |") 172 | total_tasks += 1 173 | 174 | log_debug("Generated board with {} total tasks", total_tasks) 175 | return "\n".join(board) 176 | 177 | def process_event(event_json: str) -> Optional[str]: 178 | try: 179 | event = json.loads(event_json) 180 | log_info("📋 Processing event for kanban board") 181 | 182 | if isinstance(event, dict): 183 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 184 | if event_type and "content" in event_type: 185 | content = event_type["content"] 186 | title = event_type["title"] 187 | file_path = event_type.get("file_path", f"/notes/{title}.md") 188 | note_dir = str(Path(file_path).parent) 189 | 190 | log_debug("Processing note: {}", title) 191 | tasks_cache = load_tasks_cache(event_json) 192 | new_tasks = extract_tasks(content, title, file_path) 193 | 194 | task_count = sum(len(items) for items in new_tasks.values()) 195 | if task_count > 0: 196 | log_debug("Updating cache with {} tasks from '{}'", task_count, title) 197 | tasks_cache[title] = new_tasks 198 | else: 199 | if title in tasks_cache: 200 | log_debug("Removing '{}' from cache (no tasks)", title) 201 | tasks_cache.pop(title, None) 202 | 203 | save_tasks_cache(tasks_cache, event_json) 204 | 205 | combined_tasks = {state: [] for state in KANBAN_STATES} 206 | for note_tasks in tasks_cache.values(): 207 | for state in KANBAN_STATES: 208 | combined_tasks[state].extend(note_tasks[state]) 209 | 210 | kanban_file = get_kanban_file(note_dir) 211 | board_content = generate_kanban_board(combined_tasks) 212 | with open(kanban_file, 'w', encoding='utf-8') as f: 213 | f.write(board_content) 214 | 215 | total_tasks = sum(len(items) for items in combined_tasks.values()) 216 | log_info("✨ Updated kanban board with {} tasks from {} notes", 217 | total_tasks, len(tasks_cache)) 218 | 219 | return json.dumps({ 220 | "metadata": { 221 | "kanban_tasks": str(total_tasks), 222 | "kanban_updated": datetime.now().strftime("%Y-%m-%d %H:%M:%S") 223 | } 224 | }) 225 | 226 | return None 227 | 228 | except Exception as e: 229 | log_error("Failed to process kanban board: {}", str(e)) 230 | return None -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /src/settings_dialog.rs: -------------------------------------------------------------------------------- 1 | use crate::settings::Settings; 2 | use iced::widget::{button, checkbox, column, container, row, text, text_input, Space}; 3 | use iced::{ 4 | executor, theme, window, Alignment, Application, Command, Element, Length, 5 | Settings as IcedSettings, Theme, 6 | }; 7 | use rfd::FileDialog; 8 | use std::sync::Arc; 9 | use tokio::sync::Mutex; 10 | use tracing::error; 11 | 12 | pub struct SettingsDialog { 13 | settings: Arc>, 14 | temp_settings: Settings, 15 | save_message: Option<(String, bool)>, // (message, is_success) 16 | } 17 | 18 | #[derive(Debug, Clone)] 19 | pub enum Message { 20 | FileTypeChanged(String), 21 | TimestampsToggled(bool), 22 | SelectNoteDir, 23 | SelectScriptsDir, 24 | SelectObsidianVault, 25 | ObserverToggled(String, bool), 26 | SaveSettings, 27 | DismissMessage, 28 | } 29 | 30 | impl Application for SettingsDialog { 31 | type Message = Message; 32 | type Theme = Theme; 33 | type Executor = executor::Default; 34 | type Flags = Settings; 35 | 36 | fn new(settings: Settings) -> (Self, Command) { 37 | ( 38 | Self { 39 | settings: Arc::new(Mutex::new(settings.clone())), 40 | temp_settings: settings, 41 | save_message: None, 42 | }, 43 | Command::none(), 44 | ) 45 | } 46 | 47 | fn title(&self) -> String { 48 | String::from("Note CLI Settings") 49 | } 50 | 51 | fn update(&mut self, message: Message) -> Command { 52 | match message { 53 | Message::FileTypeChanged(value) => { 54 | self.temp_settings.file_type = value; 55 | } 56 | Message::TimestampsToggled(value) => { 57 | self.temp_settings.timestamps = value; 58 | } 59 | Message::SelectNoteDir => { 60 | if let Some(path) = FileDialog::new().pick_folder() { 61 | self.temp_settings.note_dir = path.to_string_lossy().to_string(); 62 | } 63 | } 64 | Message::SelectScriptsDir => { 65 | if let Some(path) = FileDialog::new().pick_folder() { 66 | self.temp_settings.scripts_dir = path.to_string_lossy().to_string(); 67 | } 68 | } 69 | Message::SelectObsidianVault => { 70 | if let Some(path) = FileDialog::new().pick_folder() { 71 | self.temp_settings.obsidian_vault_path = 72 | Some(path.to_string_lossy().to_string()); 73 | } 74 | } 75 | Message::ObserverToggled(observer, enabled) => { 76 | if enabled { 77 | self.temp_settings.enabled_observers.push(observer); 78 | } else { 79 | self.temp_settings 80 | .enabled_observers 81 | .retain(|x| x != &observer); 82 | } 83 | } 84 | Message::SaveSettings => match self.save_settings() { 85 | Ok(_) => { 86 | self.save_message = Some(("Settings saved successfully!".into(), true)); 87 | } 88 | Err(e) => { 89 | self.save_message = Some((format!("Error saving settings: {}", e), false)); 90 | } 91 | }, 92 | Message::DismissMessage => { 93 | self.save_message = None; 94 | } 95 | } 96 | Command::none() 97 | } 98 | 99 | fn view(&self) -> Element { 100 | let title = text("Note CLI Settings") 101 | .size(24) 102 | .style(theme::Text::Default); 103 | 104 | let section_title_style = |title: &str| text(title).size(16).style(theme::Text::Default); 105 | 106 | let label_style = |label: &str| { 107 | text(label) 108 | .style(theme::Text::Default) 109 | .width(Length::Fixed(120.0)) 110 | }; 111 | 112 | let mut content = column![ 113 | title, 114 | Space::with_height(20), 115 | // Basic Settings 116 | section_title_style("Basic Settings"), 117 | container( 118 | column![ 119 | row![ 120 | label_style("File Type"), 121 | text_input("md", &self.temp_settings.file_type) 122 | .padding(6) 123 | .on_input(Message::FileTypeChanged) 124 | ] 125 | .spacing(10) 126 | .align_items(Alignment::Center), 127 | Space::with_height(10), 128 | checkbox( 129 | "Enable Timestamps", 130 | self.temp_settings.timestamps, 131 | Message::TimestampsToggled 132 | ) 133 | .text_size(14), 134 | ] 135 | .spacing(5) 136 | ) 137 | .padding(10), 138 | Space::with_height(20), 139 | // Directory Settings 140 | section_title_style("Directories"), 141 | container( 142 | column![ 143 | row![ 144 | label_style("Note Directory"), 145 | text(&self.temp_settings.note_dir).width(Length::Fill), 146 | button(text("Choose").size(14)) 147 | .padding([5, 10]) 148 | .on_press(Message::SelectNoteDir) 149 | ] 150 | .spacing(10) 151 | .align_items(Alignment::Center), 152 | Space::with_height(10), 153 | row![ 154 | label_style("Scripts Directory"), 155 | text(&self.temp_settings.scripts_dir).width(Length::Fill), 156 | button(text("Choose").size(14)) 157 | .padding([5, 10]) 158 | .on_press(Message::SelectScriptsDir) 159 | ] 160 | .spacing(10) 161 | .align_items(Alignment::Center), 162 | ] 163 | .spacing(5) 164 | ) 165 | .padding(10), 166 | Space::with_height(20), 167 | // Obsidian Settings 168 | section_title_style("Obsidian Integration"), 169 | container( 170 | row![ 171 | label_style("Vault Path"), 172 | text( 173 | self.temp_settings 174 | .obsidian_vault_path 175 | .as_deref() 176 | .unwrap_or("") 177 | ) 178 | .width(Length::Fill), 179 | button(text("Choose").size(14)) 180 | .padding([5, 10]) 181 | .on_press(Message::SelectObsidianVault) 182 | ] 183 | .spacing(10) 184 | .align_items(Alignment::Center) 185 | ) 186 | .padding(10), 187 | Space::with_height(20), 188 | // Observers 189 | section_title_style("Enabled Observers"), 190 | container( 191 | column( 192 | vec!["timestamp", "sqlite", "tag_index", "toc"] 193 | .into_iter() 194 | .map(|observer| { 195 | checkbox( 196 | observer, 197 | self.temp_settings 198 | .enabled_observers 199 | .contains(&observer.to_string()), 200 | move |checked| { 201 | Message::ObserverToggled(observer.to_string(), checked) 202 | }, 203 | ) 204 | .text_size(14) 205 | .spacing(10) 206 | .into() 207 | }) 208 | .collect() 209 | ) 210 | .spacing(10) 211 | ) 212 | .padding(10), 213 | Space::with_height(20), 214 | // Save Button 215 | button( 216 | row![text("Save Settings").size(14),] 217 | .spacing(10) 218 | .align_items(Alignment::Center) 219 | ) 220 | .padding([8, 16]) 221 | .style(theme::Button::Primary) 222 | .on_press(Message::SaveSettings), 223 | ] 224 | .spacing(5) 225 | .padding(20); 226 | 227 | // Add save message if present 228 | if let Some((message, is_success)) = &self.save_message { 229 | content = content.push(Space::with_height(10)).push( 230 | container( 231 | row![ 232 | text(if *is_success { "✅ " } else { "❌ " }), 233 | text(message), 234 | Space::with_width(Length::Fill), 235 | button(text("×").size(16)) 236 | .style(theme::Button::Text) 237 | .on_press(Message::DismissMessage) 238 | ] 239 | .spacing(10) 240 | .align_items(Alignment::Center), 241 | ) 242 | .padding(10) 243 | .style(if *is_success { 244 | theme::Container::Custom(Box::new(SuccessStyle)) 245 | } else { 246 | theme::Container::Custom(Box::new(ErrorStyle)) 247 | }), 248 | ); 249 | } 250 | 251 | container(content) 252 | .width(Length::Fill) 253 | .height(Length::Fill) 254 | .center_x() 255 | .into() 256 | } 257 | } 258 | 259 | // Add custom styles for success/error messages 260 | struct SuccessStyle; 261 | struct ErrorStyle; 262 | 263 | impl container::StyleSheet for SuccessStyle { 264 | type Style = Theme; 265 | 266 | fn appearance(&self, _style: &Self::Style) -> container::Appearance { 267 | container::Appearance { 268 | background: Some(iced::Color::from_rgb(0.0, 0.8, 0.0).into()), 269 | text_color: Some(iced::Color::WHITE), 270 | ..Default::default() 271 | } 272 | } 273 | } 274 | 275 | impl container::StyleSheet for ErrorStyle { 276 | type Style = Theme; 277 | 278 | fn appearance(&self, _style: &Self::Style) -> container::Appearance { 279 | container::Appearance { 280 | background: Some(iced::Color::from_rgb(0.8, 0.0, 0.0).into()), 281 | text_color: Some(iced::Color::WHITE), 282 | ..Default::default() 283 | } 284 | } 285 | } 286 | 287 | impl SettingsDialog { 288 | pub fn show(settings: Settings) { 289 | let iced_settings = IcedSettings { 290 | flags: settings, 291 | window: window::Settings { 292 | size: (600, 800), 293 | position: window::Position::Centered, 294 | resizable: false, 295 | decorations: true, 296 | transparent: false, 297 | ..Default::default() 298 | }, 299 | default_text_size: default_text_size(), 300 | ..Default::default() 301 | }; 302 | 303 | // Run the application 304 | if let Err(e) = ::run(iced_settings) { 305 | error!("Failed to start settings window: {}", e); 306 | rfd::MessageDialog::new() 307 | .set_title("Error") 308 | .set_description(&format!("Failed to start settings window: {}", e)) 309 | .set_level(rfd::MessageLevel::Error) 310 | .show(); 311 | } 312 | } 313 | 314 | fn save_settings(&self) -> Result<(), Box> { 315 | // Get the config file path 316 | let config_path = 317 | if let Some(proj_dirs) = directories::ProjectDirs::from("", "norg", "norg") { 318 | proj_dirs.config_dir().join("config.toml") 319 | } else { 320 | return Err("Could not determine config directory".into()); 321 | }; 322 | 323 | // Serialize and save the settings 324 | let config_str = toml::to_string_pretty(&self.temp_settings)?; 325 | std::fs::write(config_path, config_str)?; 326 | 327 | // Update the shared settings 328 | let mut settings = self.settings.blocking_lock(); 329 | *settings = self.temp_settings.clone(); 330 | 331 | Ok(()) 332 | } 333 | } 334 | 335 | #[cfg(target_os = "macos")] 336 | fn default_text_size() -> f32 { 337 | 13.0 338 | } 339 | 340 | #[cfg(not(target_os = "macos"))] 341 | fn default_text_size() -> f32 { 342 | 14.0 343 | } 344 | -------------------------------------------------------------------------------- /src/bin/note_tray.rs: -------------------------------------------------------------------------------- 1 | use image::io::Reader as ImageReader; 2 | use noorg::{ 3 | cli::Command, handlers::handle_command, logging::init_logging, 4 | observer_registry::ObserverRegistry, script_loader::ScriptLoader, settings::Settings, 5 | window_manager, 6 | }; 7 | use std::io::Cursor; 8 | use std::path::PathBuf; 9 | use std::sync::atomic::{AtomicBool, Ordering}; 10 | use std::{io, sync::Arc}; 11 | use tao::event_loop::{ControlFlow, EventLoop}; 12 | use tokio::sync::mpsc; 13 | use tokio::sync::Mutex; 14 | use tracing::{error, info}; 15 | use tray_icon::{ 16 | menu::{Menu, MenuEvent, MenuItem}, 17 | TrayIconBuilder, 18 | }; 19 | 20 | #[derive(Debug)] 21 | enum TrayCommand { 22 | ToggleWatch, 23 | AddNote, 24 | Quit, 25 | UpdateWatchStatus(bool), 26 | OpenSettings, 27 | ShowInfo, 28 | } 29 | 30 | struct MenuItems { 31 | watch_item: MenuItem, 32 | add_note_item: MenuItem, 33 | settings_item: MenuItem, 34 | info_item: MenuItem, 35 | quit_item: MenuItem, 36 | } 37 | 38 | impl MenuItems { 39 | fn update_watch_status(&self, is_watching: bool) { 40 | self.watch_item.set_text(if is_watching { 41 | "🟢 Stop Watching" 42 | } else { 43 | "🔴 Start Watching" 44 | }); 45 | } 46 | } 47 | 48 | fn show_error(title: &str, message: &str) { 49 | rfd::MessageDialog::new() 50 | .set_title(title) 51 | .set_description(message) 52 | .set_level(rfd::MessageLevel::Error) 53 | .show(); 54 | } 55 | 56 | fn show_input(title: &str, message: &str) -> Option { 57 | rfd::FileDialog::new() 58 | .set_title(title) 59 | .set_file_name(message) 60 | .save_file() 61 | .map(|path| { 62 | path.file_stem() 63 | .unwrap_or_default() 64 | .to_string_lossy() 65 | .to_string() 66 | }) 67 | } 68 | 69 | #[cfg(not(target_os = "windows"))] 70 | const BASE_PATH: &str = "/usr/local/share/noorg"; 71 | #[cfg(target_os = "windows")] 72 | const BASE_PATH: &str = "C:\\Program Files\\noorg"; 73 | 74 | fn get_base_path() -> PathBuf { 75 | PathBuf::from(BASE_PATH) 76 | } 77 | 78 | fn get_bin_path() -> PathBuf { 79 | let mut path = get_base_path(); 80 | path.push("bin"); 81 | path 82 | } 83 | 84 | fn get_cli_path() -> PathBuf { 85 | let mut path = get_bin_path(); 86 | #[cfg(target_os = "windows")] 87 | path.push("note_cli.exe"); 88 | #[cfg(not(target_os = "windows"))] 89 | path.push("note_cli"); 90 | path 91 | } 92 | 93 | #[tokio::main] 94 | async fn main() -> io::Result<()> { 95 | init_logging(true); 96 | 97 | // Create channels for menu events 98 | let (tx_watch, mut rx) = mpsc::unbounded_channel(); 99 | let tx_add = tx_watch.clone(); 100 | let tx_quit = tx_watch.clone(); 101 | 102 | let event_loop = EventLoop::new(); 103 | let menu = Menu::new(); 104 | let menu_items = MenuItems { 105 | watch_item: MenuItem::new("🔴 Start Watching", true, None), 106 | add_note_item: MenuItem::new("Add Note", true, None), 107 | settings_item: MenuItem::new("⚙️ Settings", true, None), 108 | info_item: MenuItem::new("ℹ️ Show Info", true, None), 109 | quit_item: MenuItem::new("Quit", true, None), 110 | }; 111 | 112 | // Set up menu event handlers 113 | let watch_id = menu_items.watch_item.id().clone(); 114 | let add_id = menu_items.add_note_item.id().clone(); 115 | let settings_id = menu_items.settings_item.id().clone(); 116 | let info_id = menu_items.info_item.id().clone(); 117 | let quit_id = menu_items.quit_item.id().clone(); 118 | 119 | menu.append(&menu_items.watch_item).unwrap(); 120 | menu.append(&menu_items.add_note_item).unwrap(); 121 | menu.append(&menu_items.settings_item).unwrap(); 122 | menu.append(&menu_items.info_item).unwrap(); 123 | menu.append(&menu_items.quit_item).unwrap(); 124 | 125 | // Register menu event handlers 126 | let tx_watch_clone = tx_watch.clone(); 127 | let tx_settings = tx_watch.clone(); 128 | let tx_info = tx_watch.clone(); 129 | 130 | MenuEvent::set_event_handler(Some(move |event: MenuEvent| { 131 | let menu_id = event.id(); 132 | let _ = if *menu_id == watch_id { 133 | tx_watch_clone.send(TrayCommand::ToggleWatch) 134 | } else if *menu_id == add_id { 135 | tx_add.send(TrayCommand::AddNote) 136 | } else if *menu_id == settings_id { 137 | tx_settings.send(TrayCommand::OpenSettings) 138 | } else if *menu_id == info_id { 139 | tx_info.send(TrayCommand::ShowInfo) 140 | } else if *menu_id == quit_id { 141 | tx_quit.send(TrayCommand::Quit) 142 | } else { 143 | Ok(()) 144 | }; 145 | })); 146 | 147 | // Create tray icon 148 | let icon = include_bytes!("../../resources/icon.png"); 149 | let image = ImageReader::new(Cursor::new(icon)) 150 | .with_guessed_format() 151 | .expect("Failed to guess image format") 152 | .decode() 153 | .expect("Failed to decode image"); 154 | 155 | let rgba = image.into_rgba8(); 156 | let (width, height) = (rgba.width() as u32, rgba.height() as u32); 157 | let rgba = rgba.into_raw(); 158 | 159 | let icon = match tray_icon::Icon::from_rgba(rgba, width, height) { 160 | Ok(icon) => icon, 161 | Err(err) => { 162 | error!("Failed to create tray icon: {}", err); 163 | return Err(io::Error::new(io::ErrorKind::Other, err)); 164 | } 165 | }; 166 | 167 | let _tray = TrayIconBuilder::new() 168 | .with_icon(icon) 169 | .with_menu(Box::new(menu)) 170 | .with_tooltip("Note CLI") 171 | .build() 172 | .unwrap(); 173 | 174 | // Create settings wrapped in Arc 175 | let settings = Arc::new(Mutex::new(Settings::new())); 176 | 177 | // Create script loader with settings 178 | let settings_guard = settings.lock().await; 179 | let script_loader = 180 | ScriptLoader::new(settings_guard.scripts_dir.clone(), settings_guard.clone()); 181 | 182 | // Load observers 183 | let observers = script_loader.load_observers(&settings_guard.enabled_observers)?; 184 | drop(settings_guard); // Release the lock 185 | 186 | // Create observer registry 187 | let observer_registry = Arc::new(ObserverRegistry::new()); 188 | 189 | // Load and register observers 190 | for observer in observers { 191 | observer_registry.register(observer).await; 192 | } 193 | 194 | // Command handler 195 | let settings_clone = Arc::clone(&settings); 196 | let observer_registry_clone = Arc::clone(&observer_registry); 197 | let is_watching = Arc::new(AtomicBool::new(false)); 198 | let stop_signal = Arc::new(AtomicBool::new(false)); 199 | 200 | event_loop.run(move |_event, _event_loop, control_flow| { 201 | *control_flow = ControlFlow::Wait; 202 | 203 | while let Ok(cmd) = rx.try_recv() { 204 | match cmd { 205 | TrayCommand::ToggleWatch => { 206 | if !is_watching.load(Ordering::SeqCst) { 207 | info!("Starting file watcher..."); 208 | 209 | // Get the path to the note_cli binary using our new path functions 210 | let note_cli = get_cli_path(); 211 | 212 | info!("Using note_cli binary at: {:?}", note_cli); 213 | if !note_cli.exists() { 214 | error!("note_cli binary not found at {:?}", note_cli); 215 | show_error( 216 | "Failed to start watcher", 217 | &format!("note_cli binary not found at {:?}", note_cli), 218 | ); 219 | return; 220 | } 221 | 222 | let settings = settings_clone.clone(); 223 | let observer_registry = Arc::clone(&observer_registry_clone); 224 | let is_watching_clone = Arc::clone(&is_watching); 225 | let tx = tx_watch.clone(); 226 | let stop_signal = Arc::clone(&stop_signal); 227 | 228 | stop_signal.store(false, Ordering::SeqCst); 229 | 230 | std::thread::spawn(move || { 231 | if let Err(e) = 232 | tokio::runtime::Runtime::new().unwrap().block_on(async { 233 | let settings = settings.lock().await; 234 | handle_command( 235 | Command::Watch, 236 | settings.clone(), 237 | observer_registry, 238 | Some(Arc::clone(&stop_signal)), 239 | ) 240 | .await 241 | }) 242 | { 243 | error!("Failed to start watcher: {}", e); 244 | show_error("Failed to start watcher", &e.to_string()); 245 | is_watching_clone.store(false, Ordering::SeqCst); 246 | let _ = tx.send(TrayCommand::UpdateWatchStatus(false)); 247 | return; 248 | } 249 | is_watching_clone.store(false, Ordering::SeqCst); 250 | let _ = tx.send(TrayCommand::UpdateWatchStatus(false)); 251 | }); 252 | 253 | // Update UI immediately when starting 254 | is_watching.store(true, Ordering::SeqCst); 255 | menu_items.update_watch_status(true); 256 | } else { 257 | info!("Stopping file watcher..."); 258 | stop_signal.store(true, Ordering::SeqCst); 259 | is_watching.store(false, Ordering::SeqCst); 260 | menu_items.update_watch_status(false); 261 | } 262 | } 263 | TrayCommand::UpdateWatchStatus(watching) => { 264 | menu_items.update_watch_status(watching); 265 | } 266 | TrayCommand::AddNote => { 267 | if let Some(title) = show_input("New Note", "Enter note title") { 268 | let settings = settings_clone.clone(); 269 | let observer_registry = Arc::clone(&observer_registry_clone); 270 | let title_clone = title.clone(); 271 | 272 | std::thread::spawn(move || { 273 | if let Err(e) = 274 | tokio::runtime::Runtime::new().unwrap().block_on(async { 275 | let settings = settings.lock().await; 276 | handle_command( 277 | Command::Add { 278 | title: title_clone, 279 | body: None, 280 | frontmatter: vec![], 281 | }, 282 | settings.clone(), 283 | observer_registry, 284 | None, 285 | ) 286 | .await 287 | }) 288 | { 289 | error!("Failed to create note: {}", e); 290 | show_error("Failed to create note", &e.to_string()); 291 | } 292 | }); 293 | } 294 | } 295 | TrayCommand::Quit => { 296 | info!("Quitting..."); 297 | std::process::exit(0); 298 | } 299 | TrayCommand::OpenSettings => { 300 | let settings = Arc::clone(&settings_clone); 301 | window_manager::open_settings(settings); 302 | } 303 | TrayCommand::ShowInfo => { 304 | let settings = settings_clone.clone(); 305 | std::thread::spawn(move || { 306 | let rt = tokio::runtime::Runtime::new().unwrap(); 307 | let settings_guard = rt.block_on(async { 308 | let settings = settings.lock().await; 309 | settings.clone() 310 | }); 311 | 312 | let message = format!( 313 | "Watched Directory: {}\n\ 314 | File Type: {}\n\ 315 | Active Observers: {}", 316 | settings_guard.note_dir, 317 | settings_guard.file_type, 318 | settings_guard.enabled_observers.join(", ") 319 | ); 320 | 321 | rfd::MessageDialog::new() 322 | .set_title("Note Watcher Info") 323 | .set_description(&message) 324 | .set_level(rfd::MessageLevel::Info) 325 | .show(); 326 | }); 327 | } 328 | } 329 | } 330 | }); 331 | } 332 | -------------------------------------------------------------------------------- /src/observers/sqlite_store.rs: -------------------------------------------------------------------------------- 1 | use crate::event::*; 2 | use crate::settings::Settings; 3 | use rusqlite::{Connection, Result as SqlResult}; 4 | use std::any::Any; 5 | use std::collections::HashMap; 6 | use std::future::Future; 7 | use std::io; 8 | use std::path::Path; 9 | use std::pin::Pin; 10 | use std::sync::Arc; 11 | use tokio::sync::Mutex; 12 | use tracing::{debug, error, info}; 13 | 14 | pub struct SqliteObserver { 15 | conn: Arc>, 16 | settings: Arc, 17 | } 18 | #[derive(Debug)] 19 | pub struct NoteResult { 20 | pub id: i64, 21 | pub title: String, 22 | pub filepath: String, // Changed from Option to String since it's required 23 | } 24 | 25 | #[derive(Debug)] 26 | pub struct QueryResult { 27 | pub columns: Vec, 28 | pub rows: Vec>, 29 | } 30 | 31 | #[derive(Debug)] 32 | struct SqlBlock { 33 | sql: String, 34 | range: (usize, usize), 35 | } 36 | 37 | impl SqliteObserver { 38 | pub fn new(settings: Arc) -> io::Result { 39 | let data_dir = Settings::get_data_dir(); 40 | let sqlite_dir = data_dir.join("sqlite"); 41 | let db_path = sqlite_dir.join("frontmatter.db"); 42 | 43 | debug!("Creating SQLite directory at {:?}", sqlite_dir); 44 | std::fs::create_dir_all(&sqlite_dir)?; 45 | 46 | debug!("Initializing SQLite database at {:?}", db_path); 47 | let conn = Connection::open(&db_path).map_err(|e| { 48 | error!("Failed to open SQLite database: {}", e); 49 | io::Error::new(io::ErrorKind::Other, e) 50 | })?; 51 | 52 | debug!("Creating database schema"); 53 | conn.execute( 54 | "CREATE TABLE IF NOT EXISTS notes ( 55 | id INTEGER PRIMARY KEY, 56 | title TEXT UNIQUE NOT NULL, 57 | path TEXT NOT NULL 58 | )", 59 | [], 60 | ) 61 | .map_err(|e| { 62 | error!("Failed to create notes table: {}", e); 63 | io::Error::new(io::ErrorKind::Other, e) 64 | })?; 65 | 66 | conn.execute( 67 | "CREATE TABLE IF NOT EXISTS frontmatter ( 68 | file_id INTEGER, 69 | key TEXT, 70 | value TEXT, 71 | PRIMARY KEY (file_id, key), 72 | FOREIGN KEY (file_id) REFERENCES notes(id) 73 | )", 74 | [], 75 | ) 76 | .map_err(|e| { 77 | error!("Failed to create frontmatter table: {}", e); 78 | io::Error::new(io::ErrorKind::Other, e) 79 | })?; 80 | 81 | info!("✨ SQLite observer initialized successfully"); 82 | Ok(Self { 83 | conn: Arc::new(Mutex::new(conn)), 84 | settings, 85 | }) 86 | } 87 | 88 | async fn store_frontmatter( 89 | &self, 90 | title: &str, 91 | frontmatter: &HashMap, 92 | file_path: String, 93 | ) -> SqlResult<()> { 94 | debug!("Storing frontmatter for note: {}", title); 95 | let conn = self.conn.lock().await; 96 | 97 | conn.execute( 98 | "INSERT OR REPLACE INTO notes (title, path) VALUES (?1, ?2)", 99 | [title, &file_path], 100 | )?; 101 | 102 | let file_id: i64 = 103 | conn.query_row("SELECT id FROM notes WHERE title = ?1", [title], |row| { 104 | row.get(0) 105 | })?; 106 | 107 | debug!("Updating frontmatter for note ID: {}", file_id); 108 | conn.execute("DELETE FROM frontmatter WHERE file_id = ?1", [file_id])?; 109 | 110 | let mut stmt = 111 | conn.prepare("INSERT INTO frontmatter (file_id, key, value) VALUES (?1, ?2, ?3)")?; 112 | 113 | for (key, value) in frontmatter { 114 | stmt.execute(rusqlite::params![file_id, key.as_str(), value.as_str()])?; 115 | } 116 | 117 | debug!("Successfully stored frontmatter for '{}'", title); 118 | Ok(()) 119 | } 120 | 121 | pub async fn query(&self, sql: &str) -> io::Result { 122 | debug!("Executing SQL query: {}", sql); 123 | let conn = self.conn.lock().await; 124 | let mut stmt = conn.prepare(sql).map_err(|e| { 125 | error!("Failed to prepare SQL statement: {}", e); 126 | io::Error::new(io::ErrorKind::Other, e) 127 | })?; 128 | 129 | let columns: Vec = stmt 130 | .column_names() 131 | .into_iter() 132 | .map(|c| c.to_string()) 133 | .collect(); 134 | 135 | let rows = stmt 136 | .query_map([], |row| { 137 | let mut map = HashMap::new(); 138 | for (i, column) in columns.iter().enumerate() { 139 | let value: String = row.get(i).unwrap_or_else(|_| "".to_string()); 140 | map.insert(column.clone(), value); 141 | } 142 | Ok(map) 143 | }) 144 | .map_err(|e| { 145 | error!("Failed to execute query: {}", e); 146 | io::Error::new(io::ErrorKind::Other, e) 147 | })? 148 | .collect::, _>>() 149 | .map_err(|e| io::Error::new(io::ErrorKind::Other, e))?; 150 | 151 | debug!("Query returned {} rows", rows.len()); 152 | Ok(QueryResult { columns, rows }) 153 | } 154 | 155 | pub async fn print_all_frontmatter(&self) -> io::Result<()> { 156 | debug!("Retrieving all frontmatter data"); 157 | let conn = self.conn.lock().await; 158 | let sql = " 159 | SELECT n.title, f.key, f.value 160 | FROM notes n 161 | JOIN frontmatter f ON n.id = f.file_id 162 | ORDER BY n.title, f.key"; 163 | 164 | let mut stmt = conn.prepare(sql).unwrap(); 165 | let rows = stmt 166 | .query_map([], |row| { 167 | Ok(( 168 | row.get::<_, String>(0)?, 169 | row.get::<_, String>(1)?, 170 | row.get::<_, String>(2)?, 171 | )) 172 | }) 173 | .unwrap(); 174 | 175 | info!("📊 Current Database Contents:"); 176 | let mut current_title = String::new(); 177 | for row in rows { 178 | if let Ok((title, key, value)) = row { 179 | if title != current_title { 180 | info!("📝 {}", title); 181 | current_title = title; 182 | } 183 | debug!(" {} = {}", key, value); 184 | } 185 | } 186 | 187 | Ok(()) 188 | } 189 | 190 | pub async fn process_sql_blocks(&self, content: &str) -> io::Result { 191 | let sql_blocks = self.extract_sql_blocks(content); 192 | 193 | if sql_blocks.is_empty() { 194 | debug!("No SQL blocks found in content"); 195 | return Ok(content.to_string()); 196 | } 197 | 198 | let mut new_content = content.to_string(); 199 | 200 | debug!("Processing {} SQL blocks", sql_blocks.len()); 201 | 202 | // Process blocks in reverse to maintain correct positions 203 | for block in sql_blocks.into_iter().rev() { 204 | let results = self.query(&block.sql).await?; 205 | 206 | // Build the replacement content 207 | let mut output = String::new(); 208 | output.push_str("```sql\n"); 209 | output.push_str(&block.sql); 210 | output.push_str("\n```\n"); 211 | output.push_str("\n"); 212 | 213 | // Add table header 214 | output.push_str("| "); 215 | output.push_str(&results.columns.join(" | ")); 216 | output.push_str(" |\n|"); 217 | output.push_str(&vec!["---"; results.columns.len()].join("|")); 218 | output.push_str("|\n"); 219 | 220 | let default_string = String::new(); 221 | // Add table rows 222 | for row in &results.rows { 223 | output.push_str("| "); 224 | let values: Vec = results 225 | .columns 226 | .iter() 227 | .map(|col| { 228 | let val = row.get(col.as_str()).unwrap_or(&default_string); 229 | if col == "path" { 230 | // Extract title from the full path 231 | let path = Path::new(&val); 232 | let title = path.file_stem().and_then(|s| s.to_str()).unwrap_or(""); 233 | 234 | // Create relative path for the link 235 | let relative_path = format!("./{}.{}", title, self.settings.file_type); 236 | 237 | // Format link using relative path 238 | format!("[{}]({})", title, relative_path) 239 | } else { 240 | val.trim().to_string() 241 | } 242 | }) 243 | .collect(); 244 | output.push_str(&values.join(" | ")); 245 | output.push_str(" |\n"); 246 | } 247 | 248 | output.push_str("\n\n"); 249 | 250 | // Check if the block is within the TOC section 251 | let is_in_toc = content[..block.range.0].contains("## Contents"); 252 | 253 | if !is_in_toc { 254 | // Replace the old content with the new only if not in TOC 255 | new_content.replace_range(block.range.0..block.range.1, &output); 256 | } 257 | } 258 | 259 | Ok(new_content) 260 | } 261 | 262 | fn extract_sql_blocks(&self, content: &str) -> Vec { 263 | let mut blocks = Vec::new(); 264 | let lines: Vec<&str> = content.lines().collect(); 265 | let mut i = 0; 266 | 267 | while i < lines.len() { 268 | if lines[i].trim().starts_with("```sql") && !content[..i].contains("## Contents") { 269 | let start_line = i; 270 | let mut sql = String::new(); 271 | 272 | // Find the exact byte position of the start of the SQL block 273 | let start_pos = content 274 | .chars() 275 | .take( 276 | content 277 | .lines() 278 | .take(start_line) 279 | .map(|l| l.chars().count() + 1) // +1 for newline 280 | .sum(), 281 | ) 282 | .map(|c| c.len_utf8()) 283 | .sum::(); 284 | 285 | i += 1; // Skip the opening ```sql line 286 | 287 | // Collect SQL until the closing backticks 288 | while i < lines.len() && !lines[i].trim().starts_with("```") { 289 | sql.push_str(lines[i]); 290 | sql.push('\n'); 291 | i += 1; 292 | } 293 | 294 | // Skip the closing ``` line 295 | if i < lines.len() { 296 | i += 1; 297 | } 298 | 299 | // Find the end of any existing results 300 | let mut end_line = i; 301 | while end_line < lines.len() { 302 | let line = lines[end_line].trim(); 303 | if line.starts_with("```sql") { 304 | // Next SQL block starts 305 | break; 306 | } 307 | if line == "" { 308 | // Current block results end 309 | end_line += 1; // Include the END SQL marker 310 | break; 311 | } 312 | end_line += 1; 313 | } 314 | 315 | // Calculate exact end position in bytes 316 | let end_pos = content 317 | .chars() 318 | .take( 319 | content 320 | .lines() 321 | .take(end_line) 322 | .map(|l| l.chars().count() + 1) 323 | .sum(), 324 | ) 325 | .map(|c| c.len_utf8()) 326 | .sum::(); 327 | 328 | debug!( 329 | "SQL block range: {} to {} (content len: {})", 330 | start_pos, 331 | end_pos, 332 | content.len() 333 | ); 334 | 335 | blocks.push(SqlBlock { 336 | sql: sql.trim().to_string(), 337 | range: (start_pos, end_pos), 338 | }); 339 | 340 | i = end_line; 341 | } else { 342 | i += 1; 343 | } 344 | } 345 | 346 | blocks 347 | } 348 | } 349 | 350 | impl NoteObserver for SqliteObserver { 351 | fn on_event_boxed( 352 | &self, 353 | event: NoteEvent, 354 | ) -> Pin>> + Send + '_>> { 355 | Box::pin(async move { 356 | match event { 357 | NoteEvent::Synced { 358 | content, 359 | title, 360 | file_path, 361 | frontmatter, 362 | .. 363 | } => { 364 | info!("🔄 Processing note '{}' with SQLite observer", title); 365 | 366 | match self 367 | .store_frontmatter(&title, &frontmatter, file_path) 368 | .await 369 | { 370 | Ok(_) => debug!("Successfully stored frontmatter for '{}'", title), 371 | Err(e) => error!("Failed to store frontmatter for '{}': {}", title, e), 372 | } 373 | 374 | if self.extract_sql_blocks(&content).is_empty() { 375 | debug!( 376 | "No SQL blocks found in note '{}', skipping processing", 377 | title 378 | ); 379 | Ok(None) 380 | } else { 381 | match self.process_sql_blocks(&content).await { 382 | Ok(processed_content) => { 383 | info!("✨ Successfully processed SQL blocks for '{}'", title); 384 | debug!("SQL OBSERVER: Processed content:\n{}", processed_content); 385 | Ok(Some(ObserverResult { 386 | metadata: None, 387 | content: Some(processed_content), 388 | })) 389 | } 390 | Err(e) => { 391 | error!("Failed to process SQL blocks for '{}': {}", title, e); 392 | Err(e) 393 | } 394 | } 395 | } 396 | } 397 | _ => Ok(None), 398 | } 399 | }) 400 | } 401 | 402 | fn name(&self) -> String { 403 | "sqlite".to_string() 404 | } 405 | 406 | fn as_any(&self) -> &dyn Any { 407 | self 408 | } 409 | 410 | fn priority(&self) -> i32 { 411 | 100 // Make sure SQLite runs last 412 | } 413 | } 414 | -------------------------------------------------------------------------------- /src/script_loader.rs: -------------------------------------------------------------------------------- 1 | use crate::event::{NoteEvent, NoteObserver, ObserverResult}; 2 | use crate::observers; 3 | use crate::settings::Settings; 4 | use mlua::Lua; 5 | use pyo3::prelude::*; 6 | use pyo3::types::{PyDict, PyModule}; 7 | use std::any::Any; 8 | use std::collections::HashMap; 9 | use std::fs; 10 | use std::future::Future; 11 | use std::io; 12 | use std::path::Path; 13 | use std::pin::Pin; 14 | use std::sync::Arc; 15 | use tokio::task; 16 | use tracing::{debug, error, info, trace, warn}; 17 | 18 | #[derive(Clone)] 19 | pub struct LuaObserver { 20 | lua: Lua, 21 | name: String, 22 | } 23 | 24 | impl LuaObserver { 25 | pub fn new(script_path: &Path) -> io::Result { 26 | debug!("Creating new Lua observer from: {}", script_path.display()); 27 | let lua = Lua::new(); 28 | 29 | // First, register the json module 30 | lua.load( 31 | r#" 32 | json = { 33 | encode = function(v) 34 | return require("json").encode(v) 35 | end, 36 | decode = function(v) 37 | return require("json").decode(v) 38 | end 39 | } 40 | "#, 41 | ) 42 | .exec() 43 | .map_err(|e| { 44 | error!("Failed to register json module: {}", e); 45 | io::Error::new(io::ErrorKind::Other, e.to_string()) 46 | })?; 47 | 48 | // Register logging module directly 49 | let logging_utils = r#" 50 | local M = {} 51 | 52 | local function format_log(level, message, ...) 53 | local formatted = string.format(message, ...) 54 | io.write(string.format(" %s %s %s\n", 55 | os.date("%Y-%m-%dT%H:%M:%S.000000Z"), 56 | string.format("%-5s", level), 57 | formatted 58 | )) 59 | end 60 | 61 | function M.error(message, ...) 62 | format_log("ERROR", message, ...) 63 | end 64 | 65 | function M.warn(message, ...) 66 | format_log("WARN", message, ...) 67 | end 68 | 69 | function M.info(message, ...) 70 | format_log("INFO", message, ...) 71 | end 72 | 73 | function M.debug(message, ...) 74 | format_log("DEBUG", message, ...) 75 | end 76 | 77 | function M.trace(message, ...) 78 | format_log("TRACE", message, ...) 79 | end 80 | 81 | return M 82 | "#; 83 | 84 | let package: mlua::Table = lua.globals().get("package").map_err(|e| { 85 | error!("Failed to get package table: {}", e); 86 | io::Error::new(io::ErrorKind::Other, e.to_string()) 87 | })?; 88 | 89 | let loaded: mlua::Table = package.get("loaded").map_err(|e| { 90 | error!("Failed to get loaded table: {}", e); 91 | io::Error::new(io::ErrorKind::Other, e.to_string()) 92 | })?; 93 | 94 | // Load and execute the logging module 95 | let logging_module = lua 96 | .load(logging_utils) 97 | .set_name("logging_utils") 98 | .eval::() 99 | .map_err(|e| { 100 | error!("Failed to load logging module: {}", e); 101 | io::Error::new(io::ErrorKind::Other, e.to_string()) 102 | })?; 103 | 104 | // Register it in package.loaded 105 | loaded.set("logging_utils", logging_module).map_err(|e| { 106 | error!("Failed to register logging module: {}", e); 107 | io::Error::new(io::ErrorKind::Other, e.to_string()) 108 | })?; 109 | 110 | // Load the main script 111 | let script_content = fs::read_to_string(script_path).map_err(|e| { 112 | error!("Failed to read Lua script: {}", e); 113 | e 114 | })?; 115 | 116 | // Load the script 117 | lua.load(&script_content) 118 | .set_name(script_path.to_str().unwrap_or("script")) 119 | .exec() 120 | .map_err(|e| { 121 | error!("Failed to execute Lua script: {}", e); 122 | io::Error::new(io::ErrorKind::Other, e.to_string()) 123 | })?; 124 | 125 | let name = script_path 126 | .file_stem() 127 | .unwrap_or_default() 128 | .to_string_lossy() 129 | .to_string(); 130 | 131 | info!("✨ Loaded Lua observer: {}", name); 132 | Ok(Self { lua, name }) 133 | } 134 | } 135 | 136 | impl NoteObserver for LuaObserver { 137 | fn on_event_boxed( 138 | &self, 139 | event: NoteEvent, 140 | ) -> Pin>> + Send + '_>> { 141 | let lua = self.lua.clone(); 142 | let observer_name = self.name.clone(); 143 | 144 | Box::pin(async move { 145 | debug!("Processing event in Lua observer: {}", observer_name); 146 | task::spawn_blocking(move || { 147 | let globals = lua.globals(); 148 | let on_event: mlua::Function = globals.get("on_event").map_err(|e| { 149 | error!("Failed to get on_event function: {}", e); 150 | io::Error::new(io::ErrorKind::Other, e.to_string()) 151 | })?; 152 | 153 | let event_str = serde_json::to_string(&event)?; 154 | trace!("Sending event to Lua: {}", event_str); 155 | 156 | let result: mlua::Value = on_event.call(event_str).map_err(|e| { 157 | error!("Failed to execute Lua on_event: {}", e); 158 | io::Error::new(io::ErrorKind::Other, e.to_string()) 159 | })?; 160 | 161 | match result { 162 | mlua::Value::Nil => { 163 | debug!("Lua observer returned no changes"); 164 | Ok(None) 165 | } 166 | mlua::Value::String(s) => { 167 | debug!("Processing Lua observer result"); 168 | let result: serde_json::Value = serde_json::from_str(&s.to_string_lossy())?; 169 | 170 | let metadata = result.get("metadata").and_then(|m| { 171 | serde_json::from_value::>(m.clone()).ok() 172 | }); 173 | let content = result 174 | .get("content") 175 | .and_then(|c| c.as_str()) 176 | .map(|s| s.to_string()); 177 | 178 | trace!( 179 | "Lua observer returned - metadata: {:?}, content modified: {}", 180 | metadata, 181 | content.is_some() 182 | ); 183 | Ok(Some(ObserverResult { metadata, content })) 184 | } 185 | _ => { 186 | error!("Invalid return type from Lua script"); 187 | Err(io::Error::new( 188 | io::ErrorKind::Other, 189 | "Invalid return type from Lua script", 190 | )) 191 | } 192 | } 193 | }) 194 | .await 195 | .map_err(|e| { 196 | error!("Task execution failed: {}", e); 197 | io::Error::new(io::ErrorKind::Other, e.to_string()) 198 | })? 199 | }) 200 | } 201 | 202 | fn name(&self) -> String { 203 | self.name.clone() 204 | } 205 | 206 | fn as_any(&self) -> &dyn Any { 207 | self 208 | } 209 | } 210 | 211 | pub struct PythonObserver { 212 | code: String, 213 | name: String, 214 | } 215 | 216 | impl PythonObserver { 217 | pub fn new(script_path: &Path) -> io::Result { 218 | debug!( 219 | "Creating new Python observer from: {}", 220 | script_path.display() 221 | ); 222 | 223 | Python::with_gil(|py| { 224 | // Get the scripts directory 225 | let scripts_dir = script_path 226 | .parent() 227 | .and_then(|p| p.parent()) 228 | .ok_or_else(|| { 229 | io::Error::new(io::ErrorKind::NotFound, "Could not find scripts directory") 230 | })?; 231 | 232 | // Add both the scripts dir and the python subdir to Python path 233 | let sys_path = py.import("sys")?.getattr("path")?; 234 | sys_path.call_method1("insert", (0, scripts_dir.join("python")))?; 235 | sys_path.call_method1("insert", (0, scripts_dir))?; 236 | 237 | // Create the logging module 238 | let logging_utils = r#" 239 | from typing import Any 240 | import json 241 | import sys 242 | from datetime import datetime, timezone 243 | 244 | def _format_log(level: str, message: str) -> None: 245 | timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ") 246 | print(f" {timestamp} {level:<5} {message}") 247 | 248 | def log_error(message: str, *args: Any) -> None: 249 | _format_log("ERROR", message.format(*args)) 250 | 251 | def log_warn(message: str, *args: Any) -> None: 252 | _format_log("WARN", message.format(*args)) 253 | 254 | def log_info(message: str, *args: Any) -> None: 255 | _format_log("INFO", message.format(*args)) 256 | 257 | def log_debug(message: str, *args: Any) -> None: 258 | _format_log("DEBUG", message.format(*args)) 259 | 260 | def log_trace(message: str, *args: Any) -> None: 261 | _format_log("TRACE", message.format(*args)) 262 | "#; 263 | 264 | // Create logging_utils module 265 | PyModule::from_code(py, logging_utils, "logging_utils.py", "logging_utils")?; 266 | 267 | // Now load the actual script 268 | let code = fs::read_to_string(script_path)?; 269 | let name = script_path 270 | .file_stem() 271 | .unwrap_or_default() 272 | .to_string_lossy() 273 | .to_string(); 274 | 275 | info!("✨ Loaded Python observer: {}", name); 276 | Ok(Self { code, name }) 277 | }) 278 | .map_err(|e: PyErr| { 279 | error!("Failed to initialize Python observer: {}", e); 280 | io::Error::new(io::ErrorKind::Other, e.to_string()) 281 | }) 282 | } 283 | } 284 | 285 | impl NoteObserver for PythonObserver { 286 | fn on_event_boxed( 287 | &self, 288 | event: NoteEvent, 289 | ) -> Pin>> + Send + '_>> { 290 | let code = self.code.clone(); 291 | let observer_name = self.name.clone(); 292 | 293 | Box::pin(async move { 294 | debug!("Processing event in Python observer: {}", observer_name); 295 | task::spawn_blocking(move || { 296 | Python::with_gil(|py| { 297 | let mut event_json = serde_json::to_value(&event)?; 298 | if let serde_json::Value::Object(ref mut map) = event_json { 299 | let event_type = match map { 300 | m if m.contains_key("Created") => m.get_mut("Created"), 301 | m if m.contains_key("Updated") => m.get_mut("Updated"), 302 | m if m.contains_key("Synced") => m.get_mut("Synced"), 303 | _ => None, 304 | }; 305 | 306 | if let Some(serde_json::Value::Object(ref mut event_map)) = event_type { 307 | event_map.insert( 308 | "data_dir".to_string(), 309 | serde_json::Value::String( 310 | Settings::get_data_dir().to_string_lossy().to_string(), 311 | ), 312 | ); 313 | } 314 | } 315 | let event_json = serde_json::to_string(&event_json)?; 316 | trace!("Sending event to Python: {}", event_json); 317 | 318 | let locals = PyDict::new_bound(py); 319 | locals 320 | .set_item("event_json", event_json.clone()) 321 | .map_err(|e| { 322 | error!("Failed to set event_json in Python context: {}", e); 323 | io::Error::new(io::ErrorKind::Other, e.to_string()) 324 | })?; 325 | 326 | let code = PyModule::from_code_bound(py, &code, "", "").map_err(|e| { 327 | error!("Failed to create Python module: {}", e); 328 | io::Error::new(io::ErrorKind::Other, e.to_string()) 329 | })?; 330 | 331 | if let Ok(func) = code.getattr("process_event") { 332 | let result = func.call1((event_json,)).map_err(|e| { 333 | error!("Failed to execute Python process_event: {}", e); 334 | io::Error::new(io::ErrorKind::Other, e.to_string()) 335 | })?; 336 | 337 | if let Ok(result_str) = result.extract::() { 338 | if let Ok(result) = serde_json::from_str(&result_str) { 339 | let result: serde_json::Value = result; 340 | 341 | let metadata = result.get("metadata").and_then(|m| { 342 | serde_json::from_value::>(m.clone()) 343 | .ok() 344 | }); 345 | 346 | // Only get content if it exists, don't fall back to original 347 | let content = result 348 | .get("content") 349 | .and_then(|c| c.as_str()) 350 | .map(|s| s.to_string()); 351 | 352 | return Ok(Some(ObserverResult { metadata, content })); 353 | } 354 | } 355 | } 356 | 357 | debug!("Python observer returned no changes"); 358 | Ok(None) 359 | }) 360 | }) 361 | .await 362 | .map_err(|e| { 363 | error!("Task execution failed: {}", e); 364 | io::Error::new(io::ErrorKind::Other, e.to_string()) 365 | })? 366 | }) 367 | } 368 | 369 | fn name(&self) -> String { 370 | self.name.clone() 371 | } 372 | 373 | fn as_any(&self) -> &dyn Any { 374 | self 375 | } 376 | } 377 | 378 | pub struct ScriptLoader { 379 | scripts_dir: String, 380 | settings: Settings, 381 | } 382 | 383 | impl ScriptLoader { 384 | pub fn new(scripts_dir: String, settings: Settings) -> Self { 385 | debug!("Creating new ScriptLoader with directory: {}", scripts_dir); 386 | Self { 387 | scripts_dir, 388 | settings, 389 | } 390 | } 391 | 392 | pub fn load_observers( 393 | &self, 394 | enabled_observers: &[String], 395 | ) -> io::Result>> { 396 | debug!("Loading observers. Enabled: {:?}", enabled_observers); 397 | let mut observers: Vec> = Vec::new(); 398 | 399 | // Add enabled Rust observers 400 | for observer_name in enabled_observers { 401 | debug!("Loading Rust observer: {}", observer_name); 402 | if let Some(observer) = 403 | observers::create_observer(observer_name, Arc::new(self.settings.clone())) 404 | { 405 | info!("✨ Loaded Rust observer: {}", observer_name); 406 | observers.push(observer); 407 | } else { 408 | warn!("No Rust observer found for: {}", observer_name); 409 | } 410 | } 411 | 412 | // Load Lua scripts 413 | let lua_dir = Path::new(&self.scripts_dir).join("lua"); 414 | if lua_dir.exists() { 415 | debug!("Loading Lua scripts from: {}", lua_dir.display()); 416 | for entry in fs::read_dir(lua_dir)? { 417 | let path = entry?.path(); 418 | if path.extension().map_or(false, |ext| ext == "lua") { 419 | debug!("Loading Lua script: {}", path.display()); 420 | observers.push(Box::new(LuaObserver::new(&path)?)); 421 | } 422 | } 423 | } else { 424 | debug!("No Lua scripts directory found"); 425 | } 426 | 427 | // Load Python scripts 428 | let py_dir = Path::new(&self.scripts_dir).join("python"); 429 | if py_dir.exists() { 430 | debug!("Loading Python scripts from: {}", py_dir.display()); 431 | for entry in fs::read_dir(py_dir)? { 432 | let path = entry?.path(); 433 | if path.extension().map_or(false, |ext| ext == "py") { 434 | debug!("Loading Python script: {}", path.display()); 435 | observers.push(Box::new(PythonObserver::new(&path)?)); 436 | } 437 | } 438 | } else { 439 | debug!("No Python scripts directory found"); 440 | } 441 | 442 | info!("🔌 Loaded {} observers total", observers.len()); 443 | Ok(observers) 444 | } 445 | } 446 | -------------------------------------------------------------------------------- /resources/default_scripts/python/time_tracker.py: -------------------------------------------------------------------------------- 1 | import json 2 | import re 3 | from datetime import datetime, timedelta 4 | from pathlib import Path 5 | from typing import Dict, List, Optional, Tuple, Any 6 | from logging_utils import log_debug, log_info, log_error 7 | import os 8 | import sys 9 | 10 | DEFAULT_CONFIG = { 11 | "expected_hours_per_week": 40, 12 | "workdays": ["Monday", "Tuesday", "Wednesday", "Thursday", "Friday"], 13 | "vacation_days_per_year": 30 14 | } 15 | 16 | class TimeBlock: 17 | def __init__(self, start: str, end: str): 18 | self.start = start 19 | self.end = end 20 | 21 | def duration_minutes(self) -> float: 22 | start_time = parse_time(self.start) 23 | end_time = parse_time(self.end) 24 | if not (start_time and end_time): 25 | log_error(f"Invalid time format: start={self.start}, end={self.end}") 26 | return 0.0 27 | 28 | duration = (end_time - start_time).total_seconds() / 60 29 | log_debug(f"Duration calculation: {self.start} to {self.end} = {duration} minutes") 30 | return duration 31 | 32 | @staticmethod 33 | def parse(time_str: str) -> List['TimeBlock']: 34 | """Parse time blocks from string format '09:00-12:00,13:00-17:00'""" 35 | blocks = [] 36 | if not time_str or time_str.strip() in ['-', 'N/A']: 37 | return blocks 38 | 39 | parts = time_str.split(',') 40 | for part in parts: 41 | if '-' in part: 42 | start, end = part.strip().split('-') 43 | blocks.append(TimeBlock(start.strip(), end.strip())) 44 | return blocks 45 | 46 | def get_app_data_dir(event_json: str) -> Path: 47 | """Get the notes directory from the event filepath.""" 48 | try: 49 | event = json.loads(event_json) 50 | if isinstance(event, dict): 51 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 52 | if event_type and "file_path" in event_type: 53 | filepath = Path(event_type["file_path"]) 54 | log_debug("Event filepath: {}", filepath) 55 | return filepath.parent 56 | else: 57 | log_error("No file_path found in event: {}", event) 58 | except Exception as e: 59 | log_error("Failed to get app data directory: {}", str(e)) 60 | 61 | log_error("Falling back to default location") 62 | return Path.home() / ".local" / "share" / "note_cli" 63 | 64 | def get_tracker_file(event_json: str) -> Path: 65 | """Get the path to the time tracker file.""" 66 | try: 67 | event = json.loads(event_json) 68 | if isinstance(event, dict): 69 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 70 | if event_type and "file_path" in event_type: 71 | # Get the parent directory of the note's file path 72 | note_path = Path(event_type["file_path"]) 73 | tracker_path = note_path.parent / "_time_tracker.md" 74 | log_info(f"Creating time tracker at: {tracker_path}") 75 | return tracker_path 76 | else: 77 | log_error("No file_path found in event: {}", event) 78 | except Exception as e: 79 | log_error("Failed to get app data directory: {}", str(e)) 80 | 81 | # Fallback to default location 82 | fallback_path = Path.home() / ".local" / "share" / "note_cli" / "notes" / "_time_tracker.md" 83 | log_error(f"Falling back to default location: {fallback_path}") 84 | return fallback_path 85 | 86 | def parse_time(time_str: str) -> Optional[datetime]: 87 | """Parse time string in format HH:MM.""" 88 | try: 89 | return datetime.strptime(time_str.strip(), "%H:%M") 90 | except ValueError: 91 | return None 92 | 93 | def calculate_day_hours(work_blocks: List[TimeBlock], break_blocks: List[TimeBlock]) -> float: 94 | """Calculate working hours for a day considering multiple time blocks.""" 95 | total_work_minutes = sum(block.duration_minutes() for block in work_blocks) 96 | total_break_minutes = sum(block.duration_minutes() for block in break_blocks) 97 | 98 | working_minutes = total_work_minutes - total_break_minutes 99 | return round(working_minutes / 60, 2) 100 | 101 | def parse_config(content: str) -> Dict: 102 | """Parse configuration from the document.""" 103 | config = DEFAULT_CONFIG.copy() 104 | 105 | config_section = re.search(r'## Configuration\n(.*?)\n\n', content, re.DOTALL) 106 | if config_section: 107 | config_text = config_section.group(1) 108 | for line in config_text.splitlines(): 109 | line = line.strip() 110 | if ':' in line: 111 | key, value = [part.strip() for part in line.split(':', 1)] 112 | key = key.lower().replace(' ', '_') 113 | 114 | if key == 'expected_hours_per_week': 115 | config[key] = float(value) 116 | elif key == 'workdays': 117 | config[key] = [day.strip() for day in value.split(',')] 118 | elif key == 'vacation_days_per_year': 119 | config[key] = int(value) 120 | 121 | return config 122 | 123 | def parse_entries(content: str) -> List[Dict[str, Any]]: 124 | """Parse time entries from the content.""" 125 | entries = [] 126 | lines = content.split('\n') 127 | in_entries = False 128 | header_seen = False 129 | 130 | for line in lines: 131 | if line.startswith('## Time Entries'): 132 | in_entries = True 133 | continue 134 | if in_entries and line.startswith('|'): 135 | if not header_seen: 136 | header_seen = True # Skip the header row 137 | continue 138 | if line.startswith('|--'): # Skip the separator row 139 | continue 140 | if '|' not in line: # Skip empty or malformed lines 141 | continue 142 | 143 | parts = [p.strip() for p in line.split('|')] 144 | if len(parts) >= 6 and parts[1]: # Ensure we have enough parts and date is not empty 145 | entries.append({ 146 | 'date': parts[1], 147 | 'type': parts[2], 148 | 'work_times': TimeBlock.parse(parts[3]), # Convert to TimeBlock objects 149 | 'break_times': TimeBlock.parse(parts[4]), # Convert to TimeBlock objects 150 | 'notes': parts[5] 151 | }) 152 | 153 | return entries 154 | 155 | def calculate_balance(entries: List[Dict], config: Dict) -> Tuple[float, str]: 156 | total_worked = 0.0 157 | expected_per_week = config['expected_hours_per_week'] 158 | total_expected = 0.0 159 | 160 | # Group entries by month and week for summaries 161 | monthly_stats = {} 162 | weekly_stats = {} 163 | 164 | for entry in entries: 165 | try: 166 | if entry['date'] == 'Date': 167 | continue 168 | 169 | entry_date = datetime.strptime(entry['date'], "%Y-%m-%d") 170 | month_key = entry_date.strftime("%Y-%m") 171 | week_key = entry_date.strftime("%Y-W%W") # ISO week number 172 | 173 | # Initialize month stats 174 | if month_key not in monthly_stats: 175 | monthly_stats[month_key] = { 176 | 'worked': 0.0, 177 | 'expected': expected_per_week * 4, 178 | 'vacation_days': 0, 179 | 'sick_days': 0 180 | } 181 | total_expected += expected_per_week * 4 182 | 183 | # Initialize week stats 184 | if week_key not in weekly_stats: 185 | weekly_stats[week_key] = { 186 | 'worked': 0.0, 187 | 'expected': expected_per_week, 188 | 'start_date': entry_date - timedelta(days=entry_date.weekday()), 189 | 'vacation_days': 0, 190 | 'sick_days': 0 191 | } 192 | 193 | entry_type = entry['type'].lower() 194 | if entry_type == 'workday': 195 | hours = calculate_day_hours( 196 | entry['work_times'], 197 | entry['break_times'] 198 | ) 199 | if hours > 0: 200 | total_worked += hours 201 | monthly_stats[month_key]['worked'] += hours 202 | weekly_stats[week_key]['worked'] += hours 203 | elif entry_type in ['vacation', 'sick']: 204 | if entry_type == 'vacation': 205 | monthly_stats[month_key]['vacation_days'] += 1 206 | weekly_stats[week_key]['vacation_days'] += 1 207 | else: 208 | monthly_stats[month_key]['sick_days'] += 1 209 | weekly_stats[week_key]['sick_days'] += 1 210 | 211 | except Exception as e: 212 | log_error(f"Error processing entry {entry}: {str(e)}") 213 | continue 214 | 215 | balance = total_worked - total_expected 216 | 217 | # Build the summary text 218 | summary_parts = [ 219 | "### Overall Summary", 220 | f"Total hours worked: {total_worked:.2f}h", 221 | f"Expected hours: {total_expected:.2f}h", 222 | f"Balance: {balance:+.2f}h\n", 223 | f"Status: {'✅ On track' if balance >= 0 else '⚠️ Behind schedule'}\n", 224 | "### Weekly Summary\n", 225 | "| Week | Dates | Hours Worked | Expected Hours | Balance | Cumulative Balance |", 226 | "|------|-------|--------------|----------------|---------|-------------------|" 227 | ] 228 | 229 | # Add weekly summaries 230 | cumulative_balance = 0.0 231 | for week in sorted(weekly_stats.keys(), reverse=True): 232 | stats = weekly_stats[week] 233 | week_balance = stats['worked'] - stats['expected'] 234 | cumulative_balance += week_balance 235 | start_date = stats['start_date'] 236 | end_date = start_date + timedelta(days=6) 237 | 238 | summary_parts.append( 239 | f"| {week} | {start_date.strftime('%Y-%m-%d')} to {end_date.strftime('%Y-%m-%d')} | " 240 | f"{stats['worked']:.2f}h | {stats['expected']:.2f}h | {week_balance:+.2f}h | {cumulative_balance:+.2f}h |" 241 | ) 242 | 243 | summary_parts.extend([ 244 | "\n### Monthly Summary\n" 245 | ]) 246 | 247 | # Add monthly summaries 248 | for month in sorted(monthly_stats.keys()): 249 | stats = monthly_stats[month] 250 | month_balance = stats['worked'] - stats['expected'] 251 | 252 | summary_parts.extend([ 253 | f"#### {month}", 254 | f"Hours worked: {stats['worked']:.2f}h", 255 | f"Expected hours: {stats['expected']:.2f}h", 256 | f"Balance: {month_balance:+.2f}h", 257 | f"Vacation days: {stats['vacation_days']}", 258 | f"Sick days: {stats['sick_days']}\n" 259 | ]) 260 | 261 | return balance, "\n".join(summary_parts) 262 | 263 | def generate_tracker_content(original_content: str, config: Dict[str, Any], entries: List[Dict[str, str]]) -> str: 264 | """Generate the full content for the time tracker.""" 265 | try: 266 | # Keep existing entries if they exist 267 | existing_entries = parse_entries(original_content) if original_content else [] 268 | 269 | # Merge existing entries with any new ones, avoiding duplicates 270 | all_entries = existing_entries 271 | for new_entry in entries: 272 | if not any(e['date'] == new_entry['date'] for e in existing_entries): 273 | all_entries.append(new_entry) 274 | 275 | # Sort entries by date in reverse order 276 | all_entries.sort(key=lambda x: x['date'], reverse=True) 277 | 278 | # Calculate balances with all entries 279 | balance, summary = calculate_balance(all_entries, config) 280 | 281 | # Generate the content with all entries preserved 282 | content_lines = [ 283 | "---", 284 | "time_tracker: true", 285 | "---", 286 | "", 287 | "# ⏱️ Time Tracker", 288 | "", 289 | "## Configuration", 290 | f"Expected Hours per Week: {config['expected_hours_per_week']}", 291 | f"Workdays: {', '.join(config['workdays'])}", 292 | f"Vacation Days per Year: {config['vacation_days_per_year']}", 293 | "", 294 | summary, # This now includes both Overall and Monthly summaries 295 | "", 296 | "## Time Entries", 297 | "| Date | Type | Work Times | Break Times | Notes |", 298 | "|------|------|------------|-------------|--------|" 299 | ] 300 | 301 | # Add all entries to the table 302 | for entry in all_entries: 303 | work_times_str = ','.join(f"{b.start}-{b.end}" for b in entry['work_times']) or '-' 304 | break_times_str = ','.join(f"{b.start}-{b.end}" for b in entry['break_times']) or '-' 305 | content_lines.append( 306 | f"| {entry['date']} | {entry['type']} | {work_times_str} | {break_times_str} | {entry['notes']} |" 307 | ) 308 | 309 | content_lines.append("") 310 | 311 | return "\n".join(content_lines) 312 | 313 | except Exception as e: 314 | log_error(f"Failed to generate tracker content: {str(e)}") 315 | return original_content 316 | 317 | def process_event(event_json: str) -> Optional[str]: 318 | try: 319 | event = json.loads(event_json) 320 | 321 | # Get the tracker file path and check if it exists 322 | tracker_file = get_tracker_file(event_json) 323 | file_exists = tracker_file.exists() 324 | # Create default content if file doesn't exist 325 | default_content = "\n".join([ 326 | "---", 327 | "time_tracker: true", 328 | "---", 329 | "", 330 | "# ⏱️ Time Tracker", 331 | "", 332 | "## Configuration", 333 | f"Expected Hours per Week: {DEFAULT_CONFIG['expected_hours_per_week']}", 334 | f"Workdays: {', '.join(DEFAULT_CONFIG['workdays'])}", 335 | f"Vacation Days per Year: {DEFAULT_CONFIG['vacation_days_per_year']}", 336 | "", 337 | "## Summary", 338 | "### Overall Summary", 339 | "Total hours worked: 0.00h", 340 | "Expected hours: 0.00h", 341 | "Balance: +0.00h", 342 | "", 343 | "Status: ✅ On track", 344 | "", 345 | "### Monthly Summary", 346 | "", 347 | "## Time Entries", 348 | "| Date | Type | Work Times | Break Times | Notes |", 349 | "|------|------|------------|-------------|--------|", 350 | "" 351 | ]) 352 | # If file exists, we can do early return for non-time tracker files 353 | if file_exists: 354 | if isinstance(event, dict): 355 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 356 | if event_type and "title" in event_type: 357 | title = event_type["title"] 358 | if title != "_time_tracker": 359 | log_debug("Skipping non-time tracker file: {}", title) 360 | return None 361 | 362 | # Get content from event or use default 363 | content = None 364 | for event_type in ["Created", "Updated", "Synced"]: 365 | if event_type in event: 366 | event_data = event[event_type] 367 | event_file = Path(event_data.get("file_path", "")) 368 | 369 | # If this is the time tracker file, use its content 370 | if event_file == tracker_file: 371 | content = event_data.get("content") 372 | break 373 | 374 | # If no content exists, create the file with default template 375 | if not content: 376 | log_info("Creating new time tracker with default template") 377 | tracker_file.parent.mkdir(parents=True, exist_ok=True) 378 | tracker_file.write_text(default_content) 379 | log_info(f"Created time tracker file at: {tracker_file}") 380 | content = default_content 381 | 382 | return json.dumps({ 383 | "metadata": { 384 | "time_tracker": "true", 385 | "hour_balance": "+0.00", 386 | "time_entries": "0", 387 | "tracker_updated": datetime.now().strftime("%Y-%m-%d %H:%M:%S") 388 | }, 389 | "content": content 390 | }) 391 | 392 | # Early return if this isn't the time tracker file 393 | if isinstance(event, dict): 394 | event_type = event.get("Created") or event.get("Updated") or event.get("Synced") 395 | if event_type and "title" in event_type: 396 | title = event_type["title"] 397 | if title != "_time_tracker": 398 | log_debug("Skipping non-time tracker file: {}", title) 399 | return None 400 | 401 | # Process existing content 402 | log_info("⏱️ Processing event for time tracker") 403 | entries = parse_entries(content) 404 | config = parse_config(content) or DEFAULT_CONFIG 405 | 406 | # Generate new content with updated calculations 407 | updated_content = generate_tracker_content(content, config, entries) 408 | balance, _ = calculate_balance(entries, config) 409 | 410 | # Only return new content if it's different from the original 411 | should_update = updated_content != content 412 | 413 | return json.dumps({ 414 | "metadata": { 415 | "time_entries": str(len(entries)), 416 | "hour_balance": f"{balance:+.2f}", 417 | "tracker_updated": datetime.now().strftime("%Y-%m-%d %H:%M:%S") 418 | }, 419 | "content": updated_content if should_update else content 420 | }) 421 | 422 | except Exception as e: 423 | log_error("Failed to process time tracker: {}", str(e)) 424 | return None --------------------------------------------------------------------------------