├── assets
├── cli.png
└── wechat.png
├── src
└── sleepless_agent
│ ├── interfaces
│ └── __init__.py
│ ├── monitoring
│ ├── __init__.py
│ ├── logging.py
│ └── monitor.py
│ ├── scheduling
│ ├── __init__.py
│ └── time_utils.py
│ ├── storage
│ ├── __init__.py
│ ├── sqlite.py
│ ├── results.py
│ ├── workspace.py
│ └── db_helpers.py
│ ├── utils
│ ├── __init__.py
│ ├── exceptions.py
│ ├── display.py
│ ├── metrics_aggregator.py
│ ├── config.py
│ ├── live_status.py
│ ├── directory_manager.py
│ └── readme_manager.py
│ ├── deployment
│ ├── sleepless-agent.service
│ └── com.sleepless-agent.plist
│ ├── core
│ ├── __init__.py
│ ├── timeout_manager.py
│ └── models.py
│ ├── tasks
│ ├── __init__.py
│ ├── utils.py
│ └── refinement.py
│ ├── __main__.py
│ ├── __init__.py
│ └── config.yaml
├── .env.example
├── docs
├── reference
│ └── index.md
├── tutorials
│ └── index.md
├── examples
│ └── index.md
├── concepts
│ ├── index.md
│ ├── architecture.md
│ └── task-lifecycle.md
├── guides
│ ├── index.md
│ └── slack-setup.md
├── quickstart.md
├── index.md
├── changelog.md
├── README.md
├── installation.md
└── faq.md
├── LICENSE
├── pyproject.toml
├── .github
└── workflows
│ └── deploy-docs.yml
├── Makefile
├── .gitignore
└── mkdocs.yml
/assets/cli.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/context-machine-lab/sleepless-agent/HEAD/assets/cli.png
--------------------------------------------------------------------------------
/assets/wechat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/context-machine-lab/sleepless-agent/HEAD/assets/wechat.png
--------------------------------------------------------------------------------
/src/sleepless_agent/interfaces/__init__.py:
--------------------------------------------------------------------------------
1 | """External interfaces - Slack bot, CLI, and other integrations"""
2 |
3 | from .bot import SlackBot
4 | from .cli import main as cli_main
5 |
6 | __all__ = ["SlackBot", "cli_main"]
7 |
--------------------------------------------------------------------------------
/src/sleepless_agent/monitoring/__init__.py:
--------------------------------------------------------------------------------
1 | """Observability - monitoring, logging, and reporting."""
2 |
3 | from .logging import get_logger
4 | from .monitor import HealthMonitor, PerformanceLogger
5 | from .pro_plan_usage import ProPlanUsageChecker
6 | from .report_generator import ReportGenerator, TaskMetrics
7 |
8 | __all__ = ["get_logger", "HealthMonitor", "PerformanceLogger", "ProPlanUsageChecker", "ReportGenerator", "TaskMetrics"]
9 |
--------------------------------------------------------------------------------
/src/sleepless_agent/scheduling/__init__.py:
--------------------------------------------------------------------------------
1 | """Scheduling and prioritisation utilities."""
2 |
3 | from .auto_generator import AutoTaskGenerator
4 | from .scheduler import BudgetManager, SmartScheduler
5 | from .time_utils import current_period_start, get_time_label, is_nighttime
6 |
7 | __all__ = [
8 | "AutoTaskGenerator",
9 | "BudgetManager",
10 | "SmartScheduler",
11 | "current_period_start",
12 | "get_time_label",
13 | "is_nighttime",
14 | ]
15 |
--------------------------------------------------------------------------------
/src/sleepless_agent/storage/__init__.py:
--------------------------------------------------------------------------------
1 | """Storage layer - persistence, git, and workspace management."""
2 |
3 | from sleepless_agent.storage.git import GitManager
4 | from sleepless_agent.storage.workspace import WorkspaceSetup, WorkspaceConfigResult
5 | from sleepless_agent.storage.results import ResultManager
6 | from sleepless_agent.storage.sqlite import SQLiteStore
7 |
8 | __all__ = ["GitManager", "WorkspaceSetup", "WorkspaceConfigResult", "ResultManager", "SQLiteStore"]
9 |
--------------------------------------------------------------------------------
/.env.example:
--------------------------------------------------------------------------------
1 | # Slack Bot
2 | SLACK_BOT_TOKEN=xoxb-your-slack-bot-token
3 | SLACK_APP_TOKEN=xapp-your-slack-app-token
4 |
5 | # Claude Code CLI (optional - defaults to 'claude' from PATH)
6 | # CLAUDE_CODE_BINARY_PATH=/usr/local/bin/claude
7 |
8 | # Agent Config
9 | AGENT_WORKSPACE_ROOT=./workspace
10 | AGENT_DB_PATH=./workspace/data/tasks.db
11 | AGENT_RESULTS_PATH=./workspace/data/results
12 |
13 | # Git
14 | GIT_USER_NAME=Sleepless Agent
15 | GIT_USER_EMAIL=agent@sleepless.local
16 |
17 | # Optional
18 | LOG_LEVEL=INFO
19 | DEBUG=false
20 |
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/__init__.py:
--------------------------------------------------------------------------------
1 | """Utility functions and helpers."""
2 |
3 | from .display import format_age_seconds, format_duration, relative_time, shorten
4 | from .live_status import LiveStatusTracker, LiveStatusEntry
5 | from .exceptions import PauseException
6 | from .config import Config, ConfigNode, get_config
7 |
8 | __all__ = [
9 | "format_age_seconds",
10 | "format_duration",
11 | "relative_time",
12 | "shorten",
13 | "LiveStatusTracker",
14 | "LiveStatusEntry",
15 | "PauseException",
16 | "Config",
17 | "ConfigNode",
18 | "get_config",
19 | ]
20 |
--------------------------------------------------------------------------------
/src/sleepless_agent/deployment/sleepless-agent.service:
--------------------------------------------------------------------------------
1 | [Unit]
2 | Description=Sleepless Agent - 24/7 AI Assistant
3 | After=network.target
4 |
5 | [Service]
6 | Type=simple
7 | User=sleepless
8 | WorkingDirectory=/opt/sleepless-agent
9 | ExecStart=/opt/sleepless-agent/venv/bin/sle daemon
10 | Restart=on-failure
11 | RestartSec=10
12 | StandardOutput=journal
13 | StandardError=journal
14 |
15 | # Environment
16 | EnvironmentFile=/opt/sleepless-agent/.env
17 |
18 | # Security
19 | NoNewPrivileges=true
20 | ProtectSystem=strict
21 | ProtectHome=true
22 | ReadWritePaths=/opt/sleepless-agent/workspace
23 |
24 | [Install]
25 | WantedBy=multi-user.target
26 |
--------------------------------------------------------------------------------
/src/sleepless_agent/core/__init__.py:
--------------------------------------------------------------------------------
1 | """Core agent runtime and execution - the kernel of the agent OS."""
2 |
3 | from sleepless_agent.core.executor import ClaudeCodeExecutor
4 | from sleepless_agent.core.models import Result, Task, TaskPriority, TaskStatus, init_db
5 | from sleepless_agent.core.queue import TaskQueue
6 | from sleepless_agent.core.task_runtime import TaskRuntime
7 | from sleepless_agent.core.timeout_manager import TaskTimeoutManager
8 |
9 | __all__ = [
10 | "ClaudeCodeExecutor",
11 | "Task",
12 | "Result",
13 | "TaskPriority",
14 | "TaskStatus",
15 | "init_db",
16 | "TaskQueue",
17 | "TaskRuntime",
18 | "TaskTimeoutManager",
19 | ]
20 |
--------------------------------------------------------------------------------
/src/sleepless_agent/tasks/__init__.py:
--------------------------------------------------------------------------------
1 | """Task generation, refinement, and utilities."""
2 |
3 | from sleepless_agent.core.models import (
4 | GenerationHistory,
5 | Result,
6 | Task,
7 | TaskPriority,
8 | TaskStatus,
9 | TaskType,
10 | TaskPool,
11 | init_db,
12 | )
13 | from sleepless_agent.core.queue import TaskQueue
14 | from .refinement import ensure_refinement_task
15 | from .utils import prepare_task_creation
16 |
17 | __all__ = [
18 | "Task",
19 | "Result",
20 | "GenerationHistory",
21 | "TaskPriority",
22 | "TaskStatus",
23 | "TaskType",
24 | "TaskQueue",
25 | "TaskPool",
26 | "init_db",
27 | "ensure_refinement_task",
28 | "prepare_task_creation",
29 | ]
30 |
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/exceptions.py:
--------------------------------------------------------------------------------
1 | """Custom exceptions for Sleepless Agent"""
2 |
3 | from datetime import datetime
4 | from typing import Optional
5 |
6 |
7 | class PauseException(Exception):
8 | """Raised when Pro plan usage limit requires task execution pause"""
9 |
10 | def __init__(
11 | self,
12 | message: str,
13 | reset_time: Optional[datetime],
14 | usage_percent: float,
15 | ):
16 | """Initialize PauseException
17 |
18 | Args:
19 | message: Exception message
20 | reset_time: When usage limit will reset
21 | usage_percent: Usage percentage reported by CLI
22 | """
23 | super().__init__(message)
24 | self.reset_time = reset_time
25 | self.usage_percent = usage_percent
26 |
--------------------------------------------------------------------------------
/src/sleepless_agent/__main__.py:
--------------------------------------------------------------------------------
1 | """Unified CLI entry point for sleepless agent (daemon or CLI commands)."""
2 |
3 | from __future__ import annotations
4 |
5 | import sys
6 | from typing import Optional
7 |
8 | from sleepless_agent.interfaces.cli import main as cli_main
9 |
10 |
11 | def main(argv: Optional[list[str]] = None) -> int:
12 | """Route to daemon or CLI based on the command."""
13 |
14 | args = argv if argv is not None else sys.argv[1:]
15 |
16 | # If no args or first arg is not "daemon", treat as CLI
17 | if not args or args[0] != "daemon":
18 | return cli_main(args)
19 |
20 | # If first arg is "daemon", import and run the daemon (lazy import)
21 | from sleepless_agent.core.daemon import main as daemon_main
22 | return daemon_main()
23 |
24 |
25 | if __name__ == "__main__": # pragma: no cover - manual execution
26 | sys.exit(main())
27 |
--------------------------------------------------------------------------------
/docs/reference/index.md:
--------------------------------------------------------------------------------
1 | # Reference Documentation
2 |
3 | Complete technical reference for Sleepless Agent.
4 |
5 | ## API Reference
6 |
7 | ### 📟 [CLI Commands](api/cli-commands.md)
8 | Complete command-line interface reference.
9 |
10 | ### 💬 Slack Commands
11 | *Coming soon* - Slack slash command reference.
12 |
13 | ### 🐍 Python API
14 | *Coming soon* - Python module API reference.
15 |
16 | ## Configuration Reference
17 |
18 | ### ⚙️ Configuration
19 | *Coming soon* - All configuration options explained.
20 |
21 | ### 💾 Database Schema
22 | *Coming soon* - SQLite schema documentation.
23 |
24 | ### 🔧 Environment Variables
25 | *Coming soon* - Environment variable reference.
26 |
27 | ### ❌ Error Codes
28 | *Coming soon* - Error codes and their meanings.
29 |
30 | ## Quick Links
31 |
32 | - [CLI Commands](api/cli-commands.md) - Available now!
33 | - [Configuration Guide](../guides/environment-setup.md) - Setup help
34 | - [Troubleshooting](../troubleshooting.md) - Common issues
--------------------------------------------------------------------------------
/docs/tutorials/index.md:
--------------------------------------------------------------------------------
1 | # Tutorials
2 |
3 | Step-by-step tutorials to learn Sleepless Agent through hands-on practice.
4 |
5 | ## Available Tutorials
6 |
7 | *Tutorials coming soon! These will include:*
8 |
9 | ### 🎯 [First Task](first-task.md)
10 | Create and monitor your first automated task.
11 |
12 | ### 💬 [Slack Workflows](slack-workflows.md)
13 | Build effective command workflows in Slack.
14 |
15 | ### 📊 [Monitoring Tasks](monitoring-tasks.md)
16 | Track task execution and performance.
17 |
18 | ### 📈 [Daily Reports](daily-reports.md)
19 | Understanding and using generated reports.
20 |
21 | ### 🏗️ [Workspace Management](workspace-management.md)
22 | Managing task workspaces effectively.
23 |
24 | ## Check Back Soon
25 |
26 | We're actively developing comprehensive tutorials. In the meantime:
27 | - Check the [Quickstart Guide](../quickstart.md) to get started
28 | - Review the [Guides](../guides/index.md) for configuration help
29 | - Explore [Examples](../examples/index.md) for code snippets
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2024 Context Machine Lab
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/docs/examples/index.md:
--------------------------------------------------------------------------------
1 | # Examples
2 |
3 | Code examples and practical use cases for Sleepless Agent.
4 |
5 | ## Available Examples
6 |
7 | *Examples coming soon! These will include:*
8 |
9 | ### 📝 Basic Usage
10 | Simple usage patterns and quick snippets.
11 |
12 | ### 💬 Slack Integration
13 | Real-world Slack workflow examples.
14 |
15 | ### 🔄 Advanced Workflows
16 | Complex task automation patterns.
17 |
18 | ### ⚙️ Custom Executors
19 | Extending functionality with custom code.
20 |
21 | ### 📊 Monitoring Scripts
22 | Performance monitoring and analysis.
23 |
24 | ## Quick Examples
25 |
26 | ### Submit a Task via CLI
27 | ```bash
28 | sle think "Research Python async patterns"
29 | ```
30 |
31 | ### Submit a Serious Task
32 | ```bash
33 | sle think "Implement caching layer" -p backend
34 | ```
35 |
36 | ### Check Status
37 | ```bash
38 | sle check
39 | ```
40 |
41 | ### View Report
42 | ```bash
43 | sle report
44 | ```
45 |
46 | ## More Resources
47 |
48 | - [Quickstart Guide](../quickstart.md) - Get started quickly
49 | - [CLI Commands](../reference/api/cli-commands.md) - Full command reference
50 | - [Guides](../guides/index.md) - Step-by-step configuration
--------------------------------------------------------------------------------
/src/sleepless_agent/deployment/com.sleepless-agent.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Label
6 | com.sleepless-agent
7 | ProgramArguments
8 |
9 | /usr/local/bin/sle
10 | daemon
11 |
12 | WorkingDirectory
13 | /Users/YOUR_USERNAME/projects/sleepless-agent
14 | StandardOutPath
15 | /Users/YOUR_USERNAME/projects/sleepless-agent/workspace/data/daemon.log
16 | StandardErrorPath
17 | /Users/YOUR_USERNAME/projects/sleepless-agent/workspace/data/daemon.err
18 | EnvironmentVariables
19 |
20 | PATH
21 | /usr/local/bin:/usr/bin:/bin:/usr/sbin:/sbin
22 | PYTHONUNBUFFERED
23 | 1
24 |
25 | RunAtLoad
26 |
27 | KeepAlive
28 |
29 | SuccessfulExit
30 |
31 |
32 | Restart
33 | 10
34 |
35 |
36 |
--------------------------------------------------------------------------------
/src/sleepless_agent/__init__.py:
--------------------------------------------------------------------------------
1 | """Sleepless Agent - 24/7 AI Assistant"""
2 |
3 | from sleepless_agent.interfaces import SlackBot, cli_main
4 | from sleepless_agent.monitoring import HealthMonitor, PerformanceLogger
5 | from sleepless_agent.storage.results import ResultManager
6 | from sleepless_agent.core.daemon import SleeplessAgent
7 | from sleepless_agent.scheduling.auto_generator import AutoTaskGenerator
8 | from sleepless_agent.scheduling.scheduler import SmartScheduler
9 | from sleepless_agent.core.models import (
10 | Result,
11 | Task,
12 | TaskPriority,
13 | TaskStatus,
14 | init_db,
15 | )
16 | from sleepless_agent.core.queue import TaskQueue
17 | from sleepless_agent.core.executor import ClaudeCodeExecutor
18 | from sleepless_agent.storage.git import GitManager
19 |
20 | __version__ = "0.1.2"
21 |
22 | __all__ = [
23 | "SleeplessAgent",
24 | "Task",
25 | "TaskPriority",
26 | "TaskStatus",
27 | "TaskQueue",
28 | "SmartScheduler",
29 | "AutoTaskGenerator",
30 | "Result",
31 | "init_db",
32 | "SlackBot",
33 | "cli_main",
34 | "ClaudeCodeExecutor",
35 | "ResultManager",
36 | "GitManager",
37 | "HealthMonitor",
38 | "PerformanceLogger",
39 | ]
40 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=68", "wheel"]
3 | build-backend = "setuptools.build_meta"
4 |
5 | [project]
6 | name = "sleepless-agent"
7 | version = "0.1.2"
8 | description = "24/7 AI agent that maximizes Claude usage via Slack interface"
9 | readme = "README.md"
10 | authors = [{name = "Sleepless Agent Maintainers"}]
11 | license = {file = "LICENSE"}
12 | keywords = ["ai", "automation", "agents", "slack", "anthropic"]
13 | classifiers = [
14 | "Development Status :: 3 - Alpha",
15 | "Intended Audience :: Developers",
16 | "Operating System :: OS Independent",
17 | "Programming Language :: Python :: 3",
18 | "Programming Language :: Python :: 3.11",
19 | "License :: OSI Approved :: MIT License",
20 | "Topic :: Communications :: Chat",
21 | "Topic :: Office/Business",
22 | ]
23 | requires-python = ">=3.11"
24 | dependencies = [
25 | "anthropic",
26 | "slack-sdk",
27 | "sqlalchemy",
28 | "python-dotenv",
29 | "PyYAML",
30 | "gitpython",
31 | "aiosqlite",
32 | "loguru",
33 | "rich",
34 | "structlog",
35 | "psutil",
36 | "claude-agent-sdk",
37 | "requests",
38 | ]
39 |
40 | [project.scripts]
41 | sle = "sleepless_agent.__main__:main"
42 |
43 | [project.urls]
44 | Homepage = "https://github.com/context-machine-lab/sleepless-agent"
45 | Repository = "https://github.com/context-machine-lab/sleepless-agent"
46 |
47 | [tool.setuptools]
48 | package-dir = {"" = "src"}
49 |
50 | [tool.setuptools.package-data]
51 | "sleepless_agent" = ["config.yaml"]
52 |
53 | [tool.setuptools.packages.find]
54 | where = ["src"]
55 | include = ["sleepless_agent*"]
56 |
--------------------------------------------------------------------------------
/.github/workflows/deploy-docs.yml:
--------------------------------------------------------------------------------
1 | name: Deploy Documentation
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | paths:
8 | - 'docs/**'
9 | - 'mkdocs.yml'
10 | - '.github/workflows/deploy-docs.yml'
11 | workflow_dispatch: # Allow manual trigger
12 |
13 | permissions:
14 | contents: read
15 | pages: write
16 | id-token: write
17 |
18 | # Allow one concurrent deployment
19 | concurrency:
20 | group: "pages"
21 | cancel-in-progress: true
22 |
23 | jobs:
24 | build:
25 | runs-on: ubuntu-latest
26 | steps:
27 | - name: Checkout
28 | uses: actions/checkout@v4
29 | with:
30 | fetch-depth: 0 # Fetch all history for git-revision-date-localized-plugin
31 |
32 | - name: Setup Python
33 | uses: actions/setup-python@v5
34 | with:
35 | python-version: '3.11'
36 | cache: 'pip'
37 |
38 | - name: Install dependencies
39 | run: |
40 | pip install mkdocs mkdocs-material pymdown-extensions
41 | pip install mkdocs-git-revision-date-localized-plugin
42 | pip install mkdocs-minify-plugin
43 |
44 | - name: Build documentation
45 | run: mkdocs build
46 |
47 | - name: Upload artifact
48 | uses: actions/upload-pages-artifact@v3
49 | with:
50 | path: ./site
51 |
52 | deploy:
53 | if: github.event_name == 'push' || github.event_name == 'workflow_dispatch'
54 | needs: build
55 | runs-on: ubuntu-latest
56 | environment:
57 | name: github-pages
58 | url: ${{ steps.deployment.outputs.page_url }}
59 | steps:
60 | - name: Deploy to GitHub Pages
61 | id: deployment
62 | uses: actions/deploy-pages@v4
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/display.py:
--------------------------------------------------------------------------------
1 | """Shared formatting utilities for human-readable status output."""
2 |
3 | from __future__ import annotations
4 |
5 | from datetime import datetime, timezone
6 | from typing import Optional
7 |
8 |
9 | def format_duration(seconds: Optional[float]) -> str:
10 | """Turn seconds into a compact human-readable duration string."""
11 | if seconds is None:
12 | return "—"
13 |
14 | seconds = int(abs(seconds))
15 | hours, remainder = divmod(seconds, 3600)
16 | minutes, secs = divmod(remainder, 60)
17 |
18 | parts: list[str] = []
19 | if hours:
20 | parts.append(f"{hours}h")
21 | if minutes:
22 | parts.append(f"{minutes}m")
23 | if secs or not parts:
24 | parts.append(f"{secs}s")
25 | return " ".join(parts)
26 |
27 |
28 | def relative_time(dt: Optional[datetime], default: str = "—") -> str:
29 | """Return relative time string compared to now (e.g., '5m ago')."""
30 | if not dt:
31 | return default
32 |
33 | now = datetime.now(timezone.utc).replace(tzinfo=None)
34 | delta = now - dt
35 | suffix = "ago"
36 | total_seconds = delta.total_seconds()
37 |
38 | if total_seconds < 0:
39 | total_seconds = -total_seconds
40 | suffix = "from now"
41 |
42 | return f"{format_duration(total_seconds)} {suffix}"
43 |
44 |
45 | def shorten(text: str, limit: int = 120) -> str:
46 | """Compress whitespace and truncate to a limit with ellipsis."""
47 | clean = " ".join(text.strip().split())
48 | if len(clean) <= limit:
49 | return clean
50 | return clean[: limit - 1].rstrip() + "…"
51 |
52 |
53 | def format_age_seconds(seconds: Optional[float], default: str = "N/A") -> str:
54 | """Format seconds representing age (time elapsed) with 'ago' suffix."""
55 | if seconds is None:
56 | return default
57 | return f"{format_duration(seconds)} ago"
58 |
59 |
60 | __all__ = [
61 | "format_duration",
62 | "relative_time",
63 | "shorten",
64 | "format_age_seconds",
65 | ]
66 |
--------------------------------------------------------------------------------
/src/sleepless_agent/scheduling/time_utils.py:
--------------------------------------------------------------------------------
1 | """Shared helpers for reasoning about day/night windows."""
2 |
3 | from __future__ import annotations
4 |
5 | from datetime import datetime, timedelta
6 | from typing import Optional
7 |
8 | NIGHT_START_HOUR = 20 # 8 PM
9 | NIGHT_END_HOUR = 8 # 8 AM
10 |
11 |
12 | def is_nighttime(
13 | dt: Optional[datetime] = None,
14 | night_start_hour: int = NIGHT_START_HOUR,
15 | night_end_hour: int = NIGHT_END_HOUR,
16 | ) -> bool:
17 | """Return True when the provided datetime falls within the night window."""
18 | if dt is None:
19 | dt = datetime.now()
20 | hour = dt.hour
21 |
22 | if night_start_hour < night_end_hour:
23 | # Same-day period (e.g., 1 AM to 9 AM)
24 | return night_start_hour <= hour < night_end_hour
25 | else:
26 | # Cross-midnight period (e.g., 8 PM to 8 AM)
27 | return hour >= night_start_hour or hour < night_end_hour
28 |
29 |
30 | def get_time_label(
31 | dt: Optional[datetime] = None,
32 | night_start_hour: int = NIGHT_START_HOUR,
33 | night_end_hour: int = NIGHT_END_HOUR,
34 | ) -> str:
35 | """Return a human-readable label for the current time period."""
36 | return "night" if is_nighttime(dt, night_start_hour, night_end_hour) else "daytime"
37 |
38 |
39 | def current_period_start(
40 | dt: Optional[datetime] = None,
41 | night_start_hour: int = NIGHT_START_HOUR,
42 | night_end_hour: int = NIGHT_END_HOUR,
43 | ) -> datetime:
44 | """Return the local timestamp marking the start of the current period."""
45 | dt = dt or datetime.now()
46 | today = dt.replace(hour=0, minute=0, second=0, microsecond=0)
47 |
48 | if is_nighttime(dt, night_start_hour, night_end_hour):
49 | if night_start_hour < night_end_hour:
50 | # Same-day nighttime (e.g., 1 AM to 9 AM) - period starts at night_start_hour today
51 | return today.replace(hour=night_start_hour)
52 | else:
53 | # Cross-midnight nighttime (e.g., 8 PM to 8 AM)
54 | if dt.hour >= night_start_hour:
55 | # Before midnight: period starts today at night_start_hour
56 | return today.replace(hour=night_start_hour)
57 | else:
58 | # After midnight: period starts yesterday at night_start_hour
59 | return (today - timedelta(days=1)).replace(hour=night_start_hour)
60 |
61 | # Daytime period starts at night_end_hour today
62 | return today.replace(hour=night_end_hour)
63 |
--------------------------------------------------------------------------------
/src/sleepless_agent/tasks/utils.py:
--------------------------------------------------------------------------------
1 | """Shared helpers for task creation and metadata handling."""
2 |
3 | from __future__ import annotations
4 |
5 | import re
6 | from typing import Optional, Tuple
7 |
8 |
9 | def slugify_project(identifier: str) -> str:
10 | """Convert project name/id to Kebab-case slug used as project_id."""
11 | return re.sub(r"[^a-z0-9-]", "-", identifier.lower()).strip("-")
12 |
13 |
14 | def parse_task_description(description: str) -> Tuple[str, Optional[str], Optional[str]]:
15 | """Normalize task description, extracting optional project flag and notes.
16 |
17 | Returns:
18 | tuple of (clean_description, project_name, note)
19 | """
20 | note: Optional[str] = None
21 | working = description.strip()
22 | project_name: Optional[str] = None
23 |
24 | # Extract project flag in various formats:
25 | # -p value, -p=value, --project value, --project=value
26 | project_patterns = [
27 | r"--project[=\s]+(\S+)", # --project=xxx or --project xxx
28 | r"-p[=\s]+(\S+)", # -p=xxx or -p xxx
29 | ]
30 |
31 | for pattern in project_patterns:
32 | project_match = re.search(pattern, working)
33 | if project_match:
34 | project_name = project_match.group(1)
35 | working = working.replace(project_match.group(0), "").strip()
36 | break
37 |
38 | # Handle legacy flags
39 | if "--serious" in working:
40 | working = working.replace("--serious", "").strip()
41 | note = "ℹ️ `--serious` flag no longer needed; `/task` and CLI tasks are serious by default."
42 |
43 | if "--random" in working:
44 | working = working.replace("--random", "").strip()
45 | warning = (
46 | "ℹ️ Thoughts belong in `/think`. Treating this as a serious task."
47 | )
48 | note = f"{note}\n{warning}" if note else warning
49 |
50 | return working, project_name, note
51 |
52 |
53 | def prepare_task_creation(
54 | description: str,
55 | project_override: Optional[str] = None,
56 | ) -> Tuple[str, Optional[str], Optional[str], Optional[str]]:
57 | """Normalize task input and derive project metadata.
58 |
59 | Args:
60 | description: Raw task description (may include flags).
61 | project_override: Project name provided via CLI flag or Slack command.
62 |
63 | Returns:
64 | tuple of (
65 | cleaned_description,
66 | final_project_name,
67 | project_id,
68 | note,
69 | )
70 | """
71 | cleaned, parsed_project, note = parse_task_description(description)
72 | final_project = project_override or parsed_project
73 | project_id = slugify_project(final_project) if final_project else None
74 | return cleaned, final_project, project_id, note
75 |
--------------------------------------------------------------------------------
/src/sleepless_agent/storage/sqlite.py:
--------------------------------------------------------------------------------
1 | """Shared SQLite utilities for task and result persistence."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import Callable, Optional, TypeVar
6 |
7 | from sqlalchemy import create_engine
8 | from sqlalchemy.exc import OperationalError
9 | from sqlalchemy.orm import Session, sessionmaker
10 |
11 | from sleepless_agent.monitoring.logging import get_logger
12 |
13 | logger = get_logger(__name__)
14 |
15 | T = TypeVar("T")
16 |
17 |
18 | class SQLiteStore:
19 | """Base helper that encapsulates SQLite engine/session lifecycle."""
20 |
21 | def __init__(self, db_path: str, *, echo: bool = False):
22 | self.db_path = db_path
23 | self._echo = echo
24 | self._create_engine()
25 |
26 | def _create_engine(self) -> None:
27 | self.engine = create_engine(f"sqlite:///{self.db_path}", echo=self._echo, future=True)
28 | self.SessionLocal = sessionmaker(bind=self.engine, expire_on_commit=False)
29 |
30 | def _reset_engine(self) -> None:
31 | self.engine.dispose(close=True)
32 | self._create_engine()
33 |
34 | @staticmethod
35 | def _should_reset_on_error(exc: OperationalError) -> bool:
36 | message = str(exc).lower()
37 | return "readonly" in message or ("sqlite" in message and "locked" in message)
38 |
39 | def _run_write(
40 | self,
41 | operation: Callable[[Session], T],
42 | *,
43 | retries: int = 2,
44 | ) -> T:
45 | last_exc: Optional[Exception] = None
46 | for attempt in range(retries):
47 | session = self.SessionLocal()
48 | try:
49 | result = operation(session)
50 | session.commit()
51 | return result
52 | except OperationalError as exc:
53 | session.rollback()
54 | last_exc = exc
55 | if self._should_reset_on_error(exc) and attempt < retries - 1:
56 | logger.warning(
57 | "sqlite.retry",
58 | attempt=attempt + 1,
59 | retries=retries,
60 | error=str(exc),
61 | )
62 | self._reset_engine()
63 | continue
64 | raise
65 | except Exception as exc:
66 | session.rollback()
67 | last_exc = exc
68 | raise
69 | finally:
70 | session.close()
71 | if last_exc:
72 | raise RuntimeError(f"SQLite operation failed after {retries} attempts") from last_exc
73 | raise RuntimeError("SQLite operation failed without raising an exception")
74 |
75 | def _run_read(self, operation: Callable[[Session], T]) -> T:
76 | session = self.SessionLocal()
77 | try:
78 | return operation(session)
79 | finally:
80 | session.close()
81 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: help setup run dev logs clean test db db-reset
2 |
3 | help:
4 | @echo "Sleepless Agent - Commands"
5 | @echo ""
6 | @echo "setup Install dependencies"
7 | @echo "run Run agent daemon"
8 | @echo "dev Run with debug logging"
9 | @echo "logs Follow live logs"
10 | @echo "test Run basic tests"
11 | @echo "db Query database"
12 | @echo "db-reset Clear database"
13 | @echo "clean Clean cache and logs"
14 | @echo "install-service Install as systemd service (Linux)"
15 | @echo "install-launchd Install as launchd service (macOS)"
16 |
17 | setup:
18 | python -m venv venv
19 | ./venv/bin/pip install -e .
20 | cp .env.example .env
21 | @echo "✓ Setup complete. Edit .env with your tokens"
22 |
23 | run:
24 | sle daemon
25 |
26 | dev:
27 | PYTHONUNBUFFERED=1 sle daemon
28 |
29 | logs:
30 | tail -f workspace/data/agent.log
31 |
32 | test:
33 | @echo "Testing imports..."
34 | python -c "from sleepless_agent.runtime import SleeplessAgent; print('✓ Imports OK')"
35 |
36 | db:
37 | sqlite3 workspace/data/tasks.db "SELECT id, description, status, priority FROM tasks LIMIT 10;"
38 |
39 | db-reset:
40 | rm -f workspace/data/tasks.db workspace/data/*.db
41 | @echo "✓ Database cleared"
42 |
43 | clean:
44 | find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true
45 | find . -type f -name "*.pyc" -delete
46 | rm -rf .pytest_cache
47 | @echo "✓ Cache cleaned"
48 |
49 | install-service:
50 | @echo "Installing systemd service..."
51 | sudo cp src/sleepless_agent/deployment/sleepless-agent.service /etc/systemd/system/
52 | sudo systemctl daemon-reload
53 | sudo systemctl enable sleepless-agent
54 | @echo "✓ Service installed. Start with: sudo systemctl start sleepless-agent"
55 |
56 | install-launchd:
57 | @echo "Installing launchd service..."
58 | @echo "Note: Update WorkingDirectory in src/sleepless_agent/deployment/com.sleepless-agent.plist first!"
59 | cp src/sleepless_agent/deployment/com.sleepless-agent.plist ~/Library/LaunchAgents/
60 | launchctl load ~/Library/LaunchAgents/com.sleepless-agent.plist
61 | @echo "✓ Service installed and running"
62 |
63 | uninstall-service:
64 | sudo systemctl stop sleepless-agent
65 | sudo systemctl disable sleepless-agent
66 | sudo rm /etc/systemd/system/sleepless-agent.service
67 | sudo systemctl daemon-reload
68 | @echo "✓ Service uninstalled"
69 |
70 | uninstall-launchd:
71 | launchctl unload ~/Library/LaunchAgents/com.sleepless-agent.plist
72 | rm ~/Library/LaunchAgents/com.sleepless-agent.plist
73 | @echo "✓ Service uninstalled"
74 |
75 | stats:
76 | @echo "=== Performance Metrics (last 24h) ==="
77 | @tail -1000 workspace/data/metrics.jsonl 2>/dev/null | jq -s 'length as $$count | [.[] | select(.success == true)] | {total: $$count, successful: length, failed: ($$count - length), avg_duration: (map(.duration_seconds) | add / length | round | . as $$t | if $$t > 60 then "\($$t / 60 | floor)m\($$t % 60)s" else "\($$t)s" end)}' || echo "No metrics available"
78 |
79 | status:
80 | @echo "=== Agent Status ==="
81 | @pgrep -f "sleepless_agent" > /dev/null && echo "✓ Daemon running" || echo "✗ Daemon not running"
82 | @test -f .env && echo "✓ .env configured" || echo "✗ .env missing"
83 | @test -f workspace/data/tasks.db && echo "✓ Database exists" || echo "✗ Database missing"
84 | @echo ""
85 | @echo "Queue status:"
86 | @sqlite3 workspace/data/tasks.db "SELECT status, COUNT(*) FROM tasks GROUP BY status;" 2>/dev/null || echo "(no database)"
87 |
88 | backup:
89 | @mkdir -p backups
90 | @tar czf backups/sleepless-agent-$$(date +%Y%m%d-%H%M%S).tar.gz workspace/data/
91 | @echo "✓ Backup created"
92 |
--------------------------------------------------------------------------------
/docs/concepts/index.md:
--------------------------------------------------------------------------------
1 | # Core Concepts
2 |
3 | Understanding the fundamental concepts behind Sleepless Agent will help you use it more effectively.
4 |
5 | ## Overview
6 |
7 | Sleepless Agent is built on several core principles:
8 |
9 | 1. **Continuous Operation** - Runs 24/7 as a daemon
10 | 2. **Workspace Isolation** - Each task executes in isolation
11 | 3. **Intelligent Scheduling** - Optimizes task execution
12 | 4. **Usage Management** - Maximizes Claude Pro value
13 |
14 | ## Key Concepts
15 |
16 | ### 📐 [Architecture](architecture.md)
17 | Learn how the system components work together to process tasks autonomously.
18 |
19 | - System components and their responsibilities
20 | - Data flow through the system
21 | - Communication patterns
22 | - Extension points
23 |
24 | ### 🔄 [Task Lifecycle](task-lifecycle.md)
25 | Understand how tasks move from creation to completion.
26 |
27 | - Task states and transitions
28 | - Execution phases
29 | - Error handling and recovery
30 | - Result storage
31 |
32 | ### 🏗️ [Workspace Isolation](workspace-isolation.md)
33 | Explore how tasks run in isolated environments for safety and parallelism.
34 |
35 | - Isolation principles
36 | - Workspace types
37 | - Security model
38 | - Resource management
39 |
40 | ### ⏰ [Task Scheduling](scheduling.md)
41 | Discover how the scheduler prioritizes and executes tasks.
42 |
43 | - Priority system
44 | - Scheduling algorithm
45 | - Queue management
46 | - Resource allocation
47 |
48 | ### 📊 [Pro Plan Management](pro-plan-management.md)
49 | Learn how the agent optimizes your Claude Pro subscription usage.
50 |
51 | - Usage tracking
52 | - Time-based thresholds
53 | - Window management
54 | - Optimization strategies
55 |
56 | ## Concept Map
57 |
58 | ```
59 | Sleepless Agent
60 | ├── User Input (Slack/CLI)
61 | │ └── Task Creation
62 | │ └── Queue Management
63 | │ └── Scheduling
64 | │ └── Execution
65 | │ ├── Workspace Isolation
66 | │ ├── Claude Code CLI
67 | │ └── Result Storage
68 | ├── Resource Management
69 | │ ├── Pro Plan Usage
70 | │ ├── System Resources
71 | │ └── Time Windows
72 | └── Automation
73 | ├── Git Integration
74 | ├── PR Creation
75 | └── Report Generation
76 | ```
77 |
78 | ## Learning Path
79 |
80 | ### For New Users
81 | 1. Start with [Architecture](architecture.md) for system overview
82 | 2. Read [Task Lifecycle](task-lifecycle.md) to understand task flow
83 | 3. Review [Pro Plan Management](pro-plan-management.md) for usage optimization
84 |
85 | ### For Developers
86 | 1. Deep dive into [Workspace Isolation](workspace-isolation.md)
87 | 2. Study [Scheduling](scheduling.md) algorithms
88 | 3. Explore extension points in [Architecture](architecture.md)
89 |
90 | ### For Operators
91 | 1. Focus on [Pro Plan Management](pro-plan-management.md)
92 | 2. Understand [Scheduling](scheduling.md) for optimization
93 | 3. Review monitoring in [Task Lifecycle](task-lifecycle.md)
94 |
95 | ## Quick Reference
96 |
97 | | Concept | Key Points |
98 | |---------|------------|
99 | | **Architecture** | Modular, event-driven, extensible |
100 | | **Task Lifecycle** | Created → Pending → Scheduled → Running → Complete |
101 | | **Workspace** | Isolated, parallel, secure |
102 | | **Scheduling** | Priority-based, resource-aware, fair |
103 | | **Usage** | Time-based thresholds, automatic pausing |
104 |
105 | ## Related Topics
106 |
107 | - [Installation Guide](../installation.md) - Set up the system
108 | - [Configuration Reference](../reference/configuration.md) - Detailed settings
109 | - [First Task Tutorial](../tutorials/first-task.md) - Hands-on experience
110 | - [FAQ](../faq.md) - Common questions
--------------------------------------------------------------------------------
/docs/guides/index.md:
--------------------------------------------------------------------------------
1 | # Guides
2 |
3 | Step-by-step guides for configuring and using Sleepless Agent effectively.
4 |
5 | ## Setup Guides
6 |
7 | ### 📱 [Slack Setup](slack-setup.md)
8 | Complete guide to setting up Slack integration.
9 |
10 | - Create and configure Slack app
11 | - Set up slash commands
12 | - Configure permissions
13 | - Test the integration
14 |
15 | ### 🔧 [Environment Setup](environment-setup.md)
16 | Configure environment variables and settings.
17 |
18 | - Environment variables
19 | - Configuration files
20 | - Security best practices
21 | - Environment-specific settings
22 |
23 | ### 🔗 [Git Integration](git-integration.md)
24 | Set up automated Git workflows.
25 |
26 | - Repository configuration
27 | - Commit automation
28 | - Pull request creation
29 | - Multi-repo management
30 |
31 | ## Advanced Guides
32 |
33 | ### 📁 Project Management
34 | *Coming soon* - Organize tasks into projects.
35 |
36 | - Project structure
37 | - Task grouping
38 | - Milestone tracking
39 | - Progress reporting
40 |
41 | ### 🎯 Custom Prompts
42 | *Coming soon* - Create custom task prompts.
43 |
44 | - Prompt engineering
45 | - Template system
46 | - Context injection
47 | - Performance optimization
48 |
49 | ### 🚀 Deployment
50 | *Coming soon* - Deploy to production.
51 |
52 | - System requirements
53 | - Deployment strategies
54 | - Monitoring setup
55 | - Scaling considerations
56 |
57 | ## Quick Setup Checklist
58 |
59 | Follow this order for initial setup:
60 |
61 | - [ ] Install Sleepless Agent
62 | - [ ] Configure environment variables
63 | - [ ] Set up Slack application
64 | - [ ] Configure Git integration
65 | - [ ] Test with first task
66 | - [ ] Set up monitoring
67 | - [ ] Configure projects
68 |
69 | ## Common Workflows
70 |
71 | ### Basic Setup
72 | ```bash
73 | # 1. Install
74 | pip install sleepless-agent
75 |
76 | # 2. Configure
77 | cp .env.example .env
78 | nano .env
79 |
80 | # 3. Start
81 | sle daemon
82 | ```
83 |
84 | ### Slack Integration
85 | 1. Create Slack app
86 | 2. Enable Socket Mode
87 | 3. Add slash commands
88 | 4. Install to workspace
89 | 5. Add tokens to `.env`
90 |
91 | ### Git Workflow
92 | 1. Configure Git user
93 | 2. Authenticate GitHub CLI
94 | 3. Set repository URL
95 | 4. Enable auto-commits
96 |
97 | ## Configuration Priority
98 |
99 | Understanding configuration precedence:
100 |
101 | 1. **Command-line arguments** (highest priority)
102 | 2. **Environment variables**
103 | 3. **`.env` file**
104 | 4. **`config.yaml`**
105 | 5. **Default values** (lowest priority)
106 |
107 | ## Best Practices
108 |
109 | ### Security
110 | - Store secrets in environment variables
111 | - Use `.env` file with proper permissions
112 | - Never commit sensitive data
113 | - Rotate tokens regularly
114 |
115 | ### Performance
116 | - Adjust thresholds based on usage
117 | - Configure appropriate timeouts
118 | - Monitor resource usage
119 | - Clean up old workspaces
120 |
121 | ### Reliability
122 | - Set up proper monitoring
123 | - Configure error notifications
124 | - Regular backup of database
125 | - Test disaster recovery
126 |
127 | ## Troubleshooting
128 |
129 | Common setup issues:
130 |
131 | | Issue | Solution | Guide |
132 | |-------|----------|-------|
133 | | Slack bot not responding | Check Socket Mode | [Slack Setup](slack-setup.md#troubleshooting) |
134 | | Tasks not executing | Verify Claude CLI | [Environment Setup](environment-setup.md#validation) |
135 | | Git commits failing | Check authentication | [Git Integration](git-integration.md#troubleshooting) |
136 |
137 | ## Getting Help
138 |
139 | - Check the [FAQ](../faq.md) first
140 | - Review [Troubleshooting](../troubleshooting.md)
141 | - Join [Discord](https://discord.gg/74my3Wkn)
142 | - Open an [issue](https://github.com/context-machine-lab/sleepless-agent/issues)
--------------------------------------------------------------------------------
/docs/quickstart.md:
--------------------------------------------------------------------------------
1 | # Quickstart Guide
2 |
3 | Get Sleepless Agent running in 5 minutes! This guide covers the minimal setup needed to start processing tasks.
4 |
5 | ## Prerequisites
6 |
7 | Before starting, ensure you have:
8 |
9 | - ✅ Python 3.11+ installed
10 | - ✅ Node.js 16+ installed
11 | - ✅ Slack workspace access
12 | - ✅ Claude Code CLI installed
13 |
14 | ## Step 1: Install Claude Code CLI
15 |
16 | ```bash
17 | npm install -g @anthropic-ai/claude-code
18 | ```
19 |
20 | Verify installation:
21 | ```bash
22 | claude --version
23 | ```
24 |
25 | ## Step 2: Install Sleepless Agent
26 |
27 | ```bash
28 | pip install sleepless-agent
29 | ```
30 |
31 | Or from source:
32 | ```bash
33 | git clone https://github.com/context-machine-lab/sleepless-agent
34 | cd sleepless-agent
35 | pip install -e .
36 | ```
37 |
38 | ## Step 3: Quick Slack Setup
39 |
40 | 1. Visit [https://api.slack.com/apps](https://api.slack.com/apps)
41 | 2. Click "Create New App" → "From scratch"
42 | 3. Name it "Sleepless Agent" and select your workspace
43 |
44 | ### Enable Socket Mode
45 | - Go to Settings → Socket Mode → Enable
46 | - Create an app-level token (name: "sleepless-token")
47 | - Save the `xapp-...` token
48 |
49 | ### Add Slash Commands
50 | Go to Features → Slash Commands and create:
51 | - `/think` - Submit tasks
52 | - `/check` - Check status
53 | - `/usage` - Show Claude Code Pro plan usage
54 |
55 | ### Set Bot Permissions
56 | Features → OAuth & Permissions → Bot Token Scopes:
57 | - `chat:write`
58 | - `commands`
59 |
60 | ### Install to Workspace
61 | - Click "Install to Workspace"
62 | - Save the `xoxb-...` bot token
63 |
64 | ## Step 4: Configure Environment
65 |
66 | Create a `.env` file:
67 |
68 | ```bash
69 | # Required Slack tokens
70 | SLACK_BOT_TOKEN=xoxb-your-bot-token-here
71 | SLACK_APP_TOKEN=xapp-your-app-token-here
72 |
73 | # Optional: Custom workspace location
74 | AGENT_WORKSPACE=./workspace
75 | ```
76 |
77 | ## Step 5: Start the Agent
78 |
79 | ```bash
80 | sle daemon
81 | ```
82 |
83 | You should see:
84 | ```
85 | 2025-10-24 23:30:12 | INFO | Sleepless Agent starting...
86 | 2025-10-24 23:30:12 | INFO | Slack bot started and listening for events
87 | ```
88 |
89 | ## Step 6: Test Your Setup
90 |
91 | In Slack, try these commands:
92 |
93 | ```
94 | /think Research Python async patterns
95 | /check
96 | ```
97 |
98 | The agent should acknowledge your task and show the queue status.
99 |
100 | ## What's Next?
101 |
102 | ### Essential Configuration
103 |
104 | 1. **Set up Git integration** for automated commits:
105 | ```bash
106 | git config --global user.name "Sleepless Agent"
107 | git config --global user.email "agent@sleepless.local"
108 | ```
109 |
110 | 2. **Configure Pro plan thresholds** in `config.yaml`:
111 | ```yaml
112 | claude_code:
113 | threshold_day: 20.0 # Pause at 20% during day
114 | threshold_night: 80.0 # Pause at 80% at night
115 | ```
116 |
117 | 3. **Set working hours** for optimal usage:
118 | ```yaml
119 | claude_code:
120 | night_start_hour: 20 # 8 PM
121 | night_end_hour: 8 # 8 AM
122 | ```
123 |
124 | ### Recommended Next Steps
125 |
126 | - 📖 Read the [Architecture Overview](concepts/architecture.md)
127 | - 🔧 Complete [Slack Setup](guides/slack-setup.md) for all features
128 | - 🎯 Try the [First Task Tutorial](tutorials/first-task.md)
129 | - 📊 Learn about [Task Management](guides/project-management.md)
130 |
131 | ## Common Issues
132 |
133 | ### Agent not responding in Slack?
134 | - Verify Socket Mode is enabled
135 | - Check both tokens are correct in `.env`
136 | - Ensure the bot is in your channel
137 |
138 | ### Tasks not executing?
139 | - Run `claude --version` to verify CLI installation
140 | - Check `sle check` for usage limits
141 | - Review logs: `tail -f workspace/data/agent.log`
142 |
143 | ### Usage threshold reached?
144 | - Agent pauses at configured thresholds
145 | - Wait for 5-hour window reset
146 | - Adjust thresholds in `config.yaml` if needed
147 |
148 | ## Getting Help
149 |
150 | - 💬 [Discord Community](https://discord.gg/74my3Wkn)
151 | - 📚 [Full Documentation](index.md)
152 | - 🐛 [Report Issues](https://github.com/context-machine-lab/sleepless-agent/issues)
153 |
154 | ---
155 |
156 | 🎉 **Congratulations!** You now have a 24/7 AI agent working for you. Check out the [tutorials](tutorials/first-task.md) to learn more advanced features.
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Sleepless Agent Documentation
2 |
3 | Welcome to the Sleepless Agent documentation! This guide will help you understand, configure, and use the 24/7 AI agent that works while you sleep.
4 |
5 | ## What is Sleepless Agent?
6 |
7 | Sleepless Agent is an autonomous AI system that transforms your Claude Code Pro subscription into a 24/7 AgentOS. It processes tasks via Slack commands, manages isolated workspaces, and optimizes Claude usage across day and night cycles.
8 |
9 | ### Key Features
10 |
11 | - **🤖 Continuous Operation**: Runs as a daemon, always ready for new tasks
12 | - **💬 Slack Integration**: Submit and manage tasks through Slack commands
13 | - **🎯 Hybrid Autonomy**: Auto-processes random thoughts, requires review for serious tasks
14 | - **⚡ Smart Scheduling**: Optimizes execution based on priorities and usage limits
15 | - **📊 Persistent Storage**: SQLite-backed task queue with full history
16 | - **🏗️ Workspace Isolation**: Each task runs in its own isolated environment
17 | - **📈 Usage Optimization**: Intelligent Pro plan usage management
18 |
19 | ## Documentation Overview
20 |
21 | ### 📚 Getting Started
22 | - [**Quickstart**](quickstart.md) - Get running in 5 minutes
23 | - [**Installation**](installation.md) - Detailed setup instructions
24 | - [**FAQ**](faq.md) - Frequently asked questions
25 |
26 | ### 🧠 Core Concepts
27 | - [**Architecture**](concepts/architecture.md) - System design and components
28 | - [**Task Lifecycle**](concepts/task-lifecycle.md) - How tasks flow through the system
29 | - [**Workspace Isolation**](concepts/workspace-isolation.md) - Understanding isolated environments
30 | - [**Scheduling**](concepts/scheduling.md) - Task prioritization and execution
31 | - [**Pro Plan Management**](concepts/pro-plan-management.md) - Usage optimization strategies
32 |
33 | ### 📖 Guides
34 | - [**Slack Setup**](guides/slack-setup.md) - Configure your Slack application
35 | - [**Environment Setup**](guides/environment-setup.md) - Configure environment variables
36 | - [**Git Integration**](guides/git-integration.md) - Automated commits and PRs
37 | - [**Project Management**](guides/project-management.md) - Organizing tasks into projects
38 | - [**Deployment**](guides/deployment.md) - Production deployment strategies
39 |
40 | ### 🎓 Tutorials
41 | - [**First Task**](tutorials/first-task.md) - Create your first automated task
42 | - [**Slack Workflows**](tutorials/slack-workflows.md) - Build effective command workflows
43 | - [**Monitoring Tasks**](tutorials/monitoring-tasks.md) - Track execution and results
44 | - [**Daily Reports**](tutorials/daily-reports.md) - Understanding generated reports
45 | - [**Workspace Management**](tutorials/workspace-management.md) - Managing task workspaces
46 |
47 | ### 📋 Reference
48 | - [**CLI Commands**](reference/api/cli-commands.md) - Complete command reference
49 | - [**Slack Commands**](reference/api/slack-commands.md) - Slash command reference
50 | - [**Configuration**](reference/configuration.md) - All configuration options
51 | - [**Database Schema**](reference/database-schema.md) - SQLite schema details
52 | - [**Environment Variables**](reference/environment-variables.md) - Variable reference
53 |
54 | ### 💡 Examples
55 | - [**Basic Usage**](examples/basic-usage.md) - Simple usage patterns
56 | - [**Slack Integration**](examples/slack-integration.md) - Slack workflow examples
57 | - [**Advanced Workflows**](examples/advanced-workflows.md) - Complex task patterns
58 | - [**Custom Executors**](examples/custom-executors.md) - Extending functionality
59 |
60 | ## Quick Links
61 |
62 | - [GitHub Repository](https://github.com/context-machine-lab/sleepless-agent)
63 | - [Discord Community](https://discord.gg/74my3Wkn)
64 | - [Issue Tracker](https://github.com/context-machine-lab/sleepless-agent/issues)
65 | - [Changelog](changelog.md)
66 |
67 | ## System Requirements
68 |
69 | - Python 3.11+
70 | - Claude Code CLI (`@anthropic-ai/claude-code`)
71 | - Slack workspace with admin access
72 | - SQLite 3
73 | - Git (optional, for automation)
74 |
75 | ## Getting Help
76 |
77 | - Check the [FAQ](faq.md) for common questions
78 | - Review [Troubleshooting](troubleshooting.md) for solutions
79 | - Join our [Discord](https://discord.gg/74my3Wkn) for community support
80 | - Open an [issue](https://github.com/context-machine-lab/sleepless-agent/issues) for bugs
81 |
82 | ## Contributing
83 |
84 | We welcome contributions! See our [Contributing Guide](https://github.com/context-machine-lab/sleepless-agent/blob/main/CONTRIBUTING.md) for details.
85 |
86 | ## License
87 |
88 | Sleepless Agent is released under the [MIT License](https://github.com/context-machine-lab/sleepless-agent/blob/main/LICENSE).
--------------------------------------------------------------------------------
/src/sleepless_agent/tasks/refinement.py:
--------------------------------------------------------------------------------
1 | """Utilities for generating refinement follow-up tasks."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from datetime import datetime, timedelta, timezone
7 | from typing import Optional, Sequence
8 |
9 | from sleepless_agent.monitoring.logging import get_logger
10 | logger = get_logger(__name__)
11 |
12 | from sqlalchemy.orm import Session
13 |
14 | from sleepless_agent.core.models import GenerationHistory, Task, TaskPriority, TaskStatus
15 | from sleepless_agent.core.queue import TaskQueue
16 |
17 |
18 | REFINEMENT_CONTEXT_KEY = "refinement_parent_task_id"
19 |
20 |
21 | def _normalize_text_list(items: Sequence[str]) -> list[str]:
22 | """Clean and normalize bullet-style strings."""
23 | normalized: list[str] = []
24 | for item in items or []:
25 | text = item.strip()
26 | text = text.strip("- *[]•✓❌")
27 | if text:
28 | normalized.append(text)
29 | return normalized
30 |
31 |
32 | def build_refinement_description(
33 | *,
34 | source_task: Task,
35 | project_name: Optional[str],
36 | recommendations: Sequence[str] | None,
37 | outstanding_items: Sequence[str] | None,
38 | ) -> str:
39 | """Create a human-readable refinement task description."""
40 | recommendations = _normalize_text_list(recommendations or [])
41 | outstanding_items = _normalize_text_list(outstanding_items or [])
42 |
43 | base_label = project_name or "project"
44 | description = f"Refine {base_label}: {source_task.description[:140]}"
45 |
46 | if recommendations:
47 | description = f"Refine {base_label}: {recommendations[0]}"
48 | elif outstanding_items:
49 | description = f"Refine {base_label}: {outstanding_items[0]}"
50 |
51 | return description
52 |
53 |
54 | def ensure_refinement_task(
55 | *,
56 | task_queue: TaskQueue,
57 | session: Session,
58 | source_task: Task,
59 | project_name: Optional[str],
60 | recommendations: Sequence[str] | None = None,
61 | outstanding_items: Sequence[str] | None = None,
62 | ) -> Optional[Task]:
63 | """Create a refinement task if one does not already exist for the source task."""
64 | parent_id = source_task.id
65 |
66 | existing = (
67 | session.query(Task)
68 | .filter(
69 | Task.status.in_([TaskStatus.PENDING, TaskStatus.IN_PROGRESS]),
70 | Task.context.isnot(None),
71 | )
72 | .all()
73 | )
74 |
75 | for candidate in existing:
76 | try:
77 | context = json.loads(candidate.context)
78 | except (TypeError, json.JSONDecodeError):
79 | continue
80 | if context.get(REFINEMENT_CONTEXT_KEY) == parent_id:
81 | logger.debug(f"Refinement task already exists for task {parent_id}")
82 | return None
83 |
84 | description = build_refinement_description(
85 | source_task=source_task,
86 | project_name=project_name,
87 | recommendations=recommendations,
88 | outstanding_items=outstanding_items,
89 | )
90 |
91 | context = {
92 | "generated_by": "refinement",
93 | REFINEMENT_CONTEXT_KEY: parent_id,
94 | "generated_at": datetime.now(timezone.utc).replace(tzinfo=None).isoformat(),
95 | }
96 |
97 | refinement_task = task_queue.add_task(
98 | description=description,
99 | priority=TaskPriority.SERIOUS,
100 | context=context,
101 | project_id=source_task.project_id,
102 | project_name=project_name,
103 | )
104 |
105 | history = GenerationHistory(
106 | task_id=refinement_task.id,
107 | source="refinement",
108 | usage_percent_at_generation=0,
109 | source_metadata=json.dumps({"parent_task_id": parent_id}),
110 | )
111 | session.add(history)
112 | session.commit()
113 | logger.info(
114 | f"Generated refinement task #{refinement_task.id} for task #{parent_id}: {description[:80]}"
115 | )
116 | return refinement_task
117 |
118 |
119 | def find_recent_completed_tasks(session: Session, hours: int = 24) -> list[Task]:
120 | """Return recently completed tasks that may need refinement."""
121 | cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(hours=hours)
122 | tasks = (
123 | session.query(Task)
124 | .filter(
125 | Task.completed_at.isnot(None),
126 | Task.completed_at >= cutoff,
127 | Task.status == TaskStatus.COMPLETED,
128 | )
129 | .order_by(Task.completed_at.desc())
130 | .all()
131 | )
132 | return tasks
133 |
--------------------------------------------------------------------------------
/src/sleepless_agent/storage/results.py:
--------------------------------------------------------------------------------
1 | """Result storage and git integration."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from datetime import datetime
7 | from pathlib import Path
8 | from typing import Optional
9 |
10 | from sqlalchemy.orm import Session
11 |
12 | from sleepless_agent.monitoring.logging import get_logger
13 | from sleepless_agent.storage.sqlite import SQLiteStore
14 | from sleepless_agent.core.models import Result
15 |
16 | logger = get_logger(__name__)
17 |
18 |
19 | class ResultManager(SQLiteStore):
20 | """Manages task results and storage."""
21 |
22 | def __init__(self, db_path: str, results_path: str):
23 | super().__init__(db_path)
24 | self.results_path = Path(results_path)
25 | self.results_path.mkdir(parents=True, exist_ok=True)
26 |
27 | def _write_result_file(self, result: Result) -> Path:
28 | """Persist result data to JSON file and return its path."""
29 | result_file = self.results_path / f"task_{result.task_id}_{result.id}.json"
30 | try:
31 | payload = {
32 | "task_id": result.task_id,
33 | "result_id": result.id,
34 | "created_at": result.created_at.isoformat() if result.created_at else None,
35 | "output": result.output,
36 | "files_modified": json.loads(result.files_modified) if result.files_modified else None,
37 | "commands_executed": json.loads(result.commands_executed) if result.commands_executed else None,
38 | "processing_time_seconds": result.processing_time_seconds,
39 | "git_commit_sha": result.git_commit_sha,
40 | "git_pr_url": result.git_pr_url,
41 | "git_branch": result.git_branch,
42 | "workspace_path": result.workspace_path,
43 | }
44 | result_file.write_text(json.dumps(payload, indent=2))
45 | except Exception as exc:
46 | logger.error(f"Failed to write result file {result_file}: {exc}")
47 | raise
48 | return result_file
49 |
50 | def save_result(
51 | self,
52 | task_id: int,
53 | output: str,
54 | files_modified: Optional[list] = None,
55 | commands_executed: Optional[list] = None,
56 | processing_time_seconds: Optional[int] = None,
57 | git_commit_sha: Optional[str] = None,
58 | git_pr_url: Optional[str] = None,
59 | git_branch: Optional[str] = None,
60 | workspace_path: Optional[str] = None,
61 | ) -> Result:
62 | """Save task result to database and file."""
63 |
64 | def _op(session: Session) -> Result:
65 | result = Result(
66 | task_id=task_id,
67 | output=output,
68 | files_modified=json.dumps(files_modified) if files_modified else None,
69 | commands_executed=json.dumps(commands_executed) if commands_executed else None,
70 | processing_time_seconds=processing_time_seconds,
71 | git_commit_sha=git_commit_sha,
72 | git_pr_url=git_pr_url,
73 | git_branch=git_branch,
74 | workspace_path=workspace_path,
75 | )
76 | session.add(result)
77 | session.flush()
78 | result_file = self._write_result_file(result)
79 | logger.debug(f"Result saved for task {task_id}: {result_file}")
80 | return result
81 |
82 | try:
83 | return self._run_write(_op)
84 | except Exception as exc:
85 | logger.error(f"Failed to save result: {exc}")
86 | raise
87 |
88 | def get_result(self, result_id: int) -> Optional[Result]:
89 | """Get result by ID."""
90 |
91 | def _op(session: Session) -> Optional[Result]:
92 | return session.query(Result).filter(Result.id == result_id).first()
93 |
94 | return self._run_read(_op)
95 |
96 | def get_task_results(self, task_id: int) -> list[Result]:
97 | """Get all results for a task."""
98 |
99 | def _op(session: Session) -> list[Result]:
100 | return session.query(Result).filter(Result.task_id == task_id).all()
101 |
102 | return self._run_read(_op)
103 |
104 | def update_result_commit_info(
105 | self,
106 | result_id: int,
107 | git_commit_sha: Optional[str],
108 | git_pr_url: Optional[str] = None,
109 | git_branch: Optional[str] = None,
110 | ) -> Optional[Path]:
111 | """Update git commit information for a result record."""
112 |
113 | def _op(session: Session) -> Optional[Path]:
114 | result = session.query(Result).filter(Result.id == result_id).first()
115 | if not result:
116 | logger.warning(f"Result {result_id} not found for commit update")
117 | return None
118 |
119 | result.git_commit_sha = git_commit_sha
120 | result.git_pr_url = git_pr_url
121 | result.git_branch = git_branch
122 | return self.results_path / f"task_{result.task_id}_{result.id}.json"
123 |
124 | updated_path = self._run_write(_op)
125 | return updated_path
126 |
--------------------------------------------------------------------------------
/src/sleepless_agent/core/timeout_manager.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | from datetime import datetime, timezone
4 | from typing import Optional, TYPE_CHECKING
5 |
6 | from sleepless_agent.monitoring.logging import get_logger
7 |
8 | from sleepless_agent.core.queue import TaskQueue
9 | from sleepless_agent.monitoring.monitor import HealthMonitor, PerformanceLogger
10 | from sleepless_agent.monitoring.report_generator import ReportGenerator, TaskMetrics
11 | from sleepless_agent.core.executor import ClaudeCodeExecutor
12 |
13 | if TYPE_CHECKING:
14 | from sleepless_agent.interfaces.bot import SlackBot
15 |
16 | logger = get_logger(__name__)
17 |
18 |
19 | class TaskTimeoutManager:
20 | """Handle task timeout enforcement and related reporting."""
21 |
22 | def __init__(
23 | self,
24 | *,
25 | config,
26 | task_queue: TaskQueue,
27 | claude: ClaudeCodeExecutor,
28 | monitor: HealthMonitor,
29 | perf_logger: PerformanceLogger,
30 | report_generator: ReportGenerator,
31 | bot: Optional["SlackBot"],
32 | live_status_tracker,
33 | ):
34 | self.config = config
35 | self.task_queue = task_queue
36 | self.claude = claude
37 | self.monitor = monitor
38 | self.perf_logger = perf_logger
39 | self.report_generator = report_generator
40 | self.bot = bot
41 | self.live_status_tracker = live_status_tracker
42 |
43 | def enforce(self) -> None:
44 | timeout_seconds = self.config.agent.task_timeout_seconds
45 | if timeout_seconds <= 0:
46 | return
47 |
48 | timed_out_tasks = self.task_queue.timeout_expired_tasks(timeout_seconds)
49 | if not timed_out_tasks:
50 | return
51 |
52 | now = datetime.now(timezone.utc).replace(tzinfo=None)
53 | for task in timed_out_tasks:
54 | elapsed_seconds = self._compute_elapsed(task, now, timeout_seconds)
55 | timeout_message = task.error_message or f"Timed out after {elapsed_seconds // 60} minutes."
56 |
57 | cleanup_note = "workspace left in place"
58 | try:
59 | cleaned = self.claude.cleanup_workspace(task.id, force=False)
60 | if cleaned:
61 | cleanup_note = "workspace cleaned"
62 | except Exception as exc:
63 | cleanup_note = "workspace cleanup failed"
64 | logger.debug(f"Failed to cleanup workspace for task {task.id}: {exc}")
65 |
66 | logger.warning(f"Task {task.id} timed out after {elapsed_seconds}s ({cleanup_note})")
67 |
68 | self._log_timeout_metrics(task, elapsed_seconds, timeout_message)
69 | self._notify_timeout(task, elapsed_seconds, timeout_message)
70 |
71 | def _compute_elapsed(self, task, now: datetime, timeout_seconds: int) -> int:
72 | started_at = task.started_at or now
73 | completed_at = task.completed_at or now
74 | elapsed_seconds = int((completed_at - started_at).total_seconds())
75 | return max(elapsed_seconds, timeout_seconds)
76 |
77 | def _log_timeout_metrics(self, task, elapsed_seconds: int, timeout_message: str) -> None:
78 | try:
79 | self.monitor.record_task_completion(elapsed_seconds, success=False)
80 | except Exception as exc:
81 | logger.debug(f"Failed to record timeout in health monitor for task {task.id}: {exc}")
82 |
83 | try:
84 | self.perf_logger.log_task_execution(
85 | task_id=task.id,
86 | description=task.description,
87 | priority=task.priority.value if task.priority else "unknown",
88 | duration_seconds=elapsed_seconds,
89 | success=False,
90 | )
91 | except Exception as exc:
92 | logger.debug(f"Failed to log timeout metrics for task {task.id}: {exc}")
93 |
94 | try:
95 | task_metrics = TaskMetrics(
96 | task_id=task.id,
97 | description=task.description,
98 | priority=task.priority.value if task.priority else "unknown",
99 | status="failed",
100 | duration_seconds=elapsed_seconds,
101 | files_modified=0,
102 | commands_executed=0,
103 | error_message=timeout_message,
104 | )
105 | self.report_generator.append_task_completion(task_metrics, project_id=task.project_id)
106 | except Exception as exc:
107 | logger.debug(f"Failed to append timeout to report for task {task.id}: {exc}")
108 |
109 | def _notify_timeout(self, task, elapsed_seconds: int, timeout_message: str) -> None:
110 | if task.assigned_to and self.bot:
111 | try:
112 | minutes = max(1, elapsed_seconds // 60)
113 | self.bot.send_message(
114 | task.assigned_to,
115 | f"⏱️ Task #{task.id} timed out after {minutes} minute(s). "
116 | "It has been marked as failed.",
117 | )
118 | except Exception as exc:
119 | logger.debug(f"Failed to send timeout notification for task {task.id}: {exc}")
120 |
121 | if self.live_status_tracker:
122 | try:
123 | self.live_status_tracker.clear(task.id)
124 | except Exception as exc:
125 | logger.debug(f"Failed to clear live status for timed-out task {task.id}: {exc}")
126 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | __pycache__/
2 | *.egg-info/
3 | dist/
4 | build/
5 | venv/
6 | /venv/
7 |
8 | # Byte-compiled / optimized / DLL files
9 | __pycache__/
10 | *.py[codz]
11 | *$py.class
12 |
13 | # C extensions
14 | *.so
15 |
16 | # Distribution / packaging
17 | .Python
18 | build/
19 | develop-eggs/
20 | dist/
21 | downloads/
22 | eggs/
23 | .eggs/
24 | lib/
25 | lib64/
26 | parts/
27 | sdist/
28 | var/
29 | wheels/
30 | share/python-wheels/
31 | *.egg-info/
32 | .installed.cfg
33 | *.egg
34 | MANIFEST
35 |
36 | # PyInstaller
37 | # Usually these files are written by a python script from a template
38 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
39 | *.manifest
40 | *.spec
41 |
42 | # Installer logs
43 | pip-log.txt
44 | pip-delete-this-directory.txt
45 |
46 | # Unit test / coverage reports
47 | htmlcov/
48 | .tox/
49 | .nox/
50 | .coverage
51 | .coverage.*
52 | .cache
53 | nosetests.xml
54 | coverage.xml
55 | *.cover
56 | *.py.cover
57 | .hypothesis/
58 | .pytest_cache/
59 | cover/
60 |
61 | # Translations
62 | *.mo
63 | *.pot
64 |
65 | # Django stuff:
66 | *.log
67 | local_settings.py
68 | db.sqlite3
69 | db.sqlite3-journal
70 |
71 | # Flask stuff:
72 | instance/
73 | .webassets-cache
74 |
75 | # Scrapy stuff:
76 | .scrapy
77 |
78 | # Sphinx documentation
79 | docs/_build/
80 |
81 | # PyBuilder
82 | .pybuilder/
83 | target/
84 |
85 | # Jupyter Notebook
86 | .ipynb_checkpoints
87 |
88 | # IPython
89 | profile_default/
90 | ipython_config.py
91 |
92 | # pyenv
93 | # For a library or package, you might want to ignore these files since the code is
94 | # intended to run in multiple environments; otherwise, check them in:
95 | # .python-version
96 |
97 | # pipenv
98 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
99 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
100 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
101 | # install all needed dependencies.
102 | # Pipfile.lock
103 |
104 | # UV
105 | # Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
106 | # This is especially recommended for binary packages to ensure reproducibility, and is more
107 | # commonly ignored for libraries.
108 | # uv.lock
109 |
110 | # poetry
111 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
112 | # This is especially recommended for binary packages to ensure reproducibility, and is more
113 | # commonly ignored for libraries.
114 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
115 | # poetry.lock
116 | # poetry.toml
117 |
118 | # pdm
119 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
120 | # pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
121 | # https://pdm-project.org/en/latest/usage/project/#working-with-version-control
122 | # pdm.lock
123 | # pdm.toml
124 | .pdm-python
125 | .pdm-build/
126 |
127 | # pixi
128 | # Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
129 | # pixi.lock
130 | # Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
131 | # in the .venv directory. It is recommended not to include this directory in version control.
132 | .pixi
133 |
134 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
135 | __pypackages__/
136 |
137 | # Celery stuff
138 | celerybeat-schedule
139 | celerybeat.pid
140 |
141 | # Redis
142 | *.rdb
143 | *.aof
144 | *.pid
145 |
146 | # RabbitMQ
147 | mnesia/
148 | rabbitmq/
149 | rabbitmq-data/
150 |
151 | # ActiveMQ
152 | activemq-data/
153 |
154 | # SageMath parsed files
155 | *.sage.py
156 |
157 | # Environments
158 | .env
159 | .envrc
160 | .venv
161 | env/
162 | venv/
163 | ENV/
164 | env.bak/
165 | venv.bak/
166 |
167 | # Spyder project settings
168 | .spyderproject
169 | .spyproject
170 |
171 | # Rope project settings
172 | .ropeproject
173 |
174 | # mkdocs documentation
175 | /site
176 |
177 | # mypy
178 | .mypy_cache/
179 | .dmypy.json
180 | dmypy.json
181 |
182 | # Pyre type checker
183 | .pyre/
184 |
185 | # pytype static type analyzer
186 | .pytype/
187 |
188 | # Cython debug symbols
189 | cython_debug/
190 |
191 | # PyCharm
192 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
193 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
194 | # and can be added to the global gitignore or merged into this file. For a more nuclear
195 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
196 | # .idea/
197 |
198 | # Abstra
199 | # Abstra is an AI-powered process automation framework.
200 | # Ignore directories containing user credentials, local state, and settings.
201 | # Learn more at https://abstra.io/docs
202 | .abstra/
203 |
204 | # Visual Studio Code
205 | # Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
206 | # that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
207 | # and can be added to the global gitignore or merged into this file. However, if you prefer,
208 | # you could uncomment the following to ignore the entire vscode folder
209 | # .vscode/
210 |
211 | # Ruff stuff:
212 | .ruff_cache/
213 |
214 | # PyPI configuration file
215 | .pypirc
216 |
217 | # Marimo
218 | marimo/_static/
219 | marimo/_lsp/
220 | __marimo__/
221 |
222 | # Streamlit
223 | .streamlit/secrets.toml
224 |
225 | workspace/
226 | tests/
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/metrics_aggregator.py:
--------------------------------------------------------------------------------
1 | """Metrics aggregation utility for handling multi-phase execution metrics."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import Dict, Optional, Any
6 | from dataclasses import dataclass, field
7 |
8 | from sleepless_agent.monitoring.logging import get_logger
9 |
10 | logger = get_logger(__name__)
11 |
12 |
13 | @dataclass
14 | class PhaseMetrics:
15 | """Metrics for a single execution phase."""
16 | cost_usd: Optional[float] = None
17 | duration_ms: Optional[int] = None
18 | turns: Optional[int] = None
19 |
20 | def to_dict(self, prefix: str) -> Dict[str, Optional[Any]]:
21 | """Convert to dictionary with prefixed keys.
22 |
23 | Args:
24 | prefix: Prefix for keys (e.g., "planner", "worker", "evaluator")
25 |
26 | Returns:
27 | Dictionary with keys like "planner_cost_usd", "planner_duration_ms", etc.
28 | """
29 | return {
30 | f"{prefix}_cost_usd": self.cost_usd,
31 | f"{prefix}_duration_ms": self.duration_ms,
32 | f"{prefix}_turns": self.turns,
33 | }
34 |
35 | def update_from_result(self, cost_usd: Optional[float] = None,
36 | duration_ms: Optional[int] = None,
37 | turns: Optional[int] = None) -> None:
38 | """Update metrics from execution result.
39 |
40 | Args:
41 | cost_usd: Cost in USD
42 | duration_ms: Duration in milliseconds
43 | turns: Number of API turns
44 | """
45 | if cost_usd is not None:
46 | self.cost_usd = cost_usd
47 | if duration_ms is not None:
48 | self.duration_ms = duration_ms
49 | if turns is not None:
50 | self.turns = turns
51 |
52 |
53 | @dataclass
54 | class CombinedMetrics:
55 | """Combined metrics from all phases."""
56 | total_cost_usd: float = 0.0
57 | duration_api_ms: int = 0
58 | num_turns: int = 0
59 |
60 | def add_phase(self, phase_metrics: PhaseMetrics) -> None:
61 | """Add metrics from a phase to the combined totals.
62 |
63 | Args:
64 | phase_metrics: Metrics from a single phase
65 | """
66 | if phase_metrics.cost_usd is not None:
67 | self.total_cost_usd += phase_metrics.cost_usd
68 | if phase_metrics.duration_ms is not None:
69 | self.duration_api_ms += phase_metrics.duration_ms
70 | if phase_metrics.turns is not None:
71 | self.num_turns += phase_metrics.turns
72 |
73 | def to_dict(self) -> Dict[str, Any]:
74 | """Convert to dictionary format."""
75 | return {
76 | "total_cost_usd": self.total_cost_usd,
77 | "duration_api_ms": self.duration_api_ms,
78 | "num_turns": self.num_turns,
79 | }
80 |
81 |
82 | class MetricsAggregator:
83 | """Aggregates metrics from multi-phase execution."""
84 |
85 | def __init__(self):
86 | """Initialize the metrics aggregator."""
87 | self.planner = PhaseMetrics()
88 | self.worker = PhaseMetrics()
89 | self.evaluator = PhaseMetrics()
90 | self.combined = CombinedMetrics()
91 |
92 | def update_planner(self, cost_usd: Optional[float] = None,
93 | duration_ms: Optional[int] = None,
94 | turns: Optional[int] = None) -> None:
95 | """Update planner phase metrics."""
96 | self.planner.update_from_result(cost_usd, duration_ms, turns)
97 |
98 | def update_worker(self, cost_usd: Optional[float] = None,
99 | duration_ms: Optional[int] = None,
100 | turns: Optional[int] = None) -> None:
101 | """Update worker phase metrics."""
102 | self.worker.update_from_result(cost_usd, duration_ms, turns)
103 |
104 | def update_evaluator(self, cost_usd: Optional[float] = None,
105 | duration_ms: Optional[int] = None,
106 | turns: Optional[int] = None) -> None:
107 | """Update evaluator phase metrics."""
108 | self.evaluator.update_from_result(cost_usd, duration_ms, turns)
109 |
110 | def calculate_combined(self) -> None:
111 | """Calculate combined metrics from all phases."""
112 | self.combined = CombinedMetrics()
113 | self.combined.add_phase(self.planner)
114 | self.combined.add_phase(self.worker)
115 | self.combined.add_phase(self.evaluator)
116 |
117 | def get_all_metrics(self) -> Dict[str, Any]:
118 | """Get all metrics as a dictionary.
119 |
120 | Returns:
121 | Dictionary with all phase metrics and combined metrics
122 | """
123 | self.calculate_combined()
124 |
125 | metrics = {}
126 | metrics.update(self.planner.to_dict("planner"))
127 | metrics.update(self.worker.to_dict("worker"))
128 | metrics.update(self.evaluator.to_dict("evaluator"))
129 |
130 | # Add combined metrics (without prefix)
131 | metrics.update(self.combined.to_dict())
132 |
133 | return metrics
134 |
135 | def get_phase_metrics(self, phase: str) -> PhaseMetrics:
136 | """Get metrics for a specific phase.
137 |
138 | Args:
139 | phase: Phase name ("planner", "worker", or "evaluator")
140 |
141 | Returns:
142 | PhaseMetrics for the specified phase
143 |
144 | Raises:
145 | ValueError: If phase name is invalid
146 | """
147 | phase_map = {
148 | "planner": self.planner,
149 | "worker": self.worker,
150 | "evaluator": self.evaluator,
151 | }
152 |
153 | if phase not in phase_map:
154 | raise ValueError(f"Invalid phase: {phase}. Must be one of: {list(phase_map.keys())}")
155 |
156 | return phase_map[phase]
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/config.py:
--------------------------------------------------------------------------------
1 | """Configuration management backed by YAML."""
2 |
3 | from __future__ import annotations
4 |
5 | import os
6 | from functools import lru_cache
7 | from importlib import resources
8 | from pathlib import Path
9 | from typing import Any, Iterable
10 |
11 | import yaml
12 | from dotenv import load_dotenv
13 |
14 | load_dotenv()
15 |
16 | CONFIG_ENV_VAR = "SLEEPLESS_AGENT_CONFIG_FILE"
17 | DEFAULT_CONFIG_NAME = "config.yaml"
18 | ENV_PREFIX = "SLEEPLESS_AGENT__"
19 | PACKAGE_ROOT = "sleepless_agent"
20 |
21 | # Mapping for environment variables that previously relied on Pydantic aliases.
22 | ENVIRONMENT_ALIASES: dict[str, tuple[str, ...]] = {
23 | "SLACK_BOT_TOKEN": ("slack", "bot_token"),
24 | "SLACK_APP_TOKEN": ("slack", "app_token"),
25 | }
26 |
27 | PATH_FIELD_KEYS: set[tuple[str, ...]] = {
28 | ("agent", "workspace_root"),
29 | ("agent", "shared_workspace"),
30 | ("agent", "db_path"),
31 | ("agent", "results_path"),
32 | }
33 |
34 |
35 | class ConfigNode(dict):
36 | """Dictionary with attribute-style access that keeps nested nodes wrapped."""
37 |
38 | def __init__(self, data: dict[str, Any]) -> None:
39 | super().__init__()
40 | for key, value in data.items():
41 | super().__setitem__(key, self._wrap(value))
42 |
43 | def __getattr__(self, name: str) -> Any:
44 | try:
45 | return self[name]
46 | except KeyError as exc: # pragma: no cover - mirrors attr behaviour
47 | raise AttributeError(name) from exc
48 |
49 | def __setattr__(self, key: str, value: Any) -> None:
50 | self[key] = value
51 |
52 | def __setitem__(self, key: str, value: Any) -> None:
53 | super().__setitem__(key, self._wrap(value))
54 |
55 | def update(self, *args: Any, **kwargs: Any) -> None: # type: ignore[override]
56 | for key, value in dict(*args, **kwargs).items():
57 | self[key] = value
58 |
59 | @staticmethod
60 | def _wrap(value: Any) -> Any:
61 | if isinstance(value, dict) and not isinstance(value, ConfigNode):
62 | return ConfigNode(value)
63 | if isinstance(value, list):
64 | return [ConfigNode._wrap(item) for item in value]
65 | return value
66 |
67 |
68 | class Config(ConfigNode):
69 | """Concrete configuration object for backward compatibility."""
70 |
71 | pass
72 |
73 |
74 | def _default_config_source(path_override: str | Path | None = None) -> dict[str, Any]:
75 | if path_override:
76 | config_path = Path(path_override).expanduser().resolve()
77 | with config_path.open("r", encoding="utf-8") as handle:
78 | return yaml.safe_load(handle) or {}
79 |
80 | config_resource = resources.files(PACKAGE_ROOT).joinpath(DEFAULT_CONFIG_NAME)
81 | with config_resource.open("r", encoding="utf-8") as handle:
82 | return yaml.safe_load(handle) or {}
83 |
84 |
85 | def _coerce_env_value(raw_value: str) -> Any:
86 | try:
87 | parsed_value = yaml.safe_load(raw_value)
88 | except yaml.YAMLError:
89 | return raw_value
90 | return parsed_value
91 |
92 |
93 | def _apply_override(tree: dict[str, Any], path: Iterable[str], value: Any) -> None:
94 | current = tree
95 | segments = list(path)
96 | for segment in segments[:-1]:
97 | current = current.setdefault(segment, {}) # type: ignore[assignment]
98 | current[segments[-1]] = value
99 |
100 |
101 | def _normalise_env_key(key: str) -> tuple[str, ...] | None:
102 | if key in ENVIRONMENT_ALIASES:
103 | return ENVIRONMENT_ALIASES[key]
104 |
105 | candidate = key
106 | if key.startswith(ENV_PREFIX):
107 | candidate = key[len(ENV_PREFIX) :]
108 | elif "__" not in key:
109 | return None
110 |
111 | parts = [segment for segment in candidate.split("__") if segment]
112 | if not parts:
113 | return None
114 | return tuple(part.lower() for part in parts)
115 |
116 |
117 | def _load_env_overrides() -> dict[str, Any]:
118 | overrides: dict[str, Any] = {}
119 | for env_key, raw_value in os.environ.items():
120 | path = _normalise_env_key(env_key)
121 | if not path:
122 | continue
123 | _apply_override(overrides, path, _coerce_env_value(raw_value))
124 | return overrides
125 |
126 |
127 | def _deep_merge(base: dict[str, Any], overrides: dict[str, Any]) -> dict[str, Any]:
128 | merged = dict(base)
129 | for key, value in overrides.items():
130 | if (
131 | key in merged
132 | and isinstance(merged[key], dict)
133 | and isinstance(value, dict)
134 | ):
135 | merged[key] = _deep_merge(merged[key], value)
136 | else:
137 | merged[key] = value
138 | return merged
139 |
140 |
141 | def _coerce_special_types(data: Any, path: tuple[str, ...] = ()) -> Any:
142 | if isinstance(data, dict):
143 | return {
144 | key: _coerce_special_types(value, path + (key,))
145 | for key, value in data.items()
146 | }
147 | if isinstance(data, list):
148 | return [_coerce_special_types(item, path) for item in data]
149 | if isinstance(data, str) and path in PATH_FIELD_KEYS:
150 | return Path(data).expanduser()
151 | return data
152 |
153 |
154 | def _resolve_config_data(config_path: str | Path | None = None) -> Config:
155 | path_override = config_path or os.environ.get(CONFIG_ENV_VAR)
156 | base_config = _default_config_source(path_override)
157 | env_overrides = _load_env_overrides()
158 | merged_config = _deep_merge(base_config, env_overrides)
159 | typed_config = _coerce_special_types(merged_config)
160 | return Config(typed_config)
161 |
162 |
163 | @lru_cache(maxsize=4)
164 | def get_config(config_path: str | Path | None = None) -> Config:
165 | """Load configuration, applying environment overrides."""
166 | return _resolve_config_data(config_path)
167 |
168 |
169 | __all__ = ["Config", "ConfigNode", "get_config"]
170 |
--------------------------------------------------------------------------------
/docs/changelog.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to Sleepless Agent will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [Unreleased]
9 |
10 | ### Added
11 | - Comprehensive documentation with MkDocs support
12 | - Project-level task management
13 | - Trash/restore functionality for cancelled tasks
14 | - Real-time usage monitoring dashboard
15 | - Multi-agent workflow support (planner, worker, evaluator)
16 | - Isolated workspace management for parallel execution
17 | - Advanced scheduling with time-based thresholds
18 |
19 | ### Changed
20 | - Improved logging with Rich console output
21 | - Enhanced Pro plan usage management
22 | - Optimized task queue processing
23 | - Better error handling and recovery
24 |
25 | ### Fixed
26 | - AttributeError in 'sle check' command
27 | - Task status synchronization issues
28 | - Database locking under high load
29 | - Memory leaks in long-running daemon
30 |
31 | ## [0.1.0] - 2024-10-24
32 |
33 | ### Added
34 | - Initial public release
35 | - Core daemon functionality
36 | - Slack bot integration with slash commands
37 | - Claude Code CLI integration via Python Agent SDK
38 | - SQLite-based task persistence
39 | - Git automation (commits and PRs)
40 | - Basic task scheduling
41 | - Pro plan usage tracking
42 | - Daily report generation
43 | - CLI interface (`sle` command)
44 | - Environment-based configuration
45 | - Workspace isolation for tasks
46 | - Basic monitoring and metrics
47 |
48 | ### Features
49 | - `/think` command for task submission
50 | - `/check` command for status monitoring
51 | - `/cancel` command for task cancellation
52 | - `/report` command for viewing reports
53 | - Automatic thought processing
54 | - Project-based serious task handling
55 | - Time-based usage thresholds (day/night)
56 | - Automatic pause at usage limits
57 |
58 | ### Documentation
59 | - README with quickstart guide
60 | - Basic installation instructions
61 | - Slack setup guide
62 | - Command reference
63 |
64 | ## [0.0.9] - 2024-10-20 (Pre-release)
65 |
66 | ### Added
67 | - Beta testing release
68 | - Core task execution engine
69 | - Basic Slack integration
70 | - Initial Claude Code wrapper
71 |
72 | ### Changed
73 | - Switched from direct API to Claude Code CLI
74 | - Refactored task queue implementation
75 |
76 | ### Fixed
77 | - Connection timeout issues
78 | - Task state management bugs
79 |
80 | ## [0.0.5] - 2024-10-15 (Alpha)
81 |
82 | ### Added
83 | - Proof of concept implementation
84 | - Basic daemon structure
85 | - Simple task queue
86 | - Direct Anthropic API integration
87 |
88 | ### Known Issues
89 | - Limited error handling
90 | - No workspace isolation
91 | - Single task execution only
92 | - No usage management
93 |
94 | ## [0.0.1] - 2024-10-10 (Prototype)
95 |
96 | ### Added
97 | - Initial prototype
98 | - Basic Slack bot
99 | - Simple task processor
100 | - File-based storage
101 |
102 | ---
103 |
104 | ## Version History Summary
105 |
106 | | Version | Release Date | Status | Key Features |
107 | |---------|-------------|---------|--------------|
108 | | 0.1.0 | 2024-10-24 | Stable | Full release with all core features |
109 | | 0.0.9 | 2024-10-20 | Beta | Claude Code CLI integration |
110 | | 0.0.5 | 2024-10-15 | Alpha | Basic daemon and queue |
111 | | 0.0.1 | 2024-10-10 | Prototype | Initial concept |
112 |
113 | ## Upgrade Guide
114 |
115 | ### From 0.0.x to 0.1.0
116 |
117 | 1. **Database Migration**:
118 | ```bash
119 | # Backup old database
120 | cp workspace/tasks.db workspace/tasks.db.backup
121 |
122 | # Run migration
123 | sle migrate
124 | ```
125 |
126 | 2. **Configuration Changes**:
127 | - Rename `ANTHROPIC_API_KEY` to use Claude Code CLI
128 | - Update `config.yaml` with new structure
129 | - Add workspace configuration
130 |
131 | 3. **Slack App Updates**:
132 | - Add new slash commands (`/trash`)
133 | - Update bot permissions
134 | - Enable Socket Mode
135 |
136 | ### Breaking Changes in 0.1.0
137 |
138 | - Removed direct Anthropic API support
139 | - Changed database schema (migration required)
140 | - New configuration file format
141 | - Updated environment variable names
142 |
143 | ## Roadmap
144 |
145 | ### Version 0.2.0 (Planned)
146 | - [ ] Web dashboard for monitoring
147 | - [ ] Advanced scheduling algorithms
148 | - [ ] Multi-workspace support
149 | - [ ] Plugin system for custom executors
150 | - [ ] Enhanced Git workflows
151 | - [ ] Team collaboration features
152 |
153 | ### Version 0.3.0 (Future)
154 | - [ ] Kubernetes deployment support
155 | - [ ] Distributed task execution
156 | - [ ] Advanced analytics and reporting
157 | - [ ] Integration with CI/CD pipelines
158 | - [ ] Custom model support
159 | - [ ] Enterprise features
160 |
161 | ## Security Updates
162 |
163 | ### Security Policy
164 |
165 | We take security seriously. Please report vulnerabilities to security@sleepless-agent.dev
166 |
167 | ### Security Fixes
168 |
169 | - **0.1.0**: Fixed token exposure in logs
170 | - **0.0.9**: Sanitized user input in Slack commands
171 | - **0.0.5**: Added workspace isolation
172 |
173 | ## Deprecations
174 |
175 | ### Deprecated in 0.1.0
176 | - Direct Anthropic API calls (use Claude Code CLI)
177 | - `ANTHROPIC_API_KEY` environment variable
178 | - Legacy task queue format
179 |
180 | ### Removal Timeline
181 | - 0.2.0: Remove deprecated API methods
182 | - 0.3.0: Remove legacy configuration support
183 |
184 | ## Contributing
185 |
186 | See [CONTRIBUTING.md](https://github.com/context-machine-lab/sleepless-agent/blob/main/CONTRIBUTING.md) for how to contribute to the project.
187 |
188 | ## License
189 |
190 | This project is licensed under the MIT License - see the [LICENSE](https://github.com/context-machine-lab/sleepless-agent/blob/main/LICENSE) file for details.
191 |
192 | ---
193 |
194 | [Unreleased]: https://github.com/context-machine-lab/sleepless-agent/compare/v0.1.0...HEAD
195 | [0.1.0]: https://github.com/context-machine-lab/sleepless-agent/releases/tag/v0.1.0
196 | [0.0.9]: https://github.com/context-machine-lab/sleepless-agent/releases/tag/v0.0.9
197 | [0.0.5]: https://github.com/context-machine-lab/sleepless-agent/releases/tag/v0.0.5
198 | [0.0.1]: https://github.com/context-machine-lab/sleepless-agent/releases/tag/v0.0.1
--------------------------------------------------------------------------------
/src/sleepless_agent/config.yaml:
--------------------------------------------------------------------------------
1 | claude_code:
2 | binary_path: claude
3 | model: claude-sonnet-4-5-20250929
4 | night_start_hour: 1
5 | night_end_hour: 9
6 | threshold_day: 20.0
7 | threshold_night: 80.0
8 | usage_command: claude /usage
9 |
10 | git:
11 | enabled: false # Set to true to enable git commits and branching
12 | use_remote_repo: true
13 | remote_repo_url: git@github.com:TimeLovercc/test5.git # !!!CHANGE THIS!!!
14 | auto_create_repo: true
15 |
16 | agent:
17 | workspace_root: ./workspace
18 | task_timeout_seconds: 1800
19 |
20 | multi_agent_workflow:
21 | planner:
22 | enabled: true
23 | max_turns: 10
24 | worker:
25 | enabled: true
26 | max_turns: 30
27 | evaluator:
28 | enabled: true
29 | max_turns: 10
30 |
31 | auto_generation:
32 | enabled: true
33 | prompts:
34 | - name: refine_focused
35 | prompt: |-
36 | The workspace has multiple ongoing projects and tasks that need attention.
37 |
38 | ## Current State
39 | - Active tasks: {task_count} ({pending_count} pending, {in_progress_count} in progress)
40 | - Many tasks are already in progress or pending
41 |
42 | ## Available Tasks to Refine
43 | {available_tasks}
44 |
45 | ## Recent Work & Context
46 | {recent_work}
47 |
48 | ## Workspace Constraints
49 | - Tasks execute in isolated directories: workspace/tasks//
50 | - Each task only has access to its own workspace folder and workspace/shared/
51 | - Do NOT reference workspace/data/ or other system directories in task descriptions
52 | - REFINE tasks reuse existing task workspaces (no new folder created)
53 |
54 | ## Task Generation
55 | Generate ONE REFINE task to continue or improve existing work:
56 | - Complete partial/incomplete tasks mentioned above
57 | - Follow up on outstanding items and recommendations
58 | - Enhance or improve existing projects in the workspace
59 | - Fix issues or improve quality of current work
60 | - Add missing components to existing implementations
61 | - Expand documentation or analysis from previous tasks
62 |
63 | IMPORTANT Format Requirements:
64 | - To refine a specific task: Use [REFINE:#] followed by description
65 | Example: [REFINE:#2] Improve error handling and add edge case tests
66 | - For general refinements: Use [REFINE] followed by description
67 | - Your response MUST be 1-2 sentences max
68 |
69 | Focus on completing or improving what already exists rather than starting new projects.
70 | weight: 0.45
71 | - name: balanced
72 | prompt: |-
73 | Review the workspace state and generate a valuable task.
74 |
75 | ## Current State
76 | - Active tasks: {task_count} ({pending_count} pending, {in_progress_count} in progress)
77 |
78 | ## Available Tasks to Refine
79 | {available_tasks}
80 |
81 | ## Recent Work & Context
82 | {recent_work}
83 |
84 | ## Workspace Constraints
85 | - Tasks execute in isolated directories: workspace/tasks//
86 | - Each task only has access to its own workspace folder and workspace/shared/
87 | - Do NOT reference workspace/data/ or other system directories in task descriptions
88 | - REFINE tasks reuse existing task workspaces (no new folder created)
89 |
90 | ## Task Generation
91 | Generate ONE valuable task (NEW or REFINE):
92 | - For REFINE: improve existing work, complete partial tasks, enhance current projects
93 | - For NEW: create something useful, interesting, or educational
94 |
95 | Task categories to consider:
96 | - Software development (applications, scripts, tools, APIs)
97 | - Data analysis and visualization projects
98 | - Research and documentation (technical guides, comparisons, best practices)
99 | - Creative writing (stories, tutorials, technical articles)
100 | - System design and architecture documents
101 | - Educational content and examples
102 | - Automation and productivity improvements
103 | - Analysis and evaluation reports
104 |
105 | IMPORTANT Format Requirements:
106 | - To refine a specific task: Use [REFINE:#] followed by description
107 | Example: [REFINE:#1] Add comprehensive testing suite with edge cases
108 | - For general refinements: Use [REFINE] followed by description
109 | - For new tasks: Use [NEW] followed by description
110 | - Your response MUST be 1-2 sentences max
111 | weight: 0.35
112 | - name: new_friendly
113 | prompt: |-
114 | Generate an interesting and valuable task for the workspace.
115 |
116 | ## Current State
117 | - Active tasks: {task_count} ({pending_count} pending, {in_progress_count} in progress)
118 | - Few tasks in queue - good time for new projects!
119 |
120 | ## Available Tasks to Refine
121 | {available_tasks}
122 |
123 | ## Workspace Constraints
124 | - Tasks execute in isolated directories: workspace/tasks//
125 | - Each task only has access to its own workspace folder and workspace/shared/
126 | - Do NOT reference workspace/data/ or other system directories in task descriptions
127 | - REFINE tasks reuse existing task workspaces (no new folder created)
128 |
129 | ## Task Generation
130 | Generate ONE innovative task that creates value.
131 |
132 | Areas to explore:
133 | - Build practical tools and utilities
134 | - Create educational content and tutorials
135 | - Develop software applications or scripts
136 | - Write comprehensive documentation or guides
137 | - Design systems and architectures
138 | - Analyze and compare technologies or approaches
139 | - Generate creative content (technical writing, examples)
140 | - Research and summarize complex topics
141 | - Create data visualizations or analysis
142 | - Develop proof-of-concepts or experiments
143 |
144 | Can be NEW (fresh project) or REFINE (improve existing work) - choose what would be most valuable.
145 |
146 | IMPORTANT Format Requirements:
147 | - To refine a specific task: Use [REFINE:#] followed by description
148 | Example: [REFINE:#3] Expand analysis with performance benchmarks
149 | - For general refinements: Use [REFINE] followed by description
150 | - For new tasks: Use [NEW] followed by description
151 | - Your response MUST be 1-2 sentences max
152 | weight: 0.20
--------------------------------------------------------------------------------
/src/sleepless_agent/storage/workspace.py:
--------------------------------------------------------------------------------
1 | """Interactive workspace setup utilities."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from dataclasses import dataclass
7 | import subprocess
8 | from pathlib import Path
9 | from typing import Optional
10 |
11 | from sleepless_agent.monitoring.logging import get_logger
12 | logger = get_logger(__name__)
13 |
14 |
15 | @dataclass
16 | class WorkspaceConfigResult:
17 | workspace_root: Path
18 | use_remote_repo: bool
19 | remote_repo_url: Optional[str]
20 |
21 |
22 | class WorkspaceSetup:
23 | """Handle first-run setup for workspace configuration."""
24 |
25 | def __init__(self, agent_config, git_config=None):
26 | self.agent_config = agent_config
27 | self.git_config = git_config # git config from config.yaml
28 | self.state_path = Path.home() / ".sleepless_agent_setup.json"
29 | self.default_workspace = agent_config.workspace_root.expanduser().resolve()
30 | self.repo_root = Path.cwd()
31 | self.default_remote_url = self._detect_default_remote_url()
32 |
33 | def run(self) -> WorkspaceConfigResult:
34 | """Load configuration from config.yaml first, then fall back to JSON file or prompts."""
35 | # First priority: config.yaml git section
36 | if self.git_config:
37 | use_remote_repo = bool(self.git_config.get("use_remote_repo", False))
38 | remote_repo_url = self.git_config.get("remote_repo_url")
39 | workspace_root = self.default_workspace
40 |
41 | logger.info("Using git configuration from config.yaml")
42 | else:
43 | # Fall back to JSON file or prompts
44 | data = self._load_state()
45 | if not data:
46 | data = self._prompt_user()
47 | self._save_state(data)
48 |
49 | workspace_root = Path(data.get("workspace_root", self.default_workspace)).expanduser().resolve()
50 | use_remote_repo = bool(data.get("use_remote_repo", False))
51 | remote_repo_url = data.get("remote_repo_url")
52 |
53 | self._apply_workspace_root(workspace_root)
54 |
55 | return WorkspaceConfigResult(
56 | workspace_root=workspace_root,
57 | use_remote_repo=use_remote_repo,
58 | remote_repo_url=remote_repo_url,
59 | )
60 |
61 | # ------------------------------------------------------------------
62 | # Internal helpers
63 | # ------------------------------------------------------------------
64 | def _load_state(self) -> dict:
65 | if not self.state_path.exists():
66 | return {}
67 | try:
68 | return json.loads(self.state_path.read_text())
69 | except Exception as exc:
70 | logger.warning(f"Failed to parse setup file {self.state_path}: {exc}")
71 | return {}
72 |
73 | def _save_state(self, data: dict):
74 | try:
75 | self.state_path.write_text(json.dumps(data, indent=2))
76 | logger.info(f"Saved setup configuration to {self.state_path}")
77 | except Exception as exc:
78 | logger.warning(f"Failed to write setup file {self.state_path}: {exc}")
79 |
80 | def _prompt_user(self) -> dict:
81 | print("\nWelcome to Sleepless Agent! Let's finish the initial setup.")
82 | workspace_input = input(f"Workspace root [{self.default_workspace}]: ").strip()
83 | workspace_root = (
84 | Path(workspace_input).expanduser().resolve() if workspace_input else self.default_workspace
85 | )
86 |
87 | use_remote_input = input("Use remote GitHub repo to track? [Y/n]: ").strip().lower()
88 | use_remote_repo = use_remote_input not in {"n", "no"}
89 |
90 | remote_repo_url = None
91 | if use_remote_repo:
92 | default_remote = self.default_remote_url or self._fallback_remote_url()
93 | remote_repo_input = input(f"Remote repository URL [{default_remote}]: ").strip()
94 | remote_repo_url = remote_repo_input or default_remote
95 |
96 | return {
97 | "workspace_root": str(workspace_root),
98 | "use_remote_repo": use_remote_repo,
99 | "remote_repo_url": remote_repo_url,
100 | }
101 |
102 | def _apply_workspace_root(self, workspace_root: Path):
103 | """Update config paths to reflect new workspace root."""
104 | data_dir = workspace_root / "data"
105 | self.agent_config.workspace_root = workspace_root
106 | self.agent_config.shared_workspace = workspace_root / "shared"
107 | self.agent_config.db_path = data_dir / "tasks.db"
108 | self.agent_config.results_path = data_dir / "results"
109 |
110 | # ------------------------------------------------------------------
111 | # Git helpers
112 | # ------------------------------------------------------------------
113 | def _detect_default_remote_url(self) -> Optional[str]:
114 | """Attempt to infer a sane default remote URL."""
115 | origin_url = self._run_git_command(["git", "remote", "get-url", "origin"])
116 | if origin_url:
117 | return origin_url.strip()
118 |
119 | username = self._get_git_identity()
120 | if not username:
121 | return None
122 |
123 | repo_name = self.repo_root.name or "sleepless-agent"
124 | user_slug = username.replace(" ", "-")
125 | return f"git@github.com:{user_slug}/{repo_name}.git"
126 |
127 | def _get_git_identity(self) -> Optional[str]:
128 | """Get git user identity, preferring user.name over user.email."""
129 | name = self._run_git_command(["git", "config", "--get", "user.name"])
130 | if name:
131 | return name.strip()
132 |
133 | email = self._run_git_command(["git", "config", "--get", "user.email"])
134 | if email:
135 | return email.strip().split("@")[0]
136 |
137 | return None
138 |
139 | def _run_git_command(self, cmd: list[str]) -> Optional[str]:
140 | """Execute git command in repository root and return stdout."""
141 | try:
142 | result = subprocess.run(
143 | cmd,
144 | cwd=self.repo_root,
145 | capture_output=True,
146 | text=True,
147 | timeout=5,
148 | )
149 | if result.returncode == 0:
150 | return result.stdout.strip()
151 | except Exception as exc: # pragma: no cover - best effort
152 | logger.debug(f"Failed to run {' '.join(cmd)}: {exc}")
153 | return None
154 |
155 | def _fallback_remote_url(self) -> str:
156 | repo_name = self.repo_root.name or "sleepless-agent"
157 | return f"git@github.com:username/{repo_name}.git"
--------------------------------------------------------------------------------
/docs/README.md:
--------------------------------------------------------------------------------
1 | # Sleepless Agent Documentation
2 |
3 | This documentation provides comprehensive guides, references, and tutorials for the Sleepless Agent - a 24/7 AI automation system powered by Claude Code.
4 |
5 | ## Documentation Structure
6 |
7 | ```
8 | docs/
9 | ├── index.md # Main landing page
10 | ├── quickstart.md # 5-minute getting started
11 | ├── installation.md # Detailed installation guide
12 | ├── faq.md # Frequently asked questions
13 | ├── troubleshooting.md # Common issues and solutions
14 | ├── changelog.md # Version history
15 | ├── mkdocs.yml # MkDocs configuration
16 | │
17 | ├── concepts/ # Core concepts and architecture
18 | │ ├── index.md # Concepts overview
19 | │ ├── architecture.md # System architecture
20 | │ ├── task-lifecycle.md # How tasks flow through system
21 | │ ├── workspace-isolation.md # Isolation and security model
22 | │ ├── scheduling.md # Task scheduling algorithms
23 | │ └── pro-plan-management.md # Claude Pro usage optimization
24 | │
25 | ├── guides/ # Step-by-step guides
26 | │ ├── index.md # Guides overview
27 | │ ├── slack-setup.md # Complete Slack configuration
28 | │ ├── environment-setup.md # Environment variables and config
29 | │ └── git-integration.md # Git automation setup
30 | │
31 | ├── reference/ # Technical reference
32 | │ └── api/
33 | │ └── cli-commands.md # CLI command reference
34 | │
35 | └── assets/ # Images and diagrams
36 | ```
37 |
38 | ## Documentation Highlights
39 |
40 | ### 🚀 Quick Start
41 | - **[Quickstart Guide](quickstart.md)** - Get running in 5 minutes
42 | - **[Installation](installation.md)** - Platform-specific setup instructions
43 | - **[FAQ](faq.md)** - Answers to common questions
44 |
45 | ### 🧠 Core Concepts
46 | - **[Architecture](concepts/architecture.md)** - Understand system design
47 | - **[Task Lifecycle](concepts/task-lifecycle.md)** - Task execution flow
48 | - **[Workspace Isolation](concepts/workspace-isolation.md)** - Security model
49 | - **[Pro Plan Management](concepts/pro-plan-management.md)** - Usage optimization
50 |
51 | ### 📖 Configuration Guides
52 | - **[Slack Setup](guides/slack-setup.md)** - Complete Slack integration
53 | - **[Environment Setup](guides/environment-setup.md)** - Configuration management
54 | - **[Git Integration](guides/git-integration.md)** - Automated version control
55 |
56 | ### 📋 Reference
57 | - **[CLI Commands](reference/api/cli-commands.md)** - Complete command reference
58 | - **[Troubleshooting](troubleshooting.md)** - Solve common problems
59 |
60 | ## Key Features Documented
61 |
62 | ### System Architecture
63 | - Modular, event-driven design
64 | - Parallel task execution
65 | - Isolated workspace management
66 | - Resource optimization
67 |
68 | ### Slack Integration
69 | - Step-by-step app creation
70 | - Socket Mode configuration
71 | - Slash command setup
72 | - Permission management
73 |
74 | ### Git Automation
75 | - Automatic commits for tasks
76 | - Pull request creation
77 | - Multi-repository support
78 | - Security best practices
79 |
80 | ### Pro Plan Management
81 | - Intelligent usage tracking
82 | - Time-based thresholds (day/night)
83 | - Automatic pausing at limits
84 | - Usage optimization strategies
85 |
86 | ### Task Management
87 | - Priority-based scheduling
88 | - Project organization
89 | - Dependency handling
90 | - Result storage
91 |
92 | ## Building the Documentation
93 |
94 | ### Prerequisites
95 |
96 | ```bash
97 | pip install mkdocs mkdocs-material
98 | ```
99 |
100 | ### Local Development
101 |
102 | ```bash
103 | # Serve documentation locally
104 | cd docs
105 | mkdocs serve
106 |
107 | # View at http://localhost:8000
108 | ```
109 |
110 | ### Build Static Site
111 |
112 | ```bash
113 | # Build documentation
114 | mkdocs build
115 |
116 | # Output in site/ directory
117 | ```
118 |
119 | ### Deploy to GitHub Pages
120 |
121 | ```bash
122 | # Deploy to gh-pages branch
123 | mkdocs gh-deploy
124 | ```
125 |
126 | ## Documentation Standards
127 |
128 | ### Writing Style
129 | - Clear, concise language
130 | - Step-by-step instructions
131 | - Code examples for every concept
132 | - Visual diagrams where helpful
133 |
134 | ### Structure
135 | - Progressive disclosure (simple → complex)
136 | - Consistent formatting
137 | - Cross-references between related topics
138 | - Complete examples
139 |
140 | ### Content Types
141 | - **Concepts** - Explain how things work
142 | - **Guides** - Show how to do things
143 | - **Tutorials** - Learn by doing
144 | - **Reference** - Complete specifications
145 |
146 | ## Contributing to Documentation
147 |
148 | ### Adding New Content
149 |
150 | 1. Choose appropriate section (concepts/guides/tutorials/reference)
151 | 2. Follow existing naming conventions
152 | 3. Update navigation in mkdocs.yml
153 | 4. Include code examples
154 | 5. Add cross-references
155 |
156 | ### Style Guide
157 |
158 | - Use ATX-style headers (`#`, not underlines)
159 | - Include code language in fenced blocks
160 | - Use tables for structured data
161 | - Add admonitions for important notes
162 |
163 | ### Example Structure
164 |
165 | ```markdown
166 | # Page Title
167 |
168 | Brief introduction paragraph.
169 |
170 | ## Overview
171 |
172 | High-level explanation.
173 |
174 | ## Details
175 |
176 | ### Subsection
177 |
178 | Detailed content with examples:
179 |
180 | \`\`\`python
181 | # Code example
182 | def example():
183 | return "example"
184 | \`\`\`
185 |
186 | ## Best Practices
187 |
188 | - Bullet points for lists
189 | - **Bold** for emphasis
190 | - `code` for inline code
191 |
192 | ## See Also
193 |
194 | - [Related Topic](link.md)
195 | ```
196 |
197 | ## Documentation Coverage
198 |
199 | ### ✅ Completed
200 | - Core documentation structure
201 | - All concept documents (5/5)
202 | - Essential guides (3/5+)
203 | - Root documentation files
204 | - MkDocs configuration
205 | - CLI commands reference
206 |
207 | ### 🚧 Planned Additions
208 | - Remaining guides (project management, custom prompts, deployment)
209 | - Tutorial documents (first task, workflows, monitoring, reports)
210 | - API reference (Slack commands, Python API)
211 | - Configuration reference
212 | - Database schema reference
213 | - Example code and workflows
214 |
215 | ## Quick Links
216 |
217 | - [Main Documentation](index.md)
218 | - [Quickstart](quickstart.md)
219 | - [Slack Setup](guides/slack-setup.md)
220 | - [Architecture](concepts/architecture.md)
221 | - [CLI Reference](reference/api/cli-commands.md)
222 |
223 | ## Support
224 |
225 | - **Documentation Issues**: Open an issue with the `documentation` label
226 | - **Discord**: Join our community for help
227 | - **Contributing**: See CONTRIBUTING.md for guidelines
228 |
229 | ---
230 |
231 | *This documentation follows the style and structure of professional open-source projects like ContextAgent, providing comprehensive coverage of all aspects of the Sleepless Agent system.*
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/live_status.py:
--------------------------------------------------------------------------------
1 | """Lightweight tracker for real-time task execution status."""
2 |
3 | from __future__ import annotations
4 |
5 | import json
6 | from dataclasses import dataclass, field
7 | from datetime import datetime, timedelta, timezone
8 | from pathlib import Path
9 | from threading import Lock
10 | from typing import Any, Dict, Iterable, List, Optional
11 |
12 | from sleepless_agent.monitoring.logging import get_logger
13 | logger = get_logger(__name__)
14 |
15 |
16 | def _utc_now_iso() -> str:
17 | return datetime.now(timezone.utc).replace(tzinfo=None).isoformat()
18 |
19 |
20 | def _truncate(text: str, max_length: int = 240) -> str:
21 | if len(text) <= max_length:
22 | return text
23 | return text[: max_length - 1] + "…"
24 |
25 |
26 | @dataclass
27 | class LiveStatusEntry:
28 | """Represents the live status for a single task."""
29 |
30 | task_id: int
31 | description: str = ""
32 | project_name: Optional[str] = None
33 | phase: str = "initializing"
34 | prompt_preview: str = ""
35 | answer_preview: str = ""
36 | status: str = "running"
37 | updated_at: str = field(default_factory=_utc_now_iso)
38 |
39 | def to_dict(self) -> Dict[str, Any]:
40 | return {
41 | "task_id": self.task_id,
42 | "description": self.description,
43 | "project_name": self.project_name,
44 | "phase": self.phase,
45 | "prompt_preview": self.prompt_preview,
46 | "answer_preview": self.answer_preview,
47 | "status": self.status,
48 | "updated_at": self.updated_at,
49 | }
50 |
51 | @classmethod
52 | def from_dict(cls, payload: Dict[str, Any]) -> "LiveStatusEntry":
53 | task_id_raw = payload.get("task_id")
54 | if task_id_raw is None:
55 | raise ValueError("task_id is required in payload")
56 | return cls(
57 | task_id=int(task_id_raw),
58 | description=str(payload.get("description", "")),
59 | project_name=payload.get("project_name"),
60 | phase=str(payload.get("phase", "initializing")),
61 | prompt_preview=str(payload.get("prompt_preview", "")),
62 | answer_preview=str(payload.get("answer_preview", "")),
63 | status=str(payload.get("status", "running")),
64 | updated_at=str(payload.get("updated_at", _utc_now_iso())),
65 | )
66 |
67 |
68 | class LiveStatusTracker:
69 | """Persist and retrieve live task execution updates."""
70 |
71 | def __init__(self, storage_path: Path | str):
72 | self.storage_path = Path(storage_path)
73 | self.storage_path.parent.mkdir(parents=True, exist_ok=True)
74 | self._lock = Lock()
75 |
76 | # Public API -----------------------------------------------------------------
77 | def update(self, entry: LiveStatusEntry | Dict[str, Any]) -> None:
78 | """Upsert the status for a task."""
79 | payload = entry.to_dict() if isinstance(entry, LiveStatusEntry) else dict(entry)
80 | task_id = payload.get("task_id")
81 | if task_id is None:
82 | raise ValueError("LiveStatusTracker.update requires task_id")
83 |
84 | payload["task_id"] = int(task_id)
85 | payload.setdefault("updated_at", _utc_now_iso())
86 |
87 | payload["description"] = _truncate(payload.get("description", ""))
88 | payload["prompt_preview"] = _truncate(payload.get("prompt_preview", ""))
89 | payload["answer_preview"] = _truncate(payload.get("answer_preview", ""))
90 |
91 | with self._lock:
92 | data = self._read_all()
93 | data[str(payload["task_id"])] = payload
94 | self._atomic_write(data)
95 |
96 | def clear(self, task_id: int) -> None:
97 | """Remove a task from tracking."""
98 | with self._lock:
99 | data = self._read_all()
100 | if str(task_id) in data:
101 | del data[str(task_id)]
102 | self._atomic_write(data)
103 |
104 | def clear_all(self) -> None:
105 | """Remove all tracked entries."""
106 | with self._lock:
107 | if self.storage_path.exists():
108 | try:
109 | self.storage_path.unlink()
110 | except OSError as exc:
111 | logger.debug(f"Failed to remove live status file: {exc}")
112 |
113 | def prune_older_than(self, max_age: timedelta) -> None:
114 | """Drop entries older than the provided age."""
115 | cutoff = datetime.now(timezone.utc).replace(tzinfo=None) - max_age
116 | with self._lock:
117 | data = self._read_all()
118 | changed = False
119 | for key, value in list(data.items()):
120 | updated_at = value.get("updated_at")
121 | stamp: Optional[datetime] = None
122 | if updated_at is not None:
123 | try:
124 | stamp = datetime.fromisoformat(str(updated_at))
125 | except Exception:
126 | stamp = None
127 | if not stamp or stamp < cutoff:
128 | del data[key]
129 | changed = True
130 | if changed:
131 | self._atomic_write(data)
132 |
133 | def entries(self) -> List[LiveStatusEntry]:
134 | """Return all entries sorted by most recent update."""
135 | with self._lock:
136 | data = self._read_all()
137 |
138 | result = [LiveStatusEntry.from_dict({"task_id": key, **value}) for key, value in data.items()]
139 | result.sort(key=lambda entry: entry.updated_at, reverse=True)
140 | return result
141 |
142 | # Internal helpers -----------------------------------------------------------
143 | def _read_all(self) -> Dict[str, Dict[str, Any]]:
144 | if not self.storage_path.exists():
145 | return {}
146 | try:
147 | with self.storage_path.open("r", encoding="utf-8") as fh:
148 | payload = json.load(fh)
149 | if isinstance(payload, dict):
150 | return payload
151 | except json.JSONDecodeError as exc:
152 | logger.warning(f"Corrupted live status file {self.storage_path}: {exc}")
153 | except OSError as exc:
154 | logger.debug(f"Failed to read live status file {self.storage_path}: {exc}")
155 | return {}
156 |
157 | def _atomic_write(self, data: Dict[str, Any]) -> None:
158 | tmp_path = self.storage_path.with_suffix(".tmp")
159 | try:
160 | with tmp_path.open("w", encoding="utf-8") as fh:
161 | json.dump(data, fh, ensure_ascii=False, indent=2)
162 | tmp_path.replace(self.storage_path)
163 | except OSError as exc:
164 | logger.error(f"Failed to persist live status file {self.storage_path}: {exc}")
165 | try:
166 | if tmp_path.exists():
167 | tmp_path.unlink()
168 | except OSError:
169 | pass
--------------------------------------------------------------------------------
/src/sleepless_agent/storage/db_helpers.py:
--------------------------------------------------------------------------------
1 | """Database helper utilities for reducing boilerplate in database operations."""
2 |
3 | from __future__ import annotations
4 |
5 | from typing import Optional, Type, TypeVar, Any, List
6 | from sqlalchemy.orm import Session, Query
7 | from sqlalchemy.sql.expression import BinaryExpression
8 |
9 | from sleepless_agent.storage.sqlite import SQLiteStore
10 | from sleepless_agent.monitoring.logging import get_logger
11 |
12 | logger = get_logger(__name__)
13 |
14 | T = TypeVar("T")
15 |
16 |
17 | class DatabaseHelper(SQLiteStore):
18 | """Enhanced database helper with common query patterns."""
19 |
20 | def query_one(
21 | self,
22 | model: Type[T],
23 | *filters: BinaryExpression,
24 | ) -> Optional[T]:
25 | """Query for a single record.
26 |
27 | Args:
28 | model: The SQLAlchemy model class
29 | *filters: Filter expressions (e.g., Model.id == 1)
30 |
31 | Returns:
32 | Single record or None
33 | """
34 | def _op(session: Session) -> Optional[T]:
35 | query = session.query(model)
36 | for f in filters:
37 | query = query.filter(f)
38 | return query.first()
39 |
40 | return self._run_read(_op)
41 |
42 | def query_all(
43 | self,
44 | model: Type[T],
45 | *filters: BinaryExpression,
46 | order_by: Optional[Any] = None,
47 | limit: Optional[int] = None,
48 | ) -> List[T]:
49 | """Query for multiple records.
50 |
51 | Args:
52 | model: The SQLAlchemy model class
53 | *filters: Filter expressions
54 | order_by: Order by clause
55 | limit: Maximum number of records
56 |
57 | Returns:
58 | List of records
59 | """
60 | def _op(session: Session) -> List[T]:
61 | query = session.query(model)
62 | for f in filters:
63 | query = query.filter(f)
64 | if order_by is not None:
65 | query = query.order_by(order_by)
66 | if limit is not None:
67 | query = query.limit(limit)
68 | return query.all()
69 |
70 | return self._run_read(_op)
71 |
72 | def count(
73 | self,
74 | model: Type[T],
75 | *filters: BinaryExpression,
76 | ) -> int:
77 | """Count records matching filters.
78 |
79 | Args:
80 | model: The SQLAlchemy model class
81 | *filters: Filter expressions
82 |
83 | Returns:
84 | Number of matching records
85 | """
86 | def _op(session: Session) -> int:
87 | query = session.query(model)
88 | for f in filters:
89 | query = query.filter(f)
90 | return query.count()
91 |
92 | return self._run_read(_op)
93 |
94 | def update_one(
95 | self,
96 | model: Type[T],
97 | record_id: Any,
98 | **updates: Any,
99 | ) -> Optional[T]:
100 | """Update a single record by ID.
101 |
102 | Args:
103 | model: The SQLAlchemy model class
104 | record_id: Primary key value
105 | **updates: Fields to update
106 |
107 | Returns:
108 | Updated record or None if not found
109 | """
110 | def _op(session: Session) -> Optional[T]:
111 | record = session.query(model).filter(model.id == record_id).first()
112 | if record:
113 | for key, value in updates.items():
114 | setattr(record, key, value)
115 | session.flush()
116 | return record
117 |
118 | return self._run_write(_op)
119 |
120 | def update_where(
121 | self,
122 | model: Type[T],
123 | filters: List[BinaryExpression],
124 | **updates: Any,
125 | ) -> int:
126 | """Update multiple records matching filters.
127 |
128 | Args:
129 | model: The SQLAlchemy model class
130 | filters: List of filter expressions
131 | **updates: Fields to update
132 |
133 | Returns:
134 | Number of updated records
135 | """
136 | def _op(session: Session) -> int:
137 | query = session.query(model)
138 | for f in filters:
139 | query = query.filter(f)
140 | count = query.update(updates, synchronize_session=False)
141 | return count
142 |
143 | return self._run_write(_op)
144 |
145 | def create(
146 | self,
147 | model: Type[T],
148 | **fields: Any,
149 | ) -> T:
150 | """Create a new record.
151 |
152 | Args:
153 | model: The SQLAlchemy model class
154 | **fields: Field values for the new record
155 |
156 | Returns:
157 | Created record
158 | """
159 | def _op(session: Session) -> T:
160 | record = model(**fields)
161 | session.add(record)
162 | session.flush()
163 | return record
164 |
165 | return self._run_write(_op)
166 |
167 | def delete_one(
168 | self,
169 | model: Type[T],
170 | record_id: Any,
171 | ) -> bool:
172 | """Delete a single record by ID.
173 |
174 | Args:
175 | model: The SQLAlchemy model class
176 | record_id: Primary key value
177 |
178 | Returns:
179 | True if record was deleted, False if not found
180 | """
181 | def _op(session: Session) -> bool:
182 | record = session.query(model).filter(model.id == record_id).first()
183 | if record:
184 | session.delete(record)
185 | return True
186 | return False
187 |
188 | return self._run_write(_op)
189 |
190 | def delete_where(
191 | self,
192 | model: Type[T],
193 | *filters: BinaryExpression,
194 | ) -> int:
195 | """Delete records matching filters.
196 |
197 | Args:
198 | model: The SQLAlchemy model class
199 | *filters: Filter expressions
200 |
201 | Returns:
202 | Number of deleted records
203 | """
204 | def _op(session: Session) -> int:
205 | query = session.query(model)
206 | for f in filters:
207 | query = query.filter(f)
208 | count = query.count()
209 | query.delete(synchronize_session=False)
210 | return count
211 |
212 | return self._run_write(_op)
213 |
214 | def execute_custom_read(
215 | self,
216 | query_builder: Query,
217 | ) -> Any:
218 | """Execute a custom read query.
219 |
220 | Args:
221 | query_builder: SQLAlchemy Query object
222 |
223 | Returns:
224 | Query result
225 | """
226 | def _op(session: Session) -> Any:
227 | # Bind query to session if needed
228 | if hasattr(query_builder, 'with_session'):
229 | query_builder = query_builder.with_session(session)
230 | return query_builder.all()
231 |
232 | return self._run_read(_op)
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/directory_manager.py:
--------------------------------------------------------------------------------
1 | """Directory management utility for consistent directory operations."""
2 |
3 | from __future__ import annotations
4 |
5 | from pathlib import Path
6 | from typing import List, Optional, Union
7 |
8 | from sleepless_agent.monitoring.logging import get_logger
9 |
10 | logger = get_logger(__name__)
11 |
12 |
13 | class DirectoryManager:
14 | """Centralized directory management with consistent error handling."""
15 |
16 | @staticmethod
17 | def ensure_exists(path: Union[str, Path], *, log_creation: bool = False) -> Path:
18 | """Ensure a directory exists, creating it if necessary.
19 |
20 | Args:
21 | path: Directory path (string or Path object)
22 | log_creation: Whether to log directory creation
23 |
24 | Returns:
25 | Path object for the directory
26 |
27 | Raises:
28 | PermissionError: If unable to create directory due to permissions
29 | OSError: If unable to create directory for other reasons
30 | """
31 | dir_path = Path(path) if isinstance(path, str) else path
32 |
33 | try:
34 | existed = dir_path.exists()
35 | dir_path.mkdir(parents=True, exist_ok=True)
36 |
37 | if log_creation and not existed:
38 | logger.debug(f"Created directory: {dir_path}")
39 |
40 | return dir_path
41 |
42 | except PermissionError as exc:
43 | logger.error(f"Permission denied creating directory: {dir_path}")
44 | raise
45 | except OSError as exc:
46 | logger.error(f"Failed to create directory {dir_path}: {exc}")
47 | raise
48 |
49 | @staticmethod
50 | def ensure_multiple(paths: List[Union[str, Path]], *, log_creation: bool = False) -> List[Path]:
51 | """Ensure multiple directories exist.
52 |
53 | Args:
54 | paths: List of directory paths
55 | log_creation: Whether to log directory creation
56 |
57 | Returns:
58 | List of Path objects for the directories
59 |
60 | Raises:
61 | PermissionError: If unable to create any directory due to permissions
62 | OSError: If unable to create any directory for other reasons
63 | """
64 | result_paths = []
65 | for path in paths:
66 | result_paths.append(
67 | DirectoryManager.ensure_exists(path, log_creation=log_creation)
68 | )
69 | return result_paths
70 |
71 | @staticmethod
72 | def ensure_parent_exists(file_path: Union[str, Path], *, log_creation: bool = False) -> Path:
73 | """Ensure the parent directory of a file exists.
74 |
75 | Args:
76 | file_path: Path to a file
77 | log_creation: Whether to log directory creation
78 |
79 | Returns:
80 | Path object for the parent directory
81 |
82 | Raises:
83 | PermissionError: If unable to create parent directory due to permissions
84 | OSError: If unable to create parent directory for other reasons
85 | """
86 | file_path = Path(file_path) if isinstance(file_path, str) else file_path
87 | parent_dir = file_path.parent
88 |
89 | return DirectoryManager.ensure_exists(parent_dir, log_creation=log_creation)
90 |
91 | @staticmethod
92 | def cleanup_empty(path: Union[str, Path]) -> bool:
93 | """Remove a directory if it's empty.
94 |
95 | Args:
96 | path: Directory path
97 |
98 | Returns:
99 | True if directory was removed, False otherwise
100 | """
101 | dir_path = Path(path) if isinstance(path, str) else path
102 |
103 | try:
104 | if dir_path.exists() and dir_path.is_dir():
105 | # Check if directory is empty
106 | if not any(dir_path.iterdir()):
107 | dir_path.rmdir()
108 | logger.debug(f"Removed empty directory: {dir_path}")
109 | return True
110 | return False
111 | except OSError as exc:
112 | logger.warning(f"Failed to remove directory {dir_path}: {exc}")
113 | return False
114 |
115 | @staticmethod
116 | def safe_create_file(file_path: Union[str, Path], content: str = "",
117 | *, overwrite: bool = False) -> Path:
118 | """Safely create a file, ensuring parent directory exists.
119 |
120 | Args:
121 | file_path: Path to the file
122 | content: Initial content for the file
123 | overwrite: Whether to overwrite existing file
124 |
125 | Returns:
126 | Path object for the created file
127 |
128 | Raises:
129 | FileExistsError: If file exists and overwrite is False
130 | PermissionError: If unable to create file due to permissions
131 | OSError: If unable to create file for other reasons
132 | """
133 | file_path = Path(file_path) if isinstance(file_path, str) else file_path
134 |
135 | # Ensure parent directory exists
136 | DirectoryManager.ensure_parent_exists(file_path)
137 |
138 | # Check if file exists
139 | if file_path.exists() and not overwrite:
140 | raise FileExistsError(f"File already exists: {file_path}")
141 |
142 | try:
143 | file_path.write_text(content)
144 | logger.debug(f"Created file: {file_path}")
145 | return file_path
146 | except PermissionError:
147 | logger.error(f"Permission denied creating file: {file_path}")
148 | raise
149 | except OSError as exc:
150 | logger.error(f"Failed to create file {file_path}: {exc}")
151 | raise
152 |
153 | @staticmethod
154 | def get_size(path: Union[str, Path], *, human_readable: bool = False) -> Union[int, str]:
155 | """Get the total size of a directory or file.
156 |
157 | Args:
158 | path: Path to directory or file
159 | human_readable: Return size in human-readable format
160 |
161 | Returns:
162 | Size in bytes (int) or human-readable string
163 | """
164 | path = Path(path) if isinstance(path, str) else path
165 |
166 | if not path.exists():
167 | return "0 B" if human_readable else 0
168 |
169 | total_size = 0
170 |
171 | if path.is_file():
172 | total_size = path.stat().st_size
173 | elif path.is_dir():
174 | for item in path.rglob("*"):
175 | if item.is_file():
176 | total_size += item.stat().st_size
177 |
178 | if human_readable:
179 | return DirectoryManager._format_size(total_size)
180 | return total_size
181 |
182 | @staticmethod
183 | def _format_size(size_bytes: int) -> str:
184 | """Format size in bytes to human-readable format.
185 |
186 | Args:
187 | size_bytes: Size in bytes
188 |
189 | Returns:
190 | Human-readable size string
191 | """
192 | for unit in ["B", "KB", "MB", "GB", "TB"]:
193 | if size_bytes < 1024.0:
194 | return f"{size_bytes:.2f} {unit}"
195 | size_bytes /= 1024.0
196 | return f"{size_bytes:.2f} PB"
--------------------------------------------------------------------------------
/mkdocs.yml:
--------------------------------------------------------------------------------
1 | # MkDocs Configuration for Sleepless Agent
2 |
3 | # Project information
4 | site_name: Sleepless Agent Documentation
5 | site_description: 24/7 AI agent that works while you sleep
6 | site_author: Sleepless Agent Team
7 | site_url: https://docs.sleepless-agent.dev
8 | repo_url: https://github.com/context-machine-lab/sleepless-agent
9 | repo_name: sleepless-agent
10 | edit_uri: edit/main/docs/
11 |
12 | # Copyright
13 | copyright: Copyright © 2024 Sleepless Agent Team
14 |
15 | # Theme configuration
16 | theme:
17 | name: material
18 | language: en
19 |
20 | # Color scheme
21 | palette:
22 | - media: "(prefers-color-scheme: light)"
23 | scheme: default
24 | primary: purple
25 | accent: deep purple
26 | toggle:
27 | icon: material/brightness-7
28 | name: Switch to dark mode
29 | - media: "(prefers-color-scheme: dark)"
30 | scheme: slate
31 | primary: purple
32 | accent: deep purple
33 | toggle:
34 | icon: material/brightness-4
35 | name: Switch to light mode
36 |
37 | # Font
38 | font:
39 | text: Inter
40 | code: JetBrains Mono
41 |
42 | # Features
43 | features:
44 | - announce.dismiss
45 | - content.action.edit
46 | - content.action.view
47 | - content.code.annotate
48 | - content.code.copy
49 | - content.code.select
50 | - content.tabs.link
51 | - content.tooltips
52 | - header.autohide
53 | - navigation.expand
54 | - navigation.footer
55 | - navigation.indexes
56 | - navigation.instant
57 | - navigation.instant.prefetch
58 | - navigation.instant.progress
59 | - navigation.path
60 | - navigation.prune
61 | - navigation.sections
62 | - navigation.tabs
63 | - navigation.tabs.sticky
64 | - navigation.top
65 | - navigation.tracking
66 | - search.highlight
67 | - search.share
68 | - search.suggest
69 | - toc.follow
70 | - toc.integrate
71 |
72 | # Icons
73 | icon:
74 | repo: fontawesome/brands/github
75 | logo: material/robot-industrial
76 |
77 | # Navigation
78 | nav:
79 | - Home:
80 | - index.md
81 | - Quickstart: quickstart.md
82 | - Installation: installation.md
83 | - FAQ: faq.md
84 | - Troubleshooting: troubleshooting.md
85 | - Changelog: changelog.md
86 |
87 | - Concepts:
88 | - concepts/index.md
89 | - Architecture: concepts/architecture.md
90 | - Task Lifecycle: concepts/task-lifecycle.md
91 | - Workspace Isolation: concepts/workspace-isolation.md
92 | - Scheduling: concepts/scheduling.md
93 | - Pro Plan Management: concepts/pro-plan-management.md
94 |
95 | - Guides:
96 | - guides/index.md
97 | - Slack Setup: guides/slack-setup.md
98 | - Environment Setup: guides/environment-setup.md
99 | - Git Integration: guides/git-integration.md
100 | - Project Management: guides/project-management.md
101 | - Custom Prompts: guides/custom-prompts.md
102 | - Deployment: guides/deployment.md
103 |
104 | - Tutorials:
105 | - tutorials/index.md
106 | - First Task: tutorials/first-task.md
107 | - Slack Workflows: tutorials/slack-workflows.md
108 | - Monitoring Tasks: tutorials/monitoring-tasks.md
109 | - Daily Reports: tutorials/daily-reports.md
110 | - Workspace Management: tutorials/workspace-management.md
111 |
112 | - Reference:
113 | - reference/index.md
114 | - API:
115 | - CLI Commands: reference/api/cli-commands.md
116 | - Slack Commands: reference/api/slack-commands.md
117 | - Python API: reference/api/python-api.md
118 | - Configuration: reference/configuration.md
119 | - Database Schema: reference/database-schema.md
120 | - Environment Variables: reference/environment-variables.md
121 | - Error Codes: reference/error-codes.md
122 |
123 | - Examples:
124 | - examples/index.md
125 | - Basic Usage: examples/basic-usage.md
126 | - Slack Integration: examples/slack-integration.md
127 | - Advanced Workflows: examples/advanced-workflows.md
128 | - Custom Executors: examples/custom-executors.md
129 | - Monitoring Scripts: examples/monitoring-scripts.md
130 |
131 | # Plugins
132 | plugins:
133 | - search:
134 | separator: '[\s\-\_\.]+'
135 | lang:
136 | - en
137 | - minify:
138 | minify_html: true
139 | minify_js: true
140 | minify_css: true
141 | htmlmin_opts:
142 | remove_comments: true
143 | - git-revision-date-localized:
144 | enable_creation_date: true
145 | type: iso_datetime
146 |
147 | # Markdown extensions
148 | markdown_extensions:
149 | # Python Markdown
150 | - abbr
151 | - admonition
152 | - attr_list
153 | - def_list
154 | - footnotes
155 | - md_in_html
156 | - toc:
157 | permalink: true
158 | toc_depth: 3
159 | - tables
160 | - meta
161 |
162 | # Python Markdown Extensions
163 | - pymdownx.arithmatex:
164 | generic: true
165 | - pymdownx.betterem:
166 | smart_enable: all
167 | - pymdownx.caret
168 | - pymdownx.critic
169 | - pymdownx.details
170 | - pymdownx.emoji:
171 | emoji_index: !!python/name:material.extensions.emoji.twemoji
172 | emoji_generator: !!python/name:material.extensions.emoji.to_svg
173 | - pymdownx.highlight:
174 | anchor_linenums: true
175 | line_spans: __span
176 | pygments_lang_class: true
177 | - pymdownx.inlinehilite
178 | - pymdownx.keys
179 | - pymdownx.magiclink:
180 | normalize_issue_symbols: true
181 | repo_url_shorthand: true
182 | user: context-machine-lab
183 | repo: sleepless-agent
184 | - pymdownx.mark
185 | - pymdownx.smartsymbols
186 | - pymdownx.snippets
187 | - pymdownx.superfences:
188 | custom_fences:
189 | - name: mermaid
190 | class: mermaid
191 | format: !!python/name:pymdownx.superfences.fence_code_format
192 | - pymdownx.tabbed:
193 | alternate_style: true
194 | combine_header_slug: true
195 | - pymdownx.tasklist:
196 | custom_checkbox: true
197 | - pymdownx.tilde
198 |
199 | # Extra CSS (can add custom styles later)
200 | # extra_css:
201 | # - stylesheets/extra.css
202 |
203 | # Extra JavaScript (can add scripts later)
204 | # extra_javascript:
205 | # - javascripts/extra.js
206 |
207 | # Extra configuration
208 | extra:
209 | # Social links
210 | social:
211 | - icon: fontawesome/brands/github
212 | link: https://github.com/context-machine-lab/sleepless-agent
213 | name: GitHub
214 | - icon: fontawesome/brands/discord
215 | link: https://discord.gg/74my3Wkn
216 | name: Discord
217 | - icon: fontawesome/brands/python
218 | link: https://pypi.org/project/sleepless-agent/
219 | name: PyPI
220 |
221 | # Version
222 | version:
223 | provider: mike
224 | default: stable
225 |
226 | # Analytics
227 | analytics:
228 | provider: google
229 | property: G-XXXXXXXXXX
230 | feedback:
231 | title: Was this page helpful?
232 | ratings:
233 | - icon: material/thumb-up-outline
234 | name: This page was helpful
235 | data: 1
236 | note: >-
237 | Thanks for your feedback!
238 | - icon: material/thumb-down-outline
239 | name: This page could be improved
240 | data: 0
241 | note: >-
242 | Thanks for your feedback! Help us improve by
243 | opening an issue.
244 |
245 | # Consent
246 | # consent:
247 | # title: Cookie consent
248 | # description: >-
249 | # We use cookies to recognize your repeated visits and preferences, as well
250 | # as to measure the effectiveness of our documentation and whether users
251 | # find what they're searching for. With your consent, you're helping us to
252 | # make our documentation better.
253 | # actions:
254 | # - accept
255 | # - reject
256 | # - manage
257 |
258 | # Status
259 | status:
260 | new: Recently added
261 | deprecated: Deprecated
--------------------------------------------------------------------------------
/src/sleepless_agent/core/models.py:
--------------------------------------------------------------------------------
1 | """SQLAlchemy models for task queue and results"""
2 |
3 | from datetime import datetime
4 | from enum import Enum
5 | from typing import Optional
6 |
7 | from sqlalchemy import Column, DateTime, Enum as SQLEnum, Index, Integer, String, Text, create_engine
8 | from sqlalchemy.ext.declarative import declarative_base
9 | from sqlalchemy.orm import Session
10 |
11 | Base = declarative_base()
12 |
13 |
14 | class TaskPriority(str, Enum):
15 | """Task priority levels"""
16 | THOUGHT = "thought" # Low priority, experimental
17 | SERIOUS = "serious" # High priority, needs completion
18 | GENERATED = "generated" # Auto-generated backlog filler
19 |
20 |
21 | class TaskStatus(str, Enum):
22 | """Task status states"""
23 | PENDING = "pending"
24 | IN_PROGRESS = "in_progress"
25 | COMPLETED = "completed"
26 | FAILED = "failed"
27 | CANCELLED = "cancelled"
28 |
29 |
30 | class TaskType(str, Enum):
31 | """Task type: NEW (build from scratch) vs REFINE (improve existing code)"""
32 | NEW = "new" # Create new functionality in empty workspace
33 | REFINE = "refine" # Improve existing code (workspace pre-populated with source)
34 |
35 |
36 | class Task(Base):
37 | """Task queue model"""
38 | __tablename__ = "tasks"
39 |
40 | id = Column(Integer, primary_key=True, autoincrement=True)
41 | description = Column(Text, nullable=False)
42 | priority = Column(
43 | SQLEnum(
44 | TaskPriority,
45 | native_enum=False,
46 | validate_strings=True,
47 | create_constraint=False,
48 | ),
49 | default=TaskPriority.THOUGHT,
50 | nullable=False,
51 | )
52 | task_type = Column(
53 | SQLEnum(
54 | TaskType,
55 | native_enum=False,
56 | validate_strings=True,
57 | create_constraint=False,
58 | ),
59 | default=TaskType.NEW,
60 | nullable=True, # Nullable for backwards compatibility
61 | )
62 | status = Column(SQLEnum(TaskStatus), default=TaskStatus.PENDING, nullable=False)
63 |
64 | # Timing
65 | created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
66 | started_at = Column(DateTime, nullable=True)
67 | completed_at = Column(DateTime, nullable=True)
68 | deleted_at = Column(DateTime, nullable=True)
69 |
70 | # Execution details
71 | attempt_count = Column(Integer, default=0, nullable=False)
72 | error_message = Column(Text, nullable=True)
73 | result_id = Column(Integer, nullable=True) # Reference to Result
74 |
75 | # Metadata
76 | context = Column(Text, nullable=True) # JSON with additional context
77 | assigned_to = Column(String(255), nullable=True) # Slack user ID
78 | slack_thread_ts = Column(String(255), nullable=True) # Slack thread timestamp for updates
79 |
80 | # Project grouping - tasks with same project_id share workspace and context
81 | project_id = Column(String(255), nullable=True) # Project identifier for context sharing
82 | project_name = Column(String(255), nullable=True) # Human-readable project name
83 |
84 | def __repr__(self):
85 | return f""
86 |
87 | # Define indexes for query optimization
88 | __table_args__ = (
89 | # Single-column indexes for frequently filtered columns
90 | Index('ix_task_status', 'status'),
91 | Index('ix_task_project_id', 'project_id'),
92 | Index('ix_task_created_at', 'created_at'),
93 | Index('ix_task_type', 'task_type'),
94 |
95 | # Composite indexes for common query patterns
96 | # Optimizes get_projects() and project-specific queries
97 | Index('ix_task_project_status', 'project_id', 'status'),
98 |
99 | # Optimizes get_pending_tasks() with status filter + created_at ordering
100 | Index('ix_task_status_created', 'status', 'created_at'),
101 |
102 | # Optimizes filtering by task type and status
103 | Index('ix_task_type_status', 'task_type', 'status'),
104 | )
105 |
106 |
107 | class Result(Base):
108 | """Stores results from completed tasks"""
109 | __tablename__ = "results"
110 |
111 | id = Column(Integer, primary_key=True, autoincrement=True)
112 | task_id = Column(Integer, nullable=False)
113 |
114 | # Results
115 | output = Column(Text, nullable=True) # Main output/response
116 | files_modified = Column(Text, nullable=True) # JSON list of modified files
117 | commands_executed = Column(Text, nullable=True) # JSON list of executed commands
118 |
119 | # Git integration
120 | git_commit_sha = Column(String(40), nullable=True)
121 | git_pr_url = Column(String(512), nullable=True)
122 | git_branch = Column(String(255), nullable=True)
123 |
124 | # Workspace
125 | workspace_path = Column(String(512), nullable=True) # Path to isolated task workspace
126 |
127 | # Metadata
128 | processing_time_seconds = Column(Integer, nullable=True)
129 | created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
130 |
131 | def __repr__(self):
132 | return f""
133 |
134 |
135 | class UsageMetric(Base):
136 | """Track API usage and costs for budget management"""
137 | __tablename__ = "usage_metrics"
138 |
139 | id = Column(Integer, primary_key=True, autoincrement=True)
140 | task_id = Column(Integer, nullable=False)
141 |
142 | # API usage details
143 | total_cost_usd = Column(Text, nullable=True) # Stored as text to preserve precision
144 | duration_ms = Column(Integer, nullable=True) # Total duration in milliseconds
145 | duration_api_ms = Column(Integer, nullable=True) # API call duration
146 | num_turns = Column(Integer, nullable=True) # Number of conversation turns
147 |
148 | # Timing
149 | created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
150 |
151 | # Project tracking
152 | project_id = Column(String(255), nullable=True) # Link to project for aggregation
153 |
154 | def __repr__(self):
155 | return f""
156 |
157 |
158 | class GenerationHistory(Base):
159 | """Track auto-generated tasks and their originating prompt archetype."""
160 | __tablename__ = "generation_history"
161 |
162 | id = Column(Integer, primary_key=True, autoincrement=True)
163 | task_id = Column(Integer, nullable=False) # Reference to created Task
164 | source = Column(String(50), nullable=False) # Prompt name or generation source label
165 | usage_percent_at_generation = Column(Integer, nullable=False) # Budget usage % when generated
166 | source_metadata = Column(Text, nullable=True) # JSON with source-specific info
167 | created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
168 |
169 | def __repr__(self):
170 | return f""
171 |
172 |
173 | class TaskPool(Base):
174 | """Predefined pool of tasks for auto-generation"""
175 | __tablename__ = "task_pool"
176 |
177 | id = Column(Integer, primary_key=True, autoincrement=True)
178 | description = Column(Text, nullable=False)
179 | priority = Column(
180 | SQLEnum(
181 | TaskPriority,
182 | native_enum=False,
183 | validate_strings=True,
184 | create_constraint=False,
185 | ),
186 | default=TaskPriority.THOUGHT,
187 | nullable=False,
188 | )
189 | category = Column(String(100), nullable=True)
190 | used = Column(Integer, default=0, nullable=False)
191 | project_id = Column(String(255), nullable=True)
192 | created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
193 |
194 | def __repr__(self):
195 | return f""
196 |
197 |
198 | def init_db(db_path: str) -> Session:
199 | """Initialize database and return session"""
200 | engine = create_engine(f"sqlite:///{db_path}", echo=False, future=True)
201 | Base.metadata.create_all(engine)
202 | return engine
203 |
--------------------------------------------------------------------------------
/docs/installation.md:
--------------------------------------------------------------------------------
1 | # Installation Guide
2 |
3 | This guide covers all installation methods and platform-specific setup instructions for Sleepless Agent.
4 |
5 | ## System Requirements
6 |
7 | ### Minimum Requirements
8 |
9 | - **Python**: 3.11 or higher
10 | - **Node.js**: 16.0 or higher (for Claude Code CLI)
11 | - **Memory**: 2GB RAM minimum, 4GB recommended
12 | - **Storage**: 1GB free space for workspace
13 | - **OS**: Linux, macOS, or Windows (with WSL2)
14 |
15 | ### Required Software
16 |
17 | - **Claude Code CLI**: The AI execution engine
18 | - **Git**: For version control (optional but recommended)
19 | - **SQLite3**: Usually pre-installed with Python
20 | - **Slack Account**: With workspace admin access
21 |
22 | ## Installation Methods
23 |
24 | ### Method 1: Install from PyPI (Recommended)
25 |
26 | ```bash
27 | # Install the latest stable version
28 | pip install sleepless-agent
29 |
30 | # Verify installation
31 | sle --version
32 | ```
33 |
34 | ### Method 2: Install from Source
35 |
36 | ```bash
37 | # Clone the repository
38 | git clone https://github.com/context-machine-lab/sleepless-agent
39 | cd sleepless-agent
40 |
41 | # Create virtual environment (recommended)
42 | python -m venv venv
43 | source venv/bin/activate # On Windows: venv\Scripts\activate
44 |
45 | # Install in development mode
46 | pip install -e .
47 |
48 | # Verify installation
49 | sle --version
50 | ```
51 |
52 | ### Method 3: Docker Installation
53 |
54 | ```bash
55 | # Pull the Docker image
56 | docker pull sleeplessagent/sleepless-agent:latest
57 |
58 | # Run with environment variables
59 | docker run -d \
60 | --name sleepless-agent \
61 | -e SLACK_BOT_TOKEN=xoxb-... \
62 | -e SLACK_APP_TOKEN=xapp-... \
63 | -v $(pwd)/workspace:/workspace \
64 | sleeplessagent/sleepless-agent
65 | ```
66 |
67 | ## Platform-Specific Setup
68 |
69 | ### macOS
70 |
71 | ```bash
72 | # Install Python 3.11+ via Homebrew
73 | brew install python@3.11
74 |
75 | # Install Node.js for Claude Code CLI
76 | brew install node
77 |
78 | # Install Claude Code CLI
79 | npm install -g @anthropic-ai/claude-code
80 |
81 | # Install Sleepless Agent
82 | pip install sleepless-agent
83 | ```
84 |
85 | ### Ubuntu/Debian
86 |
87 | ```bash
88 | # Update package list
89 | sudo apt update
90 |
91 | # Install Python 3.11
92 | sudo apt install python3.11 python3.11-venv python3-pip
93 |
94 | # Install Node.js
95 | curl -fsSL https://deb.nodesource.com/setup_lts.x | sudo -E bash -
96 | sudo apt install nodejs
97 |
98 | # Install Claude Code CLI
99 | sudo npm install -g @anthropic-ai/claude-code
100 |
101 | # Install Sleepless Agent
102 | pip install sleepless-agent
103 | ```
104 |
105 | ### Windows (WSL2)
106 |
107 | ```bash
108 | # Inside WSL2 Ubuntu
109 | # Follow Ubuntu instructions above
110 |
111 | # Or use native Windows with Python
112 | # Install Python from python.org
113 | # Install Node.js from nodejs.org
114 |
115 | # In PowerShell (as Administrator)
116 | npm install -g @anthropic-ai/claude-code
117 | pip install sleepless-agent
118 | ```
119 |
120 | ## Claude Code CLI Setup
121 |
122 | ### Install Claude Code CLI
123 |
124 | ```bash
125 | # Install globally
126 | npm install -g @anthropic-ai/claude-code
127 |
128 | # Verify installation
129 | claude --version
130 | ```
131 |
132 | ### Authenticate Claude Code
133 |
134 | ```bash
135 | # Login to Claude Code (opens browser)
136 | claude login
137 |
138 | # Verify authentication
139 | claude /usage
140 | ```
141 |
142 | ## Environment Configuration
143 |
144 | ### Create Environment File
145 |
146 | ```bash
147 | # Copy example configuration
148 | cp .env.example .env
149 |
150 | # Edit with your tokens
151 | nano .env # or your preferred editor
152 | ```
153 |
154 | ### Required Environment Variables
155 |
156 | ```bash
157 | # Slack Configuration (Required)
158 | SLACK_BOT_TOKEN=xoxb-your-bot-token
159 | SLACK_APP_TOKEN=xapp-your-app-token
160 |
161 | # Agent Configuration (Optional)
162 | AGENT_WORKSPACE=./workspace
163 | AGENT_DB_PATH=./workspace/data/tasks.db
164 | AGENT_RESULTS_PATH=./workspace/data/results
165 |
166 | # Git Configuration (Optional)
167 | GIT_USER_NAME=Sleepless Agent
168 | GIT_USER_EMAIL=agent@sleepless.local
169 |
170 | # Logging Configuration (Optional)
171 | SLEEPLESS_LOG_LEVEL=INFO
172 | SLEEPLESS_LOG_DIR=workspace/.logs
173 | ```
174 |
175 | ## Configuration File Setup
176 |
177 | ### Default Configuration
178 |
179 | The agent uses `config.yaml` for runtime settings:
180 |
181 | ```yaml
182 | # Create or modify config.yaml
183 | cp src/sleepless_agent/config.yaml ./config.yaml
184 | ```
185 |
186 | ### Key Configuration Options
187 |
188 | ```yaml
189 | claude_code:
190 | model: claude-sonnet-4-5-20250929
191 | night_start_hour: 20 # 8 PM
192 | night_end_hour: 8 # 8 AM
193 | threshold_day: 20.0 # Day usage limit
194 | threshold_night: 80.0 # Night usage limit
195 |
196 | agent:
197 | workspace_root: ./workspace
198 | task_timeout_seconds: 1800
199 |
200 | git:
201 | use_remote_repo: true
202 | remote_repo_url: git@github.com:yourusername/yourrepo.git
203 | auto_create_repo: true
204 | ```
205 |
206 | ## Post-Installation Setup
207 |
208 | ### 1. Initialize Workspace
209 |
210 | ```bash
211 | # Create workspace structure
212 | sle init
213 |
214 | # This creates:
215 | # workspace/
216 | # ├── data/
217 | # │ ├── tasks.db
218 | # │ ├── results/
219 | # │ └── reports/
220 | # ├── tasks/
221 | # └── projects/
222 | ```
223 |
224 | ### 2. Test Installation
225 |
226 | ```bash
227 | # Check system status
228 | sle check
229 |
230 | # Run a test task
231 | sle think "Test task - verify installation"
232 |
233 | # Check daemon status
234 | sle daemon --test
235 | ```
236 |
237 | ### 3. Set Up Git (Optional)
238 |
239 | ```bash
240 | # Configure Git user
241 | git config --global user.name "Sleepless Agent"
242 | git config --global user.email "agent@sleepless.local"
243 |
244 | # Authenticate GitHub CLI (for PR creation)
245 | gh auth login
246 | ```
247 |
248 | ### 4. Configure Slack App
249 |
250 | Follow the [Slack Setup Guide](guides/slack-setup.md) to:
251 | - Create your Slack application
252 | - Configure slash commands
253 | - Set up permissions
254 | - Install to workspace
255 |
256 | ## Upgrading
257 |
258 | ### Upgrade from PyPI
259 |
260 | ```bash
261 | # Upgrade to latest version
262 | pip install --upgrade sleepless-agent
263 |
264 | # Check new version
265 | sle --version
266 | ```
267 |
268 | ### Upgrade from Source
269 |
270 | ```bash
271 | # Pull latest changes
272 | cd sleepless-agent
273 | git pull origin main
274 |
275 | # Reinstall
276 | pip install -e . --upgrade
277 | ```
278 |
279 | ## Troubleshooting Installation
280 |
281 | ### Python Version Issues
282 |
283 | ```bash
284 | # Check Python version
285 | python --version
286 |
287 | # If version < 3.11, install pyenv
288 | curl https://pyenv.run | bash
289 | pyenv install 3.11.0
290 | pyenv global 3.11.0
291 | ```
292 |
293 | ### Node.js/npm Issues
294 |
295 | ```bash
296 | # Check Node version
297 | node --version
298 | npm --version
299 |
300 | # If missing or old, use nvm
301 | curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.0/install.sh | bash
302 | nvm install --lts
303 | nvm use --lts
304 | ```
305 |
306 | ### Permission Issues
307 |
308 | ```bash
309 | # Fix workspace permissions
310 | chmod -R 755 workspace/
311 |
312 | # Fix global npm permissions
313 | mkdir ~/.npm-global
314 | npm config set prefix '~/.npm-global'
315 | echo 'export PATH=~/.npm-global/bin:$PATH' >> ~/.bashrc
316 | source ~/.bashrc
317 | ```
318 |
319 | ### Database Issues
320 |
321 | ```bash
322 | # Reset database if corrupted
323 | rm workspace/data/tasks.db
324 | sle init # Recreates database
325 | ```
326 |
327 | ## Verification Checklist
328 |
329 | After installation, verify:
330 |
331 | - [ ] `sle --version` shows correct version
332 | - [ ] `claude --version` shows Claude Code CLI version
333 | - [ ] `sle check` runs without errors
334 | - [ ] Workspace directory exists and is writable
335 | - [ ] `.env` file contains valid Slack tokens
336 | - [ ] Slack app responds to test command
337 |
338 | ## Next Steps
339 |
340 | - 📱 Complete [Slack Setup](guides/slack-setup.md)
341 | - 🚀 Follow the [Quickstart Guide](quickstart.md)
342 | - 🎓 Try your [First Task](tutorials/first-task.md)
343 | - ⚙️ Configure [Environment Variables](reference/environment-variables.md)
344 |
345 | ## Getting Help
346 |
347 | If you encounter issues:
348 |
349 | 1. Check [Troubleshooting Guide](troubleshooting.md)
350 | 2. Search [existing issues](https://github.com/context-machine-lab/sleepless-agent/issues)
351 | 3. Join our [Discord](https://discord.gg/74my3Wkn)
352 | 4. Open a [new issue](https://github.com/context-machine-lab/sleepless-agent/issues/new)
--------------------------------------------------------------------------------
/docs/faq.md:
--------------------------------------------------------------------------------
1 | # Frequently Asked Questions
2 |
3 | ## General Questions
4 |
5 | ### What is Sleepless Agent?
6 |
7 | Sleepless Agent is a 24/7 AI automation system that uses your Claude Code Pro subscription to process tasks autonomously. It runs as a daemon, accepts tasks via Slack, and manages isolated workspaces for parallel execution.
8 |
9 | ### Why was Sleepless Agent created?
10 |
11 | Most Claude Code Pro subscriptions are underutilized, especially during night hours. Sleepless Agent maximizes your subscription value by processing tasks 24/7, turning idle compute time into productive work.
12 |
13 | ### How is this different from other AI agents?
14 |
15 | - **24/7 Operation**: Runs continuously as a daemon
16 | - **Slack-Native**: Direct integration with your team's workflow
17 | - **Workspace Isolation**: Each task runs in its own environment
18 | - **Usage Optimization**: Intelligent Pro plan management
19 | - **Git Integration**: Automated commits and PRs
20 |
21 | ## Setup & Installation
22 |
23 | ### What are the system requirements?
24 |
25 | - Python 3.11 or higher
26 | - Node.js 16+ (for Claude Code CLI)
27 | - 2GB RAM minimum
28 | - 1GB free disk space
29 | - Linux, macOS, or Windows with WSL2
30 |
31 | ### Do I need a Claude API key?
32 |
33 | No! Sleepless Agent uses Claude Code CLI, which handles authentication through your Claude Code Pro subscription. You just need to run `claude login` once.
34 |
35 | ### Can I use this without Slack?
36 |
37 | Yes! The CLI interface (`sle`) provides full functionality without Slack. Slack integration is optional but recommended for team collaboration.
38 |
39 | ### How do I update Sleepless Agent?
40 |
41 | ```bash
42 | # From PyPI
43 | pip install --upgrade sleepless-agent
44 |
45 | # From source
46 | cd sleepless-agent
47 | git pull
48 | pip install -e . --upgrade
49 | ```
50 |
51 | ## Usage & Features
52 |
53 | ### What's the difference between "random thoughts" and "serious tasks"?
54 |
55 | - **Random Thoughts** (`/think`): Auto-committed to `thought-ideas` branch, no review needed
56 | - **Serious Tasks** (`/think -p project`): Creates feature branches and PRs, requires review
57 |
58 | ### How does workspace isolation work?
59 |
60 | Each task runs in its own directory (`workspace/tasks/task_/`), preventing conflicts and allowing true parallel execution. Tasks can only access their workspace and shared resources.
61 |
62 | ### Can multiple tasks run simultaneously?
63 |
64 | Yes! Tasks execute in parallel within isolated workspaces. The scheduler manages concurrency based on system resources and Claude usage limits.
65 |
66 | ### How are tasks prioritized?
67 |
68 | Tasks are scheduled based on:
69 | 1. Priority level (serious > random)
70 | 2. Project association
71 | 3. Creation time (FIFO within priority)
72 | 4. Available Claude usage quota
73 |
74 | ## Claude Code & Usage
75 |
76 | ### How does Pro plan usage management work?
77 |
78 | The agent monitors usage via `claude /usage` and automatically pauses at configurable thresholds:
79 | - **Daytime (8 AM - 8 PM)**: Pauses at 20% to preserve manual usage
80 | - **Nighttime (8 PM - 8 AM)**: Pauses at 80% for maximum automation
81 |
82 | ### What happens when usage limits are reached?
83 |
84 | 1. New task generation pauses
85 | 2. Running tasks complete normally
86 | 3. Pending tasks wait in queue
87 | 4. Agent resumes after 5-hour window reset
88 |
89 | ### Can I adjust usage thresholds?
90 |
91 | Yes, in `config.yaml`:
92 | ```yaml
93 | claude_code:
94 | threshold_day: 20.0 # Daytime limit
95 | threshold_night: 80.0 # Nighttime limit
96 | ```
97 |
98 | ### How do I check current usage?
99 |
100 | - Via CLI: `sle check`
101 | - Via Slack: `/check`
102 | - In logs: Look for "Pro Usage" entries
103 |
104 | ## Slack Integration
105 |
106 | ### Which Slack permissions are required?
107 |
108 | Minimum required scopes:
109 | - `chat:write` - Send messages
110 | - `commands` - Receive slash commands
111 | - `app_mentions:read` - Respond to mentions
112 |
113 | ### Can I use custom slash commands?
114 |
115 | Yes! You can add custom commands in your Slack app configuration and handle them in the bot code.
116 |
117 | ### Why isn't the bot responding?
118 |
119 | Check:
120 | 1. Socket Mode is enabled in Slack app settings
121 | 2. Both tokens (`xoxb-` and `xapp-`) are correct in `.env`
122 | 3. Bot is invited to the channel
123 | 4. Agent daemon is running (`sle daemon`)
124 |
125 | ### How do I use the bot in private channels?
126 |
127 | Invite the bot to the private channel using `/invite @sleepless-agent`
128 |
129 | ## Task Management
130 |
131 | ### How do I cancel a running task?
132 |
133 | ```bash
134 | # CLI
135 | sle cancel
136 |
137 | # Slack
138 | /cancel
139 | ```
140 |
141 | ### Can I restore cancelled tasks?
142 |
143 | Yes, cancelled tasks go to trash and can be restored:
144 | ```bash
145 | # CLI
146 | sle trash restore
147 |
148 | # Slack
149 | /trash restore
150 | ```
151 |
152 | ### How long are task results kept?
153 |
154 | Task results are stored indefinitely in `workspace/data/results/`. You can manually clean old results if needed.
155 |
156 | ### How do I see task history?
157 |
158 | ```bash
159 | # Today's tasks
160 | sle report
161 |
162 | # Specific task
163 | sle report
164 |
165 | # All reports
166 | sle report --list
167 | ```
168 |
169 | ## Git Integration
170 |
171 | ### Is Git integration required?
172 |
173 | No, Git integration is optional. Without it, tasks still execute but won't create commits or PRs.
174 |
175 | ### How do I set up GitHub authentication?
176 |
177 | ```bash
178 | # Install GitHub CLI
179 | brew install gh # or appropriate for your OS
180 |
181 | # Authenticate
182 | gh auth login
183 | ```
184 |
185 | ### Can I use a different Git provider?
186 |
187 | Yes, the system works with any Git provider. Configure the remote URL in `config.yaml`.
188 |
189 | ### How are commits structured?
190 |
191 | - Random thoughts: Committed to `thought-ideas` branch
192 | - Serious tasks: Create feature branches like `feature/task-description`
193 | - Commit messages include task metadata
194 |
195 | ## Troubleshooting
196 |
197 | ### Tasks aren't executing
198 |
199 | 1. Check Claude Code CLI: `claude --version`
200 | 2. Verify login: `claude /usage`
201 | 3. Check usage limits: `sle check`
202 | 4. Review logs: `tail -f workspace/data/agent.log`
203 |
204 | ### Database is locked
205 |
206 | ```bash
207 | # Stop the daemon
208 | pkill -f "sle daemon"
209 |
210 | # Reset database
211 | rm workspace/data/tasks.db
212 | sle init
213 | ```
214 |
215 | ### High memory usage
216 |
217 | - Reduce concurrent tasks in config
218 | - Clear old task workspaces
219 | - Check for memory leaks in custom executors
220 |
221 | ### Agent crashes frequently
222 |
223 | 1. Check system resources
224 | 2. Review error logs
225 | 3. Increase task timeout
226 | 4. Disable problematic features
227 |
228 | ## Advanced Usage
229 |
230 | ### Can I create custom executors?
231 |
232 | Yes! Extend the `BaseExecutor` class and register it in the configuration.
233 |
234 | ### How do I integrate with other tools?
235 |
236 | - Use the Python API for programmatic access
237 | - Create custom Slack commands
238 | - Extend the task processor
239 | - Add webhook notifications
240 |
241 | ### Can I run multiple agents?
242 |
243 | Yes, with separate:
244 | - Workspace directories
245 | - Database files
246 | - Slack apps
247 | - Configuration files
248 |
249 | ### Is there an API?
250 |
251 | Yes, the Python package exposes a full API. See [API Reference](reference/api/python-api.md).
252 |
253 | ## Security & Privacy
254 |
255 | ### Is my code secure?
256 |
257 | - Tasks run in isolated workspaces
258 | - No external data transmission (except Git if configured)
259 | - All processing happens locally
260 | - Slack tokens are never logged
261 |
262 | ### Can I use this with sensitive projects?
263 |
264 | Yes, with precautions:
265 | - Run on secure infrastructure
266 | - Use private Git repositories
267 | - Configure strict access controls
268 | - Review generated code before deployment
269 |
270 | ### What data is collected?
271 |
272 | None. Sleepless Agent:
273 | - Runs entirely locally
274 | - No telemetry or analytics
275 | - No external API calls (except Claude Code and Slack)
276 | - All data stays in your workspace
277 |
278 | ## Getting Help
279 |
280 | ### Where can I get support?
281 |
282 | 1. [Documentation](index.md)
283 | 2. [Discord Community](https://discord.gg/74my3Wkn)
284 | 3. [GitHub Issues](https://github.com/context-machine-lab/sleepless-agent/issues)
285 | 4. [Troubleshooting Guide](troubleshooting.md)
286 |
287 | ### How do I report bugs?
288 |
289 | Open an issue on [GitHub](https://github.com/context-machine-lab/sleepless-agent/issues) with:
290 | - System information
291 | - Error messages
292 | - Steps to reproduce
293 | - Log excerpts
294 |
295 | ### Can I contribute?
296 |
297 | Yes! We welcome contributions. See [Contributing Guide](https://github.com/context-machine-lab/sleepless-agent/blob/main/CONTRIBUTING.md).
298 |
299 | ### Is commercial use allowed?
300 |
301 | Yes, Sleepless Agent is MIT licensed. See [LICENSE](https://github.com/context-machine-lab/sleepless-agent/blob/main/LICENSE) for details.
--------------------------------------------------------------------------------
/src/sleepless_agent/monitoring/logging.py:
--------------------------------------------------------------------------------
1 | """Structured logging setup for Sleepless Agent.
2 |
3 | This module configures a Rich-powered console logger alongside a JSONL file
4 | sink and exposes helpers for creating context-aware structlog loggers.
5 | """
6 |
7 | from __future__ import annotations
8 |
9 | import logging
10 | import os
11 | import sys
12 | import time
13 | from datetime import datetime
14 | from pathlib import Path
15 | from typing import Any, Dict, Iterable, Optional
16 |
17 | import structlog
18 | try:
19 | from structlog.contextvars import merge_contextvars
20 | except ModuleNotFoundError: # pragma: no cover - fallback for older structlog
21 | def merge_contextvars(
22 | logger: Any,
23 | name: str,
24 | event_dict: Dict[str, Any],
25 | ) -> Dict[str, Any]:
26 | return event_dict
27 | from rich.console import Console
28 |
29 | __all__ = [
30 | "configure_logging",
31 | "get_logger",
32 | "logger",
33 | ]
34 |
35 | _CONFIGURED = False
36 | _CONSOLE = Console(soft_wrap=True, stderr=True)
37 | _DEFAULT_LOG_DIR = Path(os.getenv("SLEEPLESS_LOG_DIR", "workspace/.logs"))
38 |
39 | _LEVEL_STYLES: Dict[str, str] = {
40 | "CRITICAL": "bold white on red",
41 | "ERROR": "bold red",
42 | "WARNING": "bold yellow",
43 | "SUCCESS": "bold green",
44 | "INFO": "bold blue",
45 | "DEBUG": "dim cyan",
46 | "NOTSET": "dim",
47 | }
48 |
49 | _LEVEL_ICONS: Dict[str, str] = {
50 | "CRITICAL": "✗",
51 | "ERROR": "✗",
52 | "WARNING": "⚠",
53 | "SUCCESS": "✓",
54 | "INFO": "ℹ",
55 | "DEBUG": "⚙",
56 | "NOTSET": "·",
57 | }
58 |
59 |
60 | class DedupFilter(logging.Filter):
61 | """Collapse duplicate log lines emitted within a cooldown window."""
62 |
63 | def __init__(self, cooldown_seconds: float = 1.0) -> None:
64 | super().__init__()
65 | self.cooldown_seconds = cooldown_seconds
66 | self._history: Dict[tuple[str, int, str], float] = {}
67 |
68 | def filter(self, record: logging.LogRecord) -> bool: # pragma: no cover - simple heuristic
69 | now = time.monotonic()
70 | event = getattr(record, "event", None) or getattr(record, "msg", "")
71 | key = (record.name, record.levelno, str(event))
72 | last = self._history.get(key)
73 | if last is not None and (now - last) <= self.cooldown_seconds:
74 | return False
75 | self._history[key] = now
76 | return True
77 |
78 |
79 | class ThirdPartyFilter(logging.Filter):
80 | """Suppress noisy INFO logs from third-party libraries."""
81 |
82 | def filter(self, record: logging.LogRecord) -> bool: # pragma: no cover - noise reduction
83 | # Allow all logs from sleepless_agent
84 | if record.name.startswith("sleepless_agent"):
85 | return True
86 |
87 | # For third-party libraries, only show WARNING and above
88 | if record.levelno < logging.WARNING:
89 | return False
90 |
91 | return True
92 |
93 |
94 | class RichConsoleHandler(logging.Handler):
95 | """Stream handler that delegates rendering to Rich."""
96 |
97 | def __init__(self) -> None:
98 | super().__init__()
99 | self.console = _CONSOLE
100 |
101 | def emit(self, record: logging.LogRecord) -> None: # pragma: no cover - thin wrapper
102 | try:
103 | message = self.format(record)
104 | self.console.print(message, markup=True, highlight=False, overflow="ignore")
105 | except Exception: # pragma: no cover - safety net
106 | self.handleError(record)
107 |
108 |
109 | class EventDelta:
110 | """Add time delta since previous log entry for the same logger name."""
111 |
112 | def __init__(self) -> None:
113 | self._last_seen: Dict[str, float] = {}
114 |
115 | def __call__(
116 | self,
117 | logger: Any,
118 | name: str,
119 | event_dict: Dict[str, Any],
120 | ) -> Dict[str, Any]:
121 | now = time.monotonic()
122 | last = self._last_seen.get(name, now)
123 | event_dict["delta_ms"] = int((now - last) * 1000)
124 | self._last_seen[name] = now
125 | return event_dict
126 |
127 |
128 | def _level_markup(level: str) -> str:
129 | style = _LEVEL_STYLES.get(level, "white")
130 | icon = _LEVEL_ICONS.get(level, "·")
131 | # Use shorter level names for compact display
132 | short_level = level[:4] if level != "WARNING" else "WARN"
133 | return f"[{style}]{icon} {short_level:<4}[/]"
134 |
135 |
136 | def _format_delta(delta_ms: Optional[int]) -> str:
137 | if delta_ms is None:
138 | return "+000ms"
139 | if delta_ms >= 1000:
140 | return f"+{delta_ms / 1000:.1f}s"
141 | return f"+{delta_ms:03d}ms"
142 |
143 |
144 | def _format_pairs(pairs: Iterable[tuple[str, Any]]) -> str:
145 | formatted = []
146 | for key, value in pairs:
147 | if isinstance(value, (dict, list, tuple)):
148 | formatted.append(f"{key}={value!r}")
149 | elif isinstance(value, str) and " " in value:
150 | formatted.append(f"{key}=\"{value}\"")
151 | else:
152 | formatted.append(f"{key}={value}")
153 | return " ".join(formatted)
154 |
155 |
156 | def _console_renderer(
157 | logger: logging.Logger,
158 | name: str,
159 | event_dict: Dict[str, Any],
160 | ) -> str:
161 | timestamp = event_dict.pop("timestamp", None)
162 | level = event_dict.pop("level", "INFO").upper()
163 | delta_ms = event_dict.pop("delta_ms", None)
164 | component = event_dict.pop("logger", name)
165 | event = event_dict.pop("event", "")
166 |
167 | if isinstance(timestamp, datetime):
168 | ts_text = timestamp.strftime("%H:%M:%S")
169 | elif isinstance(timestamp, str):
170 | ts_text = timestamp.split("T")[-1]
171 | if "." in ts_text:
172 | ts_text = ts_text.rsplit(".", 1)[0]
173 | else:
174 | ts_text = datetime.now().strftime("%H:%M:%S")
175 |
176 | # Shorten component name for cleaner display
177 | if component.startswith("sleepless_agent."):
178 | component = component.replace("sleepless_agent.", "")
179 |
180 | pairs = _format_pairs(sorted(event_dict.items()))
181 |
182 | # Build compact, visually clean output
183 | prefix = " | ".join(
184 | (
185 | f"[dim white]{ts_text}[/]",
186 | _level_markup(level),
187 | f"[bold magenta]{component}[/]",
188 | f"[dim cyan]{_format_delta(delta_ms)}[/]",
189 | )
190 | )
191 |
192 | if pairs:
193 | return f"{prefix} | [white]{event}[/] [dim]{pairs}[/]"
194 | return f"{prefix} | [white]{event}[/]"
195 |
196 |
197 | def _json_renderer(
198 | logger: logging.Logger,
199 | name: str,
200 | event_dict: Dict[str, Any],
201 | ) -> str:
202 | return structlog.processors.JSONRenderer(sort_keys=False)(logger, name, event_dict)
203 |
204 |
205 | def _common_processors() -> list[Any]:
206 | """Return processors for foreign (non-structlog) loggers.
207 |
208 | Note: wrap_for_formatter is NOT included here - it should only be
209 | in the main structlog.configure() chain, not in foreign_pre_chain.
210 | """
211 | return [
212 | merge_contextvars,
213 | structlog.stdlib.add_logger_name,
214 | structlog.processors.add_log_level,
215 | structlog.processors.TimeStamper(fmt="iso", key="timestamp"),
216 | EventDelta(),
217 | structlog.processors.StackInfoRenderer(),
218 | structlog.processors.format_exc_info,
219 | ]
220 |
221 |
222 | def configure_logging(
223 | level: Optional[str] = None,
224 | log_dir: Optional[Path | str] = None,
225 | ) -> None:
226 | """Configure console + file logging once per process."""
227 | global _CONFIGURED
228 | if _CONFIGURED:
229 | return
230 |
231 | resolved_level = (level or os.getenv("SLEEPLESS_LOG_LEVEL", "INFO")).upper()
232 | directory = Path(log_dir or _DEFAULT_LOG_DIR).expanduser().resolve()
233 | directory.mkdir(parents=True, exist_ok=True)
234 |
235 | console_handler = RichConsoleHandler()
236 | console_handler.setLevel(resolved_level)
237 | console_handler.addFilter(ThirdPartyFilter())
238 | console_handler.addFilter(DedupFilter(cooldown_seconds=1.0))
239 | console_handler.setFormatter(
240 | structlog.stdlib.ProcessorFormatter(
241 | processor=_console_renderer,
242 | foreign_pre_chain=_common_processors(),
243 | )
244 | )
245 |
246 | file_name = directory / f"{datetime.now():%Y%m%d}.log"
247 | file_handler = logging.FileHandler(file_name, encoding="utf-8")
248 | file_handler.setLevel(resolved_level)
249 | file_handler.setFormatter(
250 | structlog.stdlib.ProcessorFormatter(
251 | processor=_json_renderer,
252 | foreign_pre_chain=_common_processors(),
253 | )
254 | )
255 |
256 | logging.basicConfig(
257 | level=getattr(logging, resolved_level, logging.INFO),
258 | handlers=[console_handler, file_handler],
259 | force=True,
260 | )
261 |
262 | structlog.configure(
263 | processors=_common_processors() + [
264 | structlog.stdlib.ProcessorFormatter.wrap_for_formatter,
265 | ],
266 | logger_factory=structlog.stdlib.LoggerFactory(),
267 | wrapper_class=structlog.stdlib.BoundLogger,
268 | cache_logger_on_first_use=True,
269 | )
270 |
271 | _CONFIGURED = True
272 |
273 |
274 | def get_logger(name: Optional[str] = None, **context: Any) -> structlog.stdlib.BoundLogger:
275 | """Return a structlog logger bound to the given name and context."""
276 | configure_logging()
277 | base = structlog.get_logger(name or "sleepless")
278 | if context:
279 | return base.bind(**context)
280 | return base
281 |
282 |
283 | # Provide a default logger for modules that import `logger` directly.
284 | logger = get_logger()
285 |
--------------------------------------------------------------------------------
/src/sleepless_agent/monitoring/monitor.py:
--------------------------------------------------------------------------------
1 | """System monitoring and health checks"""
2 |
3 | import json
4 | import psutil
5 | from datetime import datetime, timedelta, timezone
6 | from pathlib import Path
7 | from typing import Optional
8 |
9 | from sleepless_agent.monitoring.logging import get_logger
10 | logger = get_logger(__name__)
11 |
12 |
13 | class HealthMonitor:
14 | """Monitor agent health and performance"""
15 |
16 | def __init__(self, db_path: str, results_path: str):
17 | """Initialize health monitor"""
18 | self.db_path = Path(db_path)
19 | self.results_path = Path(results_path)
20 | self.start_time = datetime.now(timezone.utc).replace(tzinfo=None)
21 | self.stats = {
22 | "tasks_completed": 0,
23 | "tasks_failed": 0,
24 | "total_processing_time": 0,
25 | "uptime_seconds": 0,
26 | }
27 |
28 | def check_health(self) -> dict:
29 | """Check overall system health"""
30 | now = datetime.now(timezone.utc).replace(tzinfo=None)
31 | uptime = (now - self.start_time).total_seconds()
32 |
33 | health = {
34 | "status": "healthy",
35 | "timestamp": now.isoformat(),
36 | "uptime_seconds": uptime,
37 | "uptime_human": self._format_uptime(uptime),
38 | "system": self._check_system_resources(),
39 | "database": self._check_database(),
40 | "storage": self._check_storage(),
41 | }
42 |
43 | # Determine overall status
44 | if health["system"]["memory_percent"] > 90 or health["system"]["cpu_percent"] > 80:
45 | health["status"] = "degraded"
46 |
47 | if not health["database"]["accessible"] or not health["storage"]["accessible"]:
48 | health["status"] = "unhealthy"
49 |
50 | return health
51 |
52 | def _check_system_resources(self) -> dict:
53 | """Check CPU and memory usage"""
54 | try:
55 | cpu_percent = psutil.cpu_percent(interval=1)
56 | memory = psutil.virtual_memory()
57 |
58 | return {
59 | "cpu_percent": cpu_percent,
60 | "memory_percent": memory.percent,
61 | "memory_available_mb": memory.available / (1024 * 1024),
62 | }
63 | except Exception as e:
64 | logger.error(f"Failed to check system resources: {e}")
65 | return {"error": str(e)}
66 |
67 | def _check_database(self) -> dict:
68 | """Check database health"""
69 | try:
70 | if not self.db_path.exists():
71 | return {"accessible": False, "size_mb": 0, "error": "Database file not found"}
72 |
73 | size_mb = self.db_path.stat().st_size / (1024 * 1024)
74 | modified_ago = (datetime.now(timezone.utc).replace(tzinfo=None) - datetime.fromtimestamp(
75 | self.db_path.stat().st_mtime
76 | )).total_seconds()
77 |
78 | return {
79 | "accessible": True,
80 | "size_mb": round(size_mb, 2),
81 | "modified_ago_seconds": int(modified_ago),
82 | }
83 | except Exception as e:
84 | logger.error(f"Failed to check database: {e}")
85 | return {"accessible": False, "error": str(e)}
86 |
87 | def _check_storage(self) -> dict:
88 | """Check storage health"""
89 | try:
90 | if not self.results_path.exists():
91 | return {"accessible": False, "count": 0}
92 |
93 | files = list(self.results_path.glob("**/*.json"))
94 | total_size = sum(f.stat().st_size for f in files) / (1024 * 1024)
95 |
96 | return {
97 | "accessible": True,
98 | "count": len(files),
99 | "total_size_mb": round(total_size, 2),
100 | }
101 | except Exception as e:
102 | logger.error(f"Failed to check storage: {e}")
103 | return {"accessible": False, "error": str(e)}
104 |
105 | def record_task_completion(self, processing_time: int, success: bool):
106 | """Record task completion for stats"""
107 | if success:
108 | self.stats["tasks_completed"] += 1
109 | self.stats["total_processing_time"] += processing_time
110 | else:
111 | self.stats["tasks_failed"] += 1
112 |
113 | def get_stats(self) -> dict:
114 | """Get performance statistics"""
115 | uptime = (datetime.now(timezone.utc).replace(tzinfo=None) - self.start_time).total_seconds()
116 | self.stats["uptime_seconds"] = int(uptime)
117 |
118 | total_tasks = self.stats["tasks_completed"] + self.stats["tasks_failed"]
119 | avg_time = (
120 | self.stats["total_processing_time"] / self.stats["tasks_completed"]
121 | if self.stats["tasks_completed"] > 0
122 | else 0
123 | )
124 |
125 | return {
126 | **self.stats,
127 | "total_tasks": total_tasks,
128 | "success_rate": (
129 | self.stats["tasks_completed"] / total_tasks * 100 if total_tasks > 0 else 0
130 | ),
131 | "avg_processing_time": round(avg_time, 2),
132 | "uptime_human": self._format_uptime(uptime),
133 | }
134 |
135 | def _format_uptime(self, seconds: float) -> str:
136 | """Format uptime in human-readable format"""
137 | days = int(seconds // 86400)
138 | hours = int((seconds % 86400) // 3600)
139 | minutes = int((seconds % 3600) // 60)
140 |
141 | parts = []
142 | if days > 0:
143 | parts.append(f"{days}d")
144 | if hours > 0:
145 | parts.append(f"{hours}h")
146 | if minutes > 0:
147 | parts.append(f"{minutes}m")
148 |
149 | return " ".join(parts) if parts else "< 1m"
150 |
151 | def log_health_report(self):
152 | """Log health report"""
153 | health = self.check_health()
154 | stats = self.get_stats()
155 |
156 | logger.debug(
157 | f"Health: {health['status']} | "
158 | f"Uptime: {health['uptime_human']} | "
159 | f"Tasks: {stats['tasks_completed']} ✓ {stats['tasks_failed']} ✗ | "
160 | f"CPU: {health['system'].get('cpu_percent', 'N/A')}% | "
161 | f"Memory: {health['system'].get('memory_percent', 'N/A')}%"
162 | )
163 |
164 | def get_uptime(self) -> str:
165 | """Get formatted uptime"""
166 | uptime = (datetime.now(timezone.utc).replace(tzinfo=None) - self.start_time).total_seconds()
167 | return self._format_uptime(uptime)
168 |
169 |
170 | class PerformanceLogger:
171 | """Log performance metrics"""
172 |
173 | def __init__(self, log_dir: str = "./workspace/data"):
174 | """Initialize performance logger"""
175 | self.log_dir = Path(log_dir)
176 | self.log_dir.mkdir(parents=True, exist_ok=True)
177 | self.metrics_file = self.log_dir / "metrics.jsonl"
178 |
179 | def log_task_execution(
180 | self,
181 | task_id: int,
182 | description: str,
183 | priority: str,
184 | duration_seconds: int,
185 | success: bool,
186 | files_modified: int = 0,
187 | commands_executed: int = 0,
188 | ):
189 | """Log task execution metrics"""
190 | try:
191 | metric = {
192 | "timestamp": datetime.now(timezone.utc).replace(tzinfo=None).isoformat(),
193 | "task_id": task_id,
194 | "description": description[:60],
195 | "priority": priority,
196 | "duration_seconds": duration_seconds,
197 | "success": success,
198 | "files_modified": files_modified,
199 | "commands_executed": commands_executed,
200 | }
201 |
202 | with open(self.metrics_file, "a") as f:
203 | f.write(json.dumps(metric) + "\n")
204 |
205 | except Exception as e:
206 | logger.error(f"Failed to log metrics: {e}")
207 |
208 | def get_recent_metrics(self, hours: int = 24) -> list:
209 | """Get metrics from last N hours"""
210 | if not self.metrics_file.exists():
211 | return []
212 |
213 | cutoff_time = datetime.now(timezone.utc).replace(tzinfo=None) - timedelta(hours=hours)
214 | metrics = []
215 |
216 | try:
217 | with open(self.metrics_file, "r") as f:
218 | for line in f:
219 | metric = json.loads(line)
220 | timestamp = datetime.fromisoformat(metric["timestamp"])
221 | if timestamp >= cutoff_time:
222 | metrics.append(metric)
223 | except Exception as e:
224 | logger.error(f"Failed to read metrics: {e}")
225 |
226 | return metrics
227 |
228 | def get_performance_summary(self, hours: int = 24) -> dict:
229 | """Get performance summary for last N hours"""
230 | metrics = self.get_recent_metrics(hours)
231 |
232 | if not metrics:
233 | return {"count": 0}
234 |
235 | total_duration = sum(m["duration_seconds"] for m in metrics)
236 | successful = sum(1 for m in metrics if m["success"])
237 | files_modified = sum(m["files_modified"] for m in metrics)
238 | commands = sum(m["commands_executed"] for m in metrics)
239 |
240 | return {
241 | "count": len(metrics),
242 | "successful": successful,
243 | "failed": len(metrics) - successful,
244 | "success_rate": successful / len(metrics) * 100 if metrics else 0,
245 | "total_duration_seconds": total_duration,
246 | "avg_duration_seconds": total_duration / len(metrics) if metrics else 0,
247 | "total_files_modified": files_modified,
248 | "total_commands_executed": commands,
249 | }
--------------------------------------------------------------------------------
/docs/concepts/architecture.md:
--------------------------------------------------------------------------------
1 | # Architecture Overview
2 |
3 | Sleepless Agent is built as a modular, event-driven system that processes tasks autonomously while managing resources efficiently.
4 |
5 | ## System Architecture
6 |
7 | ```
8 | ┌─────────────────────────────────────────────────────────┐
9 | │ User Interface Layer │
10 | ├──────────────────────┬───────────────────────────────────┤
11 | │ Slack Bot (bot.py)│ CLI (cli.py) │
12 | └──────────────────────┴───────────────────────────────────┘
13 | │
14 | ▼
15 | ┌─────────────────────────────────────────────────────────┐
16 | │ Core Engine Layer │
17 | ├──────────────────────────────────────────────────────────┤
18 | │ Daemon (daemon.py) - Event Loop │
19 | │ ├── Scheduler (scheduler.py) │
20 | │ ├── Queue Manager (queue.py) │
21 | │ └── Task Runtime (task_runtime.py) │
22 | └─────────────────────────────────────────────────────────┘
23 | │
24 | ▼
25 | ┌─────────────────────────────────────────────────────────┐
26 | │ Execution Layer │
27 | ├──────────────────────────────────────────────────────────┤
28 | │ Executor (executor.py) │
29 | │ ├── Claude Code CLI Integration │
30 | │ ├── Workspace Manager │
31 | │ └── Timeout Manager │
32 | └─────────────────────────────────────────────────────────┘
33 | │
34 | ▼
35 | ┌─────────────────────────────────────────────────────────┐
36 | │ Storage Layer │
37 | ├──────────────────────────────────────────────────────────┤
38 | │ SQLite DB Git Repos File System │
39 | │ (tasks.db) (projects/) (results/) │
40 | └─────────────────────────────────────────────────────────┘
41 | ```
42 |
43 | ## Core Components
44 |
45 | ### 1. User Interface Layer
46 |
47 | #### Slack Bot (`interfaces/bot.py`)
48 | - Receives slash commands from Slack
49 | - Parses user input and creates tasks
50 | - Sends status updates and notifications
51 | - Manages real-time Socket Mode connection
52 |
53 | #### CLI Interface (`interfaces/cli.py`)
54 | - Provides local command-line access
55 | - Mirrors Slack commands functionality
56 | - Useful for debugging and automation
57 | - Direct database access for queries
58 |
59 | ### 2. Core Engine Layer
60 |
61 | #### Daemon (`core/daemon.py`)
62 | The heart of the system - an event loop that:
63 | - Continuously monitors the task queue
64 | - Dispatches tasks to executors
65 | - Manages system resources
66 | - Handles graceful shutdown
67 |
68 | ```python
69 | # Simplified daemon loop
70 | while running:
71 | task = queue.get_next_task()
72 | if task and can_execute(task):
73 | executor.run(task)
74 | await sleep(poll_interval)
75 | ```
76 |
77 | #### Scheduler (`scheduling/scheduler.py`)
78 | Intelligent task scheduling based on:
79 | - Task priority (serious vs random)
80 | - Claude usage quotas
81 | - Time of day (day/night thresholds)
82 | - System resources
83 | - Task dependencies
84 |
85 | #### Queue Manager (`core/queue.py`)
86 | - FIFO queue with priority support
87 | - Persistent storage in SQLite
88 | - Atomic operations for consistency
89 | - Status tracking and updates
90 |
91 | ### 3. Execution Layer
92 |
93 | #### Executor (`core/executor.py`)
94 | Manages task execution lifecycle:
95 | - Creates isolated workspaces
96 | - Invokes Claude Code CLI
97 | - Monitors execution progress
98 | - Handles timeouts and failures
99 | - Captures output and results
100 |
101 | #### Workspace Isolation
102 | Each task runs in its own directory:
103 | ```
104 | workspace/
105 | ├── tasks/
106 | │ ├── task_1/ # Isolated environment
107 | │ ├── task_2/ # Parallel execution
108 | │ └── task_3/ # No conflicts
109 | ├── projects/ # Serious task workspaces
110 | └── shared/ # Shared resources
111 | ```
112 |
113 | ### 4. Storage Layer
114 |
115 | #### SQLite Database (`storage/sqlite.py`)
116 | Primary data store for:
117 | - Task queue and metadata
118 | - Task history and results
119 | - System configuration
120 | - Performance metrics
121 |
122 | #### Git Integration (`storage/git.py`)
123 | Automated version control:
124 | - Commits for random thoughts
125 | - Feature branches for serious tasks
126 | - Pull request creation
127 | - Conflict resolution
128 |
129 | #### File System (`storage/results.py`)
130 | Persistent storage for:
131 | - Task outputs and logs
132 | - Generated reports
133 | - Workspace snapshots
134 | - Temporary files
135 |
136 | ## Data Flow
137 |
138 | ### Task Lifecycle
139 |
140 | 1. **Task Creation**
141 | ```
142 | User Input → Parser → Task Object → Database
143 | ```
144 |
145 | 2. **Task Scheduling**
146 | ```
147 | Queue → Scheduler → Priority Check → Execution Queue
148 | ```
149 |
150 | 3. **Task Execution**
151 | ```
152 | Executor → Workspace → Claude CLI → Output Capture
153 | ```
154 |
155 | 4. **Result Storage**
156 | ```
157 | Output → Results Manager → Database + Files → Git Commit
158 | ```
159 |
160 | ## Key Design Principles
161 |
162 | ### 1. Isolation
163 | - Each task runs in complete isolation
164 | - No shared state between tasks
165 | - Prevents conflicts and data corruption
166 | - Enables true parallel execution
167 |
168 | ### 2. Persistence
169 | - All state stored in database
170 | - Survives daemon restarts
171 | - Full audit trail
172 | - Recovery from failures
173 |
174 | ### 3. Modularity
175 | - Clear separation of concerns
176 | - Pluggable components
177 | - Easy to extend
178 | - Testable units
179 |
180 | ### 4. Resilience
181 | - Graceful error handling
182 | - Automatic retries
183 | - Timeout protection
184 | - State recovery
185 |
186 | ## Communication Patterns
187 |
188 | ### Event-Driven Architecture
189 | ```
190 | Event Producer → Event Queue → Event Consumer
191 | ↓ ↓ ↓
192 | Slack Command Task Created Task Executed
193 | ```
194 |
195 | ### Message Flow
196 | 1. **Synchronous**: CLI commands with immediate response
197 | 2. **Asynchronous**: Slack commands with background processing
198 | 3. **Polling**: Daemon checking queue for new tasks
199 | 4. **Webhooks**: Git notifications and status updates
200 |
201 | ## Resource Management
202 |
203 | ### Claude Usage Optimization
204 | ```python
205 | def can_execute_task(task):
206 | usage = get_claude_usage()
207 | threshold = get_threshold_for_time()
208 | return usage < threshold
209 | ```
210 |
211 | ### Memory Management
212 | - Isolated workspaces prevent memory leaks
213 | - Automatic cleanup of old workspaces
214 | - Streaming output for large results
215 | - Database connection pooling
216 |
217 | ### Concurrent Execution
218 | - Configurable max parallel tasks
219 | - Resource-based scheduling
220 | - Priority queue management
221 | - Deadlock prevention
222 |
223 | ## Security Considerations
224 |
225 | ### Token Management
226 | - Secrets never logged
227 | - Environment variable isolation
228 | - Secure storage in `.env`
229 | - No hardcoded credentials
230 |
231 | ### Workspace Isolation
232 | - Each task has restricted filesystem access
233 | - No access to system directories
234 | - Clean environment for each execution
235 | - Sanitized user input
236 |
237 | ### Git Security
238 | - SSH key authentication
239 | - Private repository support
240 | - Secret scanning before commits
241 | - Protected branch policies
242 |
243 | ## Monitoring & Observability
244 |
245 | ### Logging System
246 | ```
247 | Application → Logger → Formatter → Output
248 | ↓ ↓ ↓
249 | Structured Rich Files
250 | JSON Console & Stdout
251 | ```
252 |
253 | ### Metrics Collection
254 | - Task execution times
255 | - Success/failure rates
256 | - Claude usage statistics
257 | - Queue depth and latency
258 |
259 | ### Health Checks
260 | - Daemon status monitoring
261 | - Database connectivity
262 | - Claude CLI availability
263 | - Disk space and memory
264 |
265 | ## Extension Points
266 |
267 | ### Custom Executors
268 | ```python
269 | class CustomExecutor(BaseExecutor):
270 | def execute(self, task):
271 | # Custom execution logic
272 | pass
273 | ```
274 |
275 | ### Plugin System
276 | - Hook into task lifecycle
277 | - Custom storage backends
278 | - Alternative AI providers
279 | - Notification integrations
280 |
281 | ### API Integration
282 | - RESTful API endpoints
283 | - WebSocket for real-time updates
284 | - GraphQL for complex queries
285 | - Webhook receivers
286 |
287 | ## Performance Characteristics
288 |
289 | ### Scalability
290 | - Handles 100+ tasks in queue
291 | - Parallel execution up to system limits
292 | - Efficient database queries
293 | - Lazy loading of resources
294 |
295 | ### Response Times
296 | - Task creation: < 100ms
297 | - Queue polling: 250ms interval
298 | - Execution start: < 1 second
299 | - Status updates: Real-time
300 |
301 | ### Resource Usage
302 | - Memory: ~200MB base + task overhead
303 | - CPU: < 5% idle, varies during execution
304 | - Disk: 1GB minimum, grows with tasks
305 | - Network: Minimal, only for Slack/Git
306 |
307 | ## Future Architecture Goals
308 |
309 | ### Planned Enhancements
310 | 1. Distributed task execution
311 | 2. Multi-agent coordination
312 | 3. Advanced caching layer
313 | 4. Stream processing pipeline
314 | 5. Machine learning optimization
315 |
316 | ### Scalability Roadmap
317 | - Horizontal scaling with multiple daemons
318 | - Task sharding across workers
319 | - Centralized queue management
320 | - Load balancing algorithms
321 |
322 | This architecture provides a robust foundation for 24/7 autonomous task processing while maintaining flexibility for future enhancements.
--------------------------------------------------------------------------------
/src/sleepless_agent/utils/readme_manager.py:
--------------------------------------------------------------------------------
1 | """README file management utilities for consistent README operations."""
2 |
3 | from __future__ import annotations
4 |
5 | import re
6 | from pathlib import Path
7 | from typing import Optional, List, Tuple, Dict, Any
8 | from datetime import datetime
9 |
10 | from sleepless_agent.monitoring.logging import get_logger
11 |
12 | logger = get_logger(__name__)
13 |
14 |
15 | class ReadmeManager:
16 | """Manages README file operations with consistent error handling."""
17 |
18 | def __init__(self, workspace: Path):
19 | """Initialize README manager.
20 |
21 | Args:
22 | workspace: Workspace directory path
23 | """
24 | self.workspace = Path(workspace)
25 | self.readme_path = self.workspace / "README.md"
26 |
27 | def ensure_exists(
28 | self,
29 | template: str,
30 | *,
31 | template_vars: Optional[Dict[str, Any]] = None,
32 | ) -> Path:
33 | """Ensure README exists with initial content.
34 |
35 | Args:
36 | template: Template string for README content
37 | template_vars: Variables to format into the template
38 |
39 | Returns:
40 | Path to the README file
41 | """
42 | if self.readme_path.exists():
43 | return self.readme_path
44 |
45 | try:
46 | content = template
47 | if template_vars:
48 | content = template.format(**template_vars)
49 |
50 | self.readme_path.write_text(content)
51 | logger.debug(f"Created README at: {self.readme_path}")
52 | return self.readme_path
53 |
54 | except Exception as e:
55 | logger.error(f"Failed to create README: {e}")
56 | # Create minimal README on error
57 | try:
58 | minimal_content = f"# Task Workspace\n\nCreated: {datetime.now().isoformat()}\n"
59 | self.readme_path.write_text(minimal_content)
60 | except:
61 | pass
62 | return self.readme_path
63 |
64 | def update_section(
65 | self,
66 | section_name: str,
67 | new_content: str,
68 | *,
69 | use_regex: bool = False,
70 | multiline: bool = False,
71 | ) -> bool:
72 | """Update a specific section in the README.
73 |
74 | Args:
75 | section_name: Name of the section to update (e.g., "## Status")
76 | new_content: New content for the section
77 | use_regex: Whether section_name is a regex pattern
78 | multiline: Whether to match content across multiple lines
79 |
80 | Returns:
81 | True if update was successful, False otherwise
82 | """
83 | if not self.readme_path.exists():
84 | logger.warning(f"README does not exist at: {self.readme_path}")
85 | return False
86 |
87 | try:
88 | content = self.readme_path.read_text()
89 |
90 | if use_regex:
91 | pattern = section_name
92 | else:
93 | # Escape special regex characters in section name
94 | escaped_name = re.escape(section_name)
95 | if multiline:
96 | # Match section and all content until next section or EOF
97 | pattern = rf"{escaped_name}.*?(?=^##\s|\Z)"
98 | else:
99 | # Match just the section header line
100 | pattern = rf"^{escaped_name}.*$"
101 |
102 | flags = re.MULTILINE
103 | if multiline:
104 | flags |= re.DOTALL
105 |
106 | # Check if section exists
107 | if re.search(pattern, content, flags):
108 | # Replace existing section
109 | updated_content = re.sub(pattern, new_content, content, flags=flags)
110 | else:
111 | # Append new section
112 | updated_content = content + "\n\n" + new_content
113 |
114 | self.readme_path.write_text(updated_content)
115 | logger.debug(f"Updated section '{section_name}' in README")
116 | return True
117 |
118 | except Exception as e:
119 | logger.error(f"Failed to update README section '{section_name}': {e}")
120 | return False
121 |
122 | def extract_section(
123 | self,
124 | section_name: str,
125 | *,
126 | use_regex: bool = False,
127 | include_header: bool = True,
128 | ) -> Optional[str]:
129 | """Extract content from a specific section.
130 |
131 | Args:
132 | section_name: Name of the section to extract
133 | use_regex: Whether section_name is a regex pattern
134 | include_header: Whether to include the section header in the result
135 |
136 | Returns:
137 | Section content or None if not found
138 | """
139 | if not self.readme_path.exists():
140 | return None
141 |
142 | try:
143 | content = self.readme_path.read_text()
144 |
145 | if use_regex:
146 | pattern = section_name
147 | else:
148 | escaped_name = re.escape(section_name)
149 | # Match section and content until next section or EOF
150 | pattern = rf"({escaped_name})(.*?)(?=^##\s|\Z)"
151 |
152 | match = re.search(pattern, content, re.MULTILINE | re.DOTALL)
153 | if match:
154 | if include_header:
155 | return match.group(0).strip()
156 | else:
157 | # Return just the content, not the header
158 | if len(match.groups()) > 1:
159 | return match.group(2).strip()
160 | else:
161 | # If pattern doesn't have groups, return everything after first line
162 | lines = match.group(0).strip().split('\n')
163 | return '\n'.join(lines[1:]) if len(lines) > 1 else ""
164 |
165 | return None
166 |
167 | except Exception as e:
168 | logger.error(f"Failed to extract README section '{section_name}': {e}")
169 | return None
170 |
171 | def extract_list_items(
172 | self,
173 | section_name: str,
174 | *,
175 | item_pattern: str = r"^[-*]\s+(.+)$",
176 | ) -> List[str]:
177 | """Extract list items from a section.
178 |
179 | Args:
180 | section_name: Name of the section containing the list
181 | item_pattern: Regex pattern for matching list items
182 |
183 | Returns:
184 | List of extracted items (without bullets)
185 | """
186 | section_content = self.extract_section(section_name, include_header=False)
187 | if not section_content:
188 | return []
189 |
190 | items = []
191 | for line in section_content.split('\n'):
192 | match = re.match(item_pattern, line.strip())
193 | if match:
194 | items.append(match.group(1).strip())
195 |
196 | return items
197 |
198 | def append_to_section(
199 | self,
200 | section_name: str,
201 | content_to_append: str,
202 | ) -> bool:
203 | """Append content to an existing section.
204 |
205 | Args:
206 | section_name: Name of the section to append to
207 | content_to_append: Content to append
208 |
209 | Returns:
210 | True if successful, False otherwise
211 | """
212 | existing_content = self.extract_section(section_name, include_header=True)
213 | if existing_content is None:
214 | # Section doesn't exist, create it
215 | return self.update_section(section_name, f"{section_name}\n{content_to_append}")
216 |
217 | # Append to existing section
218 | new_content = f"{existing_content}\n{content_to_append}"
219 | return self.update_section(section_name, new_content, multiline=True)
220 |
221 | def add_timestamp_entry(
222 | self,
223 | section_name: str,
224 | entry: str,
225 | *,
226 | timestamp_format: str = "%Y-%m-%d %H:%M:%S",
227 | ) -> bool:
228 | """Add a timestamped entry to a section.
229 |
230 | Args:
231 | section_name: Section to add the entry to
232 | entry: Entry content
233 | timestamp_format: Format for the timestamp
234 |
235 | Returns:
236 | True if successful, False otherwise
237 | """
238 | timestamp = datetime.now().strftime(timestamp_format)
239 | timestamped_entry = f"[{timestamp}] {entry}"
240 | return self.append_to_section(section_name, timestamped_entry)
241 |
242 | def update_status(self, status: str) -> bool:
243 | """Update the status section of the README.
244 |
245 | Args:
246 | status: New status value
247 |
248 | Returns:
249 | True if successful, False otherwise
250 | """
251 | status_content = f"## Status\n\n**Current Status:** {status}"
252 | return self.update_section("## Status", status_content, multiline=True)
253 |
254 | def get_content(self) -> Optional[str]:
255 | """Get the full content of the README.
256 |
257 | Returns:
258 | README content or None if file doesn't exist
259 | """
260 | if not self.readme_path.exists():
261 | return None
262 |
263 | try:
264 | return self.readme_path.read_text()
265 | except Exception as e:
266 | logger.error(f"Failed to read README: {e}")
267 | return None
268 |
269 | def backup(self, suffix: Optional[str] = None) -> Optional[Path]:
270 | """Create a backup of the current README.
271 |
272 | Args:
273 | suffix: Optional suffix for the backup file
274 |
275 | Returns:
276 | Path to the backup file or None if failed
277 | """
278 | if not self.readme_path.exists():
279 | return None
280 |
281 | try:
282 | if suffix:
283 | backup_name = f"README_{suffix}.md.bak"
284 | else:
285 | timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
286 | backup_name = f"README_{timestamp}.md.bak"
287 |
288 | backup_path = self.workspace / backup_name
289 | backup_path.write_text(self.readme_path.read_text())
290 | logger.debug(f"Created README backup at: {backup_path}")
291 | return backup_path
292 |
293 | except Exception as e:
294 | logger.error(f"Failed to create README backup: {e}")
295 | return None
--------------------------------------------------------------------------------
/docs/guides/slack-setup.md:
--------------------------------------------------------------------------------
1 | # Slack Setup Guide
2 |
3 | Complete step-by-step guide to set up Slack integration for Sleepless Agent.
4 |
5 | ## Prerequisites
6 |
7 | Before starting:
8 | - Admin access to your Slack workspace
9 | - Sleepless Agent installed locally
10 | - Understanding of [basic concepts](../concepts/architecture.md)
11 |
12 | ## Step 1: Create a New Slack App
13 |
14 | ### 1.1 Navigate to Slack API
15 |
16 | 1. Go to [https://api.slack.com/apps](https://api.slack.com/apps)
17 | 2. Click **"Create New App"**
18 | 3. Choose **"From scratch"**
19 |
20 | ### 1.2 Configure Basic Information
21 |
22 | ```
23 | App Name: Sleepless Agent
24 | Pick a workspace: [Your Workspace]
25 | ```
26 |
27 | Click **"Create App"**
28 |
29 | ### 1.3 App Configuration
30 |
31 | In the **Basic Information** page:
32 |
33 | 1. Add an app icon (optional but recommended)
34 | 2. Add a description:
35 | ```
36 | 24/7 AI agent that processes tasks autonomously using Claude Code
37 | ```
38 | 3. Set the background color: `#7C3AED` (purple)
39 |
40 | ## Step 2: Enable Socket Mode
41 |
42 | Socket Mode allows real-time communication without exposing a public endpoint.
43 |
44 | ### 2.1 Enable Socket Mode
45 |
46 | 1. Go to **Settings → Socket Mode**
47 | 2. Toggle **Enable Socket Mode** to ON
48 | 3. You'll be prompted to create an app-level token
49 |
50 | ### 2.2 Create App Token
51 |
52 | ```
53 | Token Name: sleepless-token
54 | Scope: connections:write
55 | ```
56 |
57 | Click **Generate**
58 |
59 | ⚠️ **Save this token!** It starts with `xapp-` and you'll need it for your `.env` file.
60 |
61 | ## Step 3: Configure Slash Commands
62 |
63 | ### 3.1 Navigate to Slash Commands
64 |
65 | Go to **Features → Slash Commands**
66 |
67 | ### 3.2 Create Commands
68 |
69 | Create each command by clicking **"Create New Command"**:
70 |
71 | #### /think Command
72 | ```
73 | Command: /think
74 | Request URL: [Leave empty - Socket Mode handles this]
75 | Short Description: Submit a task or thought
76 | Usage Hint: [description] [-p project_name]
77 | ```
78 |
79 | #### /check Command
80 | ```
81 | Command: /check
82 | Request URL: [Leave empty]
83 | Short Description: Check system status and queue
84 | Usage Hint: [no arguments]
85 | ```
86 |
87 | #### /report Command
88 | ```
89 | Command: /report
90 | Request URL: [Leave empty]
91 | Short Description: View task reports
92 | Usage Hint: [task_id | date | project_name | --list]
93 | ```
94 |
95 | #### /cancel Command
96 | ```
97 | Command: /cancel
98 | Request URL: [Leave empty]
99 | Short Description: Cancel a task or project
100 | Usage Hint:
101 | ```
102 |
103 | #### /trash Command
104 | ```
105 | Command: /trash
106 | Request URL: [Leave empty]
107 | Short Description: Manage cancelled tasks
108 | Usage Hint: | empty>
109 | ```
110 |
111 | #### /usage Command
112 | ```
113 | Command: /usage
114 | Request URL: [Leave empty]
115 | Short Description: Show Claude Code Pro plan usage
116 | Usage Hint: [Leave empty]
117 | ```
118 |
119 | ## Step 4: Set OAuth Scopes
120 |
121 | ### 4.1 Navigate to OAuth & Permissions
122 |
123 | Go to **Features → OAuth & Permissions**
124 |
125 | ### 4.2 Bot Token Scopes
126 |
127 | Add these scopes:
128 |
129 | | Scope | Purpose |
130 | |-------|---------|
131 | | `chat:write` | Send messages to channels |
132 | | `chat:write.public` | Send messages to public channels without joining |
133 | | `commands` | Receive slash commands |
134 | | `app_mentions:read` | Respond to @mentions |
135 | | `channels:read` | List channels |
136 | | `groups:read` | Access private channels |
137 | | `im:read` | Read direct messages |
138 | | `im:write` | Send direct messages |
139 | | `users:read` | Get user information |
140 |
141 | ### 4.3 User Token Scopes (Optional)
142 |
143 | These are optional but useful:
144 |
145 | | Scope | Purpose |
146 | |-------|---------|
147 | | `files:write` | Upload files (for reports) |
148 | | `files:read` | Read uploaded files |
149 |
150 | ## Step 5: Event Subscriptions
151 |
152 | ### 5.1 Enable Events
153 |
154 | Go to **Features → Event Subscriptions**
155 |
156 | Toggle **Enable Events** to ON
157 |
158 | ### 5.2 Subscribe to Bot Events
159 |
160 | Add these bot events:
161 |
162 | | Event | Purpose |
163 | |-------|---------|
164 | | `app_mention` | Respond when bot is mentioned |
165 | | `message.channels` | Monitor channel messages (optional) |
166 | | `message.im` | Respond to direct messages |
167 |
168 | ### 5.3 Event URL
169 |
170 | Since we're using Socket Mode, leave the Request URL empty.
171 |
172 | ## Step 6: Install to Workspace
173 |
174 | ### 6.1 Install App
175 |
176 | 1. Go to **Settings → Install App**
177 | 2. Click **"Install to Workspace"**
178 | 3. Review permissions
179 | 4. Click **"Allow"**
180 |
181 | ### 6.2 Save Bot Token
182 |
183 | After installation, you'll see a **Bot User OAuth Token**.
184 |
185 | ⚠️ **Save this token!** It starts with `xoxb-` and you'll need it for your `.env` file.
186 |
187 | ## Step 7: Configure Sleepless Agent
188 |
189 | ### 7.1 Create Environment File
190 |
191 | ```bash
192 | # Create .env file
193 | cp .env.example .env
194 | ```
195 |
196 | ### 7.2 Add Slack Tokens
197 |
198 | Edit `.env`:
199 |
200 | ```bash
201 | # Slack Configuration (Required)
202 | SLACK_BOT_TOKEN=xoxb-your-bot-token-here
203 | SLACK_APP_TOKEN=xapp-your-app-token-here
204 |
205 | # Optional: Default channel for notifications
206 | SLACK_DEFAULT_CHANNEL=general
207 | ```
208 |
209 | ### 7.3 Verify Configuration
210 |
211 | ```bash
212 | # Test Slack connection
213 | sle test-slack
214 |
215 | # Should output:
216 | # ✓ Slack bot token valid
217 | # ✓ Slack app token valid
218 | # ✓ Socket Mode connected
219 | # ✓ Bot user: @sleepless-agent
220 | ```
221 |
222 | ## Step 8: Channel Setup
223 |
224 | ### 8.1 Add Bot to Channels
225 |
226 | For each channel where you want to use the bot:
227 |
228 | 1. Go to the channel in Slack
229 | 2. Type: `/invite @sleepless-agent`
230 | 3. The bot will join the channel
231 |
232 | ### 8.2 Set Channel Permissions
233 |
234 | For private channels:
235 | 1. Channel Details → Integrations
236 | 2. Add App → Sleepless Agent
237 | 3. Click Add
238 |
239 | ### 8.3 Configure Default Channels
240 |
241 | In `config.yaml`:
242 |
243 | ```yaml
244 | slack:
245 | default_channel: general
246 | error_channel: sleepless-errors
247 | report_channel: sleepless-reports
248 | notification_channels:
249 | - general
250 | - dev-team
251 | ```
252 |
253 | ## Step 9: Test the Integration
254 |
255 | ### 9.1 Start the Agent
256 |
257 | ```bash
258 | sle daemon
259 | ```
260 |
261 | You should see:
262 | ```
263 | INFO | Slack bot started and listening for events
264 | INFO | Sleepless Agent starting...
265 | ```
266 |
267 | ### 9.2 Test Commands in Slack
268 |
269 | Try these commands:
270 |
271 | ```
272 | /check
273 | # Should show system status
274 |
275 | /think Test task from Slack
276 | # Should acknowledge and queue task
277 |
278 | /report --list
279 | # Should list available reports
280 | ```
281 |
282 | ## Advanced Configuration
283 |
284 | ### Custom Emoji Reactions
285 |
286 | Add custom emoji for task status:
287 |
288 | ```yaml
289 | slack:
290 | reactions:
291 | pending: hourglass
292 | in_progress: gear
293 | completed: white_check_mark
294 | failed: x
295 | ```
296 |
297 | ### Thread Management
298 |
299 | Configure threading behavior:
300 |
301 | ```yaml
302 | slack:
303 | threading:
304 | enabled: true
305 | reply_in_thread: true
306 | broadcast_important: true
307 | ```
308 |
309 | ### Rate Limiting
310 |
311 | Prevent Slack rate limit issues:
312 |
313 | ```yaml
314 | slack:
315 | rate_limiting:
316 | max_messages_per_minute: 20
317 | retry_after: 60
318 | backoff_multiplier: 2
319 | ```
320 |
321 | ## Troubleshooting
322 |
323 | ### Bot Not Responding
324 |
325 | 1. **Check Socket Mode is enabled**
326 | ```bash
327 | # In Slack App settings
328 | Settings → Socket Mode → Should be ON
329 | ```
330 |
331 | 2. **Verify tokens**
332 | ```bash
333 | # Check .env file
334 | cat .env | grep SLACK
335 | ```
336 |
337 | 3. **Check bot status**
338 | ```bash
339 | # In terminal running daemon
340 | # Should show "listening for events"
341 | ```
342 |
343 | ### Commands Not Working
344 |
345 | 1. **Reinstall slash commands**
346 | - Delete and recreate each command
347 | - Reinstall app to workspace
348 |
349 | 2. **Check permissions**
350 | - Ensure all required scopes are added
351 | - Reinstall if scopes were changed
352 |
353 | 3. **Verify Socket Mode connection**
354 | ```bash
355 | sle test-slack --verbose
356 | ```
357 |
358 | ### Permission Errors
359 |
360 | 1. **Bot not in channel**
361 | ```
362 | /invite @sleepless-agent
363 | ```
364 |
365 | 2. **Missing scopes**
366 | - Add required scopes in OAuth & Permissions
367 | - Reinstall app after scope changes
368 |
369 | 3. **Private channel access**
370 | - Manually add app in channel settings
371 |
372 | ## Security Best Practices
373 |
374 | ### 1. Token Management
375 |
376 | - Never commit tokens to Git
377 | - Use environment variables only
378 | - Rotate tokens periodically
379 | - Restrict token access
380 |
381 | ### 2. Channel Restrictions
382 |
383 | ```yaml
384 | slack:
385 | allowed_channels:
386 | - general
387 | - dev-team
388 | blocked_channels:
389 | - sensitive-data
390 | require_mention: true # Only respond to @mentions
391 | ```
392 |
393 | ### 3. User Permissions
394 |
395 | ```yaml
396 | slack:
397 | authorized_users:
398 | - U0123456789 # User IDs
399 | - U9876543210
400 | admin_users:
401 | - U0123456789
402 | ```
403 |
404 | ### 4. Audit Logging
405 |
406 | ```yaml
407 | slack:
408 | audit:
409 | log_commands: true
410 | log_users: true
411 | retention_days: 90
412 | ```
413 |
414 | ## Monitoring Integration
415 |
416 | ### 1. Slack Metrics
417 |
418 | Track Slack-specific metrics:
419 |
420 | ```python
421 | def collect_slack_metrics():
422 | return {
423 | 'commands_received': count_commands(),
424 | 'response_time': avg_response_time(),
425 | 'active_channels': count_active_channels(),
426 | 'error_rate': calculate_error_rate()
427 | }
428 | ```
429 |
430 | ### 2. Health Checks
431 |
432 | ```yaml
433 | monitoring:
434 | slack_health:
435 | check_interval: 60 # seconds
436 | timeout: 10
437 | alert_on_failure: true
438 | ```
439 |
440 | ### 3. Error Notifications
441 |
442 | Configure error handling:
443 |
444 | ```yaml
445 | slack:
446 | errors:
447 | notify_channel: sleepless-errors
448 | include_stacktrace: false
449 | rate_limit: 5 # Max 5 error messages per hour
450 | ```
451 |
452 | ## Next Steps
453 |
454 | Now that Slack is configured:
455 |
456 | 1. [Configure environment variables](environment-setup.md)
457 | 2. [Set up Git integration](git-integration.md)
458 | 3. [Try your first task](../tutorials/first-task.md)
459 | 4. [Learn Slack workflows](../tutorials/slack-workflows.md)
460 |
461 | ## Additional Resources
462 |
463 | - [Slack API Documentation](https://api.slack.com)
464 | - [Socket Mode Guide](https://api.slack.com/apis/connections/socket)
465 | - [Slash Commands Reference](https://api.slack.com/interactivity/slash-commands)
466 | - [Bot Permissions](https://api.slack.com/scopes)
--------------------------------------------------------------------------------
/docs/concepts/task-lifecycle.md:
--------------------------------------------------------------------------------
1 | # Task Lifecycle
2 |
3 | Understanding how tasks move through the Sleepless Agent system is crucial for effective usage and troubleshooting.
4 |
5 | ## Task States
6 |
7 | Every task progresses through a defined set of states:
8 |
9 | ```
10 | [Created]
11 | ↓
12 | [Pending] ←──────┐
13 | ↓ │
14 | [Scheduled] │
15 | ↓ │
16 | [In Progress] │
17 | ↓ │
18 | ┌───┴───┐ │
19 | ↓ ↓ │
20 | [Completed] [Failed]─┘
21 | ↓ ↓
22 | [Archived] [Retried]
23 | ```
24 |
25 | ### State Definitions
26 |
27 | | State | Description | Duration |
28 | |-------|-------------|----------|
29 | | **Created** | Task just submitted, not yet queued | < 1 second |
30 | | **Pending** | In queue, waiting for execution | Variable |
31 | | **Scheduled** | Selected for execution, checking resources | < 5 seconds |
32 | | **In Progress** | Actively executing | Minutes to hours |
33 | | **Completed** | Successfully finished | Terminal state |
34 | | **Failed** | Execution failed | Can be retried |
35 | | **Archived** | Moved to long-term storage | Permanent |
36 |
37 | ## Task Creation
38 |
39 | ### 1. Input Sources
40 |
41 | Tasks can be created from multiple sources:
42 |
43 | #### Slack Command
44 | ```
45 | /think Research async patterns in Python
46 | ↓
47 | Parse Input
48 | ↓
49 | Create Task
50 | ↓
51 | Store in DB
52 | ```
53 |
54 | #### CLI Command
55 | ```bash
56 | sle think "Implement caching layer" -p backend
57 | ↓
58 | Parse Arguments
59 | ↓
60 | Create Task Object
61 | ↓
62 | Queue Task
63 | ```
64 |
65 | #### Auto-Generation
66 | ```python
67 | # System generates tasks based on schedule
68 | if is_idle_time() and has_capacity():
69 | task = generate_refinement_task()
70 | queue.add(task)
71 | ```
72 |
73 | ### 2. Task Properties
74 |
75 | Each task is created with:
76 |
77 | ```python
78 | task = {
79 | 'id': auto_increment,
80 | 'description': user_input,
81 | 'priority': 'serious' | 'random',
82 | 'project': project_name | null,
83 | 'created_at': timestamp,
84 | 'created_by': 'slack' | 'cli' | 'system',
85 | 'metadata': {
86 | 'estimated_time': seconds,
87 | 'retry_count': 0,
88 | 'parent_task': task_id | null
89 | }
90 | }
91 | ```
92 |
93 | ## Task Scheduling
94 |
95 | ### 1. Queue Management
96 |
97 | Tasks enter a priority queue:
98 |
99 | ```python
100 | def get_next_task():
101 | # Priority order:
102 | # 1. Serious tasks with projects
103 | # 2. Serious standalone tasks
104 | # 3. Random thoughts
105 | # 4. Auto-generated tasks
106 |
107 | return queue.pop_highest_priority()
108 | ```
109 |
110 | ### 2. Execution Eligibility
111 |
112 | Before execution, tasks must pass checks:
113 |
114 | ```python
115 | def can_execute(task):
116 | checks = [
117 | has_available_claude_usage(),
118 | within_time_threshold(),
119 | has_system_resources(),
120 | no_workspace_conflicts(),
121 | dependencies_completed()
122 | ]
123 | return all(checks)
124 | ```
125 |
126 | ### 3. Resource Allocation
127 |
128 | ```
129 | Task Selected
130 | ↓
131 | Check Claude Usage → Over Limit → Queue (wait)
132 | ↓
133 | Check Time Window → Wrong Time → Defer
134 | ↓
135 | Allocate Workspace → Conflict → Wait
136 | ↓
137 | Start Execution
138 | ```
139 |
140 | ## Task Execution
141 |
142 | ### 1. Workspace Setup
143 |
144 | Each task gets an isolated environment:
145 |
146 | ```bash
147 | workspace/tasks/task_42/
148 | ├── .env # Task-specific environment
149 | ├── context.json # Task metadata
150 | ├── input.txt # Task description
151 | ├── output/ # Generated files
152 | └── logs/ # Execution logs
153 | ```
154 |
155 | ### 2. Claude Code Invocation
156 |
157 | ```python
158 | def execute_task(task):
159 | # 1. Change to workspace
160 | os.chdir(task.workspace_path)
161 |
162 | # 2. Prepare prompt
163 | prompt = format_prompt(task)
164 |
165 | # 3. Invoke Claude Code CLI
166 | result = subprocess.run(
167 | ['claude'],
168 | input=prompt,
169 | capture_output=True
170 | )
171 |
172 | # 4. Process output
173 | return process_result(result)
174 | ```
175 |
176 | ### 3. Multi-Phase Execution
177 |
178 | Complex tasks may involve multiple phases:
179 |
180 | ```
181 | Planning Phase
182 | ↓
183 | Implementation Phase
184 | ↓
185 | Testing Phase
186 | ↓
187 | Review Phase
188 | ↓
189 | Completion
190 | ```
191 |
192 | ### 4. Progress Monitoring
193 |
194 | Real-time status updates:
195 |
196 | ```python
197 | # Status updates during execution
198 | task.update_status("Starting implementation")
199 | task.update_progress(25) # 25% complete
200 | task.log_output(chunk) # Stream output
201 | ```
202 |
203 | ## Result Handling
204 |
205 | ### 1. Output Capture
206 |
207 | All task outputs are captured:
208 |
209 | ```
210 | Claude Output → Parse Response → Extract Files → Store Results
211 | ↓ ↓ ↓
212 | Markdown Code Files Database Entry
213 | ```
214 |
215 | ### 2. Result Storage
216 |
217 | ```
218 | workspace/data/results/
219 | ├── 2024-10-24/
220 | │ ├── task_42_result.json
221 | │ ├── task_42_output.md
222 | │ └── task_42_files/
223 | │ ├── main.py
224 | │ └── test.py
225 | ```
226 |
227 | ### 3. Success Criteria
228 |
229 | Task marked as completed when:
230 | - Claude execution returns successfully
231 | - No critical errors in output
232 | - Required files generated
233 | - Tests pass (if applicable)
234 |
235 | ## Error Handling
236 |
237 | ### 1. Failure Types
238 |
239 | | Type | Description | Action |
240 | |------|-------------|--------|
241 | | **Timeout** | Execution exceeds limit | Kill process, mark failed |
242 | | **Claude Error** | CLI returns error | Log error, retry if transient |
243 | | **Resource Error** | Out of disk/memory | Clean up, defer task |
244 | | **Validation Error** | Output doesn't meet criteria | Mark failed, notify user |
245 |
246 | ### 2. Retry Logic
247 |
248 | ```python
249 | def handle_failure(task, error):
250 | if is_transient_error(error):
251 | if task.retry_count < MAX_RETRIES:
252 | task.retry_count += 1
253 | task.status = 'pending'
254 | task.scheduled_for = calculate_backoff(task.retry_count)
255 | else:
256 | task.status = 'failed'
257 | notify_permanent_failure(task)
258 | else:
259 | task.status = 'failed'
260 | log_error(task, error)
261 | ```
262 |
263 | ### 3. Recovery Mechanisms
264 |
265 | - **Automatic retry** for transient failures
266 | - **Exponential backoff** to prevent thundering herd
267 | - **Dead letter queue** for persistent failures
268 | - **Manual intervention** options via CLI/Slack
269 |
270 | ## Post-Execution
271 |
272 | ### 1. Git Integration
273 |
274 | Based on task type:
275 |
276 | #### Random Thoughts
277 | ```bash
278 | git checkout thought-ideas
279 | git add .
280 | git commit -m "Random thought: "
281 | git push origin thought-ideas
282 | ```
283 |
284 | #### Serious Tasks
285 | ```bash
286 | git checkout -b feature/task-description
287 | git add .
288 | git commit -m "Implement: "
289 | git push origin feature/task-description
290 | gh pr create --title "" --body ""
291 | ```
292 |
293 | ### 2. Notifications
294 |
295 | ```python
296 | def notify_completion(task):
297 | channels = []
298 |
299 | if task.source == 'slack':
300 | channels.append(slack_notification)
301 |
302 | if task.priority == 'serious':
303 | channels.append(email_notification)
304 |
305 | for channel in channels:
306 | channel.send(task.get_summary())
307 | ```
308 |
309 | ### 3. Cleanup
310 |
311 | After successful completion:
312 | ```python
313 | def cleanup_task(task):
314 | if task.keep_workspace:
315 | archive_workspace(task)
316 | else:
317 | remove_workspace(task)
318 |
319 | update_metrics(task)
320 | generate_report(task)
321 | ```
322 |
323 | ## Task Dependencies
324 |
325 | ### 1. Dependency Graph
326 |
327 | Tasks can depend on others:
328 |
329 | ```
330 | Task A ─────┬──→ Task C
331 | │
332 | Task B ─────┘
333 | ```
334 |
335 | ### 2. Dependency Resolution
336 |
337 | ```python
338 | def can_start_task(task):
339 | if not task.dependencies:
340 | return True
341 |
342 | for dep_id in task.dependencies:
343 | dep = get_task(dep_id)
344 | if dep.status != 'completed':
345 | return False
346 |
347 | return True
348 | ```
349 |
350 | ## Monitoring & Observability
351 |
352 | ### 1. Task Metrics
353 |
354 | Tracked for each task:
355 | - Queue time (pending duration)
356 | - Execution time (in_progress duration)
357 | - Resource usage (CPU, memory, disk)
358 | - Success rate (per task type)
359 | - Retry attempts
360 |
361 | ### 2. Lifecycle Events
362 |
363 | All state transitions are logged:
364 |
365 | ```json
366 | {
367 | "timestamp": "2024-10-24T15:30:00Z",
368 | "task_id": 42,
369 | "event": "state_change",
370 | "from_state": "pending",
371 | "to_state": "in_progress",
372 | "metadata": {
373 | "queue_time": 120,
374 | "executor": "claude-code"
375 | }
376 | }
377 | ```
378 |
379 | ### 3. Performance Analysis
380 |
381 | ```sql
382 | -- Average execution time by task type
383 | SELECT
384 | priority,
385 | AVG(TIMESTAMPDIFF(SECOND, started_at, completed_at)) as avg_duration
386 | FROM tasks
387 | WHERE status = 'completed'
388 | GROUP BY priority;
389 | ```
390 |
391 | ## Advanced Lifecycle Features
392 |
393 | ### 1. Task Chaining
394 |
395 | Create follow-up tasks automatically:
396 |
397 | ```python
398 | @on_task_complete
399 | def chain_tasks(completed_task):
400 | if completed_task.has_chain():
401 | next_task = completed_task.get_next_in_chain()
402 | queue.add(next_task)
403 | ```
404 |
405 | ### 2. Conditional Execution
406 |
407 | ```python
408 | def should_execute(task):
409 | conditions = task.get_conditions()
410 | return all([
411 | evaluate_condition(c) for c in conditions
412 | ])
413 | ```
414 |
415 | ### 3. Task Templates
416 |
417 | Reusable task patterns:
418 |
419 | ```yaml
420 | templates:
421 | code_review:
422 | phases:
423 | - analyze
424 | - suggest_improvements
425 | - generate_report
426 | timeout: 1800
427 | retries: 2
428 | ```
429 |
430 | ## Best Practices
431 |
432 | ### 1. Task Sizing
433 | - Keep tasks focused and atomic
434 | - Break large tasks into subtasks
435 | - Estimate execution time accurately
436 |
437 | ### 2. Priority Management
438 | - Use projects for related serious tasks
439 | - Reserve serious priority for important work
440 | - Let random thoughts fill idle time
441 |
442 | ### 3. Error Recovery
443 | - Write idempotent tasks when possible
444 | - Include validation in task descriptions
445 | - Monitor retry patterns for issues
446 |
447 | ### 4. Resource Optimization
448 | - Schedule heavy tasks during low-usage periods
449 | - Clean up workspaces regularly
450 | - Archive old results periodically
451 |
452 | This complete lifecycle ensures reliable, efficient task processing while maintaining system stability and resource optimization.
--------------------------------------------------------------------------------