├── Dockerfile
├── api
└── api.py
├── appservice
├── a2a.py
├── acp.py
├── agent_manager.py
├── base.py
├── mcp.py
└── router.py
├── config.yaml
├── docker-compose.yaml
├── docs
├── ROADMAP.md
├── docs.json
└── logo.png
├── main.py
├── poetry.lock
├── pyproject.toml
├── registration.yaml
├── registry
├── models
│ ├── agent.py
│ └── search.py
├── models__init__.py
└── registry.py
├── scripts
└── inject-registration.sh
├── search
├── mcp
│ └── search_mcp.py
├── models
│ ├── mcp_models.py
│ └── search_models.py
└── search.py
├── search__init__.py
├── synapse
└── data
│ ├── homeserver.yaml
│ └── registration.yaml
└── tests
├── README.md
├── conftest.py
├── e2e
├── test_docker_setup.py
├── test_matrix_integration.py
├── test_mcp_functionality.py
└── test_pgvector.py
├── integration
├── test_mcp_registration.py
├── test_registry_api.py
└── test_search_api.py
├── requirements.txt
├── run_tests.py
├── test_config.yaml
├── unit
├── test_mcp.py
├── test_registry.py
└── test_search.py
└── utils.py
/Dockerfile:
--------------------------------------------------------------------------------
1 | # Use minimal Python base
2 | FROM python:3.11-slim
3 |
4 | # System-level dependencies
5 | RUN apt-get update && apt-get install -y \
6 | build-essential \
7 | libffi-dev \
8 | libpq-dev \
9 | git \
10 | && rm -rf /var/lib/apt/lists/*
11 |
12 | # Set environment
13 | ENV PYTHONDONTWRITEBYTECODE 1
14 | ENV PYTHONUNBUFFERED 1
15 | WORKDIR /app
16 |
17 | # Install Python dependencies
18 | COPY pyproject.toml ./
19 | RUN pip install --upgrade pip && pip install poetry ruamel.yaml
20 | RUN poetry config virtualenvs.create false
21 | RUN poetry lock
22 | RUN poetry install --no-root
23 |
24 | # Copy source code
25 | COPY . .
26 |
27 | # Expose AppService port (as configured in `config.yaml`)
28 | EXPOSE 29333
29 |
30 | # Default startup command
31 | CMD ["python", "main.py"]
32 |
--------------------------------------------------------------------------------
/api/api.py:
--------------------------------------------------------------------------------
1 | from fastapi import FastAPI, HTTPException, Depends, Request
2 | from fastapi.middleware.cors import CORSMiddleware
3 | from typing import List, Dict, Any, Optional
4 | import os
5 | import json
6 | import logging
7 | from pydantic import BaseModel
8 |
9 | # Import registry and search modules
10 | from AutonomousSphere.registry import registry
11 | from AutonomousSphere.search import router as search_router, startup_event
12 |
13 | # Configure logging
14 | logging.basicConfig(level=logging.INFO)
15 | logger = logging.getLogger(__name__)
16 |
17 | # Initialize FastAPI app
18 | app = FastAPI(
19 | title="AutonomousSphere API",
20 | description="API for communicating with the Autonomous Sphere Matrix AppService",
21 | version="0.1.0"
22 | )
23 |
24 | # Add CORS middleware
25 | app.add_middleware(
26 | CORSMiddleware,
27 | allow_origins=["*"], # Modify in production
28 | allow_credentials=True,
29 | allow_methods=["*"],
30 | allow_headers=["*"],
31 | )
32 |
33 | # Root endpoint
34 | @app.get("/")
35 | async def root():
36 | return {"message": "Welcome to AutonomousSphere API"}
37 |
38 | # Health check endpoint
39 | @app.get("/health")
40 | async def health_check():
41 | return {"status": "healthy"}
42 |
43 | # Include registry routes
44 | app.include_router(registry.router, prefix="/registry", tags=["registry"])
45 |
46 | # Include search routes
47 | app.include_router(search_router, prefix="/search", tags=["search"])
48 |
49 | # Register startup event
50 | @app.on_event("startup")
51 | async def on_startup():
52 | await startup_event()
53 |
54 | # Error handling
55 | @app.exception_handler(Exception)
56 | async def global_exception_handler(request: Request, exc: Exception):
57 | logger.error(f"Unhandled exception: {str(exc)}")
58 | return {"error": "Internal server error", "detail": str(exc)}, 500
59 |
60 | # Main function to run the app
61 | def start_api(host="0.0.0.0", port=8000):
62 | import uvicorn
63 | uvicorn.run(app, host=host, port=port)
64 |
65 | if __name__ == "__main__":
66 | start_api()
--------------------------------------------------------------------------------
/appservice/a2a.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/appservice/acp.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/appservice/agent_manager.py:
--------------------------------------------------------------------------------
1 | from mautrix.util.async_db import Database
2 | from mautrix.bridge import BaseBridge
3 |
4 | class AgentManager:
5 | def __init__(self, bridge: BaseBridge):
6 | self.bridge = bridge
7 | self.intent_cache = {}
8 |
9 | def get_agent_user_id(self, agent_id: str) -> str:
10 | return f"@agent_{agent_id}:{self.bridge.config['homeserver.domain']}"
11 |
12 | def get_intent(self, agent_id: str):
13 | mxid = self.get_agent_user_id(agent_id)
14 | if mxid not in self.intent_cache:
15 | self.intent_cache[mxid] = self.bridge.get_intent(mxid)
16 | return self.intent_cache[mxid]
17 |
--------------------------------------------------------------------------------
/appservice/base.py:
--------------------------------------------------------------------------------
1 | from mautrix.appservice import AppService
2 | from .agent_manager import AgentManager
3 | from .router import MessageRouter
4 |
5 | class AutonomousSphereBridge(AppService):
6 | async def start(self):
7 | self.agent_manager = AgentManager(self)
8 | self.router = MessageRouter(self, self.agent_manager)
9 |
10 | self.register_event_handler("m.room.message", self.router.handle_message)
11 |
12 | await super().start()
13 |
--------------------------------------------------------------------------------
/appservice/mcp.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/appservice/router.py:
--------------------------------------------------------------------------------
1 | from appservice.a2a import handle_a2a
2 | from appservice.acp import handle_acp
3 | from appservice.mcp import handle_mcp
4 |
5 | class MessageRouter:
6 | def __init__(self, bridge, agent_manager):
7 | self.bridge = bridge
8 | self.agent_manager = agent_manager
9 |
10 | async def handle_message(self, evt):
11 | content = evt.content.get("body", "")
12 | sender = evt.sender
13 |
14 | if content.startswith("a2a:"):
15 | await handle_a2a(self.bridge, evt, self.agent_manager)
16 | elif content.startswith("mcp:"):
17 | await handle_mcp(self.bridge, evt, self.agent_manager)
18 | elif content.startswith("acp:"):
19 | await handle_acp(self.bridge, evt, self.agent_manager)
20 | else:
21 | print(f"Ignoring: {content}")
22 |
--------------------------------------------------------------------------------
/config.yaml:
--------------------------------------------------------------------------------
1 | homeserver:
2 | address: "https://matrix.yourdomain.com"
3 | domain: "yourdomain.com"
4 |
5 | appservice:
6 | id: "autonomoussphere"
7 | bot_username: "_as_master"
8 | token: "YOUR_AS_TOKEN"
9 | homeserver_token: "YOUR_HS_TOKEN"
10 | database: "sqlite:///mautrix-as.db"
11 | port: 29333
12 | address: "0.0.0.0"
13 |
14 | logging:
15 | level: DEBUG
16 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | synapse:
3 | image: matrixdotorg/synapse:latest
4 | container_name: synapse
5 | restart: unless-stopped
6 | volumes:
7 | - ./synapse/data:/data
8 | - ./registration.yaml:/data/registration.yaml
9 | - ./scripts/inject-registration.sh:/start.sh
10 | entrypoint: ["/bin/bash", "/start.sh"]
11 | environment:
12 | - SYNAPSE_SERVER_NAME=localhost
13 | - SYNAPSE_REPORT_STATS=no
14 | ports:
15 | - "8008:8008"
16 | depends_on:
17 | - postgres
18 |
19 | postgres:
20 | image: pgvector/pgvector:pg14
21 | container_name: synapse_postgres
22 | restart: unless-stopped
23 | environment:
24 | POSTGRES_USER: synapse
25 | POSTGRES_PASSWORD: synapsepass
26 | POSTGRES_DB: synapse
27 | volumes:
28 | - postgres_data:/var/lib/postgresql/data
29 | - ./postgres/init-pgvector.sql:/docker-entrypoint-initdb.d/init-pgvector.sql
30 |
31 | autonomoussphere:
32 | build:
33 | context: .
34 | dockerfile: Dockerfile
35 | container_name: autonomoussphere
36 | restart: unless-stopped
37 | volumes:
38 | - ./config.yaml:/app/config.yaml
39 | - ./registration.yaml:/app/registration.yaml
40 | environment:
41 | - MAUTRIX_CONFIG=/app/config.yaml
42 | depends_on:
43 | - synapse
44 | ports:
45 | - "29333:29333" # AppService listening port
46 |
47 | volumes:
48 | postgres_data:
49 |
--------------------------------------------------------------------------------
/docs/ROADMAP.md:
--------------------------------------------------------------------------------
1 | # AutonomousSphere Roadmap (6‑Month)
2 |
3 | > **Goal:** Ship a production‑ready, multi‑protocol agent‑collaboration platform on Matrix in 6 months, with incremental releases each month.
4 |
5 | | Phase | Timeline | Core Deliverables |
6 | | ------------------------------ | --------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
7 | | **0 — Matrix Core & Registry** | Month 1 | \* Synapse/Dendrite homeserver
\* Appservice skeleton
\* Agent Registry + `!search`
\* **Federation smoke‑test:** 2 homeservers + appservices exchange agent messages
\* “Connectivity Alpha” demo |
8 | | **1 — Protocol Bridge** | Month 2 | \* Adapters for **A2A**, **MCP**, **ACP**
\* Chat‑orchestration (interjections, race‑control)
\* Registry upgrade → multi‑protocol metadata
\* **Federation test** with cross‑server A2A agent chat
\* “Multi‑Protocol Beta” release |
9 | | **2 — Task Queues** | Month 3 | \* Redis‑backed Kanban queues (room & agent scoped)
\* Admin UI v0 (task board + agent list)
\* Matrix notifications for task updates
\* **Federation test:** tasks on server A worked by agent on server B
\* “Task Alpha” rollout |
10 | | **3 — Custom Client** | Month 4 | \* Branded Matrix web client
\* Built‑in agent sidebar & task board
\* Private client beta |
11 | | **4 — Extra Protocols** | Mid‑Month 5 | \* **ANP** adapter + modular plugin system
\* Docs for adding more protocols
\* Dev preview |
12 | | **5 — UX & Collaboration** | Late M5 → Wk 22 | \* UI polish, comments, role‑permissions, file‑share
\* Public beta / RC |
13 | | **6 — Security & Scale** | Month 6 | \* Security audit, E2EE, RBAC
\* Load tests, Prometheus/Grafana
\* GA readiness gate |
14 | | **7 — Deployment & Launch** | Wk 23‑24 | \* SaaS & Docker/Helm packages
\* Docs, onboarding, launch comms |
15 | | **8 — Feedback Loop** | Post‑launch | \* Analytics, interviews, agile sprints
\* Roadmap refresh every quarter |
16 |
17 | ---
18 |
19 | ### Milestone Flow
20 |
21 | 1. **Connectivity Alpha (M1)**
22 | 2. **Multi‑Protocol Beta (M2)**
23 | 3. **Task Alpha (M3)**
24 | 4. **Unified Client Beta (M4)**
25 | 5. **Interoperability Preview (M5)**
26 | 6. **RC + Security Green‑light (M6‑W22)**
27 | 7. **v1.0 GA (M6‑W24)**
28 |
29 | ---
30 |
31 | ### Quick References
32 |
33 | * **Repo folders**
34 | `/appservice` – bridge code & protocol adapters
35 | `/registry` – agent metadata service
36 | `/client` – custom Matrix client
37 | `/docs` – specs & guides
38 | * **Issue labels:** `phase:0` … `phase:8`
39 | * **Federation testing:** `docker‑compose.test‑federation.yml` spins up two homeservers + appservices.
40 |
41 | ---
42 |
43 | ## Phase Details
44 |
45 | - [AutonomousSphere Roadmap (6‑Month)](#autonomoussphere-roadmap-6month)
46 | - [Milestone Flow](#milestoneflow)
47 | - [Quick References](#quickreferences)
48 | - [Phase Details ](#phase-details--)
49 | - [Phase 0 — Matrix Core \& Registry](#phase0-matrixcore-registry)
50 | - [Phase 1 — Protocol Bridge](#phase1-protocol-bridge)
51 | - [Phase 2 — Task Queues](#phase2-task-queues)
52 | - [Phase 3 — Custom Client](#phase3-custom-client)
53 | - [Phase 4 — Extra Protocols](#phase4-extraprotocols)
54 | - [Phase 5 — UX \& Collab](#phase5-uxcollab)
55 | - [Phase 6 — Security \& Scale](#phase6-securityscale)
56 | - [Phase 7 — Deployment \& Launch](#phase7-deploymentlaunch)
57 | - [Phase 8 — Feedback Loop](#phase8-feedback-loop)
58 |
59 | ### Phase 0 — Matrix Core & Registry
60 |
61 | * **Why:** Foundation – everything else builds on reliable Matrix comms.
62 | * **Homeserver + Appservice:** Spin up Synapse (Docker) and minimal bridge that can spawn virtual agent users.
63 | * **Agent Registry:** In‑memory/SQLite list → enables discovery via `!search`.
64 | * **Federation Smoke‑test:** Bring up a second homeserver; confirm an agent on Server A can message a room on Server B.
65 |
66 | ### Phase 1 — Protocol Bridge
67 |
68 | * **Why:** Agents live everywhere; AutonomousSphere must speak their languages.
69 | * **Adapters:** Implement HTTP shims for A2A, MCP, ACP.
70 | * **Chat Orchestration:** Workers inspect each incoming Matrix event, decide if/which agent responds, and lock the query to one agent to avoid duplicates. This prevents “agent‑spam” races and makes multi‑agent rooms sane.
71 | * **Cross‑Server Test:** Verify an A2A agent behind Server B answers a question posted in Server A’s room.
72 |
73 | ### Phase 2 — Task Queues
74 |
75 | * **Why:** Structured work beats loose chat for real projects.
76 | * **Redis Queues:** Atomic `BLPOP` for agent pull, sorted‑set for room Kanban.
77 | * **Admin UI v0:** simple React or Svelte board; CRUD tasks; shows live status.
78 | * **Federation Test:** Task created on one server, remote agent marks done, board updates everywhere.
79 |
80 | ### Phase 3 — Custom Client
81 |
82 | Focused UX: built‑in agent sidebar, drag‑drop task board, branded theme. Quick‑invite agents and visualize status.
83 |
84 | ### Phase 4 — Extra Protocols
85 |
86 | Add **ANP** adapter and plugin scaffold so community can drop in new protocols without touching core.
87 |
88 | ### Phase 5 — UX & Collab
89 |
90 | Comment threads, file‑sharing, role permissions, responsive design → polish for public beta.
91 |
92 | ### Phase 6 — Security & Scale
93 |
94 | E2EE, RBAC, penetration test, Prometheus/Grafana dashboards, horizontal‑scaling guide.
95 |
96 | ### Phase 7 — Deployment & Launch
97 |
98 | Cloud SaaS & self‑host (Docker/Helm). Docs, onboarding, marketing comms. boop
99 |
100 | ### Phase 8 — Feedback Loop
101 |
102 | Analytics + community channels → iterate every quarter; keep roadmap in `/docs/ROADMAP_NEXT.md`.
103 |
--------------------------------------------------------------------------------
/docs/docs.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "AutonomousSphere Documentation",
3 | "logo": {
4 | "light": "/logo.png",
5 | "dark": "/logo.png"
6 | },
7 | "favicon": "/logo.png",
8 | "colors": {
9 | "primary": "#0D9373",
10 | "light": "#07C983",
11 | "dark": "#0D9373"
12 | },
13 | "topbarLinks": [
14 | {
15 | "name": "GitHub",
16 | "url": "https://github.com/cybertheory/AutonomousSphere"
17 | }
18 | ],
19 | "topbarCtaButton": {
20 | "name": "Get Started",
21 | "url": "/introduction"
22 | },
23 | "anchors": [
24 | {
25 | "name": "API Reference",
26 | "icon": "rectangle-terminal",
27 | "url": "api-reference"
28 | },
29 | {
30 | "name": "Roadmap",
31 | "icon": "road",
32 | "url": "roadmap"
33 | }
34 | ],
35 | "navigation": [
36 | {
37 | "group": "Getting Started",
38 | "pages": ["introduction", "quickstart", "architecture-overview"]
39 | },
40 | {
41 | "group": "Core Concepts",
42 | "pages": ["matrix-foundation", "appservice", "agent-registry", "federation"]
43 | },
44 | {
45 | "group": "Protocol Bridge",
46 | "pages": ["protocol-overview", "a2a-protocol", "mcp-protocol", "acp-protocol", "anp-protocol"]
47 | },
48 | {
49 | "group": "Chat Orchestration",
50 | "pages": ["chat-orchestration", "agent-autonomy", "message-routing", "multi-agent-sync"]
51 | },
52 | {
53 | "group": "Task Management",
54 | "pages": ["task-queues", "kanban-system", "task-federation"]
55 | },
56 | {
57 | "group": "Client & UX",
58 | "pages": ["custom-client", "agent-sidebar", "task-board"]
59 | },
60 | {
61 | "group": "Deployment",
62 | "pages": ["deployment-options", "security-considerations", "scaling"]
63 | },
64 | {
65 | "group": "API Reference",
66 | "pages": ["api-reference/agent-api", "api-reference/registry-api", "api-reference/task-api"]
67 | }
68 | ],
69 | "footerSocials": {
70 | "github": "https://github.com/cybertheory/AutonomousSphere"
71 | }
72 | }
--------------------------------------------------------------------------------
/docs/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/cybertheory/AutonomousSphere/HEAD/docs/logo.png
--------------------------------------------------------------------------------
/main.py:
--------------------------------------------------------------------------------
1 | from mautrix.appservice import AppService
2 | from appservice.base import AutonomousSphereBridge
3 |
4 | appservice = AppService(main_class=AutonomousSphereBridge)
5 | appservice.run()
6 |
--------------------------------------------------------------------------------
/poetry.lock:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [tool.poetry]
2 | name = "autonomoussphere"
3 | version = "0.1.0"
4 | description = "AutonomousSphere Matrix bot"
5 | authors = ["Your Name "]
6 |
7 | [tool.poetry.dependencies]
8 | python = "^3.11"
9 | mautrix = "^0.20.2"
10 | asyncpg = "^0.27.0"
11 | pgvector = "^0.2.0"
12 | aiohttp = "^3.8.4"
13 | ruamel.yaml = "^0.17.21"
14 |
15 | [tool.poetry.group.dev.dependencies]
16 | pytest = "^7.3.1"
17 |
18 | [build-system]
19 | requires = ["poetry-core>=1.0.0"]
20 | build-backend = "poetry.core.masonry.api"
--------------------------------------------------------------------------------
/registration.yaml:
--------------------------------------------------------------------------------
1 | id: "autonomoussphere"
2 | url: "http://localhost:29333"
3 | as_token: "YOUR_AS_TOKEN"
4 | hs_token: "YOUR_HS_TOKEN"
5 | sender_localpart: "_as_master"
6 |
7 | namespaces:
8 | users:
9 | - regex: "^@agent_[a-zA-Z0-9_-]+:yourdomain\\.com$"
10 | exclusive: true
11 | rooms:
12 | - regex: "^!as_.*:yourdomain\\.com$"
13 | exclusive: false
14 | aliases:
15 | - regex: "^#as_.*:yourdomain\\.com$"
16 | exclusive: false
17 |
--------------------------------------------------------------------------------
/registry/models/agent.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel, Field, HttpUrl
2 | from typing import List, Dict, Any, Optional
3 | from datetime import datetime
4 | from enum import Enum
5 |
6 | class Protocol(str, Enum):
7 | MCP = "MCP"
8 | A2A = "A2A"
9 | ACP = "ACP"
10 |
11 | class Agent(BaseModel):
12 | id: str = Field(..., description="Unique agent identifier")
13 | matrix_id: Optional[str] = Field(None, description="Matrix user ID")
14 | display_name: str = Field(..., description="Human-readable name")
15 | description: Optional[str] = Field(None, description="Brief summary of the agent's functionality")
16 | protocol: Protocol = Field(..., description="Communication protocol used by the agent")
17 | tools: List[str] = Field(default=[], description="List of callable tools")
18 | skills: List[str] = Field(default=[], description="Freeform tags representing agent skills")
19 | languages: List[str] = Field(default=[], description="Languages the agent can communicate in")
20 | endpoint_url: Optional[HttpUrl] = Field(None, description="Public URL for agent communication")
21 | room_ids: List[str] = Field(default=[], description="Matrix room IDs the agent is active in")
22 | owner: Optional[str] = Field(None, description="Identifier for the entity or user that owns the agent")
23 | registered_at: datetime = Field(default_factory=datetime.now, description="Timestamp of agent registration")
24 | last_seen: datetime = Field(default_factory=datetime.now, description="Timestamp of the last heartbeat or activity")
25 | public: bool = Field(default=True, description="Indicates if the agent is publicly discoverable")
26 | custom_metadata: Dict[str, Any] = Field(default_factory=dict, description="Protocol-specific metadata")
--------------------------------------------------------------------------------
/registry/models/search.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 | from typing import List, Optional, Dict, Any
3 | from .agent import Protocol
4 |
5 | class SearchQuery(BaseModel):
6 | query: str
7 | filters: Optional[Dict[str, Any]] = None
--------------------------------------------------------------------------------
/registry/models__init__.py:
--------------------------------------------------------------------------------
1 | from .agent import Agent, Protocol
2 | from .search import SearchQuery
3 |
4 | __all__ = ["Agent", "Protocol", "SearchQuery"]
--------------------------------------------------------------------------------
/registry/registry.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter, HTTPException, Depends, Body, Query, Path, status
2 | from typing import List, Dict, Any, Optional, Union
3 | import os
4 | import json
5 | import logging
6 | from datetime import datetime
7 | import uuid
8 |
9 | # Import models from the models directory
10 | from .models import Agent, Protocol, SearchQuery
11 |
12 | # Configure logging
13 | logging.basicConfig(level=logging.INFO)
14 | logger = logging.getLogger(__name__)
15 |
16 | # Initialize router
17 | router = APIRouter()
18 |
19 | # In-memory storage (replace with database in production)
20 | agents_registry = {}
21 |
22 | # Agent registry routes
23 | @router.post("/agents", response_model=Agent, status_code=status.HTTP_201_CREATED)
24 | async def register_agent(agent: Agent):
25 | """Register a new agent"""
26 | if agent.id in agents_registry:
27 | raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
28 | detail=f"Agent with ID {agent.id} already exists")
29 |
30 | # Set timestamps
31 | agent.registered_at = datetime.now()
32 | agent.last_seen = datetime.now()
33 |
34 | agents_registry[agent.id] = agent
35 | logger.info(f"Agent registered: {agent.id}")
36 | return agent
37 |
38 | @router.get("/agents", response_model=List[Agent])
39 | async def list_agents(
40 | protocol: Optional[Protocol] = Query(None, description="Filter agents by protocol"),
41 | public: Optional[bool] = Query(None, description="Filter agents by public visibility")
42 | ):
43 | """Retrieve a list of agents with optional filtering"""
44 | filtered_agents = list(agents_registry.values())
45 |
46 | # Apply protocol filter if provided
47 | if protocol:
48 | filtered_agents = [agent for agent in filtered_agents if agent.protocol == protocol]
49 |
50 | # Apply public visibility filter if provided
51 | if public is not None:
52 | filtered_agents = [agent for agent in filtered_agents if agent.public == public]
53 |
54 | return filtered_agents
55 |
56 | @router.get("/agents/{agent_id}", response_model=Agent)
57 | async def get_agent(agent_id: str = Path(..., description="Unique agent identifier")):
58 | """Retrieve a specific agent by ID"""
59 | if agent_id not in agents_registry:
60 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
61 | detail=f"Agent with ID {agent_id} not found")
62 | return agents_registry[agent_id]
63 |
64 | @router.put("/agents/{agent_id}", response_model=Agent)
65 | async def update_agent(
66 | agent_id: str = Path(..., description="Unique agent identifier"),
67 | agent: Agent = Body(...)
68 | ):
69 | """Update an existing agent"""
70 | if agent_id not in agents_registry:
71 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
72 | detail=f"Agent with ID {agent_id} not found")
73 |
74 | # Ensure ID consistency
75 | if agent.id != agent_id:
76 | raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST,
77 | detail="Agent ID in path must match ID in body")
78 |
79 | # Preserve registration timestamp
80 | agent.registered_at = agents_registry[agent_id].registered_at
81 | agent.last_seen = datetime.now()
82 |
83 | agents_registry[agent_id] = agent
84 | logger.info(f"Agent updated: {agent_id}")
85 | return agent
86 |
87 | @router.delete("/agents/{agent_id}", status_code=status.HTTP_204_NO_CONTENT)
88 | async def delete_agent(agent_id: str = Path(..., description="Unique agent identifier")):
89 | """Delete an agent"""
90 | if agent_id not in agents_registry:
91 | raise HTTPException(status_code=status.HTTP_404_NOT_FOUND,
92 | detail=f"Agent with ID {agent_id} not found")
93 |
94 | del agents_registry[agent_id]
95 | logger.info(f"Agent deleted: {agent_id}")
96 | return None
97 |
98 | @router.post("/agents/search", response_model=List[Agent])
99 | async def search_agents(search_query: SearchQuery):
100 | """Semantic search for agents"""
101 | # In a real implementation, this would use vector embeddings or a search engine
102 | # For now, we'll do a simple text-based search
103 | results = []
104 | query_lower = search_query.query.lower()
105 |
106 | for agent in agents_registry.values():
107 | # Simple text matching in name and description
108 | if (query_lower in agent.display_name.lower() or
109 | (agent.description and query_lower in agent.description.lower())):
110 |
111 | # Apply filters if provided
112 | if search_query.filters:
113 | # Protocol filter
114 | if "protocol" in search_query.filters and search_query.filters["protocol"]:
115 | if agent.protocol not in search_query.filters["protocol"]:
116 | continue
117 |
118 | # Language filter
119 | if "language" in search_query.filters and search_query.filters["language"]:
120 | if not any(lang in agent.languages for lang in search_query.filters["language"]):
121 | continue
122 |
123 | # Tools filter
124 | if "tools" in search_query.filters and search_query.filters["tools"]:
125 | if not any(tool in agent.tools for tool in search_query.filters["tools"]):
126 | continue
127 |
128 | results.append(agent)
129 |
130 | return results
131 |
132 | # Health check for registry
133 | @router.get("/health")
134 | async def registry_health():
135 | """Check the health of the registry service"""
136 | return {
137 | "status": "healthy",
138 | "agents_count": len(agents_registry)
139 | }
--------------------------------------------------------------------------------
/scripts/inject-registration.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | CONFIG_PATH="/data/homeserver.yaml"
4 | REG_LINE=" - /data/registration.yaml"
5 |
6 | echo "[AutonomousSphere] Checking for registration file reference..."
7 |
8 | # If the app_service_config_files block is missing, add it
9 | if ! grep -q "app_service_config_files:" "$CONFIG_PATH"; then
10 | echo "[AutonomousSphere] Adding app_service_config_files block..."
11 | echo -e "\napp_service_config_files:\n$REG_LINE" >> "$CONFIG_PATH"
12 | else
13 | # Check if our registration file is already listed
14 | if ! grep -q "$REG_LINE" "$CONFIG_PATH"; then
15 | echo "[AutonomousSphere] Adding registration.yaml to config..."
16 | sed -i "/app_service_config_files:/a\\$REG_LINE" "$CONFIG_PATH"
17 | else
18 | echo "[AutonomousSphere] Registration file already listed."
19 | fi
20 | fi
21 |
22 | # Hand off to Synapse
23 | exec python -m synapse.app.homeserver \
24 | --config-path "$CONFIG_PATH"
25 |
--------------------------------------------------------------------------------
/search/mcp/search_mcp.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter, HTTPException, Request, Depends
2 | from fastapi.responses import StreamingResponse
3 | from typing import List, Dict, Any, Optional, AsyncGenerator
4 | import logging
5 | import asyncio
6 | import json
7 | import uuid
8 | import httpx
9 | from datetime import datetime
10 | import os
11 | import yaml
12 |
13 | # Import FastMCP
14 | from fastmcp import FastMCP
15 |
16 | # Import registry models and functions
17 | from AutonomousSphere.registry.models.search import SearchQuery
18 |
19 | # Import search models
20 | from AutonomousSphere.search.models import SearchResult, MCPServiceRegistration, MCPEvent
21 |
22 | # Configure logging
23 | logging.basicConfig(level=logging.INFO)
24 | logger = logging.getLogger(__name__)
25 |
26 | # Initialize router
27 | router = APIRouter()
28 |
29 | # Initialize MCP
30 | mcp = FastMCP(
31 | "AutonomousSphere Search MCP",
32 | "MCP server for unified search across agents and Matrix"
33 | )
34 |
35 | # Load configuration
36 | def get_config():
37 | config_path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(__file__))), "config.yaml")
38 | with open(config_path, "r") as f:
39 | return yaml.safe_load(f)
40 |
41 | # MCP search tool
42 | @mcp.tool()
43 | async def search(query: str, filters: Optional[Dict[str, Any]] = None) -> Dict[str, Any]:
44 | """
45 | Search across agents and Matrix rooms/messages
46 |
47 | Args:
48 | query: The search query
49 | filters: Optional filters to apply to the search
50 |
51 | Returns:
52 | Search results
53 | """
54 | try:
55 | # Create SearchQuery object
56 | search_query = SearchQuery(query=query, filters=filters or {})
57 |
58 | # Import the unified_search function here to avoid circular imports
59 | from AutonomousSphere.search.search import unified_search
60 |
61 | # Call the unified search function
62 | results = await unified_search(search_query)
63 |
64 | return results.dict()
65 | except Exception as e:
66 | logger.error(f"MCP search error: {str(e)}")
67 | return SearchResult(
68 | query=query,
69 | filters=filters,
70 | results={
71 | "agents": [],
72 | "matrix": {
73 | "rooms": [],
74 | "messages": []
75 | }
76 | },
77 | metadata={
78 | "total_results": 0,
79 | "search_time_ms": 0,
80 | "source": "mcp",
81 | "error": str(e)
82 | }
83 | ).dict()
84 |
85 | # SSE endpoint for MCP events
86 | @router.get("/sse")
87 | async def mcp_sse(request: Request):
88 | """
89 | Server-Sent Events (SSE) endpoint for MCP server events
90 | """
91 | async def event_generator() -> AsyncGenerator[str, None]:
92 | try:
93 | # Send initial connection message
94 | connected_event = MCPEvent(
95 | event="connected",
96 | data={"message": "Connected to MCP SSE stream"}
97 | )
98 | yield f"data: {json.dumps(connected_event.dict())}\n\n"
99 |
100 | # Create a queue for events
101 | queue = asyncio.Queue()
102 |
103 | # Register this client with the MCP server for events
104 | client_id = f"sse-{uuid.uuid4().hex}"
105 |
106 | # In a real implementation, you would register this queue with your MCP server
107 | # to receive events. For now, we'll just simulate some events.
108 |
109 | # Send periodic status updates
110 | count = 0
111 | while True:
112 | # Check if client disconnected
113 | if await request.is_disconnected():
114 | logger.info(f"SSE client {client_id} disconnected")
115 | break
116 |
117 | # In a real implementation, you would wait for events from the MCP server
118 | # For now, we'll just send a heartbeat every 10 seconds
119 | count += 1
120 | heartbeat_event = MCPEvent(
121 | event="heartbeat",
122 | data={"count": count}
123 | )
124 |
125 | yield f"data: {json.dumps(heartbeat_event.dict())}\n\n"
126 |
127 | await asyncio.sleep(10)
128 |
129 | except asyncio.CancelledError:
130 | logger.info(f"SSE connection cancelled")
131 | except Exception as e:
132 | logger.error(f"SSE error: {str(e)}")
133 | error_event = MCPEvent(
134 | event="error",
135 | data={"error": str(e)}
136 | )
137 | yield f"data: {json.dumps(error_event.dict())}\n\n"
138 |
139 | return StreamingResponse(
140 | event_generator(),
141 | media_type="text/event-stream",
142 | headers={
143 | "Cache-Control": "no-cache",
144 | "Connection": "keep-alive",
145 | "X-Accel-Buffering": "no" # Disable buffering in Nginx
146 | }
147 | )
148 |
149 | # Function to register the MCP service with the registry
150 | async def register_mcp_service():
151 | """Register the MCP search service with the registry"""
152 | # Import here to avoid circular imports
153 | import httpx
154 |
155 | # Get configuration
156 | config = get_config()
157 |
158 | # Determine the endpoint URL based on configuration
159 | host = os.environ.get("API_HOST", "localhost")
160 | port = int(os.environ.get("API_PORT", 8000))
161 |
162 | # Create service registration model
163 | service_data = MCPServiceRegistration(
164 | endpoint_url=f"ws://{host}:{port}/search/mcp",
165 | custom_metadata={
166 | "mcp_capabilities": ["search"],
167 | "mcp_server_url": f"ws://{host}:{port}/search/mcp"
168 | }
169 | )
170 |
171 | # Register with the registry
172 | try:
173 | async with httpx.AsyncClient() as client:
174 | registry_url = f"http://{host}:{port}/registry/agents"
175 | response = await client.post(registry_url, json=service_data.dict())
176 |
177 | if response.status_code == 201:
178 | logger.info(f"MCP search service registered successfully: {service_data.id}")
179 | return True
180 | else:
181 | logger.error(f"Failed to register MCP search service: {response.status_code} - {response.text}")
182 | return False
183 | except Exception as e:
184 | logger.error(f"Error registering MCP search service: {str(e)}")
185 | return False
186 |
187 | # Get the Starlette app for mounting
188 | starlette_app = mcp.create_starlette_app(debug=True)
189 |
190 | # Function to mount the MCP server to the FastAPI app
191 | def mount_mcp_server(app):
192 | """Mount the MCP server to the FastAPI app"""
193 | app.mount("/mcp", starlette_app)
194 |
195 | # Register the MCP service with the registry
196 | @app.on_event("startup")
197 | async def startup_event():
198 | await register_mcp_service()
--------------------------------------------------------------------------------
/search/models/mcp_models.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel, Field
2 | from typing import List, Dict, Any, Optional
3 | from datetime import datetime
4 | import uuid
5 |
6 | class MCPServiceRegistration(BaseModel):
7 | """
8 | Model for MCP service registration with the registry
9 | """
10 | id: str = Field(default_factory=lambda: f"search-mcp-{uuid.uuid4().hex[:8]}")
11 | name: str = "AutonomousSphere Search MCP"
12 | description: str = "MCP server for unified search across agents and Matrix"
13 | protocol: str = "MCP"
14 | endpoint_url: str
15 | tools: List[str] = ["search"]
16 | skills: List[str] = ["search", "matrix_search", "agent_search"]
17 | public: bool = True
18 | custom_metadata: Dict[str, Any] = {
19 | "mcp_capabilities": ["search"]
20 | }
21 |
22 | class Config:
23 | schema_extra = {
24 | "example": {
25 | "id": "search-mcp-a1b2c3d4",
26 | "name": "AutonomousSphere Search MCP",
27 | "description": "MCP server for unified search across agents and Matrix",
28 | "protocol": "MCP",
29 | "endpoint_url": "ws://localhost:8000/search/mcp",
30 | "tools": ["search"],
31 | "skills": ["search", "matrix_search", "agent_search"],
32 | "public": True,
33 | "custom_metadata": {
34 | "mcp_capabilities": ["search"],
35 | "mcp_server_url": "ws://localhost:8000/search/mcp"
36 | }
37 | }
38 | }
39 |
40 | class MCPEvent(BaseModel):
41 | """
42 | Model for MCP server events
43 | """
44 | event: str
45 | timestamp: datetime = Field(default_factory=datetime.now)
46 | data: Optional[Dict[str, Any]] = None
47 |
48 | class Config:
49 | schema_extra = {
50 | "example": {
51 | "event": "search_completed",
52 | "timestamp": "2023-07-01T12:34:56.789Z",
53 | "data": {
54 | "query": "example query",
55 | "results_count": 10
56 | }
57 | }
58 | }
--------------------------------------------------------------------------------
/search/models/search_models.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel, Field
2 | from typing import List, Dict, Any, Optional
3 | from datetime import datetime
4 |
5 | class MatrixSearchRequest(BaseModel):
6 | """
7 | Model for Matrix search API requests
8 | """
9 | search_categories: Dict[str, Any] = Field(
10 | ...,
11 | description="Describes which categories to search in and their criteria"
12 | )
13 |
14 | class MatrixMessage(BaseModel):
15 | """
16 | Model for a Matrix message search result
17 | """
18 | event_id: str
19 | room_id: str
20 | sender: str
21 | content: Dict[str, Any]
22 | origin_server_ts: int
23 | rank: float = 0
24 |
25 | class MatrixRoom(BaseModel):
26 | """
27 | Model for a Matrix room search result
28 | """
29 | room_id: str
30 | name: Optional[str] = None
31 | topic: Optional[str] = None
32 | members_count: int = 0
33 |
34 | class MatrixResults(BaseModel):
35 | """
36 | Model for Matrix search results
37 | """
38 | messages: List[MatrixMessage] = []
39 | rooms: List[MatrixRoom] = []
40 | next_batch: Optional[str] = None
41 |
42 | class SearchMetadata(BaseModel):
43 | """
44 | Model for search metadata
45 | """
46 | total_results: int = 0
47 | search_time_ms: int = 0
48 | source: str = "api"
49 | timestamp: datetime = Field(default_factory=datetime.now)
50 |
51 | class SearchResult(BaseModel):
52 | """
53 | Model for unified search results
54 | """
55 | query: str
56 | filters: Optional[Dict[str, Any]] = None
57 | results: Dict[str, Any] = {
58 | "agents": [],
59 | "matrix": {
60 | "rooms": [],
61 | "messages": []
62 | }
63 | }
64 | metadata: SearchMetadata = Field(default_factory=SearchMetadata)
--------------------------------------------------------------------------------
/search/search.py:
--------------------------------------------------------------------------------
1 | from fastapi import APIRouter, HTTPException, Body, Depends, Header
2 | from typing import List, Dict, Any, Optional
3 | import logging
4 | import httpx
5 | import yaml
6 | import os
7 | import time
8 | import json
9 | import asyncio
10 |
11 | # Import the SearchQuery model from registry models
12 | from AutonomousSphere.registry.models.search import SearchQuery
13 |
14 | # Import search models
15 | from .models import MatrixSearchRequest, SearchResult
16 |
17 | # Import registry functions for agent search
18 | from AutonomousSphere.registry.registry import search_agents
19 |
20 | # Import MCP search module
21 | from .mcp import router as mcp_router, mount_mcp_server
22 |
23 | # Configure logging
24 | logging.basicConfig(level=logging.INFO)
25 | logger = logging.getLogger(__name__)
26 |
27 | # Initialize router
28 | router = APIRouter()
29 |
30 | # Load configuration
31 | def get_config():
32 | config_path = os.path.join(os.path.dirname(os.path.dirname(__file__)), "config.yaml")
33 | with open(config_path, "r") as f:
34 | return yaml.safe_load(f)
35 |
36 | # Function to perform Matrix search
37 | async def search_matrix(query: str, access_token: str, config: Dict[str, Any], next_batch: Optional[str] = None):
38 | """
39 | Perform a search on the Matrix homeserver
40 | """
41 | homeserver_url = config["homeserver"]["address"]
42 |
43 | # Construct search request
44 | search_request = MatrixSearchRequest(
45 | search_categories={
46 | "room_events": {
47 | "search_term": query,
48 | "order_by": "rank",
49 | "keys": ["content.body", "content.name", "content.topic"],
50 | "filter": {
51 | "limit": 20
52 | }
53 | }
54 | }
55 | )
56 |
57 | # Add next_batch if provided
58 | params = {}
59 | if next_batch:
60 | params["next_batch"] = next_batch
61 |
62 | # Make request to Matrix API
63 | async with httpx.AsyncClient() as client:
64 | try:
65 | url = f"{homeserver_url}/_matrix/client/v3/search"
66 | headers = {"Authorization": f"Bearer {access_token}"}
67 |
68 | logger.info(f"Searching Matrix at {url}")
69 | response = await client.post(
70 | url,
71 | json=search_request.dict(),
72 | headers=headers,
73 | params=params
74 | )
75 |
76 | if response.status_code == 200:
77 | return response.json()
78 | else:
79 | logger.error(f"Matrix search error: {response.status_code} - {response.text}")
80 | return {
81 | "error": f"Matrix search failed with status {response.status_code}",
82 | "details": response.text
83 | }
84 | except Exception as e:
85 | logger.error(f"Matrix search request error: {str(e)}")
86 | return {"error": f"Matrix search request failed: {str(e)}"}
87 |
88 | @router.post("/", response_model=SearchResult)
89 | async def unified_search(
90 | search_query: SearchQuery,
91 | authorization: Optional[str] = Header(None)
92 | ):
93 | """
94 | Unified search endpoint that searches across both the agent registry
95 | and Matrix rooms/messages.
96 |
97 | This endpoint combines results from:
98 | 1. Agent registry - searching for agents matching the query
99 | 2. Matrix API - searching for messages and rooms matching the query
100 |
101 | The search can be filtered using the filters parameter.
102 | """
103 | try:
104 | start_time = time.time()
105 | logger.info(f"Processing unified search query: {search_query.query}")
106 |
107 | # Get configuration
108 | config = get_config()
109 |
110 | # Initialize results structure
111 | results = SearchResult(
112 | query=search_query.query,
113 | filters=search_query.filters,
114 | results={
115 | "agents": [],
116 | "matrix": {
117 | "rooms": [],
118 | "messages": []
119 | }
120 | },
121 | metadata={
122 | "total_results": 0,
123 | "search_time_ms": 0,
124 | "source": "api"
125 | }
126 | )
127 |
128 | # 1. Search agents in registry
129 | agent_results = await search_agents(search_query)
130 | results.results["agents"] = agent_results
131 |
132 | # 2. Search Matrix if authorization token is provided
133 | matrix_results = None
134 | if authorization:
135 | # Extract token from Authorization header
136 | access_token = None
137 | if authorization.startswith("Bearer "):
138 | access_token = authorization.split(" ")[1]
139 |
140 | if access_token:
141 | matrix_results = await search_matrix(
142 | search_query.query,
143 | access_token,
144 | config
145 | )
146 |
147 | # Process Matrix results if successful
148 | if matrix_results and "search_categories" in matrix_results:
149 | room_events = matrix_results["search_categories"].get("room_events", {})
150 |
151 | # Extract messages
152 | if "results" in room_events:
153 | for result in room_events["results"]:
154 | # Add to messages list
155 | results.results["matrix"]["messages"].append({
156 | "event_id": result["result"]["event_id"],
157 | "room_id": result["result"]["room_id"],
158 | "sender": result["result"]["sender"],
159 | "content": result["result"]["content"],
160 | "origin_server_ts": result["result"]["origin_server_ts"],
161 | "rank": result.get("rank", 0)
162 | })
163 |
164 | # Extract room information from state if available
165 | if "state" in room_events:
166 | for room_id, state_events in room_events["state"].items():
167 | room_info = {
168 | "room_id": room_id,
169 | "name": None,
170 | "topic": None,
171 | "members_count": 0
172 | }
173 |
174 | # Extract room name and topic from state events
175 | for event in state_events:
176 | if event["type"] == "m.room.name":
177 | room_info["name"] = event["content"].get("name")
178 | elif event["type"] == "m.room.topic":
179 | room_info["topic"] = event["content"].get("topic")
180 |
181 | results.results["matrix"]["rooms"].append(room_info)
182 |
183 | # Add pagination token if available
184 | if matrix_results and "search_categories" in matrix_results and "room_events" in matrix_results["search_categories"]:
185 | next_batch = matrix_results["search_categories"]["room_events"].get("next_batch")
186 | if next_batch:
187 | results.results["matrix"]["next_batch"] = next_batch
188 |
189 | # Calculate total results
190 | total_agents = len(results.results["agents"])
191 | total_matrix_messages = len(results.results["matrix"]["messages"])
192 | total_matrix_rooms = len(results.results["matrix"]["rooms"])
193 | total_results = total_agents + total_matrix_messages + total_matrix_rooms
194 |
195 | # Update metadata
196 | results.metadata.total_results = total_results
197 | results.metadata.search_time_ms = int((time.time() - start_time) * 1000)
198 |
199 | logger.info(f"Search completed with {total_results} results in {results.metadata.search_time_ms}ms")
200 | return results
201 |
202 | except Exception as e:
203 | logger.error(f"Unified search error: {str(e)}")
204 | raise HTTPException(status_code=500, detail=f"Search error: {str(e)}")
205 |
206 | # Include MCP router
207 | router.include_router(mcp_router, prefix="/mcp", tags=["mcp"])
208 |
209 | # Mount the MCP server
210 | mount_mcp_server(router)
211 |
--------------------------------------------------------------------------------
/search__init__.py:
--------------------------------------------------------------------------------
1 | from .search import router
2 |
3 | __all__ = ["router"]
--------------------------------------------------------------------------------
/synapse/data/homeserver.yaml:
--------------------------------------------------------------------------------
1 |
2 | app_service_config_files:
3 | - /data/registration.yaml
4 |
--------------------------------------------------------------------------------
/synapse/data/registration.yaml:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/README.md:
--------------------------------------------------------------------------------
1 | # AutonomousSphere Tests
2 |
3 | This directory contains tests for the AutonomousSphere project. The tests are organized into three categories:
4 |
5 | 1. **Unit Tests**: Test individual components in isolation
6 | 2. **Integration Tests**: Test interactions between components
7 | 3. **End-to-End Tests**: Test the entire system
8 |
9 | ## Test Structure
10 |
11 | ```
12 | tests/
13 | ├── conftest.py # Common test fixtures and configuration
14 | ├── test_config.yaml # Test configuration
15 | ├── utils.py # Test utility functions
16 | ├── run_tests.py # Test runner script
17 | ├── unit/ # Unit tests
18 | │ ├── test_registry.py # Tests for the registry component
19 | │ ├── test_search.py # Tests for the search component
20 | │ └── test_mcp.py # Tests for the MCP component
21 | ├── integration/ # Integration tests
22 | │ ├── test_registry_api.py # Tests for the registry API
23 | │ ├── test_search_api.py # Tests for the search API
24 | │ └── test_mcp_registration.py # Tests for MCP registration
25 | └── e2e/ # End-to-end tests
26 | ├── test_docker_setup.py # Tests for Docker environment
27 | ├── test_matrix_integration.py # Tests for Matrix integration
28 | └── test_mcp_functionality.py # Tests for MCP functionality
29 | ```
30 |
31 | ## Running Tests
32 |
33 | You can run the tests using the provided `run_tests.py` script:
34 |
35 | ```bash
36 | # Run all tests
37 | python tests/run_tests.py
38 |
39 | # Run only unit tests
40 | python tests/run_tests.py --type unit
41 |
42 | # Run only integration tests
43 | python tests/run_tests.py --type integration
44 |
45 | # Run only end-to-end tests
46 | python tests/run_tests.py --type e2e
47 |
48 | # Run tests with verbose output
49 | python tests/run_tests.py --verbose
50 | ```
51 |
52 | Alternatively, you can use pytest directly:
53 |
54 | ```bash
55 | # Run all tests
56 | pytest tests/
57 |
58 | # Run only unit tests
59 | pytest tests/unit/
60 |
61 | # Run only integration tests
62 | pytest tests/integration/
63 |
64 | # Run only end-to-end tests
65 | pytest tests/e2e/
66 | ```
67 |
68 | ## Test Requirements
69 |
70 | The tests require the following dependencies:
71 |
72 | - pytest
73 | - pytest-asyncio
74 | - pytest-cov
75 | - httpx
76 | - docker-py
77 | - websockets
78 | - psycopg2-binary
79 |
80 | These dependencies are included in the project's `pyproject.toml` file.
81 |
82 | ## Docker Environment
83 |
84 | The integration and end-to-end tests require the Docker environment to be running. The `run_tests.py` script will automatically check and start the Docker environment if needed.
85 |
86 | If you want to skip the Docker environment check/start, you can use the `--no-docker` flag:
87 |
88 | ```bash
89 | python tests/run_tests.py --no-docker
90 | ```
91 |
92 | ## Test Coverage
93 |
94 | The tests generate coverage reports in both terminal output and HTML format. The HTML coverage report is saved to `tests/coverage/index.html`.
95 | ```
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 | import pytest
4 | import asyncio
5 | import httpx
6 | import yaml
7 | import time
8 | from fastapi.testclient import TestClient
9 | from fastapi import FastAPI
10 | import docker
11 |
12 | # Add project root to path
13 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
14 |
15 | # Import the main application
16 | from main import app
17 |
18 | # Fixture for the FastAPI test client
19 | @pytest.fixture
20 | def client():
21 | with TestClient(app) as client:
22 | yield client
23 |
24 | # Fixture for async test client
25 | @pytest.fixture
26 | async def async_client():
27 | async with httpx.AsyncClient(app=app, base_url="http://testserver") as client:
28 | yield client
29 |
30 | # Fixture for test configuration
31 | @pytest.fixture
32 | def test_config():
33 | config_path = os.path.join(os.path.dirname(__file__), "test_config.yaml")
34 | with open(config_path, "r") as f:
35 | return yaml.safe_load(f)
36 |
37 | # Fixture for Docker client
38 | @pytest.fixture
39 | def docker_client():
40 | return docker.from_env()
41 |
42 | # Fixture to wait for services to be ready
43 | @pytest.fixture(scope="session")
44 | def wait_for_services():
45 | # Wait for services to be up (used in integration tests)
46 | max_retries = 30
47 | retry_interval = 2
48 |
49 | # Define service endpoints to check
50 | endpoints = {
51 | "synapse": "http://localhost:8008/_matrix/client/versions",
52 | "autonomoussphere": "http://localhost:8000/registry/health",
53 | }
54 |
55 | for service, url in endpoints.items():
56 | for i in range(max_retries):
57 | try:
58 | response = httpx.get(url, timeout=5)
59 | if response.status_code == 200:
60 | print(f"{service} is ready!")
61 | break
62 | except (httpx.ConnectError, httpx.ReadTimeout):
63 | pass
64 |
65 | if i < max_retries - 1:
66 | print(f"Waiting for {service}... ({i+1}/{max_retries})")
67 | time.sleep(retry_interval)
68 | else:
69 | pytest.fail(f"Service {service} did not become ready in time")
--------------------------------------------------------------------------------
/tests/e2e/test_docker_setup.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import docker
3 | import time
4 | import requests
5 | import psycopg2
6 | import os
7 |
8 | @pytest.fixture(scope="module")
9 | def docker_environment():
10 | """Set up the Docker environment for testing"""
11 | client = docker.from_env()
12 |
13 | # Check if containers are already running
14 | containers = {
15 | "postgres": None,
16 | "synapse": None,
17 | "autonomoussphere": None
18 | }
19 |
20 | for container in client.containers.list():
21 | for name in containers.keys():
22 | if name in container.name:
23 | containers[name] = container
24 |
25 | # If containers are not running, start them
26 | if not all(containers.values()):
27 | # Pull images if needed
28 | print("Starting Docker containers...")
29 | os.system("docker-compose up -d")
30 |
31 | # Wait for containers to be ready
32 | max_retries = 30
33 | retry_interval = 2
34 |
35 | for _ in range(max_retries):
36 | all_running = True
37 | for name in containers.keys():
38 | try:
39 | container = client.containers.get(name)
40 | if container.status != "running":
41 | all_running = False
42 | break
43 | except docker.errors.NotFound:
44 | all_running = False
45 | break
46 |
47 | if all_running:
48 | break
49 |
50 | time.sleep(retry_interval)
51 |
52 | # Update container references
53 | for name in containers.keys():
54 | try:
55 | containers[name] = client.containers.get(name)
56 | except docker.errors.NotFound:
57 | pass
58 |
59 | yield containers
60 |
61 | # Don't stop containers after tests - they might be used by other tests or development
62 |
63 | def test_postgres_container(docker_environment):
64 | """Test that the Postgres container is running and pgvector is installed"""
65 | postgres = docker_environment["postgres"]
66 | assert postgres is not None
67 | assert postgres.status == "running"
68 |
69 | # Test that pgvector is installed
70 | # Execute a command in the container to check if pgvector extension exists
71 | exit_code, output = postgres.exec_run(
72 | "psql -U synapse -d synapse -c \"SELECT * FROM pg_extension WHERE extname = 'vector';\"",
73 | environment={"PGPASSWORD": "synapsepass"}
74 | )
75 |
76 | assert exit_code == 0
77 | assert b"vector" in output
78 |
79 | def test_synapse_container(docker_environment):
80 | """Test that the Synapse container is running and responding to API requests"""
81 | synapse = docker_environment["synapse"]
82 | assert synapse is not None
83 | assert synapse.status == "running"
84 |
85 | # Test that Synapse API is responding
86 | max_retries = 10
87 | retry_interval = 2
88 |
89 | for i in range(max_retries):
90 | try:
91 | response = requests.get("http://localhost:8008/_matrix/client/versions")
92 | if response.status_code == 200:
93 | assert "versions" in response.json()
94 | break
95 | except requests.exceptions.ConnectionError:
96 | if i == max_retries - 1:
97 | pytest.fail("Could not connect to Synapse API")
98 | time.sleep(retry_interval)
99 |
100 | def test_autonomoussphere_container(docker_environment):
101 | """Test that the AutonomousSphere container is running and responding to API requests"""
102 | autonomoussphere = docker_environment["autonomoussphere"]
103 | assert autonomoussphere is not None
104 | assert autonomoussphere.status == "running"
105 |
106 | # Test that AutonomousSphere API is responding
107 | max_retries = 10
108 | retry_interval = 2
109 |
110 | for i in range(max_retries):
111 | try:
112 | response = requests.get("http://localhost:29333/health")
113 | if response.status_code == 200:
114 | assert response.json()["status"] == "healthy"
115 | break
116 | except requests.exceptions.ConnectionError:
117 | if i == max_retries - 1:
118 | pytest.fail("Could not connect to AutonomousSphere API")
119 | time.sleep(retry_interval)
--------------------------------------------------------------------------------
/tests/e2e/test_matrix_integration.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import requests
3 | import json
4 | import time
5 | import os
6 | from urllib.parse import quote
7 |
8 | @pytest.fixture(scope="module")
9 | def matrix_credentials():
10 | """Create a test user and get access token"""
11 | homeserver_url = "http://localhost:8008"
12 |
13 | # Create a random username for testing
14 | import random
15 | import string
16 | username = ''.join(random.choices(string.ascii_lowercase, k=8))
17 | password = "testpassword"
18 |
19 | # Register the user
20 | register_data = {
21 | "username": username,
22 | "password": password,
23 | "auth": {"type": "m.login.dummy"}
24 | }
25 |
26 | try:
27 | response = requests.post(
28 | f"{homeserver_url}/_matrix/client/v3/register",
29 | json=register_data
30 | )
31 |
32 | if response.status_code == 200:
33 | credentials = response.json()
34 | return {
35 | "user_id": credentials["user_id"],
36 | "access_token": credentials["access_token"],
37 | "homeserver_url": homeserver_url
38 | }
39 | else:
40 | # Try logging in instead
41 | login_data = {
42 | "type": "m.login.password",
43 | "user": username,
44 | "password": password
45 | }
46 |
47 | response = requests.post(
48 | f"{homeserver_url}/_matrix/client/v3/login",
49 | json=login_data
50 | )
51 |
52 | if response.status_code == 200:
53 | credentials = response.json()
54 | return {
55 | "user_id": credentials["user_id"],
56 | "access_token": credentials["access_token"],
57 | "homeserver_url": homeserver_url
58 | }
59 | except Exception as e:
60 | pytest.skip(f"Could not create Matrix test user: {str(e)}")
61 |
62 | pytest.skip("Could not create Matrix test user")
63 |
64 | def test_matrix_search(matrix_credentials):
65 | """Test that Matrix search is working through the AutonomousSphere API"""
66 | # Create a test room
67 | room_data = {
68 | "visibility": "private",
69 | "name": "Test Search Room",
70 | "topic": "A room for testing search functionality"
71 | }
72 |
73 | response = requests.post(
74 | f"{matrix_credentials['homeserver_url']}/_matrix/client/v3/createRoom",
75 | headers={"Authorization": f"Bearer {matrix_credentials['access_token']}"},
76 | json=room_data
77 | )
78 |
79 | assert response.status_code == 200
80 | room_id = response.json()["room_id"]
81 |
82 | # Send a test message
83 | test_message = "This is a unique test message for searching XYZ123"
84 | message_data = {
85 | "msgtype": "m.room.message",
86 | "body": test_message
87 | }
88 |
89 | response = requests.put(
90 | f"{matrix_credentials['homeserver_url']}/_matrix/client/v3/rooms/{quote(room_id)}/send/m.room.message/{int(time.time() * 1000)}",
91 | headers={"Authorization": f"Bearer {matrix_credentials['access_token']}"},
92 | json=message_data
93 | )
94 |
95 | assert response.status_code == 200
96 |
97 | # Wait for the message to be indexed
98 | time.sleep(2)
99 |
100 | # Search for the message through the AutonomousSphere API
101 | search_query = {
102 | "query": "XYZ123",
103 | "filters": {}
104 | }
105 |
106 | max_retries = 5
107 | for i in range(max_retries):
108 | response = requests.post(
109 | "http://localhost:8000/search/",
110 | headers={"Authorization": f"Bearer {matrix_credentials['access_token']}"},
111 | json=search_query
112 | )
113 |
114 | assert response.status_code == 200
115 | results = response.json()
116 |
117 | # Check if the message was found
118 | messages = results["results"]["matrix"]["messages"]
119 | if any(test_message in str(msg["content"]) for msg in messages):
120 | break
121 |
122 | if i == max_retries - 1:
123 | pytest.fail("Test message not found in search results")
124 |
125 | # Wait and retry
126 | time.sleep(2)
--------------------------------------------------------------------------------
/tests/e2e/test_mcp_functionality.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import requests
3 | import json
4 | import time
5 | import asyncio
6 | import websockets
7 | import os
8 |
9 | @pytest.mark.asyncio
10 | async def test_mcp_websocket_connection():
11 | """Test that the MCP WebSocket server is running and accepting connections"""
12 | try:
13 | uri = "ws://localhost:8000/search/mcp"
14 | async with websockets.connect(uri) as websocket:
15 | # Send a ping message
16 | await websocket.send(json.dumps({
17 | "type": "ping",
18 | "id": "test-ping"
19 | }))
20 |
21 | # Wait for response
22 | response = await asyncio.wait_for(websocket.recv(), timeout=5)
23 | response_data = json.loads(response)
24 |
25 | assert response_data["type"] == "pong"
26 | assert response_data["id"] == "test-ping"
27 | except (websockets.exceptions.ConnectionError, asyncio.TimeoutError) as e:
28 | pytest.fail(f"Could not connect to MCP WebSocket server: {str(e)}")
29 |
30 | @pytest.mark.asyncio
31 | async def test_mcp_search_tool():
32 | """Test the MCP search tool functionality"""
33 | try:
34 | uri = "ws://localhost:8000/search/mcp"
35 | async with websockets.connect(uri) as websocket:
36 | # Send a search request
37 | await websocket.send(json.dumps({
38 | "type": "tool",
39 | "id": "test-search",
40 | "name": "search",
41 | "params": {
42 | "query": "test",
43 | "filters": {}
44 | }
45 | }))
46 |
47 | # Wait for response
48 | response = await asyncio.wait_for(websocket.recv(), timeout=10)
49 | response_data = json.loads(response)
50 |
51 | assert response_data["type"] == "tool_result"
52 | assert response_data["id"] == "test-search"
53 | assert "result" in response_data
54 | assert "query" in response_data["result"]
55 | assert response_data["result"]["query"] == "test"
56 | except (websockets.exceptions.ConnectionError, asyncio.TimeoutError) as e:
57 | pytest.fail(f"Error testing MCP search tool: {str(e)}")
58 |
59 | def test_mcp_registration():
60 | """Test that the MCP service is registered with the registry"""
61 | # Get the list of agents from the registry
62 | response = requests.get("http://localhost:8000/registry/agents")
63 |
64 | assert response.status_code == 200
65 | agents = response.json()
66 |
67 | # Check if the MCP service is registered
68 | mcp_agents = [agent for agent in agents if "mcp" in agent.get("id", "").lower()]
69 | assert len(mcp_agents) > 0
70 |
71 | # Check the MCP service details
72 | mcp_agent = mcp_agents[0]
73 | assert "mcp_capabilities" in mcp_agent.get("custom_metadata", {})
74 | assert "search" in mcp_agent["custom_metadata"]["mcp_capabilities"]
--------------------------------------------------------------------------------
/tests/e2e/test_pgvector.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import psycopg2
3 | import numpy as np
4 | from psycopg2.extras import execute_values
5 |
6 | @pytest.fixture
7 | def db_connection():
8 | """Create a connection to the Postgres database"""
9 | try:
10 | conn = psycopg2.connect(
11 | host="localhost",
12 | port=5432,
13 | user="synapse",
14 | password="synapsepass",
15 | database="synapse"
16 | )
17 | yield conn
18 | conn.close()
19 | except Exception as e:
20 | pytest.skip(f"Could not connect to database: {str(e)}")
21 |
22 | def test_pgvector_extension(db_connection):
23 | """Test that the pgvector extension is installed and working"""
24 | cursor = db_connection.cursor()
25 |
26 | try:
27 | # Check if vector extension is installed
28 | cursor.execute("SELECT * FROM pg_extension WHERE extname = 'vector';")
29 | result = cursor.fetchone()
30 | assert result is not None, "pgvector extension is not installed"
31 |
32 | # Create a test table with vector column
33 | cursor.execute("DROP TABLE IF EXISTS test_vectors;")
34 | cursor.execute("CREATE TABLE test_vectors (id serial PRIMARY KEY, embedding vector(3));")
35 |
36 | # Insert some test vectors
37 | test_vectors = [
38 | (np.array([1.0, 2.0, 3.0]),),
39 | (np.array([4.0, 5.0, 6.0]),),
40 | (np.array([7.0, 8.0, 9.0]),)
41 | ]
42 |
43 | execute_values(
44 | cursor,
45 | "INSERT INTO test_vectors (embedding) VALUES %s",
46 | [(f"[{v[0][0]},{v[0][1]},{v[0][2]}]",) for v in test_vectors]
47 | )
48 |
49 | # Test vector operations
50 | cursor.execute("SELECT embedding <-> '[1,2,3]' AS distance FROM test_vectors ORDER BY distance LIMIT 1;")
51 | result = cursor.fetchone()
52 | assert result is not None, "Vector distance calculation failed"
53 |
54 | # Clean up
55 | cursor.execute("DROP TABLE test_vectors;")
56 | db_connection.commit()
57 |
58 | except Exception as e:
59 | db_connection.rollback()
60 | pytest.fail(f"pgvector test failed: {str(e)}")
61 | finally:
62 | cursor.close()
--------------------------------------------------------------------------------
/tests/integration/test_mcp_registration.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import asyncio
3 | import httpx
4 | from unittest.mock import patch, MagicMock
5 |
6 | @pytest.mark.asyncio
7 | async def test_mcp_registration(async_client):
8 | # Mock the register_mcp_service function
9 | with patch('AutonomousSphere.search.mcp.search_mcp.register_mcp_service') as mock_register:
10 | mock_register.return_value = True
11 |
12 | # Trigger the startup event by calling a health endpoint
13 | response = await async_client.get("/registry/health")
14 | assert response.status_code == 200
15 |
16 | # Verify that register_mcp_service was called
17 | mock_register.assert_called_once()
18 |
19 | @pytest.mark.asyncio
20 | async def test_mcp_registration_process():
21 | from AutonomousSphere.search.mcp.search_mcp import register_mcp_service
22 |
23 | # Mock httpx.AsyncClient
24 | with patch('httpx.AsyncClient') as mock_client_class:
25 | # Set up the mock client
26 | mock_client = MagicMock()
27 | mock_client.__aenter__.return_value = mock_client
28 | mock_client_class.return_value = mock_client
29 |
30 | # Set up the mock response
31 | mock_response = MagicMock()
32 | mock_response.status_code = 201
33 | mock_client.post.return_value = mock_response
34 |
35 | # Mock get_config
36 | with patch('AutonomousSphere.search.mcp.search_mcp.get_config') as mock_get_config:
37 | mock_get_config.return_value = {}
38 |
39 | # Mock environment variables
40 | with patch.dict('os.environ', {'API_HOST': 'testhost', 'API_PORT': '8000'}):
41 | # Call the register_mcp_service function
42 | result = await register_mcp_service()
43 |
44 | # Verify the result
45 | assert result is True
46 |
47 | # Verify that the client.post was called with the correct URL and data
48 | mock_client.post.assert_called_once()
49 | call_args = mock_client.post.call_args
50 | assert call_args[0][0] == "http://testhost:8000/registry/agents"
--------------------------------------------------------------------------------
/tests/integration/test_registry_api.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import asyncio
3 | import httpx
4 | import json
5 | from AutonomousSphere.registry.models import Protocol
6 |
7 | @pytest.mark.asyncio
8 | async def test_registry_health(async_client):
9 | response = await async_client.get("/registry/health")
10 | assert response.status_code == 200
11 | data = response.json()
12 | assert "status" in data
13 | assert data["status"] == "healthy"
14 |
15 | @pytest.mark.asyncio
16 | async def test_register_and_get_agent(async_client):
17 | # Test agent data
18 | test_agent = {
19 | "id": "integration-test-agent",
20 | "display_name": "Integration Test Agent",
21 | "description": "An agent for integration testing",
22 | "protocol": Protocol.MATRIX.value,
23 | "endpoint_url": "https://example.com/integration-agent",
24 | "public": True,
25 | "languages": ["en"],
26 | "tools": ["test"]
27 | }
28 |
29 | # Register the agent
30 | response = await async_client.post("/registry/agents", json=test_agent)
31 | assert response.status_code == 201
32 | data = response.json()
33 | assert data["id"] == "integration-test-agent"
34 | assert data["registered_at"] is not None
35 |
36 | # Get the agent
37 | response = await async_client.get(f"/registry/agents/{test_agent['id']}")
38 | assert response.status_code == 200
39 | data = response.json()
40 | assert data["id"] == test_agent["id"]
41 | assert data["display_name"] == test_agent["display_name"]
42 |
43 | # Clean up - delete the agent
44 | response = await async_client.delete(f"/registry/agents/{test_agent['id']}")
45 | assert response.status_code == 204
46 |
47 | @pytest.mark.asyncio
48 | async def test_list_agents(async_client):
49 | # Register a couple of test agents
50 | test_agents = [
51 | {
52 | "id": "integration-test-agent-1",
53 | "display_name": "Integration Test Agent 1",
54 | "description": "An agent for integration testing",
55 | "protocol": Protocol.MATRIX.value,
56 | "endpoint_url": "https://example.com/integration-agent-1",
57 | "public": True,
58 | "languages": ["en"],
59 | "tools": ["test"]
60 | },
61 | {
62 | "id": "integration-test-agent-2",
63 | "display_name": "Integration Test Agent 2",
64 | "description": "Another agent for integration testing",
65 | "protocol": Protocol.HTTP.value,
66 | "endpoint_url": "https://example.com/integration-agent-2",
67 | "public": False,
68 | "languages": ["en", "fr"],
69 | "tools": ["search", "calculator"]
70 | }
71 | ]
72 |
73 | # Register the agents
74 | for agent in test_agents:
75 | response = await async_client.post("/registry/agents", json=agent)
76 | assert response.status_code == 201
77 |
78 | # List all agents
79 | response = await async_client.get("/registry/agents")
80 | assert response.status_code == 200
81 | data = response.json()
82 | assert len(data) >= 2 # There might be other agents registered
83 |
84 | # Filter by protocol
85 | response = await async_client.get("/registry/agents?protocol=matrix")
86 | assert response.status_code == 200
87 | data = response.json()
88 | assert any(agent["id"] == "integration-test-agent-1" for agent in data)
89 | assert not any(agent["id"] == "integration-test-agent-2" for agent in data)
90 |
91 | # Filter by public visibility
92 | response = await async_client.get("/registry/agents?public=true")
93 | assert response.status_code == 200
94 | data = response.json()
95 | assert any(agent["id"] == "integration-test-agent-1" for agent in data)
96 | assert not any(agent["id"] == "integration-test-agent-2" for agent in data)
97 |
98 | # Clean up
99 | for agent in test_agents:
100 | response = await async_client.delete(f"/registry/agents/{agent['id']}")
101 | assert response.status_code == 204
102 |
103 | @pytest.mark.asyncio
104 | async def test_search_agents(async_client):
105 | # Register test agents
106 | test_agents = [
107 | {
108 | "id": "search-test-agent-1",
109 | "display_name": "Search Test Agent",
110 | "description": "An agent for testing search functionality",
111 | "protocol": Protocol.MATRIX.value,
112 | "endpoint_url": "https://example.com/search-agent-1",
113 | "public": True,
114 | "languages": ["en"],
115 | "tools": ["search"]
116 | },
117 | {
118 | "id": "search-test-agent-2",
119 | "display_name": "Another Search Agent",
120 | "description": "A different agent with search capabilities",
121 | "protocol": Protocol.HTTP.value,
122 | "endpoint_url": "https://example.com/search-agent-2",
123 | "public": True,
124 | "languages": ["en", "es"],
125 | "tools": ["search", "calculator"]
126 | }
127 | ]
128 |
129 | # Register the agents
130 | for agent in test_agents:
131 | response = await async_client.post("/registry/agents", json=agent)
132 | assert response.status_code == 201
133 |
134 | # Search for agents
135 | search_query = {
136 | "query": "search",
137 | "filters": {}
138 | }
139 | response = await async_client.post("/registry/agents/search", json=search_query)
140 | assert response.status_code == 200
141 | data = response.json()
142 | assert len(data) >= 2
143 | assert any(agent["id"] == "search-test-agent-1" for agent in data)
144 | assert any(agent["id"] == "search-test-agent-2" for agent in data)
145 |
146 | # Search with filters
147 | search_query = {
148 | "query": "search",
149 | "filters": {
150 | "protocol": ["matrix"]
151 | }
152 | }
153 | response = await async_client.post("/registry/agents/search", json=search_query)
154 | assert response.status_code == 200
155 | data = response.json()
156 | assert any(agent["id"] == "search-test-agent-1" for agent in data)
157 | assert not any(agent["id"] == "search-test-agent-2" for agent in data)
158 |
159 | # Clean up
160 | for agent in test_agents:
161 | response = await async_client.delete(f"/registry/agents/{agent['id']}")
162 | assert response.status_code == 204
--------------------------------------------------------------------------------
/tests/integration/test_search_api.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import asyncio
3 | import httpx
4 | from unittest.mock import patch
5 | from AutonomousSphere.registry.models.search import SearchQuery
6 |
7 | @pytest.mark.asyncio
8 | async def test_unified_search_endpoint(async_client):
9 | # Mock the search_agents function
10 | with patch('AutonomousSphere.search.search.search_agents') as mock_search_agents:
11 | # Set up the mock to return some test agents
12 | mock_search_agents.return_value = [
13 | {
14 | "id": "test-agent-1",
15 | "display_name": "Test Agent 1",
16 | "description": "A test agent",
17 | "protocol": "matrix",
18 | "endpoint_url": "https://example.com/agent1",
19 | "public": True,
20 | "languages": ["en"],
21 | "tools": ["search"],
22 | "registered_at": "2023-01-01T00:00:00",
23 | "last_seen": "2023-01-01T00:00:00"
24 | }
25 | ]
26 |
27 | # Mock the search_matrix function
28 | with patch('AutonomousSphere.search.search.search_matrix') as mock_search_matrix:
29 | # Set up the mock to return some test matrix results
30 | mock_search_matrix.return_value = {
31 | "search_categories": {
32 | "room_events": {
33 | "results": [
34 | {
35 | "result": {
36 | "event_id": "event1",
37 | "room_id": "room1",
38 | "sender": "user1",
39 | "content": {"body": "test message"},
40 | "origin_server_ts": 1609459200000
41 | },
42 | "rank": 1.0
43 | }
44 | ],
45 | "state": {
46 | "room1": [
47 | {
48 | "type": "m.room.name",
49 | "content": {"name": "Test Room"}
50 | },
51 | {
52 | "type": "m.room.topic",
53 | "content": {"topic": "Test Topic"}
54 | }
55 | ]
56 | }
57 | }
58 | }
59 | }
60 |
61 | # Call the unified search endpoint
62 | search_query = {
63 | "query": "test query",
64 | "filters": {}
65 | }
66 | response = await async_client.post("/search/", json=search_query, headers={"Authorization": "Bearer test_token"})
67 |
68 | # Verify the response
69 | assert response.status_code == 200
70 | data = response.json()
71 | assert data["query"] == "test query"
72 | assert len(data["results"]["agents"]) == 1
73 | assert len(data["results"]["matrix"]["messages"]) == 1
74 | assert data["results"]["matrix"]["messages"][0]["event_id"] == "event1"
75 | assert data["metadata"]["total_results"] > 0
76 |
77 | @pytest.mark.asyncio
78 | async def test_mcp_sse_endpoint(async_client):
79 | # Test the SSE endpoint
80 | async with httpx.AsyncClient(app=async_client.app, base_url="http://testserver") as client:
81 | with client.stream("GET", "/search/mcp/sse") as response:
82 | assert response.status_code == 200
83 | assert response.headers["content-type"] == "text/event-stream"
84 |
85 | # Read the first event (connected event)
86 | for line in response.iter_lines():
87 | if line.startswith(b"data: "):
88 | data = json.loads(line[6:])
89 | assert data["event"] == "connected"
90 | assert "message" in data["data"]
91 | break
--------------------------------------------------------------------------------
/tests/requirements.txt:
--------------------------------------------------------------------------------
1 | pytest>=7.0.0
2 | pytest-asyncio>=0.18.0
3 | pytest-cov>=3.0.0
4 | httpx>=0.23.0
5 | docker>=6.0.0
6 | websockets>=10.0
7 | psycopg2-binary>=2.9.0
8 | numpy>=1.22.0
--------------------------------------------------------------------------------
/tests/run_tests.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | import os
3 | import sys
4 | import subprocess
5 | import argparse
6 | import time
7 |
8 | def run_tests(test_type=None, verbose=False):
9 | """Run the specified tests"""
10 | # Determine the pytest command
11 | pytest_cmd = ["pytest"]
12 |
13 | # Add verbosity if requested
14 | if verbose:
15 | pytest_cmd.append("-v")
16 |
17 | # Add test selection based on type
18 | if test_type == "unit":
19 | pytest_cmd.append("tests/unit/")
20 | elif test_type == "integration":
21 | pytest_cmd.append("tests/integration/")
22 | elif test_type == "e2e":
23 | pytest_cmd.append("tests/e2e/")
24 | else:
25 | # Run all tests if no type specified
26 | pytest_cmd.append("tests/")
27 |
28 | # Add coverage reporting
29 | pytest_cmd.extend(["--cov=AutonomousSphere", "--cov-report=term", "--cov-report=html:tests/coverage"])
30 |
31 | # Run the tests
32 | result = subprocess.run(pytest_cmd)
33 | return result.returncode
34 |
35 | def check_docker_status():
36 | """Check if Docker containers are running"""
37 | result = subprocess.run(["docker", "ps"], capture_output=True, text=True)
38 |
39 | # Check for required containers
40 | required_containers = ["postgres", "synapse", "autonomoussphere"]
41 | running_containers = result.stdout.lower()
42 |
43 | missing_containers = []
44 | for container in required_containers:
45 | if container not in running_containers:
46 | missing_containers.append(container)
47 |
48 | return missing_containers
49 |
50 | def start_docker_environment():
51 | """Start the Docker environment if not already running"""
52 | missing_containers = check_docker_status()
53 |
54 | if missing_containers:
55 | print(f"Starting Docker containers: {', '.join(missing_containers)}")
56 | subprocess.run(["docker-compose", "up", "-d"])
57 |
58 | # Wait for containers to be ready
59 | print("Waiting for containers to be ready...")
60 | time.sleep(10) # Give containers some time to start
61 |
62 | # Check again
63 | still_missing = check_docker_status()
64 | if still_missing:
65 | print(f"Warning: Some containers may not have started: {', '.join(still_missing)}")
66 | return False
67 |
68 | return True
69 |
70 | if __name__ == "__main__":
71 | parser = argparse.ArgumentParser(description="Run AutonomousSphere tests")
72 | parser.add_argument("--type", choices=["unit", "integration", "e2e"], help="Type of tests to run")
73 | parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
74 | parser.add_argument("--no-docker", action="store_true", help="Skip Docker environment check/start")
75 |
76 | args = parser.parse_args()
77 |
78 | # Check/start Docker environment if needed for integration or e2e tests
79 | if not args.no_docker and (args.type in ["integration", "e2e"] or args.type is None):
80 | if not start_docker_environment():
81 | print("Warning: Docker environment may not be fully ready")
82 |
83 | # Run the tests
84 | exit_code = run_tests(args.type, args.verbose)
85 | sys.exit(exit_code)
--------------------------------------------------------------------------------
/tests/test_config.yaml:
--------------------------------------------------------------------------------
1 | # Test configuration for AutonomousSphere
2 |
3 | # Matrix homeserver configuration
4 | homeserver:
5 | address: "http://localhost:8008"
6 | domain: "localhost"
7 |
8 | # Database configuration
9 | database:
10 | host: "localhost"
11 | port: 5432
12 | user: "synapse"
13 | password: "synapsepass"
14 | database: "synapse"
15 |
16 | # API configuration
17 | api:
18 | host: "localhost"
19 | port: 8000
20 |
21 | # Test user credentials
22 | test_user:
23 | username: "test_user"
24 | password: "test_password"
--------------------------------------------------------------------------------
/tests/unit/test_mcp.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import asyncio
3 | from unittest.mock import patch, MagicMock
4 | from fastapi import FastAPI
5 | from fastapi.testclient import TestClient
6 |
7 | @pytest.mark.asyncio
8 | async def test_mcp_search_tool():
9 | from AutonomousSphere.search.mcp.search_mcp import search
10 |
11 | # Mock the unified_search function
12 | with patch('AutonomousSphere.search.search.unified_search') as mock_unified_search:
13 | # Set up the mock to return a test result
14 | mock_result = MagicMock()
15 | mock_result.dict.return_value = {
16 | "query": "test query",
17 | "filters": {},
18 | "results": {
19 | "agents": [{"id": "test-agent"}],
20 | "matrix": {
21 | "rooms": [],
22 | "messages": []
23 | }
24 | },
25 | "metadata": {
26 | "total_results": 1,
27 | "search_time_ms": 100,
28 | "source": "mcp"
29 | }
30 | }
31 | mock_unified_search.return_value = mock_result
32 |
33 | # Call the MCP search tool
34 | result = await search("test query", {"filter_key": "filter_value"})
35 |
36 | # Verify the result
37 | assert result["query"] == "test query"
38 | assert result["results"]["agents"] == [{"id": "test-agent"}]
39 | assert result["metadata"]["total_results"] == 1
40 |
41 | # Verify that unified_search was called with the correct parameters
42 | mock_unified_search.assert_called_once()
43 | call_args = mock_unified_search.call_args[0][0]
44 | assert call_args.query == "test query"
45 | assert call_args.filters == {"filter_key": "filter_value"}
46 |
47 | @pytest.mark.asyncio
48 | async def test_register_mcp_service():
49 | from AutonomousSphere.search.mcp.search_mcp import register_mcp_service
50 |
51 | # Mock httpx.AsyncClient
52 | with patch('httpx.AsyncClient') as mock_client_class:
53 | # Set up the mock client
54 | mock_client = MagicMock()
55 | mock_client.__aenter__.return_value = mock_client
56 | mock_client_class.return_value = mock_client
57 |
58 | # Set up the mock response
59 | mock_response = MagicMock()
60 | mock_response.status_code = 201
61 | mock_client.post.return_value = mock_response
62 |
63 | # Mock get_config
64 | with patch('AutonomousSphere.search.mcp.search_mcp.get_config') as mock_get_config:
65 | mock_get_config.return_value = {}
66 |
67 | # Mock environment variables
68 | with patch.dict('os.environ', {'API_HOST': 'testhost', 'API_PORT': '8000'}):
69 | # Call the register_mcp_service function
70 | result = await register_mcp_service()
71 |
72 | # Verify the result
73 | assert result is True
74 |
75 | # Verify that the client.post was called with the correct URL and data
76 | mock_client.post.assert_called_once()
77 | call_args = mock_client.post.call_args
78 | assert call_args[0][0] == "http://testhost:8000/registry/agents"
79 | assert "endpoint_url" in call_args[1]["json"]
80 | assert call_args[1]["json"]["endpoint_url"] == "ws://testhost:8000/search/mcp"
--------------------------------------------------------------------------------
/tests/unit/test_registry.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import asyncio
3 | from datetime import datetime
4 | from AutonomousSphere.registry.models import Agent, Protocol, SearchQuery
5 |
6 | # Mock the registry storage for unit tests
7 | @pytest.fixture
8 | def mock_registry():
9 | from AutonomousSphere.registry.registry import agents_registry
10 |
11 | # Save original registry
12 | original_registry = agents_registry.copy()
13 |
14 | # Clear registry for tests
15 | agents_registry.clear()
16 |
17 | # Add some test agents
18 | test_agents = [
19 | Agent(
20 | id="test-agent-1",
21 | display_name="Test Agent 1",
22 | description="A test agent for unit testing",
23 | protocol=Protocol.MATRIX,
24 | endpoint_url="https://example.com/agent1",
25 | public=True,
26 | languages=["en"],
27 | tools=["search", "calculator"],
28 | registered_at=datetime.now(),
29 | last_seen=datetime.now()
30 | ),
31 | Agent(
32 | id="test-agent-2",
33 | display_name="Test Agent 2",
34 | description="Another test agent with different capabilities",
35 | protocol=Protocol.HTTP,
36 | endpoint_url="https://example.com/agent2",
37 | public=False,
38 | languages=["en", "es"],
39 | tools=["weather", "news"],
40 | registered_at=datetime.now(),
41 | last_seen=datetime.now()
42 | )
43 | ]
44 |
45 | for agent in test_agents:
46 | agents_registry[agent.id] = agent
47 |
48 | yield agents_registry
49 |
50 | # Restore original registry
51 | agents_registry.clear()
52 | agents_registry.update(original_registry)
53 |
54 | @pytest.mark.asyncio
55 | async def test_register_agent(mock_registry):
56 | from AutonomousSphere.registry.registry import register_agent
57 |
58 | new_agent = Agent(
59 | id="new-test-agent",
60 | display_name="New Test Agent",
61 | description="A newly registered test agent",
62 | protocol=Protocol.MATRIX,
63 | endpoint_url="https://example.com/new-agent",
64 | public=True,
65 | languages=["en"],
66 | tools=["search"],
67 | )
68 |
69 | result = await register_agent(new_agent)
70 |
71 | assert result.id == "new-test-agent"
72 | assert result.display_name == "New Test Agent"
73 | assert result.registered_at is not None
74 | assert result.last_seen is not None
75 | assert "new-test-agent" in mock_registry
76 |
77 | @pytest.mark.asyncio
78 | async def test_list_agents(mock_registry):
79 | from AutonomousSphere.registry.registry import list_agents
80 |
81 | # Test listing all agents
82 | all_agents = await list_agents(protocol=None, public=None)
83 | assert len(all_agents) == 2
84 |
85 | # Test filtering by protocol
86 | matrix_agents = await list_agents(protocol=Protocol.MATRIX, public=None)
87 | assert len(matrix_agents) == 1
88 | assert matrix_agents[0].id == "test-agent-1"
89 |
90 | # Test filtering by public visibility
91 | public_agents = await list_agents(protocol=None, public=True)
92 | assert len(public_agents) == 1
93 | assert public_agents[0].id == "test-agent-1"
94 |
95 | @pytest.mark.asyncio
96 | async def test_search_agents(mock_registry):
97 | from AutonomousSphere.registry.registry import search_agents
98 |
99 | # Basic search
100 | results = await search_agents(SearchQuery(query="test agent", filters={}))
101 | assert len(results) == 2
102 |
103 | # Search with protocol filter
104 | results = await search_agents(SearchQuery(
105 | query="test agent",
106 | filters={"protocol": [Protocol.HTTP]}
107 | ))
108 | assert len(results) == 1
109 | assert results[0].id == "test-agent-2"
110 |
111 | # Search with tools filter
112 | results = await search_agents(SearchQuery(
113 | query="test agent",
114 | filters={"tools": ["search"]}
115 | ))
116 | assert len(results) == 1
117 | assert results[0].id == "test-agent-1"
--------------------------------------------------------------------------------
/tests/unit/test_search.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import asyncio
3 | from unittest.mock import patch, MagicMock
4 | from AutonomousSphere.registry.models.search import SearchQuery
5 | from AutonomousSphere.search.models import SearchResult
6 |
7 | @pytest.mark.asyncio
8 | async def test_unified_search():
9 | from AutonomousSphere.search.search import unified_search
10 |
11 | # Create a mock search query
12 | search_query = SearchQuery(
13 | query="test query",
14 | filters={}
15 | )
16 |
17 | # Mock the search_agents function
18 | with patch('AutonomousSphere.search.search.search_agents') as mock_search_agents:
19 | # Set up the mock to return some test agents
20 | mock_search_agents.return_value = [
21 | {
22 | "id": "test-agent-1",
23 | "display_name": "Test Agent 1",
24 | "description": "A test agent",
25 | "protocol": "matrix",
26 | "endpoint_url": "https://example.com/agent1",
27 | "public": True,
28 | "languages": ["en"],
29 | "tools": ["search"],
30 | "registered_at": "2023-01-01T00:00:00",
31 | "last_seen": "2023-01-01T00:00:00"
32 | }
33 | ]
34 |
35 | # Mock the search_matrix function
36 | with patch('AutonomousSphere.search.search.search_matrix') as mock_search_matrix:
37 | # Set up the mock to return some test matrix results
38 | mock_search_matrix.return_value = {
39 | "search_categories": {
40 | "room_events": {
41 | "results": [
42 | {
43 | "result": {
44 | "event_id": "event1",
45 | "room_id": "room1",
46 | "sender": "user1",
47 | "content": {"body": "test message"},
48 | "origin_server_ts": 1609459200000
49 | },
50 | "rank": 1.0
51 | }
52 | ],
53 | "state": {
54 | "room1": [
55 | {
56 | "type": "m.room.name",
57 | "content": {"name": "Test Room"}
58 | },
59 | {
60 | "type": "m.room.topic",
61 | "content": {"topic": "Test Topic"}
62 | }
63 | ]
64 | }
65 | }
66 | }
67 | }
68 |
69 | # Mock the get_config function
70 | with patch('AutonomousSphere.search.search.get_config') as mock_get_config:
71 | mock_get_config.return_value = {
72 | "homeserver": {
73 | "address": "http://localhost:8008"
74 | }
75 | }
76 |
77 | # Call the unified_search function
78 | result = await unified_search(search_query, authorization="Bearer test_token")
79 |
80 | # Verify the result
81 | assert isinstance(result, SearchResult)
82 | assert result.query == "test query"
83 | assert len(result.results["agents"]) == 1
84 | assert len(result.results["matrix"]["messages"]) == 1
85 | assert result.results["matrix"]["messages"][0]["event_id"] == "event1"
86 | assert result.metadata.total_results > 0
--------------------------------------------------------------------------------
/tests/utils.py:
--------------------------------------------------------------------------------
1 | import asyncio
2 | import httpx
3 | import json
4 | import time
5 | import os
6 | import yaml
7 | import psycopg2
8 | from typing import Dict, Any, Optional
9 |
10 | # Load test configuration
11 | def load_test_config():
12 | config_path = os.path.join(os.path.dirname(__file__), "test_config.yaml")
13 | with open(config_path, "r") as f:
14 | return yaml.safe_load(f)
15 |
16 | # Create a Matrix test user
17 | async def create_matrix_test_user(username: str, password: str) -> Dict[str, Any]:
18 | """Create a test user on the Matrix homeserver and return credentials"""
19 | config = load_test_config()
20 | homeserver_url = config["homeserver"]["address"]
21 |
22 | # Register the user
23 | register_data = {
24 | "username": username,
25 | "password": password,
26 | "auth": {"type": "m.login.dummy"}
27 | }
28 |
29 | async with httpx.AsyncClient() as client:
30 | try:
31 | response = await client.post(
32 | f"{homeserver_url}/_matrix/client/v3/register",
33 | json=register_data
34 | )
35 |
36 | if response.status_code == 200:
37 | credentials = response.json()
38 | return {
39 | "user_id": credentials["user_id"],
40 | "access_token": credentials["access_token"],
41 | "homeserver_url": homeserver_url
42 | }
43 | else:
44 | # Try logging in instead
45 | login_data = {
46 | "type": "m.login.password",
47 | "user": username,
48 | "password": password
49 | }
50 |
51 | response = await client.post(
52 | f"{homeserver_url}/_matrix/client/v3/login",
53 | json=login_data
54 | )
55 |
56 | if response.status_code == 200:
57 | credentials = response.json()
58 | return {
59 | "user_id": credentials["user_id"],
60 | "access_token": credentials["access_token"],
61 | "homeserver_url": homeserver_url
62 | }
63 | except Exception as e:
64 | print(f"Error creating Matrix test user: {str(e)}")
65 |
66 | return None
67 |
68 | # Connect to the Postgres database
69 | def connect_to_db():
70 | """Connect to the Postgres database and return connection"""
71 | config = load_test_config()
72 | db_config = config["database"]
73 |
74 | try:
75 | conn = psycopg2.connect(
76 | host=db_config["host"],
77 | port=db_config["port"],
78 | user=db_config["user"],
79 | password=db_config["password"],
80 | database=db_config["database"]
81 | )
82 | return conn
83 | except Exception as e:
84 | print(f"Error connecting to database: {str(e)}")
85 | return None
86 |
87 | # Wait for a service to be ready
88 | async def wait_for_service(url: str, max_retries: int = 30, retry_interval: int = 2) -> bool:
89 | """Wait for a service to be ready by polling the URL"""
90 | async with httpx.AsyncClient() as client:
91 | for i in range(max_retries):
92 | try:
93 | response = await client.get(url, timeout=5)
94 | if response.status_code == 200:
95 | return True
96 | except (httpx.ConnectError, httpx.ReadTimeout):
97 | pass
98 |
99 | if i < max_retries - 1:
100 | await asyncio.sleep(retry_interval)
101 |
102 | return False
--------------------------------------------------------------------------------