├── tests
├── Feature
│ └── .gitkeep
├── TestCase.php
└── Unit
│ ├── DummyClientTest.php
│ └── LlmManagerTest.php
├── assets
├── logo
│ └── laravel-llm-suite-logo.png
└── examples
│ └── example-load-conversation.png
├── .gitignore
├── src
├── Contracts
│ ├── LlmClient.php
│ ├── ImageClient.php
│ ├── ChatClient.php
│ └── ConversationStore.php
├── Exceptions
│ ├── LlmException.php
│ ├── ProviderConfigException.php
│ └── ProviderRequestException.php
├── Support
│ ├── ImageResponse.php
│ ├── ChatMessage.php
│ ├── ImageRequest.php
│ ├── ChatResponse.php
│ ├── TokenUsage.php
│ └── Conversation.php
├── Facades
│ └── Llm.php
├── LlmSuiteServiceProvider.php
├── ConversationStores
│ ├── SessionStore.php
│ └── DatabaseStore.php
├── Clients
│ ├── Dummy
│ │ └── DummyClient.php
│ ├── Anthropic
│ │ └── AnthropicClient.php
│ ├── LmStudio
│ │ └── LmStudioClient.php
│ └── OpenAI
│ │ └── OpenAIClient.php
├── Helpers
│ └── LlmFake.php
└── Managers
│ └── LlmManager.php
├── .editorconfig
├── phpunit.xml
├── database
└── migrations
│ └── 2024_01_01_000000_create_llm_conversations_table.php
├── LICENSE
├── .github
└── workflows
│ └── workflow.yml
├── composer.json
├── SECURITY.md
├── CHANGELOG.md
├── config
└── llm-suite.php
├── CONTRIBUTING.md
├── CODE_OF_CONDUCT.md
└── README.md
/tests/Feature/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/assets/logo/laravel-llm-suite-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oziriemeka/laravel-llm-suite/HEAD/assets/logo/laravel-llm-suite-logo.png
--------------------------------------------------------------------------------
/assets/examples/example-load-conversation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/oziriemeka/laravel-llm-suite/HEAD/assets/examples/example-load-conversation.png
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /vendor/
2 | /node_modules/
3 | .env
4 | .env.backup
5 | .phpunit.result.cache
6 | .phpunit.cache
7 | composer.lock
8 | *.log
9 | .DS_Store
10 | Thumbs.db
11 | .idea/
12 | .vscode/
13 | *.swp
14 | *.swo
15 | .local/
16 |
17 |
--------------------------------------------------------------------------------
/src/Contracts/LlmClient.php:
--------------------------------------------------------------------------------
1 |
2 |
8 |
9 |
10 | tests/Unit
11 |
12 |
13 | tests/Feature
14 |
15 |
16 |
17 |
18 | src
19 |
20 |
21 |
22 |
23 |
--------------------------------------------------------------------------------
/database/migrations/2024_01_01_000000_create_llm_conversations_table.php:
--------------------------------------------------------------------------------
1 | id();
16 | $table->string('conversation_id')->unique();
17 | $table->text('system_prompt')->nullable();
18 | $table->json('messages');
19 | $table->timestamps();
20 |
21 | $table->index('conversation_id');
22 | $table->index('updated_at');
23 | });
24 | }
25 |
26 | /**
27 | * Reverse the migrations.
28 | */
29 | public function down(): void
30 | {
31 | Schema::dropIfExists('llm_conversations');
32 | }
33 | };
34 |
35 |
--------------------------------------------------------------------------------
/src/Exceptions/ProviderConfigException.php:
--------------------------------------------------------------------------------
1 | url !== null && $this->url !== '';
25 | }
26 |
27 | /**
28 | * Check if the response contains base64 data.
29 | */
30 | public function hasBase64(): bool
31 | {
32 | return $this->base64 !== null && $this->base64 !== '';
33 | }
34 |
35 | /**
36 | * Get the image data (URL or base64).
37 | */
38 | public function getData(): ?string
39 | {
40 | return $this->url ?? $this->base64;
41 | }
42 |
43 | /**
44 | * Get the raw response data.
45 | */
46 | public function getRaw(): array
47 | {
48 | return $this->raw;
49 | }
50 | }
51 |
52 |
--------------------------------------------------------------------------------
/src/Support/ChatMessage.php:
--------------------------------------------------------------------------------
1 | $this->role,
52 | 'content' => $this->content,
53 | ];
54 | }
55 | }
56 |
57 |
--------------------------------------------------------------------------------
/composer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "oziri/laravel-llm-suite",
3 | "description": "A unified driver-based LLM toolkit for Laravel.",
4 | "type": "library",
5 | "license": "MIT",
6 | "keywords": [
7 | "laravel",
8 | "llm",
9 | "openai",
10 | "anthropic",
11 | "claude",
12 | "gpt",
13 | "ai",
14 | "chat",
15 | "image-generation"
16 | ],
17 | "authors": [
18 | {
19 | "name": "Oziri",
20 | "email": "oziri@example.com"
21 | }
22 | ],
23 | "require": {
24 | "php": "^8.1",
25 | "illuminate/support": "^10.0|^11.0|^12.0",
26 | "guzzlehttp/guzzle": "^7.0"
27 | },
28 | "require-dev": {
29 | "orchestra/testbench": "^8.0|^9.0|^10.0",
30 | "phpunit/phpunit": "^10.0|^11.0"
31 | },
32 | "autoload": {
33 | "psr-4": {
34 | "Oziri\\LlmSuite\\": "src/"
35 | }
36 | },
37 | "autoload-dev": {
38 | "psr-4": {
39 | "Oziri\\LlmSuite\\Tests\\": "tests/"
40 | }
41 | },
42 | "extra": {
43 | "laravel": {
44 | "providers": [
45 | "Oziri\\LlmSuite\\LlmSuiteServiceProvider"
46 | ],
47 | "aliases": {
48 | "Llm": "Oziri\\LlmSuite\\Facades\\Llm"
49 | }
50 | }
51 | },
52 | "config": {
53 | "sort-packages": true
54 | },
55 | "minimum-stability": "stable",
56 | "prefer-stable": true
57 | }
--------------------------------------------------------------------------------
/tests/TestCase.php:
--------------------------------------------------------------------------------
1 | \Oziri\LlmSuite\Facades\Llm::class,
28 | ];
29 | }
30 |
31 | protected function defineEnvironment($app): void
32 | {
33 | // Use SQLite in-memory for testing
34 | $app['config']->set('database.default', 'testing');
35 | $app['config']->set('database.connections.testing', [
36 | 'driver' => 'sqlite',
37 | 'database' => ':memory:',
38 | 'prefix' => '',
39 | ]);
40 |
41 | // Set default LLM config for tests
42 | $app['config']->set('llm-suite.default', 'dummy');
43 | $app['config']->set('llm-suite.providers.dummy', [
44 | 'driver' => 'dummy',
45 | ]);
46 |
47 | // Conversation config for tests
48 | $app['config']->set('llm-suite.conversation', [
49 | 'driver' => 'session',
50 | 'table' => 'llm_conversations',
51 | ]);
52 | }
53 | }
54 |
55 |
--------------------------------------------------------------------------------
/src/Exceptions/ProviderRequestException.php:
--------------------------------------------------------------------------------
1 | response = $response;
20 | }
21 |
22 | /**
23 | * Get the HTTP response that caused the exception.
24 | */
25 | public function getResponse(): ?Response
26 | {
27 | return $this->response;
28 | }
29 |
30 | /**
31 | * Get the response body as an array.
32 | */
33 | public function getResponseBody(): ?array
34 | {
35 | return $this->response?->json();
36 | }
37 |
38 | /**
39 | * Get the HTTP status code.
40 | */
41 | public function getStatusCode(): ?int
42 | {
43 | return $this->response?->status();
44 | }
45 |
46 | /**
47 | * Create an exception from an HTTP response.
48 | */
49 | public static function fromResponse(string $message, Response $response): static
50 | {
51 | $body = $response->json();
52 | $errorMessage = $body['error']['message'] ?? $message;
53 |
54 | return new static(
55 | message: "{$message}: {$errorMessage}",
56 | response: $response,
57 | code: $response->status()
58 | );
59 | }
60 | }
61 |
62 |
--------------------------------------------------------------------------------
/src/Support/ImageRequest.php:
--------------------------------------------------------------------------------
1 | $this->prompt,
43 | 'size' => $this->size,
44 | 'n' => $this->n,
45 | ];
46 |
47 | if ($this->model !== null) {
48 | $data['model'] = $this->model;
49 | }
50 |
51 | if ($this->quality !== null) {
52 | $data['quality'] = $this->quality;
53 | }
54 |
55 | if ($this->style !== null) {
56 | $data['style'] = $this->style;
57 | }
58 |
59 | return $data;
60 | }
61 | }
62 |
63 |
--------------------------------------------------------------------------------
/src/Support/ChatResponse.php:
--------------------------------------------------------------------------------
1 | tokenUsage ??= TokenUsage::empty();
22 | }
23 |
24 | /**
25 | * Get the response content as a string.
26 | */
27 | public function __toString(): string
28 | {
29 | return $this->content;
30 | }
31 |
32 | /**
33 | * Get the raw response data.
34 | */
35 | public function getRaw(): array
36 | {
37 | return $this->raw;
38 | }
39 |
40 | /**
41 | * Check if the response is empty.
42 | */
43 | public function isEmpty(): bool
44 | {
45 | return empty($this->content);
46 | }
47 |
48 | /**
49 | * Get the total tokens used in this request.
50 | */
51 | public function getTotalTokens(): int
52 | {
53 | return $this->tokenUsage->totalTokens;
54 | }
55 |
56 | /**
57 | * Get the prompt/input tokens used.
58 | */
59 | public function getPromptTokens(): int
60 | {
61 | return $this->tokenUsage->promptTokens;
62 | }
63 |
64 | /**
65 | * Get the completion/output tokens used.
66 | */
67 | public function getCompletionTokens(): int
68 | {
69 | return $this->tokenUsage->completionTokens;
70 | }
71 | }
72 |
--------------------------------------------------------------------------------
/src/Support/TokenUsage.php:
--------------------------------------------------------------------------------
1 | totalTokens === 0 && ($this->promptTokens > 0 || $this->completionTokens > 0)) {
19 | $this->totalTokens = $this->promptTokens + $this->completionTokens;
20 | }
21 | }
22 |
23 | /**
24 | * Create a TokenUsage instance from an array.
25 | */
26 | public static function fromArray(array $data): static
27 | {
28 | return new static(
29 | promptTokens: $data['prompt_tokens'] ?? $data['input_tokens'] ?? 0,
30 | completionTokens: $data['completion_tokens'] ?? $data['output_tokens'] ?? 0,
31 | totalTokens: $data['total_tokens'] ?? 0,
32 | );
33 | }
34 |
35 | /**
36 | * Create an empty TokenUsage instance.
37 | */
38 | public static function empty(): static
39 | {
40 | return new static(0, 0, 0);
41 | }
42 |
43 | /**
44 | * Check if token usage data is available.
45 | */
46 | public function hasData(): bool
47 | {
48 | return $this->totalTokens > 0 || $this->promptTokens > 0 || $this->completionTokens > 0;
49 | }
50 |
51 | /**
52 | * Convert to array.
53 | */
54 | public function toArray(): array
55 | {
56 | return [
57 | 'prompt_tokens' => $this->promptTokens,
58 | 'completion_tokens' => $this->completionTokens,
59 | 'total_tokens' => $this->totalTokens,
60 | ];
61 | }
62 | }
63 |
64 |
--------------------------------------------------------------------------------
/src/Facades/Llm.php:
--------------------------------------------------------------------------------
1 | mergeConfigFrom(
21 | __DIR__ . '/../config/llm-suite.php',
22 | 'llm-suite'
23 | );
24 |
25 | $this->app->singleton(LlmManager::class, function ($app) {
26 | return new LlmManager($app['config']->get('llm-suite'));
27 | });
28 |
29 | $this->app->alias(LlmManager::class, 'llm-suite');
30 | }
31 |
32 | /**
33 | * Bootstrap the service provider.
34 | */
35 | public function boot(): void
36 | {
37 | if ($this->app->runningInConsole()) {
38 | // Publish config
39 | $this->publishes([
40 | __DIR__ . '/../config/llm-suite.php' => config_path('llm-suite.php'),
41 | ], 'llm-suite-config');
42 |
43 | // Publish migrations
44 | $this->publishes([
45 | __DIR__ . '/../database/migrations/2024_01_01_000000_create_llm_conversations_table.php' => database_path('migrations/2024_01_01_000000_create_llm_conversations_table.php'),
46 | ], 'llm-suite-migrations');
47 |
48 | // Publish both config and migrations together
49 | $this->publishes([
50 | __DIR__ . '/../config/llm-suite.php' => config_path('llm-suite.php'),
51 | __DIR__ . '/../database/migrations/2024_01_01_000000_create_llm_conversations_table.php' => database_path('migrations/2024_01_01_000000_create_llm_conversations_table.php'),
52 | ], 'llm-suite');
53 | }
54 | }
55 |
56 | /**
57 | * Get the services provided by the provider.
58 | */
59 | public function provides(): array
60 | {
61 | return [
62 | LlmManager::class,
63 | 'llm-suite',
64 | ];
65 | }
66 | }
67 |
68 |
--------------------------------------------------------------------------------
/src/Contracts/ConversationStore.php:
--------------------------------------------------------------------------------
1 | tokenUsage`
25 | - **LM Studio Improvements**
26 | - Added `protocol` option (http/https)
27 | - Added `base_url` option for full URL override
28 | - Database migration for conversation storage
29 |
30 | ### Changed
31 | - Default conversation driver changed from `session` to `database`
32 | - Updated all clients to return token usage in responses
33 |
34 | ## [0.1.2] - 2024-12-08
35 |
36 | ### Added
37 | - LM Studio support for local LLM testing
38 | - Configurable host, port, timeout
39 | - `isAvailable()` method to check server status
40 | - `getAvailableModels()` method to list loaded models
41 | - OpenAI `getAvailableModels()` and `isAvailable()` methods
42 | - Package logo
43 |
44 | ### Changed
45 | - Extracted hardcoded values to class constants across all clients
46 | - Improved README documentation
47 |
48 | ## [0.1.1] - 2024-12-08
49 |
50 | ### Added
51 | - Laravel 12.x support
52 |
53 | ### Fixed
54 | - Composer dependency constraints for Laravel 12
55 |
56 | ## [0.1.0] - 2024-12-08
57 |
58 | ### Added
59 | - Initial release
60 | - **Chat API** - Unified interface for chat completions
61 | - OpenAI (GPT-4, GPT-4.1-mini, etc.)
62 | - Anthropic (Claude 3.5 Sonnet, etc.)
63 | - Dummy provider for testing
64 | - **Image Generation** - OpenAI DALL-E support
65 | - **Driver Pattern** - Switch providers with `Llm::using('provider')`
66 | - Laravel service provider with auto-discovery
67 | - Configuration file with environment variable support
68 | - `Llm` facade with method hints
69 | - Testing support with `Llm::fake()` and HTTP fakes
70 | - Comprehensive documentation
71 |
72 | [Unreleased]: https://github.com/OziriEmeka/laravel-llm-suite/compare/v0.2.0...HEAD
73 | [0.2.0]: https://github.com/OziriEmeka/laravel-llm-suite/compare/v0.1.2...v0.2.0
74 | [0.1.2]: https://github.com/OziriEmeka/laravel-llm-suite/compare/v0.1.1...v0.1.2
75 | [0.1.1]: https://github.com/OziriEmeka/laravel-llm-suite/compare/v0.1.0...v0.1.1
76 | [0.1.0]: https://github.com/OziriEmeka/laravel-llm-suite/releases/tag/v0.1.0
77 |
78 |
--------------------------------------------------------------------------------
/config/llm-suite.php:
--------------------------------------------------------------------------------
1 | env('LLM_SUITE_DEFAULT', 'openai'),
17 |
18 | /*
19 | |--------------------------------------------------------------------------
20 | | LLM Providers
21 | |--------------------------------------------------------------------------
22 | |
23 | | Here you may configure the LLM providers for your application. You may
24 | | configure as many providers as you wish, and you may even configure
25 | | multiple providers of the same driver.
26 | |
27 | */
28 |
29 | 'providers' => [
30 |
31 | 'openai' => [
32 | 'driver' => 'openai',
33 | 'api_key' => env('OPENAI_API_KEY'),
34 | 'base_url' => env('OPENAI_BASE_URL', 'https://api.openai.com/v1'),
35 | 'chat_model' => env('OPENAI_CHAT_MODEL', 'gpt-4.1-mini'),
36 | 'image_model' => env('OPENAI_IMAGE_MODEL', 'dall-e-3'),
37 | ],
38 |
39 | 'anthropic' => [
40 | 'driver' => 'anthropic',
41 | 'api_key' => env('ANTHROPIC_API_KEY'),
42 | 'base_url' => env('ANTHROPIC_BASE_URL', 'https://api.anthropic.com/v1'),
43 | 'chat_model' => env('ANTHROPIC_CHAT_MODEL', 'claude-3-5-sonnet-20241022'),
44 | ],
45 |
46 | 'lmstudio' => [
47 | 'driver' => 'lmstudio',
48 | 'protocol' => env('LMSTUDIO_PROTOCOL', 'http'),
49 | 'host' => env('LMSTUDIO_HOST', '127.0.0.1'),
50 | 'port' => env('LMSTUDIO_PORT', 1234),
51 | // 'base_url' => env('LMSTUDIO_BASE_URL'), // Optional: override protocol/host/port
52 | 'api_key' => env('LMSTUDIO_API_KEY'), // Optional - LM Studio doesn't require auth by default
53 | 'chat_model' => env('LMSTUDIO_CHAT_MODEL', 'local-model'),
54 | 'timeout' => env('LMSTUDIO_TIMEOUT', 120), // Local models can be slow
55 | ],
56 |
57 | 'dummy' => [
58 | 'driver' => 'dummy',
59 | // Optional: set default responses for testing
60 | // 'chat_response' => 'This is a test response.',
61 | // 'image_url' => 'https://example.com/test-image.png',
62 | ],
63 |
64 | ],
65 |
66 | /*
67 | |--------------------------------------------------------------------------
68 | | Conversation Settings
69 | |--------------------------------------------------------------------------
70 | |
71 | | Configure how conversation history is stored. Available drivers:
72 | | - 'database': Store in database (default, requires migration)
73 | | - 'session': Store in Laravel session (good for temporary chats)
74 | |
75 | */
76 |
77 | 'conversation' => [
78 | 'driver' => env('LLM_CONVERSATION_DRIVER', 'database'),
79 | 'table' => 'llm_conversations',
80 | ],
81 |
82 | ];
83 |
84 |
--------------------------------------------------------------------------------
/src/ConversationStores/SessionStore.php:
--------------------------------------------------------------------------------
1 | getKey($conversationId), []);
35 |
36 | return $data['messages'] ?? [];
37 | }
38 |
39 | /**
40 | * Save messages for a conversation.
41 | */
42 | public function saveMessages(string $conversationId, array $messages): void
43 | {
44 | $data = Session::get($this->getKey($conversationId), []);
45 | $data['messages'] = $messages;
46 |
47 | Session::put($this->getKey($conversationId), $data);
48 | }
49 |
50 | /**
51 | * Add a message to a conversation.
52 | */
53 | public function addMessage(string $conversationId, array $message): void
54 | {
55 | $messages = $this->getMessages($conversationId);
56 | $messages[] = $message;
57 |
58 | $this->saveMessages($conversationId, $messages);
59 | }
60 |
61 | /**
62 | * Get the system prompt for a conversation.
63 | */
64 | public function getSystemPrompt(string $conversationId): ?string
65 | {
66 | $data = Session::get($this->getKey($conversationId), []);
67 |
68 | return $data['system_prompt'] ?? null;
69 | }
70 |
71 | /**
72 | * Set the system prompt for a conversation.
73 | */
74 | public function setSystemPrompt(string $conversationId, string $prompt): void
75 | {
76 | $data = Session::get($this->getKey($conversationId), []);
77 | $data['system_prompt'] = $prompt;
78 |
79 | Session::put($this->getKey($conversationId), $data);
80 | }
81 |
82 | /**
83 | * Clear all messages from a conversation.
84 | */
85 | public function clear(string $conversationId): void
86 | {
87 | $data = Session::get($this->getKey($conversationId), []);
88 | $data['messages'] = [];
89 |
90 | Session::put($this->getKey($conversationId), $data);
91 | }
92 |
93 | /**
94 | * Check if a conversation exists.
95 | */
96 | public function exists(string $conversationId): bool
97 | {
98 | return Session::has($this->getKey($conversationId));
99 | }
100 |
101 | /**
102 | * Delete a conversation entirely.
103 | */
104 | public function delete(string $conversationId): void
105 | {
106 | Session::forget($this->getKey($conversationId));
107 | }
108 |
109 | /**
110 | * Get all conversation IDs from the session.
111 | */
112 | public function all(): array
113 | {
114 | $conversations = [];
115 | $prefix = self::SESSION_PREFIX;
116 |
117 | foreach (Session::all() as $key => $value) {
118 | if (str_starts_with($key, $prefix)) {
119 | $conversations[] = substr($key, strlen($prefix));
120 | }
121 | }
122 |
123 | return $conversations;
124 | }
125 | }
126 |
127 |
--------------------------------------------------------------------------------
/tests/Unit/DummyClientTest.php:
--------------------------------------------------------------------------------
1 | chat('Hello');
18 |
19 | $this->assertInstanceOf(ChatResponse::class, $response);
20 | $this->assertStringContainsString('Hello', $response->content);
21 | }
22 |
23 | public function test_returns_custom_chat_response(): void
24 | {
25 | $client = new DummyClient(['chat_response' => 'Custom response']);
26 | $response = $client->chat('Hello');
27 |
28 | $this->assertEquals('Custom response', $response->content);
29 | }
30 |
31 | public function test_can_set_chat_response(): void
32 | {
33 | $client = new DummyClient();
34 | $client->setChatResponse('Modified response');
35 | $response = $client->chat('Hello');
36 |
37 | $this->assertEquals('Modified response', $response->content);
38 | }
39 |
40 | public function test_returns_default_image_response(): void
41 | {
42 | $client = new DummyClient();
43 | $response = $client->generate(['prompt' => 'A cat']);
44 |
45 | $this->assertInstanceOf(ImageResponse::class, $response);
46 | $this->assertEquals('https://example.com/dummy-image.png', $response->url);
47 | }
48 |
49 | public function test_returns_custom_image_url(): void
50 | {
51 | $client = new DummyClient(['image_url' => 'https://custom.com/image.png']);
52 | $response = $client->generate(['prompt' => 'A cat']);
53 |
54 | $this->assertEquals('https://custom.com/image.png', $response->url);
55 | }
56 |
57 | public function test_can_set_image_url(): void
58 | {
59 | $client = new DummyClient();
60 | $client->setImageUrl('https://modified.com/image.png');
61 | $response = $client->generate(['prompt' => 'A cat']);
62 |
63 | $this->assertEquals('https://modified.com/image.png', $response->url);
64 | }
65 |
66 | public function test_tracks_chat_history(): void
67 | {
68 | $client = new DummyClient();
69 | $client->chat('First message');
70 | $client->chat('Second message', ['model' => 'test']);
71 |
72 | $history = $client->getChatHistory();
73 |
74 | $this->assertCount(2, $history);
75 | $this->assertEquals('First message', $history[0]['prompt']);
76 | $this->assertEquals('Second message', $history[1]['prompt']);
77 | $this->assertEquals('test', $history[1]['options']['model']);
78 | }
79 |
80 | public function test_tracks_image_history(): void
81 | {
82 | $client = new DummyClient();
83 | $client->generate(['prompt' => 'A cat']);
84 | $client->generate(['prompt' => 'A dog', 'size' => '512x512']);
85 |
86 | $history = $client->getImageHistory();
87 |
88 | $this->assertCount(2, $history);
89 | $this->assertEquals('A cat', $history[0]['prompt']);
90 | $this->assertEquals('A dog', $history[1]['prompt']);
91 | $this->assertEquals('512x512', $history[1]['size']);
92 | }
93 |
94 | public function test_can_clear_history(): void
95 | {
96 | $client = new DummyClient();
97 | $client->chat('Hello');
98 | $client->generate(['prompt' => 'A cat']);
99 |
100 | $client->clearHistory();
101 |
102 | $this->assertEmpty($client->getChatHistory());
103 | $this->assertEmpty($client->getImageHistory());
104 | }
105 | }
106 |
107 |
--------------------------------------------------------------------------------
/src/Clients/Dummy/DummyClient.php:
--------------------------------------------------------------------------------
1 | chatResponse = $config['chat_response'] ?? null;
43 | $this->imageUrl = $config['image_url'] ?? null;
44 | }
45 |
46 | /**
47 | * Set a custom chat response for testing.
48 | */
49 | public function setChatResponse(string $response): static
50 | {
51 | $this->chatResponse = $response;
52 |
53 | return $this;
54 | }
55 |
56 | /**
57 | * Set a custom image URL for testing.
58 | */
59 | public function setImageUrl(string $url): static
60 | {
61 | $this->imageUrl = $url;
62 |
63 | return $this;
64 | }
65 |
66 | /**
67 | * Get the chat request history.
68 | */
69 | public function getChatHistory(): array
70 | {
71 | return $this->chatHistory;
72 | }
73 |
74 | /**
75 | * Get the image request history.
76 | */
77 | public function getImageHistory(): array
78 | {
79 | return $this->imageHistory;
80 | }
81 |
82 | /**
83 | * Clear all request history.
84 | */
85 | public function clearHistory(): static
86 | {
87 | $this->chatHistory = [];
88 | $this->imageHistory = [];
89 |
90 | return $this;
91 | }
92 |
93 | /**
94 | * Return a dummy chat response.
95 | */
96 | public function chat(string $prompt, array $options = []): ChatResponse
97 | {
98 | $this->chatHistory[] = [
99 | 'prompt' => $prompt,
100 | 'options' => $options,
101 | ];
102 |
103 | $content = $this->chatResponse ?? "This is a dummy response to: {$prompt}";
104 |
105 | // Simulate token usage (rough estimate: ~4 chars per token)
106 | $promptTokens = (int) ceil(strlen($prompt) / 4);
107 | $completionTokens = (int) ceil(strlen($content) / 4);
108 |
109 | return new ChatResponse(
110 | content: $content,
111 | raw: [
112 | 'dummy' => true,
113 | 'prompt' => $prompt,
114 | 'options' => $options,
115 | ],
116 | model: self::DEFAULT_MODEL,
117 | id: self::DEFAULT_ID_PREFIX . uniqid(),
118 | latencyMs: 0.0,
119 | tokenUsage: new TokenUsage($promptTokens, $completionTokens),
120 | );
121 | }
122 |
123 | /**
124 | * Return a dummy image response.
125 | */
126 | public function generate(array $params): ImageResponse
127 | {
128 | $this->imageHistory[] = $params;
129 |
130 | $url = $this->imageUrl ?? self::DEFAULT_IMAGE_URL;
131 |
132 | return new ImageResponse(
133 | url: $url,
134 | base64: null,
135 | raw: [
136 | 'dummy' => true,
137 | 'params' => $params,
138 | ],
139 | revisedPrompt: $params['prompt'] ?? null,
140 | );
141 | }
142 | }
143 |
144 |
--------------------------------------------------------------------------------
/src/Clients/Anthropic/AnthropicClient.php:
--------------------------------------------------------------------------------
1 | $this->config['api_key'],
61 | 'anthropic-version' => self::API_VERSION,
62 | ])
63 | ->baseUrl($this->config['base_url'] ?? self::DEFAULT_BASE_URL)
64 | ->acceptJson()
65 | ->asJson();
66 | }
67 |
68 | /**
69 | * Send a chat message to Anthropic Claude.
70 | */
71 | public function chat(string $prompt, array $options = []): ChatResponse
72 | {
73 | $startTime = microtime(true);
74 |
75 | // Anthropic uses a different message format
76 | $messages = $options['messages'] ?? [
77 | ['role' => 'user', 'content' => $prompt],
78 | ];
79 |
80 | $payload = [
81 | 'model' => $options['model'] ?? $this->config['chat_model'] ?? self::DEFAULT_CHAT_MODEL,
82 | 'messages' => $messages,
83 | 'max_tokens' => $options['max_tokens'] ?? self::DEFAULT_MAX_TOKENS,
84 | ];
85 |
86 | // Anthropic handles system prompts separately
87 | if (isset($options['system'])) {
88 | $payload['system'] = $options['system'];
89 | }
90 |
91 | // Add optional parameters if provided
92 | if (isset($options['temperature'])) {
93 | $payload['temperature'] = $options['temperature'];
94 | }
95 |
96 | if (isset($options['top_p'])) {
97 | $payload['top_p'] = $options['top_p'];
98 | }
99 |
100 | if (isset($options['top_k'])) {
101 | $payload['top_k'] = $options['top_k'];
102 | }
103 |
104 | $response = $this->http()->post(self::ENDPOINT_MESSAGES, $payload);
105 |
106 | if (! $response->successful()) {
107 | throw ProviderRequestException::fromResponse(self::ERROR_CHAT_FAILED, $response);
108 | }
109 |
110 | $latencyMs = (microtime(true) - $startTime) * 1000;
111 |
112 | $data = $response->json();
113 |
114 | // Anthropic returns content as an array of content blocks
115 | $content = '';
116 | if (isset($data['content']) && is_array($data['content'])) {
117 | foreach ($data['content'] as $block) {
118 | if (($block['type'] ?? '') === 'text') {
119 | $content .= $block['text'] ?? '';
120 | }
121 | }
122 | }
123 |
124 | // Parse token usage from response (Anthropic uses input_tokens/output_tokens)
125 | $tokenUsage = isset($data['usage'])
126 | ? TokenUsage::fromArray($data['usage'])
127 | : TokenUsage::empty();
128 |
129 | return new ChatResponse(
130 | content: $content,
131 | raw: $data,
132 | model: $data['model'] ?? null,
133 | id: $data['id'] ?? null,
134 | latencyMs: $latencyMs,
135 | tokenUsage: $tokenUsage,
136 | );
137 | }
138 | }
139 |
140 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to Laravel LLM Suite
2 |
3 | Thank you for your interest in contributing to Laravel LLM Suite! This document provides guidelines and instructions for contributing.
4 |
5 | ## Code of Conduct
6 |
7 | By participating in this project, you agree to maintain a respectful and inclusive environment for everyone.
8 |
9 | ## How to Contribute
10 |
11 | ### Reporting Bugs
12 |
13 | 1. **Search existing issues** to avoid duplicates
14 | 2. **Create a new issue** with:
15 | - Clear, descriptive title
16 | - Steps to reproduce
17 | - Expected vs actual behavior
18 | - Laravel version, PHP version, package version
19 | - Relevant code snippets or error messages
20 |
21 | ### Suggesting Features
22 |
23 | 1. **Search existing issues** to see if it's been suggested
24 | 2. **Create a new issue** with:
25 | - Clear description of the feature
26 | - Use case / why it's needed
27 | - Proposed API or implementation (optional)
28 |
29 | ### Pull Requests
30 |
31 | 1. **Fork the repository**
32 | 2. **Create a feature branch** from `main`:
33 | ```bash
34 | git checkout -b feature/your-feature-name
35 | ```
36 | 3. **Make your changes** following our coding standards
37 | 4. **Write/update tests** for your changes
38 | 5. **Update documentation** (README, docblocks)
39 | 6. **Commit with clear messages**:
40 | ```bash
41 | git commit -m "feat: Add support for XYZ"
42 | ```
43 | 7. **Push and create a Pull Request**
44 |
45 | ## Development Setup
46 |
47 | ### Requirements
48 |
49 | - PHP 8.1+
50 | - Composer
51 | - Git
52 |
53 | ### Installation
54 |
55 | ```bash
56 | # Clone your fork
57 | git clone https://github.com/YOUR_USERNAME/laravel-llm-suite.git
58 | cd laravel-llm-suite
59 |
60 | # Install dependencies
61 | composer install
62 |
63 | # Run tests
64 | ./vendor/bin/phpunit
65 | ```
66 |
67 | ### Running Tests
68 |
69 | ```bash
70 | # All tests
71 | ./vendor/bin/phpunit
72 |
73 | # Specific test file
74 | ./vendor/bin/phpunit tests/Unit/LlmManagerTest.php
75 |
76 | # With coverage
77 | ./vendor/bin/phpunit --coverage-html coverage
78 | ```
79 |
80 | ## Coding Standards
81 |
82 | ### PHP Style
83 |
84 | - Follow **PSR-12** coding standard
85 | - Use `declare(strict_types=1);` in all PHP files
86 | - Use PHP 8.1+ features appropriately
87 |
88 | ### Code Structure
89 |
90 | ```php
91 | property = $property;
109 | }
110 |
111 | // 4. Public methods
112 | public function doSomething(): string
113 | {
114 | return $this->helperMethod();
115 | }
116 |
117 | // 5. Protected/Private methods
118 | protected function helperMethod(): string
119 | {
120 | return self::DEFAULT_VALUE;
121 | }
122 | }
123 | ```
124 |
125 | ### Constants for Magic Values
126 |
127 | Always use class constants instead of hardcoded strings:
128 |
129 | ```php
130 | // Good
131 | protected const ENDPOINT_CHAT = '/chat/completions';
132 | $response = $this->http()->post(self::ENDPOINT_CHAT, $payload);
133 |
134 | // Bad
135 | $response = $this->http()->post('/chat/completions', $payload);
136 | ```
137 |
138 | ### Type Hints
139 |
140 | Always use type hints:
141 |
142 | ```php
143 | public function process(string $input, array $options = []): Result
144 | ```
145 |
146 | ## Commit Messages
147 |
148 | Format: `: `
149 |
150 | | Type | Description |
151 | |------|-------------|
152 | | `feat` | New feature |
153 | | `fix` | Bug fix |
154 | | `docs` | Documentation changes |
155 | | `refactor` | Code refactoring |
156 | | `test` | Adding or updating tests |
157 | | `chore` | Maintenance tasks |
158 |
159 | Examples:
160 | ```
161 | feat: Add Google Gemini provider
162 | fix: Handle null response from API
163 | docs: Add conversation examples to README
164 | test: Add tests for TokenUsage class
165 | ```
166 |
167 | ## Adding a New Provider
168 |
169 | 1. Create client in `src/Clients/NewProvider/NewProviderClient.php`
170 | 2. Implement `ChatClient` and/or `ImageClient` interfaces
171 | 3. Register in `LlmManager::resolve()` method
172 | 4. Add configuration in `config/llm-suite.php`
173 | 5. Write tests in `tests/Unit/`
174 | 6. Update README with usage examples
175 |
176 | ## Documentation
177 |
178 | - Update README.md for user-facing changes
179 | - Add PHPDoc comments to all public methods
180 | - Include `@param`, `@return`, and `@throws` tags
181 |
182 | ## Questions?
183 |
184 | If you have questions about contributing, feel free to:
185 | - Open a GitHub Discussion
186 | - Create an issue with the "question" label
187 |
188 | Thank you for contributing!
189 |
190 |
--------------------------------------------------------------------------------
/tests/Unit/LlmManagerTest.php:
--------------------------------------------------------------------------------
1 | 'dummy',
20 | 'providers' => [
21 | 'dummy' => [
22 | 'driver' => 'dummy',
23 | ],
24 | ],
25 | ];
26 |
27 | return new LlmManager(array_merge($defaultConfig, $config));
28 | }
29 |
30 | public function test_can_get_default_provider(): void
31 | {
32 | $manager = $this->getManager();
33 |
34 | $this->assertEquals('dummy', $manager->getDefaultProvider());
35 | }
36 |
37 | public function test_can_resolve_client(): void
38 | {
39 | $manager = $this->getManager();
40 | $client = $manager->client();
41 |
42 | $this->assertInstanceOf(DummyClient::class, $client);
43 | }
44 |
45 | public function test_client_implements_chat_interface(): void
46 | {
47 | $manager = $this->getManager();
48 | $client = $manager->client();
49 |
50 | $this->assertInstanceOf(ChatClient::class, $client);
51 | }
52 |
53 | public function test_client_implements_image_interface(): void
54 | {
55 | $manager = $this->getManager();
56 | $client = $manager->client();
57 |
58 | $this->assertInstanceOf(ImageClient::class, $client);
59 | }
60 |
61 | public function test_can_switch_provider_using_using_method(): void
62 | {
63 | $manager = $this->getManager([
64 | 'providers' => [
65 | 'dummy' => ['driver' => 'dummy'],
66 | 'another' => ['driver' => 'dummy'],
67 | ],
68 | ]);
69 |
70 | $result = $manager->using('another');
71 |
72 | $this->assertSame($manager, $result);
73 | }
74 |
75 | public function test_throws_exception_for_missing_provider(): void
76 | {
77 | $manager = $this->getManager();
78 |
79 | $this->expectException(ProviderConfigException::class);
80 | $this->expectExceptionMessage('LLM provider [nonexistent] is not configured.');
81 |
82 | $manager->client('nonexistent');
83 | }
84 |
85 | public function test_throws_exception_for_unsupported_driver(): void
86 | {
87 | $manager = new LlmManager([
88 | 'default' => 'test',
89 | 'providers' => [
90 | 'test' => ['driver' => 'unsupported'],
91 | ],
92 | ]);
93 |
94 | $this->expectException(ProviderConfigException::class);
95 | $this->expectExceptionMessage('Unsupported LLM driver [unsupported].');
96 |
97 | $manager->client();
98 | }
99 |
100 | public function test_can_send_chat_message(): void
101 | {
102 | $manager = $this->getManager();
103 | $response = $manager->chat('Hello');
104 |
105 | $this->assertIsString($response);
106 | $this->assertStringContainsString('Hello', $response);
107 | }
108 |
109 | public function test_can_generate_image(): void
110 | {
111 | $manager = $this->getManager();
112 | $response = $manager->image()->generate(['prompt' => 'A cat']);
113 |
114 | $this->assertNotNull($response->url);
115 | }
116 |
117 | public function test_can_extend_with_custom_driver(): void
118 | {
119 | $manager = $this->getManager([
120 | 'providers' => [
121 | 'custom' => ['driver' => 'custom'],
122 | ],
123 | ]);
124 |
125 | $customClient = new DummyClient(['chat_response' => 'Custom response']);
126 |
127 | $manager->extend('custom', function () use ($customClient) {
128 | return $customClient;
129 | });
130 |
131 | $result = $manager->using('custom')->chat('Test');
132 |
133 | $this->assertEquals('Custom response', $result);
134 | }
135 |
136 | public function test_clients_are_cached(): void
137 | {
138 | $manager = $this->getManager();
139 |
140 | $client1 = $manager->client('dummy');
141 | $client2 = $manager->client('dummy');
142 |
143 | $this->assertSame($client1, $client2);
144 | }
145 |
146 | public function test_can_forget_cached_client(): void
147 | {
148 | $manager = $this->getManager();
149 |
150 | $client1 = $manager->client('dummy');
151 | $manager->forgetClient('dummy');
152 | $client2 = $manager->client('dummy');
153 |
154 | $this->assertNotSame($client1, $client2);
155 | }
156 |
157 | public function test_can_list_providers(): void
158 | {
159 | $manager = $this->getManager([
160 | 'providers' => [
161 | 'dummy' => ['driver' => 'dummy'],
162 | 'another' => ['driver' => 'dummy'],
163 | ],
164 | ]);
165 |
166 | $providers = $manager->getProviders();
167 |
168 | $this->assertContains('dummy', $providers);
169 | $this->assertContains('another', $providers);
170 | }
171 | }
172 |
173 |
--------------------------------------------------------------------------------
/src/Support/Conversation.php:
--------------------------------------------------------------------------------
1 | id = $id;
42 | $this->store = $store;
43 | $this->client = $client;
44 | $this->provider = $provider;
45 | }
46 |
47 | /**
48 | * Get the conversation ID.
49 | */
50 | public function getId(): string
51 | {
52 | return $this->id;
53 | }
54 |
55 | /**
56 | * Set the system prompt for this conversation.
57 | */
58 | public function system(string $prompt): static
59 | {
60 | $this->store->setSystemPrompt($this->id, $prompt);
61 |
62 | return $this;
63 | }
64 |
65 | /**
66 | * Get the system prompt for this conversation.
67 | */
68 | public function getSystemPrompt(): ?string
69 | {
70 | return $this->store->getSystemPrompt($this->id);
71 | }
72 |
73 | /**
74 | * Send a chat message and maintain history.
75 | */
76 | public function chat(string $message, array $options = []): ChatResponse
77 | {
78 | // Add user message to history
79 | $this->addMessage('user', $message);
80 |
81 | // Get all messages for context
82 | $messages = $this->getMessages();
83 |
84 | // Build options with message history
85 | $chatOptions = array_merge($options, [
86 | 'messages' => $messages,
87 | ]);
88 |
89 | // Add system prompt if set
90 | $systemPrompt = $this->getSystemPrompt();
91 | if ($systemPrompt && ! isset($options['system'])) {
92 | $chatOptions['system'] = $systemPrompt;
93 | }
94 |
95 | // Send to LLM
96 | $response = $this->client->chat('', $chatOptions);
97 |
98 | // Add assistant response to history
99 | $this->addMessage('assistant', $response->content);
100 |
101 | return $response;
102 | }
103 |
104 | /**
105 | * Add a message to the conversation history.
106 | */
107 | public function addMessage(string $role, string $content): static
108 | {
109 | $this->store->addMessage($this->id, [
110 | 'role' => $role,
111 | 'content' => $content,
112 | ]);
113 |
114 | return $this;
115 | }
116 |
117 | /**
118 | * Get all messages in the conversation.
119 | */
120 | public function getMessages(): array
121 | {
122 | return $this->store->getMessages($this->id);
123 | }
124 |
125 | /**
126 | * Get the number of messages in the conversation.
127 | */
128 | public function getMessageCount(): int
129 | {
130 | return count($this->getMessages());
131 | }
132 |
133 | /**
134 | * Clear the conversation history (keeps system prompt).
135 | */
136 | public function clear(): static
137 | {
138 | $this->store->clear($this->id);
139 |
140 | return $this;
141 | }
142 |
143 | /**
144 | * Delete the entire conversation (including system prompt).
145 | */
146 | public function delete(): void
147 | {
148 | $this->store->delete($this->id);
149 | }
150 |
151 | /**
152 | * Check if the conversation has any messages.
153 | */
154 | public function hasMessages(): bool
155 | {
156 | return $this->getMessageCount() > 0;
157 | }
158 |
159 | /**
160 | * Get the last message in the conversation.
161 | */
162 | public function getLastMessage(): ?array
163 | {
164 | $messages = $this->getMessages();
165 |
166 | return empty($messages) ? null : end($messages);
167 | }
168 |
169 | /**
170 | * Get the last N messages.
171 | */
172 | public function getLastMessages(int $count): array
173 | {
174 | $messages = $this->getMessages();
175 |
176 | return array_slice($messages, -$count);
177 | }
178 |
179 | /**
180 | * Load messages from an external source.
181 | */
182 | public function loadHistory(array $messages): static
183 | {
184 | $this->store->saveMessages($this->id, $messages);
185 |
186 | return $this;
187 | }
188 |
189 | /**
190 | * Export the conversation history.
191 | */
192 | public function export(): array
193 | {
194 | return [
195 | 'id' => $this->id,
196 | 'provider' => $this->provider,
197 | 'system_prompt' => $this->getSystemPrompt(),
198 | 'messages' => $this->getMessages(),
199 | ];
200 | }
201 |
202 | /**
203 | * Get the chat client being used.
204 | */
205 | public function getClient(): ChatClient
206 | {
207 | return $this->client;
208 | }
209 |
210 | /**
211 | * Get the provider name.
212 | */
213 | public function getProvider(): string
214 | {
215 | return $this->provider;
216 | }
217 | }
218 |
219 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | oziriemeka@gmail.com.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/src/ConversationStores/DatabaseStore.php:
--------------------------------------------------------------------------------
1 | table = $config['table'] ?? 'llm_conversations';
24 | }
25 |
26 | /**
27 | * Get the messages for a conversation.
28 | */
29 | public function getMessages(string $conversationId): array
30 | {
31 | $record = $this->getRecord($conversationId);
32 |
33 | if (! $record) {
34 | return [];
35 | }
36 |
37 | return json_decode($record->messages, true) ?? [];
38 | }
39 |
40 | /**
41 | * Save messages for a conversation.
42 | */
43 | public function saveMessages(string $conversationId, array $messages): void
44 | {
45 | $record = $this->getRecord($conversationId);
46 |
47 | if ($record) {
48 | DB::table($this->table)
49 | ->where('conversation_id', $conversationId)
50 | ->update([
51 | 'messages' => json_encode($messages),
52 | 'updated_at' => now(),
53 | ]);
54 | } else {
55 | DB::table($this->table)->insert([
56 | 'conversation_id' => $conversationId,
57 | 'messages' => json_encode($messages),
58 | 'system_prompt' => null,
59 | 'created_at' => now(),
60 | 'updated_at' => now(),
61 | ]);
62 | }
63 | }
64 |
65 | /**
66 | * Add a message to a conversation.
67 | */
68 | public function addMessage(string $conversationId, array $message): void
69 | {
70 | $messages = $this->getMessages($conversationId);
71 | $messages[] = $message;
72 |
73 | $this->saveMessages($conversationId, $messages);
74 | }
75 |
76 | /**
77 | * Get the system prompt for a conversation.
78 | */
79 | public function getSystemPrompt(string $conversationId): ?string
80 | {
81 | $record = $this->getRecord($conversationId);
82 |
83 | return $record?->system_prompt;
84 | }
85 |
86 | /**
87 | * Set the system prompt for a conversation.
88 | */
89 | public function setSystemPrompt(string $conversationId, string $prompt): void
90 | {
91 | $record = $this->getRecord($conversationId);
92 |
93 | if ($record) {
94 | DB::table($this->table)
95 | ->where('conversation_id', $conversationId)
96 | ->update([
97 | 'system_prompt' => $prompt,
98 | 'updated_at' => now(),
99 | ]);
100 | } else {
101 | DB::table($this->table)->insert([
102 | 'conversation_id' => $conversationId,
103 | 'messages' => json_encode([]),
104 | 'system_prompt' => $prompt,
105 | 'created_at' => now(),
106 | 'updated_at' => now(),
107 | ]);
108 | }
109 | }
110 |
111 | /**
112 | * Clear all messages from a conversation.
113 | */
114 | public function clear(string $conversationId): void
115 | {
116 | DB::table($this->table)
117 | ->where('conversation_id', $conversationId)
118 | ->update([
119 | 'messages' => json_encode([]),
120 | 'updated_at' => now(),
121 | ]);
122 | }
123 |
124 | /**
125 | * Check if a conversation exists.
126 | */
127 | public function exists(string $conversationId): bool
128 | {
129 | return $this->getRecord($conversationId) !== null;
130 | }
131 |
132 | /**
133 | * Delete a conversation entirely.
134 | */
135 | public function delete(string $conversationId): void
136 | {
137 | DB::table($this->table)
138 | ->where('conversation_id', $conversationId)
139 | ->delete();
140 | }
141 |
142 | /**
143 | * Get a conversation record from the database.
144 | */
145 | protected function getRecord(string $conversationId): ?object
146 | {
147 | return DB::table($this->table)
148 | ->where('conversation_id', $conversationId)
149 | ->first();
150 | }
151 |
152 | /**
153 | * Get all conversation IDs.
154 | */
155 | public function all(): array
156 | {
157 | return DB::table($this->table)
158 | ->orderBy('updated_at', 'desc')
159 | ->pluck('conversation_id')
160 | ->toArray();
161 | }
162 |
163 | /**
164 | * Get all conversations with metadata.
165 | */
166 | public function allWithMetadata(): array
167 | {
168 | return DB::table($this->table)
169 | ->orderBy('updated_at', 'desc')
170 | ->get()
171 | ->map(function ($record) {
172 | return [
173 | 'id' => $record->conversation_id,
174 | 'system_prompt' => $record->system_prompt,
175 | 'message_count' => count(json_decode($record->messages, true) ?? []),
176 | 'created_at' => $record->created_at,
177 | 'updated_at' => $record->updated_at,
178 | ];
179 | })
180 | ->toArray();
181 | }
182 | }
183 |
184 |
--------------------------------------------------------------------------------
/src/Clients/LmStudio/LmStudioClient.php:
--------------------------------------------------------------------------------
1 | config['base_url'])) {
73 | return rtrim($this->config['base_url'], '/');
74 | }
75 |
76 | $protocol = $this->config['protocol'] ?? self::DEFAULT_PROTOCOL;
77 | $host = $this->config['host'] ?? self::DEFAULT_HOST;
78 | $port = $this->config['port'] ?? self::DEFAULT_PORT;
79 |
80 | return "{$protocol}://{$host}:{$port}/v1";
81 | }
82 |
83 | /**
84 | * Get a configured HTTP client for LM Studio API requests.
85 | */
86 | protected function http(): PendingRequest
87 | {
88 | $request = Http::baseUrl($this->getBaseUrl())
89 | ->acceptJson()
90 | ->asJson()
91 | ->timeout($this->config['timeout'] ?? self::DEFAULT_TIMEOUT);
92 |
93 | // LM Studio doesn't require auth, but accepts it if provided
94 | if (! empty($this->config['api_key'])) {
95 | $request->withToken($this->config['api_key']);
96 | }
97 |
98 | return $request;
99 | }
100 |
101 | /**
102 | * Send a chat message to LM Studio.
103 | */
104 | public function chat(string $prompt, array $options = []): ChatResponse
105 | {
106 | $startTime = microtime(true);
107 |
108 | $messages = $options['messages'] ?? [
109 | ['role' => 'user', 'content' => $prompt],
110 | ];
111 |
112 | // If a system prompt is provided, prepend it
113 | if (isset($options['system'])) {
114 | array_unshift($messages, ['role' => 'system', 'content' => $options['system']]);
115 | }
116 |
117 | $payload = [
118 | 'model' => $options['model'] ?? $this->config['chat_model'] ?? self::DEFAULT_CHAT_MODEL,
119 | 'messages' => $messages,
120 | ];
121 |
122 | // Add optional parameters if provided
123 | if (isset($options['temperature'])) {
124 | $payload['temperature'] = $options['temperature'];
125 | }
126 |
127 | if (isset($options['max_tokens'])) {
128 | $payload['max_tokens'] = $options['max_tokens'];
129 | }
130 |
131 | if (isset($options['top_p'])) {
132 | $payload['top_p'] = $options['top_p'];
133 | }
134 |
135 | // LM Studio specific: stop sequences
136 | if (isset($options['stop'])) {
137 | $payload['stop'] = $options['stop'];
138 | }
139 |
140 | $response = $this->http()->post(self::ENDPOINT_CHAT, $payload);
141 |
142 | if (! $response->successful()) {
143 | throw ProviderRequestException::fromResponse(self::ERROR_CHAT_FAILED, $response);
144 | }
145 |
146 | $latencyMs = (microtime(true) - $startTime) * 1000;
147 |
148 | $data = $response->json();
149 | $content = $data['choices'][0]['message']['content'] ?? '';
150 |
151 | // Parse token usage from response (OpenAI-compatible format)
152 | $tokenUsage = isset($data['usage'])
153 | ? TokenUsage::fromArray($data['usage'])
154 | : TokenUsage::empty();
155 |
156 | return new ChatResponse(
157 | content: $content,
158 | raw: $data,
159 | model: $data['model'] ?? null,
160 | id: $data['id'] ?? null,
161 | latencyMs: $latencyMs,
162 | tokenUsage: $tokenUsage,
163 | );
164 | }
165 |
166 | /**
167 | * Check if LM Studio server is running and accessible.
168 | */
169 | public function isAvailable(): bool
170 | {
171 | try {
172 | $response = $this->http()->get(self::ENDPOINT_MODELS);
173 | return $response->successful();
174 | } catch (\Exception $e) {
175 | return false;
176 | }
177 | }
178 |
179 | /**
180 | * Get the list of available models from LM Studio.
181 | */
182 | public function getAvailableModels(): array
183 | {
184 | try {
185 | $response = $this->http()->get(self::ENDPOINT_MODELS);
186 |
187 | if (! $response->successful()) {
188 | return [];
189 | }
190 |
191 | $data = $response->json();
192 | return array_column($data['data'] ?? [], 'id');
193 | } catch (\Exception $e) {
194 | return [];
195 | }
196 | }
197 | }
198 |
199 |
--------------------------------------------------------------------------------
/src/Helpers/LlmFake.php:
--------------------------------------------------------------------------------
1 | client = new DummyClient();
23 | $this->createFakeManager();
24 | }
25 |
26 | /**
27 | * Create a fake LLM manager that uses our custom dummy client.
28 | */
29 | protected function createFakeManager(): void
30 | {
31 | $config = [
32 | 'default' => 'fake',
33 | 'providers' => [
34 | 'fake' => [
35 | 'driver' => 'fake',
36 | ],
37 | ],
38 | ];
39 |
40 | $this->manager = new LlmManager($config);
41 |
42 | // Register our custom fake driver that returns our client instance
43 | $client = $this->client;
44 | $this->manager->extend('fake', function () use ($client) {
45 | return $client;
46 | });
47 |
48 | // Override the manager binding in the container
49 | App::instance(LlmManager::class, $this->manager);
50 | App::instance('llm-suite', $this->manager);
51 | }
52 |
53 | /**
54 | * Set the chat response that should be returned.
55 | */
56 | public function shouldReturnChat(string $response): static
57 | {
58 | $this->client->setChatResponse($response);
59 |
60 | return $this;
61 | }
62 |
63 | /**
64 | * Set the image URL that should be returned.
65 | */
66 | public function shouldReturnImage(string $url): static
67 | {
68 | $this->client->setImageUrl($url);
69 |
70 | return $this;
71 | }
72 |
73 | /**
74 | * Get the chat request history.
75 | */
76 | public function getChatHistory(): array
77 | {
78 | return $this->client->getChatHistory();
79 | }
80 |
81 | /**
82 | * Get the image request history.
83 | */
84 | public function getImageHistory(): array
85 | {
86 | return $this->client->getImageHistory();
87 | }
88 |
89 | /**
90 | * Assert that a chat request was made with the given prompt.
91 | */
92 | public function assertChatSent(string $prompt): static
93 | {
94 | $history = $this->getChatHistory();
95 | $found = false;
96 |
97 | foreach ($history as $request) {
98 | if ($request['prompt'] === $prompt) {
99 | $found = true;
100 | break;
101 | }
102 | }
103 |
104 | if (! $found) {
105 | throw new \PHPUnit\Framework\AssertionFailedError(
106 | "Expected chat request with prompt [{$prompt}] was not sent."
107 | );
108 | }
109 |
110 | return $this;
111 | }
112 |
113 | /**
114 | * Assert that an image request was made with the given prompt.
115 | */
116 | public function assertImageSent(string $prompt): static
117 | {
118 | $history = $this->getImageHistory();
119 | $found = false;
120 |
121 | foreach ($history as $request) {
122 | if (($request['prompt'] ?? '') === $prompt) {
123 | $found = true;
124 | break;
125 | }
126 | }
127 |
128 | if (! $found) {
129 | throw new \PHPUnit\Framework\AssertionFailedError(
130 | "Expected image request with prompt [{$prompt}] was not sent."
131 | );
132 | }
133 |
134 | return $this;
135 | }
136 |
137 | /**
138 | * Assert that no chat requests were made.
139 | */
140 | public function assertNoChatSent(): static
141 | {
142 | $history = $this->getChatHistory();
143 |
144 | if (count($history) > 0) {
145 | throw new \PHPUnit\Framework\AssertionFailedError(
146 | 'Expected no chat requests to be sent, but ' . count($history) . ' were sent.'
147 | );
148 | }
149 |
150 | return $this;
151 | }
152 |
153 | /**
154 | * Assert that no image requests were made.
155 | */
156 | public function assertNoImageSent(): static
157 | {
158 | $history = $this->getImageHistory();
159 |
160 | if (count($history) > 0) {
161 | throw new \PHPUnit\Framework\AssertionFailedError(
162 | 'Expected no image requests to be sent, but ' . count($history) . ' were sent.'
163 | );
164 | }
165 |
166 | return $this;
167 | }
168 |
169 | /**
170 | * Assert a specific number of chat requests were made.
171 | */
172 | public function assertChatCount(int $count): static
173 | {
174 | $actual = count($this->getChatHistory());
175 |
176 | if ($actual !== $count) {
177 | throw new \PHPUnit\Framework\AssertionFailedError(
178 | "Expected {$count} chat requests, but {$actual} were sent."
179 | );
180 | }
181 |
182 | return $this;
183 | }
184 |
185 | /**
186 | * Assert a specific number of image requests were made.
187 | */
188 | public function assertImageCount(int $count): static
189 | {
190 | $actual = count($this->getImageHistory());
191 |
192 | if ($actual !== $count) {
193 | throw new \PHPUnit\Framework\AssertionFailedError(
194 | "Expected {$count} image requests, but {$actual} were sent."
195 | );
196 | }
197 |
198 | return $this;
199 | }
200 |
201 | /**
202 | * Clear all request history.
203 | */
204 | public function clearHistory(): static
205 | {
206 | $this->client->clearHistory();
207 |
208 | return $this;
209 | }
210 |
211 | /**
212 | * Get the underlying dummy client.
213 | */
214 | public function getClient(): DummyClient
215 | {
216 | return $this->client;
217 | }
218 |
219 | /**
220 | * Get the fake manager.
221 | */
222 | public function getManager(): LlmManager
223 | {
224 | return $this->manager;
225 | }
226 |
227 | /**
228 | * Static factory method for cleaner usage.
229 | */
230 | public static function create(): static
231 | {
232 | return new static();
233 | }
234 | }
235 |
--------------------------------------------------------------------------------
/src/Clients/OpenAI/OpenAIClient.php:
--------------------------------------------------------------------------------
1 | config['api_key'])
77 | ->baseUrl($this->config['base_url'] ?? self::DEFAULT_BASE_URL)
78 | ->acceptJson()
79 | ->asJson();
80 | }
81 |
82 | /**
83 | * Send a chat message to OpenAI.
84 | */
85 | public function chat(string $prompt, array $options = []): ChatResponse
86 | {
87 | $startTime = microtime(true);
88 |
89 | $messages = $options['messages'] ?? [
90 | ['role' => 'user', 'content' => $prompt],
91 | ];
92 |
93 | // If a system prompt is provided, prepend it
94 | if (isset($options['system'])) {
95 | array_unshift($messages, ['role' => 'system', 'content' => $options['system']]);
96 | }
97 |
98 | $payload = [
99 | 'model' => $options['model'] ?? $this->config['chat_model'] ?? self::DEFAULT_CHAT_MODEL,
100 | 'messages' => $messages,
101 | ];
102 |
103 | // Add optional parameters if provided
104 | if (isset($options['temperature'])) {
105 | $payload['temperature'] = $options['temperature'];
106 | }
107 |
108 | if (isset($options['max_tokens'])) {
109 | $payload['max_tokens'] = $options['max_tokens'];
110 | }
111 |
112 | if (isset($options['top_p'])) {
113 | $payload['top_p'] = $options['top_p'];
114 | }
115 |
116 | $response = $this->http()->post(self::ENDPOINT_CHAT, $payload);
117 |
118 | if (! $response->successful()) {
119 | throw ProviderRequestException::fromResponse(self::ERROR_CHAT_FAILED, $response);
120 | }
121 |
122 | $latencyMs = (microtime(true) - $startTime) * 1000;
123 |
124 | $data = $response->json();
125 | $content = $data['choices'][0]['message']['content'] ?? '';
126 |
127 | // Parse token usage from response
128 | $tokenUsage = isset($data['usage'])
129 | ? TokenUsage::fromArray($data['usage'])
130 | : TokenUsage::empty();
131 |
132 | return new ChatResponse(
133 | content: $content,
134 | raw: $data,
135 | model: $data['model'] ?? null,
136 | id: $data['id'] ?? null,
137 | latencyMs: $latencyMs,
138 | tokenUsage: $tokenUsage,
139 | );
140 | }
141 |
142 | /**
143 | * Generate an image using OpenAI's DALL-E.
144 | */
145 | public function generate(array $params): ImageResponse
146 | {
147 | $payload = [
148 | 'model' => $params['model'] ?? $this->config['image_model'] ?? self::DEFAULT_IMAGE_MODEL,
149 | 'prompt' => $params['prompt'] ?? '',
150 | 'size' => $params['size'] ?? self::DEFAULT_IMAGE_SIZE,
151 | 'n' => $params['n'] ?? 1,
152 | ];
153 |
154 | // Add optional parameters
155 | if (isset($params['quality'])) {
156 | $payload['quality'] = $params['quality'];
157 | }
158 |
159 | if (isset($params['style'])) {
160 | $payload['style'] = $params['style'];
161 | }
162 |
163 | if (isset($params['response_format'])) {
164 | $payload['response_format'] = $params['response_format'];
165 | }
166 |
167 | $response = $this->http()->post(self::ENDPOINT_IMAGES, $payload);
168 |
169 | if (! $response->successful()) {
170 | throw ProviderRequestException::fromResponse(self::ERROR_IMAGE_FAILED, $response);
171 | }
172 |
173 | $data = $response->json();
174 | $imageData = $data['data'][0] ?? [];
175 |
176 | return new ImageResponse(
177 | url: $imageData['url'] ?? null,
178 | base64: $imageData['b64_json'] ?? null,
179 | raw: $data,
180 | revisedPrompt: $imageData['revised_prompt'] ?? null,
181 | );
182 | }
183 |
184 | /**
185 | * Check if the OpenAI API is accessible.
186 | */
187 | public function isAvailable(): bool
188 | {
189 | try {
190 | $response = $this->http()->get(self::ENDPOINT_MODELS);
191 | return $response->successful();
192 | } catch (\Exception $e) {
193 | return false;
194 | }
195 | }
196 |
197 | /**
198 | * Get the list of available models from OpenAI.
199 | */
200 | public function getAvailableModels(): array
201 | {
202 | try {
203 | $response = $this->http()->get(self::ENDPOINT_MODELS);
204 |
205 | if (! $response->successful()) {
206 | return [];
207 | }
208 |
209 | $data = $response->json();
210 | return array_column($data['data'] ?? [], 'id');
211 | } catch (\Exception $e) {
212 | return [];
213 | }
214 | }
215 | }
216 |
217 |
--------------------------------------------------------------------------------
/src/Managers/LlmManager.php:
--------------------------------------------------------------------------------
1 | config = $config;
58 | }
59 |
60 | /**
61 | * Switch to a different provider for the next operation.
62 | */
63 | public function using(string $name): static
64 | {
65 | $this->current = $name;
66 |
67 | return $this;
68 | }
69 |
70 | /**
71 | * Get the name of the default provider.
72 | */
73 | public function getDefaultProvider(): string
74 | {
75 | return $this->config['default'] ?? 'openai';
76 | }
77 |
78 | /**
79 | * Send a chat message using the current provider.
80 | */
81 | public function chat(string $prompt, array $options = []): string
82 | {
83 | return $this->chatClient()->chat($prompt, $options)->content;
84 | }
85 |
86 | /**
87 | * Send a chat message and get the full response object.
88 | */
89 | public function chatWithResponse(string $prompt, array $options = []): ChatResponse
90 | {
91 | return $this->chatClient()->chat($prompt, $options);
92 | }
93 |
94 | /**
95 | * Get the chat client for the current provider.
96 | */
97 | public function chatClient(): ChatClient
98 | {
99 | $client = $this->client();
100 |
101 | if (! $client instanceof ChatClient) {
102 | $name = $this->getCurrentProviderName();
103 | throw new \InvalidArgumentException("LLM provider [{$name}] does not support chat.");
104 | }
105 |
106 | return $client;
107 | }
108 |
109 | /**
110 | * Get the image client for the current provider.
111 | */
112 | public function image(): ImageClient
113 | {
114 | $client = $this->client();
115 |
116 | if (! $client instanceof ImageClient) {
117 | $name = $this->getCurrentProviderName();
118 | throw new \InvalidArgumentException("LLM provider [{$name}] does not support image generation.");
119 | }
120 |
121 | return $client;
122 | }
123 |
124 | /**
125 | * Generate an image using the current provider.
126 | * Convenience method that wraps image()->generate().
127 | */
128 | public function generateImage(array $params): ImageResponse
129 | {
130 | return $this->image()->generate($params);
131 | }
132 |
133 | /**
134 | * Get the client for a specific provider or the current/default provider.
135 | */
136 | public function client(?string $name = null): LlmClient
137 | {
138 | $name = $name ?? $this->getCurrentProviderName();
139 |
140 | // Reset current after getting the name
141 | $this->current = null;
142 |
143 | if (! isset($this->clients[$name])) {
144 | $this->clients[$name] = $this->resolve($name);
145 | }
146 |
147 | return $this->clients[$name];
148 | }
149 |
150 | /**
151 | * Get the current provider name (from using() or default).
152 | */
153 | protected function getCurrentProviderName(): string
154 | {
155 | return $this->current ?? $this->getDefaultProvider();
156 | }
157 |
158 | /**
159 | * Resolve a provider by name.
160 | */
161 | protected function resolve(string $name): LlmClient
162 | {
163 | $config = Arr::get($this->config, "providers.{$name}");
164 |
165 | if (! $config) {
166 | throw ProviderConfigException::missingProvider($name);
167 | }
168 |
169 | $driver = $config['driver'] ?? null;
170 |
171 | // Check for custom creators first
172 | if (isset($this->customCreators[$driver])) {
173 | return $this->customCreators[$driver]($config);
174 | }
175 |
176 | return match ($driver) {
177 | 'openai' => $this->createOpenAiClient($config),
178 | 'anthropic' => $this->createAnthropicClient($config),
179 | 'lmstudio' => $this->createLmStudioClient($config),
180 | 'dummy' => $this->createDummyClient($config),
181 | default => throw ProviderConfigException::unsupportedDriver($driver ?? 'null'),
182 | };
183 | }
184 |
185 | /**
186 | * Create an OpenAI client instance.
187 | */
188 | protected function createOpenAiClient(array $config): OpenAIClient
189 | {
190 | return new OpenAIClient($config);
191 | }
192 |
193 | /**
194 | * Create an Anthropic client instance.
195 | */
196 | protected function createAnthropicClient(array $config): AnthropicClient
197 | {
198 | return new AnthropicClient($config);
199 | }
200 |
201 | /**
202 | * Create an LM Studio client instance.
203 | */
204 | protected function createLmStudioClient(array $config): LmStudioClient
205 | {
206 | return new LmStudioClient($config);
207 | }
208 |
209 | /**
210 | * Create a Dummy client instance.
211 | */
212 | protected function createDummyClient(array $config): DummyClient
213 | {
214 | return new DummyClient($config);
215 | }
216 |
217 | /**
218 | * Register a custom driver creator.
219 | */
220 | public function extend(string $driver, callable $callback): static
221 | {
222 | $this->customCreators[$driver] = $callback;
223 |
224 | return $this;
225 | }
226 |
227 | /**
228 | * Get all registered provider names.
229 | */
230 | public function getProviders(): array
231 | {
232 | return array_keys($this->config['providers'] ?? []);
233 | }
234 |
235 | /**
236 | * Forget a cached client instance.
237 | */
238 | public function forgetClient(string $name): static
239 | {
240 | unset($this->clients[$name]);
241 |
242 | return $this;
243 | }
244 |
245 | /**
246 | * Forget all cached client instances.
247 | */
248 | public function forgetAllClients(): static
249 | {
250 | $this->clients = [];
251 |
252 | return $this;
253 | }
254 |
255 | /**
256 | * Get the full configuration array.
257 | */
258 | public function getConfig(): array
259 | {
260 | return $this->config;
261 | }
262 |
263 | /**
264 | * Start or resume a conversation.
265 | *
266 | * @param string|null $conversationId Unique ID for the conversation (auto-generated if null)
267 | * @param string|null $provider Provider to use for this conversation
268 | * @return Conversation
269 | */
270 | public function conversation(?string $conversationId = null, ?string $provider = null): Conversation
271 | {
272 | $conversationId = $conversationId ?? $this->generateConversationId();
273 | $provider = $provider ?? $this->getCurrentProviderName();
274 |
275 | // Reset current after getting the name
276 | $this->current = null;
277 |
278 | $client = $this->client($provider);
279 |
280 | if (! $client instanceof ChatClient) {
281 | throw new \InvalidArgumentException("LLM provider [{$provider}] does not support chat.");
282 | }
283 |
284 | return new Conversation(
285 | id: $conversationId,
286 | store: $this->getConversationStore(),
287 | client: $client,
288 | provider: $provider
289 | );
290 | }
291 |
292 | /**
293 | * Get the conversation store instance.
294 | */
295 | public function getConversationStore(): ConversationStore
296 | {
297 | if ($this->conversationStore === null) {
298 | $this->conversationStore = $this->createConversationStore();
299 | }
300 |
301 | return $this->conversationStore;
302 | }
303 |
304 | /**
305 | * Set a custom conversation store.
306 | */
307 | public function setConversationStore(ConversationStore $store): static
308 | {
309 | $this->conversationStore = $store;
310 |
311 | return $this;
312 | }
313 |
314 | /**
315 | * Create the conversation store based on configuration.
316 | */
317 | protected function createConversationStore(): ConversationStore
318 | {
319 | $config = $this->config['conversation'] ?? [];
320 | $driver = $config['driver'] ?? 'session';
321 |
322 | return match ($driver) {
323 | 'session' => new SessionStore(),
324 | 'database' => new DatabaseStore($config),
325 | default => new SessionStore(),
326 | };
327 | }
328 |
329 | /**
330 | * Generate a unique conversation ID.
331 | */
332 | protected function generateConversationId(): string
333 | {
334 | return Str::uuid()->toString();
335 | }
336 |
337 | /**
338 | * Get all conversation IDs.
339 | */
340 | public function conversations(): array
341 | {
342 | return $this->getConversationStore()->all();
343 | }
344 | }
345 |
346 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | # Laravel LLM Suite
6 |
7 | A unified, driver-based Laravel toolkit for working with multiple LLM providers for chat, image generation, and more.
8 |
9 | ## Features
10 |
11 | - **Unified API** - Same interface regardless of provider (OpenAI, Anthropic, LM Studio, etc.)
12 | - **Driver Pattern** - Switch providers like Laravel's Storage or Mail systems
13 | - **Conversation Management** - Automatic message history with session or database storage
14 | - **Token Usage Tracking** - Monitor token consumption for cost management
15 | - **Local LLM Support** - Run models locally with LM Studio for development and testing
16 | - **Laravel Native** - Config files, facades, service providers
17 | - **Testable** - Built-in faking support for testing without API calls
18 |
19 | ## Supported Providers
20 |
21 | | Provider | Driver | Chat | Image | Models List |
22 | |----------|--------|:----:|:-----:|:-----------:|
23 | | **OpenAI** | `openai` | Yes | Yes | Yes |
24 | | **Anthropic** | `anthropic` | Yes | - | - |
25 | | **LM Studio** | `lmstudio` | Yes | - | Yes |
26 | | **Dummy** | `dummy` | Yes | Yes | - |
27 |
28 | - **OpenAI** - GPT-4, GPT-4.1, DALL-E 3, and other OpenAI models
29 | - **Anthropic** - Claude 3.5 Sonnet, Claude 3 Opus, and other Claude models
30 | - **LM Studio** - Run any open-source LLM locally (Llama, Mistral, Phi, etc.)
31 | - **Dummy** - For testing and offline development (returns configurable mock responses)
32 |
33 | ## Requirements
34 |
35 | - PHP 8.1+
36 | - Laravel 10.x, 11.x, or 12.x
37 |
38 | ## Installation
39 |
40 | Install via Composer:
41 |
42 | ```bash
43 | composer require oziri/laravel-llm-suite
44 | ```
45 |
46 | Publish the configuration file and migrations:
47 |
48 | ```bash
49 | # Publish both config and migrations (recommended)
50 | php artisan vendor:publish --tag=llm-suite
51 |
52 | # Or publish separately:
53 | php artisan vendor:publish --tag=llm-suite-config
54 | php artisan vendor:publish --tag=llm-suite-migrations
55 | ```
56 |
57 | ## Configuration
58 |
59 | Add your API keys to your `.env` file (Please keep keys and API Safe and do not share with anyone):
60 |
61 | ```env
62 | # Default provider
63 | LLM_SUITE_DEFAULT=openai
64 |
65 | # OpenAI
66 | OPENAI_API_KEY=your-openai-api-key
67 | OPENAI_CHAT_MODEL=gpt-4.1-mini
68 | OPENAI_IMAGE_MODEL=dall-e-3
69 |
70 | # Anthropic
71 | ANTHROPIC_API_KEY=your-anthropic-api-key
72 | ANTHROPIC_CHAT_MODEL=claude-3-5-sonnet-20241022
73 |
74 | # LM Studio (local)
75 | LMSTUDIO_PROTOCOL=http # http or https
76 | LMSTUDIO_HOST=127.0.0.1
77 | LMSTUDIO_PORT=1234
78 | LMSTUDIO_API_KEY= # Optional - leave empty if not using authentication
79 | LMSTUDIO_TIMEOUT=120
80 | # LMSTUDIO_BASE_URL= # Optional - override protocol/host/port with full URL
81 |
82 | # Conversation Storage (optional - database is default)
83 | LLM_CONVERSATION_DRIVER=database # or 'session'
84 | ```
85 |
86 | The configuration file (`config/llm-suite.php`) allows you to customize providers:
87 |
88 | ```php
89 | return [
90 | 'default' => env('LLM_SUITE_DEFAULT', 'openai'),
91 |
92 | 'providers' => [
93 | 'openai' => [
94 | 'driver' => 'openai',
95 | 'api_key' => env('OPENAI_API_KEY'),
96 | 'base_url' => env('OPENAI_BASE_URL', 'https://api.openai.com/v1'),
97 | 'chat_model' => env('OPENAI_CHAT_MODEL', 'gpt-4.1-mini'),
98 | 'image_model' => env('OPENAI_IMAGE_MODEL', 'dall-e-3'),
99 | ],
100 |
101 | 'anthropic' => [
102 | 'driver' => 'anthropic',
103 | 'api_key' => env('ANTHROPIC_API_KEY'),
104 | 'base_url' => env('ANTHROPIC_BASE_URL', 'https://api.anthropic.com/v1'),
105 | 'chat_model' => env('ANTHROPIC_CHAT_MODEL', 'claude-3-5-sonnet-20241022'),
106 | ],
107 |
108 | 'lmstudio' => [
109 | 'driver' => 'lmstudio',
110 | 'protocol' => env('LMSTUDIO_PROTOCOL', 'http'),
111 | 'host' => env('LMSTUDIO_HOST', '127.0.0.1'),
112 | 'port' => env('LMSTUDIO_PORT', 1234),
113 | 'api_key' => env('LMSTUDIO_API_KEY'),
114 | 'chat_model' => env('LMSTUDIO_CHAT_MODEL', 'local-model'),
115 | 'timeout' => env('LMSTUDIO_TIMEOUT', 120),
116 | ],
117 |
118 | 'dummy' => [
119 | 'driver' => 'dummy',
120 | ],
121 | ],
122 |
123 | // Conversation storage settings
124 | 'conversation' => [
125 | 'driver' => env('LLM_CONVERSATION_DRIVER', 'database'),
126 | 'table' => 'llm_conversations',
127 | ],
128 | ];
129 | ```
130 |
131 | ## Usage
132 |
133 | ### Basic Chat
134 |
135 | ```php
136 | use Llm;
137 |
138 | // Simple chat - returns string
139 | $response = Llm::chat('Explain transformers in simple terms.');
140 |
141 | // Get full response object with metadata
142 | $response = Llm::chatWithResponse('Explain transformers in simple terms.');
143 | echo $response->content;
144 | echo $response->model;
145 | echo $response->latencyMs;
146 | ```
147 |
148 | ### Switching Providers
149 |
150 | ```php
151 | use Llm;
152 |
153 | // Use default provider (from config)
154 | $response = Llm::chat('Hello!');
155 |
156 | // Switch to Anthropic for this request
157 | $response = Llm::using('anthropic')->chat('Write a Laravel policy example.');
158 |
159 | // Switch to dummy for offline development
160 | $response = Llm::using('dummy')->chat('Test message');
161 |
162 | // Use LM Studio for local models
163 | $response = Llm::using('lmstudio')->chat('Hello from local LLM!');
164 | ```
165 |
166 | ### Override Model Per Request
167 |
168 | ```php
169 | use Llm;
170 |
171 | $response = Llm::chat('Explain queues in Laravel.', [
172 | 'model' => 'gpt-4.1',
173 | 'temperature' => 0.7,
174 | 'max_tokens' => 1000,
175 | ]);
176 | ```
177 |
178 | ### System Prompts
179 |
180 | ```php
181 | use Llm;
182 |
183 | $response = Llm::chat('What is 2 + 2?', [
184 | 'system' => 'You are a helpful math tutor. Always explain your reasoning.',
185 | ]);
186 | ```
187 |
188 | ### Conversations (Multi-turn Chat)
189 |
190 | Build chatbots and maintain context across multiple messages:
191 |
192 | ```php
193 | use Llm;
194 |
195 | // Start a NEW conversation (auto-generates UUID)
196 | $conversation = Llm::conversation();
197 | $conversation->system('You are a helpful assistant.');
198 |
199 | // Chat with automatic context - the LLM remembers previous messages!
200 | $response = $conversation->chat('My name is John.');
201 | $response = $conversation->chat('What is my name?'); // "Your name is John."
202 |
203 | // Get the conversation ID for later use
204 | $conversationId = $conversation->getId();
205 | // e.g., "550e8400-e29b-41d4-a716-446655440000"
206 | ```
207 |
208 | **Resume an existing conversation:**
209 |
210 | ```php
211 | // Resume conversation using the saved ID
212 | $conversation = Llm::conversation($conversationId);
213 | $response = $conversation->chat('What else do you remember about me?');
214 | ```
215 |
216 | **Use a specific provider for conversations:**
217 |
218 | ```php
219 | $conversation = Llm::using('openai')->conversation();
220 | // or
221 | $conversation = Llm::using('lmstudio')->conversation();
222 | ```
223 |
224 | **Practical API example:**
225 |
226 | ```php
227 | // Start new chat
228 | Route::post('/chat/new', function (Request $request) {
229 | $conversation = Llm::conversation();
230 | $conversation->system('You are a helpful assistant.');
231 | $response = $conversation->chat($request->input('message'));
232 |
233 | return [
234 | 'conversation_id' => $conversation->getId(),
235 | 'response' => $response->content,
236 | 'tokens' => $response->tokenUsage->totalTokens,
237 | ];
238 | });
239 |
240 | // Continue existing chat
241 | Route::post('/chat/{id}', function (Request $request, string $id) {
242 | $conversation = Llm::conversation($id);
243 | $response = $conversation->chat($request->input('message'));
244 |
245 | return [
246 | 'response' => $response->content,
247 | 'tokens' => $response->tokenUsage->totalTokens,
248 | ];
249 | });
250 | ```
251 |
252 | **Other conversation methods:**
253 |
254 | ```php
255 | $conversation->getMessages(); // Get all messages
256 | $conversation->getMessageCount(); // Count messages
257 | $conversation->getSystemPrompt(); // Get system prompt
258 | $conversation->clear(); // Clear history (keeps system prompt)
259 | $conversation->delete(); // Delete entire conversation
260 | $conversation->export(); // Export as array
261 | ```
262 |
263 | **List all conversations:**
264 |
265 | ```php
266 | use Llm;
267 |
268 | // Get all conversation IDs
269 | $conversationIds = Llm::conversations();
270 | // ['550e8400-e29b-41d4-a716-446655440000', '6ba7b810-9dad-11d1-80b4-00c04fd430c8', ...]
271 |
272 | // With database driver, you can also get metadata
273 | $store = Llm::getConversationStore();
274 | if ($store instanceof \Oziri\LlmSuite\ConversationStores\DatabaseStore) {
275 | $conversations = $store->allWithMetadata();
276 | // [['id' => '...', 'message_count' => 5, 'created_at' => '...', 'updated_at' => '...'], ...]
277 | }
278 | ```
279 |
280 | **Storage Drivers:**
281 |
282 | | Driver | Storage | Best For |
283 | |--------|---------|----------|
284 | | `database` | Database table | Persistent storage, chat history (default) |
285 | | `session` | Laravel session | Temporary chats, no database setup |
286 |
287 | **Database Driver (Default):**
288 |
289 | Conversations are stored in the database for persistent storage. Publish and run the migration:
290 |
291 | ```bash
292 | php artisan vendor:publish --tag=llm-suite-migrations
293 | php artisan migrate
294 | ```
295 |
296 | This creates the `llm_conversations` table for storing conversation history.
297 |
298 | **Session Driver:**
299 |
300 | For temporary chats that don't need persistence (expires with session):
301 |
302 | ```env
303 | LLM_CONVERSATION_DRIVER=session
304 | ```
305 |
306 | No migration required for session driver.
307 |
308 | ### Token Usage
309 |
310 | Track token consumption for cost monitoring:
311 |
312 | ```php
313 | use Llm;
314 |
315 | $response = Llm::chatWithResponse('Explain Laravel in one paragraph.');
316 |
317 | // Access token usage
318 | echo $response->tokenUsage->promptTokens; // Input tokens
319 | echo $response->tokenUsage->completionTokens; // Output tokens
320 | echo $response->tokenUsage->totalTokens; // Total tokens
321 |
322 | // Helper methods
323 | echo $response->getTotalTokens();
324 | echo $response->getPromptTokens();
325 | echo $response->getCompletionTokens();
326 |
327 | // As array
328 | $usage = $response->tokenUsage->toArray();
329 | // ['prompt_tokens' => 10, 'completion_tokens' => 50, 'total_tokens' => 60]
330 | ```
331 |
332 | ### Image Generation
333 |
334 | ```php
335 | use Llm;
336 |
337 | // Generate an image
338 | $image = Llm::image()->generate([
339 | 'prompt' => 'A minimalist logo for a Laravel AI package',
340 | 'size' => '1024x1024',
341 | ]);
342 |
343 | echo $image->url;
344 |
345 | // Or use the convenience method
346 | $image = Llm::generateImage([
347 | 'prompt' => 'A futuristic cityscape',
348 | 'size' => '512x512',
349 | 'quality' => 'hd',
350 | ]);
351 | ```
352 |
353 | ### Listing Available Models
354 |
355 | ```php
356 | use Llm;
357 |
358 | // Get available models from OpenAI
359 | $client = Llm::client('openai');
360 | $models = $client->getAvailableModels();
361 | print_r($models);
362 | // ['gpt-4.1-mini', 'gpt-4.1', 'dall-e-3', ...]
363 |
364 | // Check if the API is accessible
365 | if ($client->isAvailable()) {
366 | echo "OpenAI API is accessible!";
367 | }
368 |
369 | // Same works for LM Studio
370 | $lmClient = Llm::client('lmstudio');
371 | if ($lmClient->isAvailable()) {
372 | $localModels = $lmClient->getAvailableModels();
373 | print_r($localModels);
374 | }
375 | ```
376 |
377 | ### Using LM Studio (Local LLMs)
378 |
379 | LM Studio allows you to run open-source LLMs locally. Perfect for development, testing, or privacy-sensitive applications.
380 |
381 | **Setup:**
382 | 1. Download [LM Studio](https://lmstudio.ai/)
383 | 2. Load a model (e.g., Llama, Mistral, Phi)
384 | 3. Start the local server (default: `http://localhost:1234`)
385 |
386 | **Usage:**
387 | ```php
388 | use Llm;
389 |
390 | // Basic chat with local model
391 | $response = Llm::using('lmstudio')->chat('Explain Laravel middleware.');
392 |
393 | // Check if LM Studio is running
394 | $client = Llm::using('lmstudio')->client();
395 | if ($client->isAvailable()) {
396 | echo "LM Studio is running!";
397 | }
398 |
399 | // List available models
400 | $models = $client->getAvailableModels();
401 | print_r($models);
402 |
403 | // Use a specific local model
404 | $response = Llm::using('lmstudio')->chat('Hello!', [
405 | 'model' => 'mistral-7b-instruct',
406 | 'temperature' => 0.7,
407 | ]);
408 | ```
409 |
410 | **Set as default for local development:**
411 | ```env
412 | LLM_SUITE_DEFAULT=lmstudio
413 | ```
414 |
415 | ### Working with Message History
416 |
417 | ```php
418 | use Llm;
419 |
420 | $response = Llm::chat('What is the capital of France?', [
421 | 'messages' => [
422 | ['role' => 'system', 'content' => 'You are a geography expert.'],
423 | ['role' => 'user', 'content' => 'What continent is Brazil in?'],
424 | ['role' => 'assistant', 'content' => 'Brazil is in South America.'],
425 | ['role' => 'user', 'content' => 'What is the capital of France?'],
426 | ],
427 | ]);
428 | ```
429 |
430 | ## Testing
431 |
432 | ### Using Laravel HTTP Fakes
433 |
434 | The simplest approach is to use Laravel's built-in HTTP faking:
435 |
436 | ```php
437 | use Illuminate\Support\Facades\Http;
438 | use Llm;
439 |
440 | Http::fake([
441 | 'api.openai.com/*' => Http::response([
442 | 'id' => 'chatcmpl-test',
443 | 'model' => 'gpt-4.1-mini',
444 | 'choices' => [
445 | ['message' => ['content' => 'Fake response']],
446 | ],
447 | ]),
448 | ]);
449 |
450 | $response = Llm::chat('Test');
451 | $this->assertEquals('Fake response', $response);
452 | ```
453 |
454 | ### Using LlmFake
455 |
456 | For more control, use the built-in fake helper:
457 |
458 | ```php
459 | use Llm;
460 |
461 | // Set up the fake
462 | $fake = Llm::fake()
463 | ->shouldReturnChat('Hello world')
464 | ->shouldReturnImage('https://example.com/image.png');
465 |
466 | // Make requests
467 | $chatResponse = Llm::chat('Hi there');
468 | $imageResponse = Llm::image()->generate(['prompt' => 'A cat']);
469 |
470 | // Assert requests were made
471 | $fake->assertChatSent('Hi there');
472 | $fake->assertImageSent('A cat');
473 | $fake->assertChatCount(1);
474 | $fake->assertImageCount(1);
475 | ```
476 |
477 | ### Using the Dummy Provider
478 |
479 | You can also use the dummy provider directly in your tests:
480 |
481 | ```php
482 | use Llm;
483 |
484 | // Switch to dummy provider
485 | $response = Llm::using('dummy')->chat('Test message');
486 | // Returns: "This is a dummy response to: Test message"
487 | ```
488 |
489 | ## Extending with Custom Drivers
490 |
491 | You can register custom drivers for other LLM providers:
492 |
493 | ```php
494 | use Oziri\LlmSuite\Facades\Llm;
495 | use Oziri\LlmSuite\Contracts\ChatClient;
496 | use Oziri\LlmSuite\Support\ChatResponse;
497 |
498 | // Create your custom client
499 | class MyCustomClient implements ChatClient
500 | {
501 | public function __construct(protected array $config) {}
502 |
503 | public function chat(string $prompt, array $options = []): ChatResponse
504 | {
505 | // Your implementation here
506 | return new ChatResponse(
507 | content: 'Response from custom provider',
508 | raw: [],
509 | model: 'custom-model',
510 | );
511 | }
512 | }
513 |
514 | // Register the driver (in a service provider)
515 | Llm::extend('custom', function (array $config) {
516 | return new MyCustomClient($config);
517 | });
518 |
519 | // Add to config/llm-suite.php
520 | 'providers' => [
521 | 'my-custom' => [
522 | 'driver' => 'custom',
523 | 'api_key' => env('CUSTOM_API_KEY'),
524 | ],
525 | ],
526 |
527 | // Use it
528 | $response = Llm::using('my-custom')->chat('Hello!');
529 | ```
530 |
531 | ## Available Methods
532 |
533 | ### Facade Methods
534 |
535 | | Method | Description |
536 | |--------|-------------|
537 | | `Llm::chat($prompt, $options)` | Send a chat message, returns string |
538 | | `Llm::chatWithResponse($prompt, $options)` | Send a chat message, returns ChatResponse |
539 | | `Llm::using($provider)` | Switch to a different provider |
540 | | `Llm::image()` | Get the image client |
541 | | `Llm::generateImage($params)` | Generate an image |
542 | | `Llm::extend($driver, $callback)` | Register a custom driver |
543 | | `Llm::fake()` | Create a fake for testing |
544 | | `Llm::getProviders()` | List available providers |
545 | | `Llm::getDefaultProvider()` | Get the default provider name |
546 | | `Llm::client($name)` | Get the underlying client instance |
547 | | `Llm::conversation($id)` | Start new or resume existing conversation |
548 | | `Llm::conversations()` | Get all conversation IDs |
549 |
550 | ### Client Methods (OpenAI, LM Studio)
551 |
552 | You can access the underlying client instance using `Llm::client('provider')` to call provider-specific methods:
553 |
554 | ```php
555 | $client = Llm::client('openai'); // or 'lmstudio'
556 | ```
557 |
558 | | Method | Description |
559 | |--------|-------------|
560 | | `$client->isAvailable()` | Check if the API/server is accessible |
561 | | `$client->getAvailableModels()` | List available models from the provider |
562 |
563 | ### ChatResponse Properties
564 |
565 | | Property | Type | Description |
566 | |----------|------|-------------|
567 | | `content` | string | The response text |
568 | | `raw` | array | Raw API response data |
569 | | `model` | string\|null | Model used for the request |
570 | | `id` | string\|null | Request ID from the provider |
571 | | `latencyMs` | float\|null | Request latency in milliseconds |
572 | | `tokenUsage` | TokenUsage | Token usage statistics |
573 |
574 | ### TokenUsage Properties
575 |
576 | | Property | Type | Description |
577 | |----------|------|-------------|
578 | | `promptTokens` | int | Number of tokens in the prompt/input |
579 | | `completionTokens` | int | Number of tokens in the completion/output |
580 | | `totalTokens` | int | Total tokens used |
581 |
582 | **Methods:**
583 | - `toArray()` - Convert to array
584 | - `hasData()` - Check if usage data is available
585 | - `TokenUsage::fromArray($data)` - Create from API response
586 | - `TokenUsage::empty()` - Create empty instance
587 |
588 | ### ImageResponse Properties
589 |
590 | | Property | Type | Description |
591 | |----------|------|-------------|
592 | | `url` | string\|null | URL of the generated image |
593 | | `base64` | string\|null | Base64 encoded image data |
594 | | `raw` | array | Raw API response data |
595 | | `revisedPrompt` | string\|null | Revised prompt (if modified by provider) |
596 |
597 | ## Roadmap
598 |
599 | - [x] LM Studio support (local LLMs)
600 | - [x] Conversation management (session & database storage)
601 | - [x] Token usage tracking
602 | - [ ] Streaming support
603 | - [ ] Tool/Function calling
604 | - [ ] Embeddings API
605 | - [ ] RAG helpers
606 | - [ ] Additional providers (Gemini, Groq, Ollama)
607 | - [ ] Rate limiting
608 | - [ ] Caching layer
609 |
610 | ## License
611 |
612 | MIT License. See [LICENSE](LICENSE) for details.
613 |
614 |
--------------------------------------------------------------------------------