├── examples ├── completion.php ├── embedding.php ├── completion-streamed.php ├── chat.php └── chat-streamed.php ├── src ├── Ollama.php ├── Resources │ ├── EmbeddingsInterface.php │ ├── CompletionInterface.php │ ├── Embeddings.php │ ├── ChatInterface.php │ ├── Completion.php │ └── Chat.php ├── Responses │ ├── Chat │ │ ├── CreateResponseMessage.php │ │ ├── CreateStreamedResponseMessage.php │ │ ├── CreateResponse.php │ │ └── CreateStreamedResponse.php │ ├── Embeddings │ │ └── CreateResponse.php │ └── Completion │ │ ├── CreateStreamedResponse.php │ │ └── CreateResponse.php ├── Factory.php ├── Client.php └── ClientInterface.php ├── LICENSE ├── .readme.yaml ├── composer.json └── README.md /examples/completion.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | use ModelflowAi\Ollama\Ollama; 15 | 16 | require_once __DIR__ . '/../vendor/autoload.php'; 17 | 18 | $client = Ollama::client(); 19 | 20 | $response = $client->completion()->create([ 21 | 'model' => 'llama2', 22 | 'prompt' => 'Hello world!', 23 | ]); 24 | 25 | echo $response->response . \PHP_EOL; 26 | -------------------------------------------------------------------------------- /examples/embedding.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | use ModelflowAi\Ollama\Ollama; 15 | 16 | require_once __DIR__ . '/../vendor/autoload.php'; 17 | 18 | $client = Ollama::client(); 19 | 20 | $response = $client->embeddings()->create([ 21 | 'model' => 'llama2', 22 | 'prompt' => 'You are an angry bot!', 23 | ]); 24 | 25 | \var_dump(\count($response->embedding)); 26 | echo \PHP_EOL; 27 | -------------------------------------------------------------------------------- /examples/completion-streamed.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | use ModelflowAi\Ollama\Ollama; 15 | 16 | require_once __DIR__ . '/../vendor/autoload.php'; 17 | 18 | $client = Ollama::client(); 19 | 20 | $stream = $client->completion()->createStreamed([ 21 | 'model' => 'llama2', 22 | 'prompt' => 'Hello world!', 23 | ]); 24 | 25 | foreach ($stream as $response) { 26 | echo $response->delta; 27 | } 28 | -------------------------------------------------------------------------------- /src/Ollama.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama; 15 | 16 | final class Ollama 17 | { 18 | private function __construct() 19 | { 20 | } 21 | 22 | public static function client(): ClientInterface 23 | { 24 | return self::factory()->make(); 25 | } 26 | 27 | public static function factory(): Factory 28 | { 29 | return new Factory(); 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /examples/chat.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | use ModelflowAi\Ollama\Ollama; 15 | 16 | require_once __DIR__ . '/../vendor/autoload.php'; 17 | 18 | $client = Ollama::client(); 19 | 20 | $response = $client->chat()->create([ 21 | 'model' => 'llama2', 22 | 'messages' => [ 23 | ['role' => 'system', 'content' => 'You are an angry bot!'], 24 | ['role' => 'user', 'content' => 'Hello world!'], 25 | ], 26 | ]); 27 | 28 | echo $response->message->content . \PHP_EOL; 29 | -------------------------------------------------------------------------------- /src/Resources/EmbeddingsInterface.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Resources; 15 | 16 | use ModelflowAi\Ollama\Responses\Embeddings\CreateResponse; 17 | 18 | interface EmbeddingsInterface 19 | { 20 | /** 21 | * @param array{ 22 | * model: string, 23 | * prompt: string, 24 | * options?: array, 25 | * } $parameters 26 | */ 27 | public function create(array $parameters): CreateResponse; 28 | } 29 | -------------------------------------------------------------------------------- /examples/chat-streamed.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | use ModelflowAi\Ollama\Ollama; 15 | 16 | require_once __DIR__ . '/../vendor/autoload.php'; 17 | 18 | $client = Ollama::client(); 19 | 20 | $stream = $client->chat()->createStreamed([ 21 | 'model' => 'llama2', 22 | 'messages' => [ 23 | ['role' => 'system', 'content' => 'You are an angry bot!'], 24 | ['role' => 'user', 'content' => 'Hello world!'], 25 | ], 26 | ]); 27 | 28 | foreach ($stream as $response) { 29 | if (0 === $response->index) { 30 | echo $response->message->role . ': '; 31 | } 32 | 33 | echo $response->message->delta; 34 | } 35 | -------------------------------------------------------------------------------- /src/Responses/Chat/CreateResponseMessage.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Chat; 15 | 16 | final readonly class CreateResponseMessage 17 | { 18 | private function __construct( 19 | public string $role, 20 | public ?string $content, 21 | ) { 22 | } 23 | 24 | /** 25 | * @param array{ 26 | * role: string, 27 | * content: ?string, 28 | * } $attributes 29 | */ 30 | public static function from(array $attributes): self 31 | { 32 | return new self( 33 | $attributes['role'], 34 | $attributes['content'] ?? null, 35 | ); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/Responses/Chat/CreateStreamedResponseMessage.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Chat; 15 | 16 | final readonly class CreateStreamedResponseMessage 17 | { 18 | private function __construct( 19 | public string $role, 20 | public ?string $delta, 21 | ) { 22 | } 23 | 24 | /** 25 | * @param array{ 26 | * role: string, 27 | * content: ?string, 28 | * } $attributes 29 | */ 30 | public static function from(array $attributes): self 31 | { 32 | return new self( 33 | $attributes['role'], 34 | $attributes['content'] ?? null, 35 | ); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Johannes Wachter 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /src/Factory.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama; 15 | 16 | use ModelflowAi\ApiClient\Transport\SymfonyHttpTransporter; 17 | use Symfony\Component\HttpClient\HttpClient; 18 | use Symfony\Contracts\HttpClient\HttpClientInterface; 19 | 20 | final class Factory 21 | { 22 | private HttpClientInterface $httpClient; 23 | 24 | private string $baseUrl = 'http://localhost:11434/api/'; 25 | 26 | public function withHttpClient(HttpClientInterface $client): self 27 | { 28 | $this->httpClient = $client; 29 | 30 | return $this; 31 | } 32 | 33 | public function withBaseUrl(string $baseUrl): self 34 | { 35 | $this->baseUrl = \rtrim($baseUrl, '/') . '/'; 36 | 37 | return $this; 38 | } 39 | 40 | public function make(): ClientInterface 41 | { 42 | $transporter = new SymfonyHttpTransporter($this->httpClient ?? HttpClient::create(), $this->baseUrl); 43 | 44 | return new Client($transporter); 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/Client.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama; 15 | 16 | use ModelflowAi\ApiClient\Transport\TransportInterface; 17 | use ModelflowAi\Ollama\Resources\Chat; 18 | use ModelflowAi\Ollama\Resources\ChatInterface; 19 | use ModelflowAi\Ollama\Resources\Completion; 20 | use ModelflowAi\Ollama\Resources\CompletionInterface; 21 | use ModelflowAi\Ollama\Resources\Embeddings; 22 | use ModelflowAi\Ollama\Resources\EmbeddingsInterface; 23 | 24 | final readonly class Client implements ClientInterface 25 | { 26 | public function __construct( 27 | private TransportInterface $transport, 28 | ) { 29 | } 30 | 31 | public function chat(): ChatInterface 32 | { 33 | return new Chat($this->transport); 34 | } 35 | 36 | public function completion(): CompletionInterface 37 | { 38 | return new Completion($this->transport); 39 | } 40 | 41 | public function embeddings(): EmbeddingsInterface 42 | { 43 | return new Embeddings($this->transport); 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/Responses/Embeddings/CreateResponse.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Embeddings; 15 | 16 | use ModelflowAi\ApiClient\Responses\MetaInformation; 17 | use ModelflowAi\ApiClient\Responses\Usage; 18 | 19 | final readonly class CreateResponse 20 | { 21 | /** 22 | * @param float[] $embedding 23 | */ 24 | private function __construct( 25 | public array $embedding, 26 | public Usage $usage, 27 | public MetaInformation $meta, 28 | ) { 29 | } 30 | 31 | /** 32 | * @param array{ 33 | * embedding: float[], 34 | * } $attributes 35 | */ 36 | public static function from(array $attributes, MetaInformation $meta): self 37 | { 38 | return new self( 39 | $attributes['embedding'], 40 | Usage::from([ 41 | 'prompt_tokens' => 0, 42 | 'completion_tokens' => null, 43 | 'total_tokens' => 0, 44 | ]), 45 | $meta, 46 | ); 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/Resources/CompletionInterface.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Resources; 15 | 16 | use ModelflowAi\Ollama\Responses\Completion\CreateResponse; 17 | use ModelflowAi\Ollama\Responses\Completion\CreateStreamedResponse; 18 | 19 | interface CompletionInterface 20 | { 21 | /** 22 | * @param array{ 23 | * model: string, 24 | * prompt: string, 25 | * format?: "json", 26 | * options?: array, 27 | * template?: string, 28 | * context?: float[], 29 | * } $parameters 30 | */ 31 | public function create(array $parameters): CreateResponse; 32 | 33 | /** 34 | * @param array{ 35 | * model: string, 36 | * prompt: string, 37 | * format?: "json", 38 | * options?: array, 39 | * template?: string, 40 | * context?: float[], 41 | * } $parameters 42 | * 43 | * @return \Iterator 44 | */ 45 | public function createStreamed(array $parameters): \Iterator; 46 | } 47 | -------------------------------------------------------------------------------- /src/ClientInterface.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama; 15 | 16 | use ModelflowAi\Ollama\Resources\ChatInterface; 17 | use ModelflowAi\Ollama\Resources\CompletionInterface; 18 | use ModelflowAi\Ollama\Resources\EmbeddingsInterface; 19 | 20 | interface ClientInterface 21 | { 22 | /** 23 | * Given a chat conversation, the model will return a chat completion response. 24 | * 25 | * @see https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-chat-completion 26 | */ 27 | public function chat(): ChatInterface; 28 | 29 | /** 30 | * Given a prompt, the model will return a text completion response. 31 | * 32 | * @see https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-a-completion 33 | */ 34 | public function completion(): CompletionInterface; 35 | 36 | /** 37 | * Get a vector representation of a given input that can be easily consumed by machine learning models and algorithms. 38 | * 39 | * @see https://github.com/jmorganca/ollama/blob/main/docs/api.md#generate-embeddings 40 | */ 41 | public function embeddings(): EmbeddingsInterface; 42 | } 43 | -------------------------------------------------------------------------------- /src/Responses/Completion/CreateStreamedResponse.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Completion; 15 | 16 | use ModelflowAi\ApiClient\Responses\MetaInformation; 17 | 18 | final readonly class CreateStreamedResponse 19 | { 20 | private function __construct( 21 | public string $model, 22 | public int $createdAt, 23 | public int $index, 24 | public string $delta, 25 | public bool $done, 26 | public MetaInformation $meta, 27 | ) { 28 | } 29 | 30 | /** 31 | * @param array{ 32 | * model: string, 33 | * created_at: string, 34 | * response: string, 35 | * context: float[], 36 | * done: bool, 37 | * } $attributes 38 | */ 39 | public static function from(int $index, array $attributes, MetaInformation $meta): self 40 | { 41 | return new self( 42 | $attributes['model'], 43 | (new \DateTimeImmutable($attributes['created_at']))->getTimestamp(), 44 | $index, 45 | $attributes['response'], 46 | $attributes['done'], 47 | $meta, 48 | ); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/Resources/Embeddings.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Resources; 15 | 16 | use ModelflowAi\ApiClient\Transport\Payload; 17 | use ModelflowAi\ApiClient\Transport\TransportInterface; 18 | use ModelflowAi\Ollama\Responses\Embeddings\CreateResponse; 19 | use Webmozart\Assert\Assert; 20 | 21 | final readonly class Embeddings implements EmbeddingsInterface 22 | { 23 | public function __construct( 24 | private TransportInterface $transport, 25 | ) { 26 | } 27 | 28 | public function create(array $parameters): CreateResponse 29 | { 30 | $this->validateParameters($parameters); 31 | 32 | $payload = Payload::create('embeddings', $parameters); 33 | 34 | $response = $this->transport->requestObject($payload); 35 | 36 | // @phpstan-ignore-next-line 37 | return CreateResponse::from($response->data, $response->meta); 38 | } 39 | 40 | /** 41 | * @param array $parameters 42 | */ 43 | private function validateParameters(array $parameters): void 44 | { 45 | Assert::keyExists($parameters, 'model'); 46 | Assert::string($parameters['model']); 47 | 48 | Assert::keyExists($parameters, 'prompt'); 49 | Assert::string($parameters['prompt']); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/Resources/ChatInterface.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Resources; 15 | 16 | use ModelflowAi\Ollama\Responses\Chat\CreateResponse; 17 | use ModelflowAi\Ollama\Responses\Chat\CreateStreamedResponse; 18 | 19 | interface ChatInterface 20 | { 21 | /** 22 | * @param array{ 23 | * model: string, 24 | * messages: array, 29 | * format?: "json", 30 | * options?: array, 31 | * template?: string, 32 | * } $parameters 33 | */ 34 | public function create(array $parameters): CreateResponse; 35 | 36 | /** 37 | * @param array{ 38 | * model: string, 39 | * messages: array, 44 | * format?: "json", 45 | * options?: array, 46 | * template?: string, 47 | * } $parameters 48 | * 49 | * @return \Iterator 50 | */ 51 | public function createStreamed(array $parameters): \Iterator; 52 | } 53 | -------------------------------------------------------------------------------- /.readme.yaml: -------------------------------------------------------------------------------- 1 | title: Ollama 2 | description: | 3 | Ollama is a PHP package that provides an easy-to-use client for the ollama API. 4 | shortDescription: A comprehensive API client for Ollama. 5 | examples: true 6 | usage: | 7 | ```php 8 | use ModelflowAi\Ollama\Ollama; 9 | 10 | // Create a client instance 11 | $client = Ollama::client(); 12 | 13 | // Use the client 14 | $chat = $client->chat(); 15 | $completion = $client->completion(); 16 | $embeddings = $client->embeddings(); 17 | 18 | // Example usage of chat 19 | $chatResponse = $chat->create([ 20 | 'model' => 'llama2', 21 | 'messages' => [['role' => 'user', 'content' => 'Hello, world!']], 22 | ]); 23 | echo $chatResponse->message->content; 24 | 25 | // Example usage of completion 26 | $completionResponse = $completion->create([ 27 | 'model' => 'llama2', 28 | 'prompt' => 'Once upon a time', 29 | ]); 30 | echo $completionResponse->response; 31 | 32 | // Example usage of embeddings 33 | $embeddingsResponse = $embeddings->create(['prompt' => 'Hello, world!']); 34 | echo $embeddingsResponse->embedding; 35 | ``` 36 | 37 | For more examples, see the [examples](examples) directory. 38 | 39 | ## Testing & Code Quality 40 | 41 | To run the tests and all the code quality tools with the following commands: 42 | 43 | ```bash 44 | composer fix 45 | composer lint 46 | composer test 47 | ``` 48 | 49 | ## Open Points 50 | 51 | ### Model API 52 | 53 | The Model API is another area that we are actively working on. Once completed, this will provide users with the ability 54 | to manage and interact with their AI models directly from the Ollama package. 55 | independent: true 56 | -------------------------------------------------------------------------------- /src/Responses/Chat/CreateResponse.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Chat; 15 | 16 | use ModelflowAi\ApiClient\Responses\MetaInformation; 17 | use ModelflowAi\ApiClient\Responses\Usage; 18 | 19 | final readonly class CreateResponse 20 | { 21 | private function __construct( 22 | public string $model, 23 | public int $createdAt, 24 | public CreateResponseMessage $message, 25 | public bool $done, 26 | public int $totalDuration, 27 | public int $loadDuration, 28 | public int $promptEvalDuration, 29 | public int $evalDuration, 30 | public Usage $usage, 31 | public MetaInformation $meta, 32 | ) { 33 | } 34 | 35 | /** 36 | * @param array{ 37 | * model: string, 38 | * created_at: string, 39 | * message: array{ 40 | * role: string, 41 | * content: ?string, 42 | * }, 43 | * done: bool, 44 | * total_duration: int, 45 | * load_duration: int, 46 | * prompt_eval_count?: int|null, 47 | * prompt_eval_duration: int, 48 | * eval_count: int, 49 | * eval_duration: int, 50 | * } $attributes 51 | */ 52 | public static function from(array $attributes, MetaInformation $meta): self 53 | { 54 | return new self( 55 | $attributes['model'], 56 | (new \DateTimeImmutable($attributes['created_at']))->getTimestamp(), 57 | CreateResponseMessage::from($attributes['message']), 58 | $attributes['done'], 59 | $attributes['total_duration'], 60 | $attributes['load_duration'], 61 | $attributes['prompt_eval_duration'], 62 | $attributes['eval_duration'], 63 | Usage::from([ 64 | 'prompt_tokens' => $attributes['prompt_eval_count'] ?? 0, 65 | 'completion_tokens' => $attributes['eval_count'], 66 | 'total_tokens' => ($attributes['prompt_eval_count'] ?? 0) + $attributes['eval_count'], 67 | ]), 68 | $meta, 69 | ); 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /src/Responses/Completion/CreateResponse.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Completion; 15 | 16 | use ModelflowAi\ApiClient\Responses\MetaInformation; 17 | use ModelflowAi\ApiClient\Responses\Usage; 18 | 19 | final readonly class CreateResponse 20 | { 21 | /** 22 | * @param float[] $context 23 | */ 24 | private function __construct( 25 | public string $model, 26 | public int $createdAt, 27 | public string $response, 28 | public array $context, 29 | public bool $done, 30 | public int $totalDuration, 31 | public int $loadDuration, 32 | public int $promptEvalDuration, 33 | public int $evalDuration, 34 | public Usage $usage, 35 | public MetaInformation $meta, 36 | ) { 37 | } 38 | 39 | /** 40 | * @param array{ 41 | * model: string, 42 | * created_at: string, 43 | * response: string, 44 | * context: float[], 45 | * done: bool, 46 | * total_duration: int, 47 | * load_duration: int, 48 | * prompt_eval_count?: int|null, 49 | * prompt_eval_duration: int, 50 | * eval_count: int, 51 | * eval_duration: int, 52 | * } $attributes 53 | */ 54 | public static function from(array $attributes, MetaInformation $meta): self 55 | { 56 | return new self( 57 | $attributes['model'], 58 | (new \DateTimeImmutable($attributes['created_at']))->getTimestamp(), 59 | $attributes['response'], 60 | $attributes['context'], 61 | $attributes['done'], 62 | $attributes['total_duration'], 63 | $attributes['load_duration'], 64 | $attributes['prompt_eval_duration'], 65 | $attributes['eval_duration'], 66 | Usage::from([ 67 | 'prompt_tokens' => $attributes['prompt_eval_count'] ?? 0, 68 | 'completion_tokens' => $attributes['eval_count'], 69 | 'total_tokens' => ($attributes['prompt_eval_count'] ?? 0) + $attributes['eval_count'], 70 | ]), 71 | $meta, 72 | ); 73 | } 74 | } 75 | -------------------------------------------------------------------------------- /src/Responses/Chat/CreateStreamedResponse.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Responses\Chat; 15 | 16 | use ModelflowAi\ApiClient\Responses\MetaInformation; 17 | use ModelflowAi\ApiClient\Responses\Usage; 18 | 19 | final readonly class CreateStreamedResponse 20 | { 21 | private function __construct( 22 | public string $model, 23 | public int $createdAt, 24 | public int $index, 25 | public CreateStreamedResponseMessage $message, 26 | public bool $done, 27 | public ?int $totalDuration, 28 | public ?int $loadDuration, 29 | public ?int $promptEvalDuration, 30 | public ?int $evalDuration, 31 | public ?Usage $usage, 32 | public MetaInformation $meta, 33 | ) { 34 | } 35 | 36 | /** 37 | * @param array{ 38 | * model: string, 39 | * created_at: string, 40 | * message: array{ 41 | * role: string, 42 | * content: ?string, 43 | * }, 44 | * done: bool, 45 | * total_duration?: int, 46 | * load_duration?: int, 47 | * prompt_eval_count?: int|null, 48 | * prompt_eval_duration?: int, 49 | * eval_count?: int, 50 | * eval_duration?: int, 51 | * } $attributes 52 | */ 53 | public static function from(int $index, array $attributes, MetaInformation $meta): self 54 | { 55 | return new self( 56 | $attributes['model'], 57 | (new \DateTimeImmutable($attributes['created_at']))->getTimestamp(), 58 | $index, 59 | CreateStreamedResponseMessage::from($attributes['message']), 60 | $attributes['done'], 61 | $attributes['total_duration'] ?? null, 62 | $attributes['load_duration'] ?? null, 63 | $attributes['prompt_eval_duration'] ?? null, 64 | $attributes['eval_duration'] ?? null, 65 | $attributes['eval_count'] ?? null ? Usage::from([ 66 | 'prompt_tokens' => $attributes['prompt_eval_count'] ?? 0, 67 | 'completion_tokens' => $attributes['eval_count'], 68 | 'total_tokens' => ($attributes['prompt_eval_count'] ?? 0) + $attributes['eval_count'], 69 | ]) : null, 70 | $meta, 71 | ); 72 | } 73 | } 74 | -------------------------------------------------------------------------------- /src/Resources/Completion.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Resources; 15 | 16 | use ModelflowAi\ApiClient\Resources\Concerns\Streamable; 17 | use ModelflowAi\ApiClient\Transport\Payload; 18 | use ModelflowAi\ApiClient\Transport\TransportInterface; 19 | use ModelflowAi\Ollama\Responses\Completion\CreateResponse; 20 | use ModelflowAi\Ollama\Responses\Completion\CreateStreamedResponse; 21 | use Webmozart\Assert\Assert; 22 | 23 | final readonly class Completion implements CompletionInterface 24 | { 25 | use Streamable; 26 | 27 | public function __construct( 28 | private TransportInterface $transport, 29 | ) { 30 | } 31 | 32 | public function create(array $parameters): CreateResponse 33 | { 34 | $this->ensureNotStreamed($parameters); 35 | $this->validateParameters($parameters); 36 | $parameters['stream'] = false; 37 | 38 | $payload = Payload::create('generate', $parameters); 39 | 40 | $response = $this->transport->requestObject($payload); 41 | 42 | // @phpstan-ignore-next-line 43 | return CreateResponse::from($response->data, $response->meta); 44 | } 45 | 46 | public function createStreamed(array $parameters): \Iterator 47 | { 48 | $this->validateParameters($parameters); 49 | $parameters['stream'] = true; 50 | 51 | $payload = Payload::create('generate', $parameters); 52 | 53 | foreach ($this->transport->requestStream($payload) as $index => $response) { 54 | // @phpstan-ignore-next-line 55 | yield CreateStreamedResponse::from($index, $response->data, $response->meta); 56 | } 57 | } 58 | 59 | /** 60 | * @param array $parameters 61 | */ 62 | private function validateParameters(array $parameters): void 63 | { 64 | Assert::keyExists($parameters, 'model'); 65 | Assert::string($parameters['model']); 66 | 67 | Assert::keyExists($parameters, 'prompt'); 68 | Assert::string($parameters['prompt']); 69 | 70 | if (isset($parameters['format'])) { 71 | Assert::string($parameters['format']); 72 | Assert::inArray($parameters['format'], ['json']); 73 | } 74 | 75 | if (isset($parameters['template'])) { 76 | Assert::string($parameters['template']); 77 | } 78 | 79 | if (isset($parameters['context'])) { 80 | Assert::allFloat($parameters['context']); 81 | } 82 | } 83 | } 84 | -------------------------------------------------------------------------------- /composer.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "modelflow-ai/ollama", 3 | "description": "Client for ollama API.", 4 | "type": "library", 5 | "license": "MIT", 6 | "keywords": [ 7 | "ai", 8 | "ollama", 9 | "api", 10 | "client" 11 | ], 12 | "autoload": { 13 | "psr-4": { 14 | "ModelflowAi\\Ollama\\": "src/" 15 | } 16 | }, 17 | "autoload-dev": { 18 | "psr-4": { 19 | "ModelflowAi\\Ollama\\Tests\\": "tests/" 20 | } 21 | }, 22 | "authors": [ 23 | { 24 | "name": "Johannes Wachter", 25 | "email": "johannes@sulu.io" 26 | } 27 | ], 28 | "require": { 29 | "php": "^8.2", 30 | "modelflow-ai/api-client": "^0.2", 31 | "webmozart/assert": "^1.11" 32 | }, 33 | "require-dev": { 34 | "php-cs-fixer/shim": "^3.15", 35 | "phpstan/extension-installer": "^1.2", 36 | "phpstan/phpstan": "^1.10, <1.10.55", 37 | "phpstan/phpstan-phpunit": "^1.3@stable", 38 | "phpunit/phpunit": "^10.3", 39 | "rector/rector": "^0.18.1", 40 | "jangregor/phpstan-prophecy": "^1.0", 41 | "phpspec/prophecy-phpunit": "^2.1@stable", 42 | "asapo/remove-vendor-plugin": "^0.1" 43 | }, 44 | "scripts": { 45 | "test-with-coverage": "@test --coverage-php var/reports/coverage.cov --coverage-cobertura=var/cobertura-coverage.xml --coverage-html var/reports/html --log-junit var/reports/junit.xml", 46 | "test": [ 47 | "Composer\\Config::disableProcessTimeout", 48 | "vendor/bin/phpunit" 49 | ], 50 | "phpstan": "@php vendor/bin/phpstan analyze", 51 | "lint-rector": "@php vendor/bin/rector process --dry-run", 52 | "lint-php-cs": "@php vendor/bin/php-cs-fixer fix --verbose --diff --dry-run", 53 | "lint": [ 54 | "@phpstan", 55 | "@lint-php-cs", 56 | "@lint-rector", 57 | "@lint-composer" 58 | ], 59 | "lint-composer": "@composer validate --strict", 60 | "rector": "@php vendor/bin/rector process", 61 | "php-cs-fix": "@php vendor/bin/php-cs-fixer fix", 62 | "fix": [ 63 | "@rector", 64 | "@php-cs-fix" 65 | ] 66 | }, 67 | "repositories": [ 68 | { 69 | "type": "path", 70 | "url": "./../*", 71 | "options": { 72 | "symlink": false 73 | } 74 | } 75 | ], 76 | "minimum-stability": "dev", 77 | "config": { 78 | "allow-plugins": { 79 | "phpstan/extension-installer": true, 80 | "asapo/remove-vendor-plugin": true 81 | } 82 | }, 83 | "extra": { 84 | "remove-folders": [ 85 | "modelflow-ai/*/vendor" 86 | ] 87 | } 88 | } 89 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |
3 | Ollama Logo 4 |
5 | 6 |

7 | Modelflow AI
8 | Ollama
9 |
10 |

11 | 12 |
13 | 14 |
15 | Ollama is a PHP package that provides an easy-to-use client for the ollama API. 16 |
17 | 18 |
19 | 20 | > **Note**: 21 | > This is part of the `modelflow-ai` project create issues in the [main repository](https://github.com/modelflow-ai/.github). 22 | 23 | > **Note**: 24 | > This project is heavily under development and any feedback is greatly appreciated. 25 | 26 |
27 | 28 | ## Installation 29 | 30 | To install the Ollama package, you need to have PHP 8.2 or higher and Composer installed on your machine. Then, you can 31 | add the package to your project by running the following command: 32 | 33 | ```bash 34 | composer require modelflow-ai/ollama 35 | ``` 36 | 37 | ## Examples 38 | 39 | Here are some examples of how you can use the Ollama in your PHP applications. You can find more detailed 40 | examples in the [examples directory](examples). 41 | 42 | ## Usage 43 | 44 | ```php 45 | use ModelflowAi\Ollama\Ollama; 46 | 47 | // Create a client instance 48 | $client = Ollama::client(); 49 | 50 | // Use the client 51 | $chat = $client->chat(); 52 | $completion = $client->completion(); 53 | $embeddings = $client->embeddings(); 54 | 55 | // Example usage of chat 56 | $chatResponse = $chat->create([ 57 | 'model' => 'llama2', 58 | 'messages' => [['role' => 'user', 'content' => 'Hello, world!']], 59 | ]); 60 | echo $chatResponse->message->content; 61 | 62 | // Example usage of completion 63 | $completionResponse = $completion->create([ 64 | 'model' => 'llama2', 65 | 'prompt' => 'Once upon a time', 66 | ]); 67 | echo $completionResponse->response; 68 | 69 | // Example usage of embeddings 70 | $embeddingsResponse = $embeddings->create(['prompt' => 'Hello, world!']); 71 | echo $embeddingsResponse->embedding; 72 | ``` 73 | 74 | For more examples, see the [examples](examples) directory. 75 | 76 | ## Testing & Code Quality 77 | 78 | To run the tests and all the code quality tools with the following commands: 79 | 80 | ```bash 81 | composer fix 82 | composer lint 83 | composer test 84 | ``` 85 | 86 | ## Open Points 87 | 88 | ### Model API 89 | 90 | The Model API is another area that we are actively working on. Once completed, this will provide users with the ability 91 | to manage and interact with their AI models directly from the Ollama package. 92 | 93 | ## Contributing 94 | 95 | Contributions are welcome. Please open an issue or submit a pull request in the main repository 96 | at [https://github.com/modelflow-ai/.github](https://github.com/modelflow-ai/.github). 97 | 98 | ## License 99 | 100 | This project is licensed under the MIT License. For the full copyright and license information, please view the LICENSE 101 | file that was distributed with this source code. 102 | -------------------------------------------------------------------------------- /src/Resources/Chat.php: -------------------------------------------------------------------------------- 1 | 9 | * 10 | * For the full copyright and license information, please view the LICENSE 11 | * file that was distributed with this source code. 12 | */ 13 | 14 | namespace ModelflowAi\Ollama\Resources; 15 | 16 | use ModelflowAi\ApiClient\Resources\Concerns\Streamable; 17 | use ModelflowAi\ApiClient\Transport\Payload; 18 | use ModelflowAi\ApiClient\Transport\TransportInterface; 19 | use ModelflowAi\Ollama\Responses\Chat\CreateResponse; 20 | use ModelflowAi\Ollama\Responses\Chat\CreateStreamedResponse; 21 | use Webmozart\Assert\Assert; 22 | 23 | final readonly class Chat implements ChatInterface 24 | { 25 | use Streamable; 26 | 27 | public function __construct( 28 | private TransportInterface $transport, 29 | ) { 30 | } 31 | 32 | public function create(array $parameters): CreateResponse 33 | { 34 | $this->ensureNotStreamed($parameters); 35 | $this->validateParameters($parameters); 36 | $parameters['stream'] = false; 37 | 38 | $payload = Payload::create('chat', $parameters); 39 | 40 | $response = $this->transport->requestObject($payload); 41 | 42 | // @phpstan-ignore-next-line 43 | return CreateResponse::from($response->data, $response->meta); 44 | } 45 | 46 | public function createStreamed(array $parameters): \Iterator 47 | { 48 | $this->validateParameters($parameters); 49 | $parameters['stream'] = true; 50 | 51 | $payload = Payload::create('chat', $parameters); 52 | 53 | foreach ($this->transport->requestStream($payload) as $index => $response) { 54 | // @phpstan-ignore-next-line 55 | yield CreateStreamedResponse::from($index, $response->data, $response->meta); 56 | } 57 | } 58 | 59 | /** 60 | * @param array $parameters 61 | */ 62 | private function validateParameters(array $parameters): void 63 | { 64 | Assert::keyExists($parameters, 'model'); 65 | Assert::string($parameters['model']); 66 | 67 | Assert::keyExists($parameters, 'messages'); 68 | Assert::isArray($parameters['messages']); 69 | foreach ($parameters['messages'] as $message) { 70 | Assert::keyExists($message, 'role'); 71 | Assert::string($message['role']); 72 | Assert::inArray($message['role'], ['system', 'user', 'assistant']); 73 | Assert::keyExists($message, 'content'); 74 | Assert::string($message['content']); 75 | 76 | if (isset($message['images'])) { 77 | Assert::isArray($message['images']); 78 | Assert::allString($message['images']); 79 | } 80 | } 81 | 82 | if (isset($parameters['format'])) { 83 | Assert::string($parameters['format']); 84 | Assert::inArray($parameters['format'], ['json']); 85 | } 86 | 87 | if (isset($parameters['template'])) { 88 | Assert::string($parameters['template']); 89 | } 90 | } 91 | } 92 | --------------------------------------------------------------------------------