borah/llm-port-laravel

Wrapper around the most popular LLMs that allows drop-in replacement of large language models in Laravel.

1.0.6 2024-11-26 11:38 UTC

This package is auto-updated.

Last update: 2024-11-26 11:38:25 UTC


README

Wrapper around the most popular LLMs that allows drop-in replacement of large language models in Laravel.

Installation

You can install the package via composer:

composer require borah/llm-port-laravel

You can publish the config file with:

php artisan vendor:publish --tag="llm-port-laravel-config"

This is the contents of the published config file:

return [
    'default' => env('LLMPORT_DEFAULT_DRIVER', 'openai'),
    'drivers' => [
        'openai' => [
            'key' => env('OPENAI_API_KEY'),
            'default_model' => env('OPENAI_MODEL', 'gpt-4o-mini'),
            'organization' => env('OPENAI_ORGANIZATION'),
            'base_uri' => env('OPENAI_BASE_URI'),
        ],
        'gemini' => [
            'key' => env('GEMINI_API_KEY'),
            'default_model' => env('GEMINI_MODEL', 'gemini-1.5-flash-latest'),
        ],
        'anthropic' => [
            'key' => env('ANTHROPIC_API_KEY'),
            'default_model' => env('ANTHROPIC_MODEL', 'claude-3-5-sonnet-20240620'),
        ],
        'replicate' => [
            'key' => env('REPLICATE_API_KEY'),
            'default_model' => env('REPLICATE_MODEL', 'meta/meta-llama-3-8b-instruct'),
            'poll_interval' => env('REPLICATE_POLL_INTERVAL', 100000),
        ],
        'groq' => [
            'key' => env('GROQ_API_KEY'),
            'default_model' => env('GROQ_MODEL', 'llama-3.1-8b-instant'),
        ],
        'nebius' => [
            'key' => env('NEBIUS_API_KEY'),
            'default_model' => env('NEBIUS_MODEL', 'meta-llama/Meta-Llama-3.1-8B-Instruct'),
        ],
    ],
];

Usage

use Borah\LLMPort\Facades\LLMPort;
use Borah\LLMPort\Enums\MessageRole;
use Borah\LLMPort\ValueObjects\ChatMessage;
use Borah\LLMPort\ValueObjects\ChatRequest;

$response = LLMPort::chat(new ChatRequest(
    messages: [
        new ChatMessage(role: MessageRole::System, content: 'You are an AI assistant that just replies with Yes or No'),
        new ChatMessage(role: MessageRole::User, content: 'Are you an AI model?'),
    ]
));

echo $response->id; // 'chatcmpl-...'
echo $response->content; // 'Yes'
echo $response->finishReason; // 'stop'
echo $response->usage?->inputTokens; // 5
echo $response->usage?->outputTokens; // 10
echo $response->usage?->totalTokens(); // 15

You can also choose the model to use:

use Borah\LLMPort\Facades\LLMPort;
use Borah\LLMPort\Enums\MessageRole;
use Borah\LLMPort\ValueObjects\ChatMessage;
use Borah\LLMPort\ValueObjects\ChatRequest;

$response = LLMPort::driver('openai')->using('gpt-4o-mini')->chat(new ChatRequest(
    messages: [
        new ChatMessage(role: MessageRole::System, content: 'You are an AI assistant that just replies with Yes or No'),
        new ChatMessage(role: MessageRole::User, content: 'Are you an AI model?'),
    ]
));

Or define a specific driver:

use Borah\LLMPort\Facades\LLMPort;
use Borah\LLMPort\Enums\MessageRole;
use Borah\LLMPort\ValueObjects\ChatMessage;
use Borah\LLMPort\ValueObjects\ChatRequest;

$response = LLMPort::driver('gemini')->chat(new ChatRequest(
    messages: [
        new ChatMessage(role: MessageRole::System, content: 'You are an AI assistant that just replies with Yes or No'),
        new ChatMessage(role: MessageRole::User, content: 'Are you an AI model?'),
    ]
));

The supported drivers are:

You can also create your own driver:

use Borah\LLMPort\Contracts\CanListModels;
use Borah\LLMPort\Contracts\CanStreamChat;
use Borah\LLMPort\Drivers\LlmProvider;
use Borah\LLMPort\Enums\MessageRole;
use Borah\LLMPort\ValueObjects\ChatMessage;
use Borah\LLMPort\ValueObjects\ChatRequest;

class MyAwesomeDriver extends LlmProvider implements CanListModels, CanStreamChat
{
    public function models(): Collection
    {
        return collect([
          new LlmModel(name: 'model-1'),
          new LlmModel(name: 'model-2'),
        ]);
    }

    public function chat(ChatRequest $request): ChatResponse
    {
        // Your implementation
    }

    public function chatStream(ChatRequest $request, Closure $onOutput): ChatResponse
    {
        // Your implementation

        // When you get the server event: `$onOutput($delta, $fullContent);`
    }

    public function driver(): ?string
    {
      return 'my_awesome_driver';
    }
}
use Borah\LLMPort\Facades\LLMPort;

LLMPort::register('my_awesome_driver', MyAwesomeDriver::class);

The key that you define in driver() should be registered as a driver in the llmport.php config file.

License

The MIT License (MIT). Please see License File for more information.