From 260c3ff8a4942799e285921f58eb84323a77d5f2 Mon Sep 17 00:00:00 2001 From: devsahm Date: Mon, 9 Feb 2026 12:26:03 +0100 Subject: [PATCH] modelslab provider integration --- config/prism.php | 4 + docs/providers/modelslab.md | 257 +++++++++++++ src/Enums/Provider.php | 1 + src/PrismManager.php | 12 + .../Concerns/HandlesAsyncRequests.php | 48 +++ .../ModelsLab/Concerns/MapsFinishReason.php | 21 ++ .../ModelsLab/Concerns/ValidatesResponse.php | 26 ++ src/Providers/ModelsLab/Handlers/Audio.php | 176 +++++++++ src/Providers/ModelsLab/Handlers/Images.php | 163 +++++++++ src/Providers/ModelsLab/Handlers/Stream.php | 281 +++++++++++++++ src/Providers/ModelsLab/Handlers/Text.php | 107 ++++++ .../ModelsLab/Maps/FinishReasonMap.php | 21 ++ .../ModelsLab/Maps/ImageRequestMap.php | 66 ++++ src/Providers/ModelsLab/Maps/MessageMap.php | 102 ++++++ .../ModelsLab/Maps/SpeechToTextRequestMap.php | 39 ++ .../ModelsLab/Maps/TextToSpeechRequestMap.php | 41 +++ src/Providers/ModelsLab/ModelsLab.php | 161 +++++++++ .../modelslab/stream-basic-text-1.sse | 35 ++ tests/Providers/ModelsLab/AudioTest.php | 341 ++++++++++++++++++ .../ModelsLab/ExceptionHandlingTest.php | 239 ++++++++++++ tests/Providers/ModelsLab/ImagesTest.php | 232 ++++++++++++ tests/Providers/ModelsLab/StreamTest.php | 110 ++++++ tests/Providers/ModelsLab/TextTest.php | 239 ++++++++++++ 23 files changed, 2722 insertions(+) create mode 100644 docs/providers/modelslab.md create mode 100644 src/Providers/ModelsLab/Concerns/HandlesAsyncRequests.php create mode 100644 src/Providers/ModelsLab/Concerns/MapsFinishReason.php create mode 100644 src/Providers/ModelsLab/Concerns/ValidatesResponse.php create mode 100644 src/Providers/ModelsLab/Handlers/Audio.php create mode 100644 src/Providers/ModelsLab/Handlers/Images.php create mode 100644 src/Providers/ModelsLab/Handlers/Stream.php create mode 100644 src/Providers/ModelsLab/Handlers/Text.php create mode 100644 src/Providers/ModelsLab/Maps/FinishReasonMap.php create mode 100644 src/Providers/ModelsLab/Maps/ImageRequestMap.php create mode 100644 src/Providers/ModelsLab/Maps/MessageMap.php create mode 100644 src/Providers/ModelsLab/Maps/SpeechToTextRequestMap.php create mode 100644 src/Providers/ModelsLab/Maps/TextToSpeechRequestMap.php create mode 100644 src/Providers/ModelsLab/ModelsLab.php create mode 100644 tests/Fixtures/modelslab/stream-basic-text-1.sse create mode 100644 tests/Providers/ModelsLab/AudioTest.php create mode 100644 tests/Providers/ModelsLab/ExceptionHandlingTest.php create mode 100644 tests/Providers/ModelsLab/ImagesTest.php create mode 100644 tests/Providers/ModelsLab/StreamTest.php create mode 100644 tests/Providers/ModelsLab/TextTest.php diff --git a/config/prism.php b/config/prism.php index 58ff34d82..84701b789 100644 --- a/config/prism.php +++ b/config/prism.php @@ -61,5 +61,9 @@ 'x_title' => env('OPENROUTER_SITE_X_TITLE', null), ], ], + 'modelslab' => [ + 'api_key' => env('MODELSLAB_API_KEY', ''), + 'url' => env('MODELSLAB_URL', 'https://modelslab.com/api/v6/'), + ], ], ]; diff --git a/docs/providers/modelslab.md b/docs/providers/modelslab.md new file mode 100644 index 000000000..99159dcd0 --- /dev/null +++ b/docs/providers/modelslab.md @@ -0,0 +1,257 @@ +# ModelsLab + +ModelsLab provides access to image generation, text-to-speech, speech-to-text, and LLM capabilities through a unified API. For the full list of available models and parameters, see the [ModelsLab API Documentation](https://docs.modelslab.com). + +## Configuration + +```php +'modelslab' => [ + 'api_key' => env('MODELSLAB_API_KEY', ''), + 'url' => env('MODELSLAB_URL', 'https://modelslab.com/api/v6/'), +], +``` + +## Image Generation + +ModelsLab supports text-to-image and image-to-image generation with async processing. + +### Text-to-Image + +```php +use Prism\Prism\Facades\Prism; + +$response = Prism::image() + ->using('modelslab', 'flux') + ->withPrompt('A serene mountain landscape at sunset') + ->generate(); + +$imageUrl = $response->images[0]->url; +``` + +### With Provider Options + +```php +$response = Prism::image() + ->using('modelslab', 'flux') + ->withPrompt('A futuristic city skyline') + ->withProviderOptions([ + 'negative_prompt' => 'blurry, low quality', + 'width' => 1024, + 'height' => 1024, + 'samples' => 1, + 'seed' => 12345, + 'guidance_scale' => 7.5, + 'num_inference_steps' => 30, + 'scheduler' => 'UniPCMultistepScheduler', + 'safety_checker' => false, + ]) + ->generate(); +``` + +### Image-to-Image + +```php +use Prism\Prism\Facades\Prism; +use Prism\Prism\ValueObjects\Media\Image; + +$sourceImage = Image::fromUrl('https://example.com/image.jpg'); + +$response = Prism::image() + ->using('modelslab', 'flux') + ->withPrompt('Transform this into an oil painting', [$sourceImage]) + ->withProviderOptions([ + 'scheduler' => 'UniPCMultistepScheduler', + ]) + ->generate(); +``` + +## Text-to-Speech + +Generate audio from text with customizable voices and languages. + +### Basic Usage + +```php +use Prism\Prism\Facades\Prism; + +$response = Prism::audio() + ->using('modelslab', 'tts') + ->withInput('Hello, welcome to ModelsLab!') + ->withVoice('henry') + ->asAudio(); + +// Save the audio file +$audioContent = base64_decode($response->audio->base64); +file_put_contents('output.mp3', $audioContent); +``` + +### With Language and Speed Options + +```php + $response = Prism::audio() + ->using('modelslab', 'tts') + ->withInput('Bonjour, bienvenue!') + ->withVoice('elodie') + ->withProviderOptions([ + 'language' => 'french', + 'speed' => 1.2, + 'emotion' => true, + ]) + ->asAudio(); +``` + +### Provider Options for Text-to-Speech + +| Option | Description | +|--------|-------------| +| `language` | Language for speech (default: `american english`) | +| `speed` | Speech speed multiplier | +| `emotion` | Emotional tone of the speech, true or false | +| `webhook` | Webhook URL for async notifications | +| `track_id` | Custom tracking ID | + +## Speech-to-Text + +Transcribe audio files to text. + +### Basic Usage + +```php +use Prism\Prism\Facades\Prism; +use Prism\Prism\ValueObjects\Media\Audio; + +$audioFile = Audio::fromPath('/path/to/recording.mp3'); + +$response = Prism::audio() + ->using('modelslab', 'stt') + ->withInput($audioFile) + ->asText(); + +echo $response->text; +``` + +### From URL + +```php +$audioFile = Audio::fromUrl('https://example.com/audio.mp3'); + +$response = Prism::audio() + ->using('modelslab', 'stt') + ->withInput($audioFile) + ->asText(); +``` + +### With Options + +```php +$response = Prism::audio() + ->using('modelslab', 'stt') + ->withInput($audioFile) + ->withProviderOptions([ + 'language' => 'en', + 'timestamp_level' => 'word', + ]) + ->asText(); +``` + +### Provider Options for Speech-to-Text + +| Option | Description | +|--------|-------------| +| `language` | Language code for transcription | +| `timestamp_level` | Timestamp granularity (`word`, `sentence`) | +| `webhook` | Webhook URL for async notifications | +| `track_id` | Custom tracking ID | + +## Async Processing + +ModelsLab uses async processing for image generation and audio operations. The provider automatically handles polling for results, so you don't need to manage this manually. Results are returned once processing is complete. + +## Text/LLM Generation + +ModelsLab provides access to both open-source and closed-source language models through their unified API. + +### Basic Usage + +```php +use Prism\Prism\Facades\Prism; + +$response = Prism::text() + ->using('modelslab', 'gpt-5-mini') + ->withPrompt('Explain quantum computing in simple terms') + ->asText(); +``` + +### With System Prompt + +```php +$response = Prism::text() + ->using('modelslab', 'gpt-5-mini') + ->withSystemPrompt('You are a helpful coding assistant.') + ->withPrompt('Write a function to reverse a string in PHP') + ->asText(); +``` + +### Model Parameters + +```php +$response = Prism::text() + ->using('modelslab', 'claude-3.5-sonnet') + ->withTemperature(0.7) + ->withTopP(0.9) + ->withMaxTokens(1000) + ->withPrompt('Write a creative story') + ->asText(); +``` + +## Streaming + +ModelsLab supports streaming responses for text generation, allowing you to process tokens as they arrive. + +### Basic Streaming + +```php +use Prism\Prism\Facades\Prism; +use Prism\Prism\Streaming\Events\TextDeltaEvent; + +$stream = Prism::text() + ->using('modelslab', 'gpt-5-mini') + ->withPrompt('Write a short story') + ->asStream(); + +foreach ($stream as $event) { + if ($event instanceof TextDeltaEvent) { + echo $event->delta; + } +} +``` + +### Server-Sent Events + +For real-time web applications: + +```php +return Prism::text() + ->using('modelslab', 'llama-3.3-70b') + ->withPrompt(request('message')) + ->asEventStreamResponse(); +``` + +## Error Handling + +```php +use Prism\Prism\Exceptions\PrismException; +use Prism\Prism\Exceptions\PrismRateLimitedException; + +try { + $response = Prism::text() + ->using('modelslab', 'gpt-4o') + ->withPrompt('Hello') + ->asText(); +} catch (PrismRateLimitedException $e) { + // Handle rate limiting +} catch (PrismException $e) { + // Handle other errors + echo $e->getMessage(); +} +``` diff --git a/src/Enums/Provider.php b/src/Enums/Provider.php index 21c7a17f6..d24330e7d 100644 --- a/src/Enums/Provider.php +++ b/src/Enums/Provider.php @@ -17,4 +17,5 @@ enum Provider: string case Gemini = 'gemini'; case VoyageAI = 'voyageai'; case ElevenLabs = 'elevenlabs'; + case ModelsLab = 'modelslab'; } diff --git a/src/PrismManager.php b/src/PrismManager.php index 035a21f99..5ce1db902 100644 --- a/src/PrismManager.php +++ b/src/PrismManager.php @@ -14,6 +14,7 @@ use Prism\Prism\Providers\Gemini\Gemini; use Prism\Prism\Providers\Groq\Groq; use Prism\Prism\Providers\Mistral\Mistral; +use Prism\Prism\Providers\ModelsLab\ModelsLab; use Prism\Prism\Providers\Ollama\Ollama; use Prism\Prism\Providers\OpenAI\OpenAI; use Prism\Prism\Providers\OpenRouter\OpenRouter; @@ -225,4 +226,15 @@ protected function createElevenlabsProvider(array $config): ElevenLabs url: $config['url'] ?? 'https://api.elevenlabs.io/v1/', ); } + + /** + * @param array $config + */ + protected function createModelslabProvider(array $config): ModelsLab + { + return new ModelsLab( + apiKey: $config['api_key'] ?? '', + url: $config['url'] ?? 'https://modelslab.com/api/v6/', + ); + } } diff --git a/src/Providers/ModelsLab/Concerns/HandlesAsyncRequests.php b/src/Providers/ModelsLab/Concerns/HandlesAsyncRequests.php new file mode 100644 index 000000000..b12e9191b --- /dev/null +++ b/src/Providers/ModelsLab/Concerns/HandlesAsyncRequests.php @@ -0,0 +1,48 @@ + + * + * @throws PrismException + */ + protected function pollForResult( + PendingRequest $client, + string $fetchUrl, + string $apiKey, + int $maxAttempts = 60, + int $delaySeconds = 5 + ): array { + $attempts = 0; + + while ($attempts < $maxAttempts) { + $response = $client->post($fetchUrl, ['key' => $apiKey]); + $data = $response->json(); + + if (($data['status'] ?? '') === 'success') { + return $data; + } + + if (in_array($data['status'] ?? '', ['failed', 'error'], true)) { + throw PrismException::providerResponseError( + $data['message'] ?? 'Generation failed' + ); + } + + sleep($delaySeconds); + $attempts++; + } + + throw PrismException::providerResponseError('Request timed out waiting for ModelsLab generation'); + } +} diff --git a/src/Providers/ModelsLab/Concerns/MapsFinishReason.php b/src/Providers/ModelsLab/Concerns/MapsFinishReason.php new file mode 100644 index 000000000..f364b4190 --- /dev/null +++ b/src/Providers/ModelsLab/Concerns/MapsFinishReason.php @@ -0,0 +1,21 @@ + $data + */ + protected function mapFinishReason(array $data): FinishReason + { + return FinishReasonMap::map( + data_get($data, 'choices.0.finish_reason', '') + ); + } +} diff --git a/src/Providers/ModelsLab/Concerns/ValidatesResponse.php b/src/Providers/ModelsLab/Concerns/ValidatesResponse.php new file mode 100644 index 000000000..41e5e1dc7 --- /dev/null +++ b/src/Providers/ModelsLab/Concerns/ValidatesResponse.php @@ -0,0 +1,26 @@ + $data + */ + protected function validateResponse(array $data): void + { + $error = $data['error'] ?? null; + + if ($error !== null) { + $message = is_array($error) + ? ($error['message'] ?? 'Unknown error from ModelsLab API') + : (is_string($error) ? $error : 'Unknown error from ModelsLab API'); + + throw PrismException::providerResponseError($message); + } + } +} diff --git a/src/Providers/ModelsLab/Handlers/Audio.php b/src/Providers/ModelsLab/Handlers/Audio.php new file mode 100644 index 000000000..7e0127bae --- /dev/null +++ b/src/Providers/ModelsLab/Handlers/Audio.php @@ -0,0 +1,176 @@ +client->post( + 'voice/text_to_speech', + TextToSpeechRequestMap::map($request, $this->apiKey) + ); + + $data = $response->json(); + + $this->validateResponse($data); + + if (($data['status'] ?? '') === 'processing') { + $data = $this->pollForResult( + $this->client, + $data['fetch_result'] ?? '', + $this->apiKey + ); + } + + $audioUrl = $data['output'][0] ?? null; + + if (! $audioUrl) { + throw PrismException::providerResponseError('No audio URL in response'); + } + + /** @var Response $audioResponse */ + $audioResponse = Http::get($audioUrl); + $audioContent = $audioResponse->body(); + $base64Audio = base64_encode($audioContent); + + return new AudioResponse( + audio: new GeneratedAudio( + base64: $base64Audio, + ), + ); + } + + public function handleSpeechToText(SpeechToTextRequest $request): TextResponse + { + $audioInput = $request->input(); + + $audioUrl = $audioInput->isUrl() + ? ($audioInput->url() ?? '') + : 'data:'.$audioInput->mimeType().';base64,'.$audioInput->base64(); + + $response = $this->client->post( + 'voice/speech_to_text', + SpeechToTextRequestMap::map($request, $this->apiKey, $audioUrl) + ); + + $data = $response->json(); + + $this->validateResponse($data); + + if (($data['status'] ?? '') === 'processing') { + $data = $this->pollForResult( + $this->client, + $data['fetch_result'] ?? '', + $this->apiKey + ); + } + + $text = $this->extractTranscriptionText($data); + + return new TextResponse( + text: $text, + additionalContent: $data, + ); + } + + /** + * Extract transcription text from the response data. + * Handles direct string output, array output with text key, and URL-based JSON output. + * + * @param array $data + */ + protected function extractTranscriptionText(array $data): string + { + $output = $data['output'] ?? ''; + + if (is_string($output)) { + return $output; + } + + if (is_array($output)) { + if (isset($output['text'])) { + return $output['text']; + } + + $firstItem = $output[0] ?? ''; + + if (is_string($firstItem) && str_starts_with($firstItem, 'http')) { + /** @var Response $response */ + $response = Http::get($firstItem); + $jsonData = $response->json(); + + if (is_array($jsonData) && isset($jsonData[0]['text'])) { + return $jsonData[0]['text']; + } + + return ''; + } + + if (is_array($firstItem) && isset($firstItem['text'])) { + return $firstItem['text']; + } + + return is_string($firstItem) ? $firstItem : ''; + } + + return ''; + } + + /** + * @param array $data + */ + protected function validateResponse(array $data): void + { + if (($data['status'] ?? '') === 'error') { + $message = $data['message'] ?? $data['messege'] ?? 'Unknown error from ModelsLab API'; + + throw PrismException::providerResponseError( + $this->formatErrorMessage($message) + ); + } + } + + /** + * Format error message from various response formats. + */ + protected function formatErrorMessage(mixed $message): string + { + if (is_string($message)) { + return $message; + } + + if (is_array($message)) { + $errors = []; + foreach ($message as $fieldErrors) { + $errors[] = is_array($fieldErrors) ? implode(' ', $fieldErrors) : (string) $fieldErrors; + } + + return implode(' ', $errors); + } + + return 'Unknown error from ModelsLab API'; + } +} diff --git a/src/Providers/ModelsLab/Handlers/Images.php b/src/Providers/ModelsLab/Handlers/Images.php new file mode 100644 index 000000000..126caf707 --- /dev/null +++ b/src/Providers/ModelsLab/Handlers/Images.php @@ -0,0 +1,163 @@ +sendRequest($request); + + $data = $response->json(); + + $this->validateResponse($data); + + if (($data['status'] ?? '') === 'processing') { + $data = $this->pollForResult( + $this->client, + $data['fetch_result'] ?? '', + $this->apiKey + ); + } + + return $this->buildResponse($request, $data); + } + + protected function sendRequest(Request $request): ClientResponse + { + if ($request->additionalContent()) { + return $this->sendImg2ImgRequest($request); + } + + /** @var ClientResponse $response */ + $response = $this->client->post( + 'images/text2img', + ImageRequestMap::map($request, $this->apiKey) + ); + + return $response; + } + + protected function sendImg2ImgRequest(Request $request): ClientResponse + { + $images = $request->additionalContent(); + $firstImage = $images[0] ?? null; + + if (! $firstImage) { + throw PrismException::providerResponseError('No image provided for img2img request'); + } + + $initImage = $firstImage->hasUrl() + ? ($firstImage->url() ?? '') + : 'data:'.$firstImage->mimeType().';base64,'.$firstImage->base64(); + + /** @var ClientResponse $response */ + $response = $this->client->post( + 'images/img2img', + ImageRequestMap::mapImg2Img($request, $this->apiKey, $initImage) + ); + + return $response; + } + + /** + * @param array $data + */ + protected function validateResponse(array $data): void + { + if (($data['status'] ?? '') === 'error') { + $message = $data['message'] ?? $data['messege'] ?? 'Unknown error from ModelsLab API'; + + throw PrismException::providerResponseError( + $this->formatErrorMessage($message) + ); + } + } + + /** + * Format error message from various response formats. + */ + protected function formatErrorMessage(mixed $message): string + { + if (is_string($message)) { + return $message; + } + + if (is_array($message)) { + $errors = []; + foreach ($message as $fieldErrors) { + $errors[] = is_array($fieldErrors) ? implode(' ', $fieldErrors) : (string) $fieldErrors; + } + + return implode(' ', $errors); + } + + return 'Unknown error from ModelsLab API'; + } + + /** + * @param array $data + */ + protected function buildResponse(Request $request, array $data): Response + { + $images = $this->extractImages($data); + + $responseBuilder = new ResponseBuilder( + usage: new Usage( + promptTokens: 0, + completionTokens: 0, + ), + meta: new Meta( + id: (string) ($data['id'] ?? ''), + model: $request->model(), + ), + images: $images, + additionalContent: [ + 'generation_time' => $data['generationTime'] ?? null, + 'seed' => $data['meta']['seed'] ?? null, + ], + raw: $data, + ); + + return $responseBuilder->toResponse(); + } + + /** + * @param array $data + * @return GeneratedImage[] + */ + protected function extractImages(array $data): array + { + $images = []; + $outputs = $data['output'] ?? []; + + foreach ($outputs as $output) { + $images[] = new GeneratedImage( + url: $output, + ); + } + + return $images; + } +} diff --git a/src/Providers/ModelsLab/Handlers/Stream.php b/src/Providers/ModelsLab/Handlers/Stream.php new file mode 100644 index 000000000..872310564 --- /dev/null +++ b/src/Providers/ModelsLab/Handlers/Stream.php @@ -0,0 +1,281 @@ +state = new StreamState; + } + + /** + * @return Generator + */ + public function handle(Request $request): Generator + { + $response = $this->sendRequest($request); + + yield from $this->processStream($response, $request); + } + + /** + * @return Generator + */ + protected function processStream(Response $response, Request $request): Generator + { + $this->state->reset(); + + $text = ''; + + while (! $response->getBody()->eof()) { + $data = $this->parseNextDataLine($response->getBody()); + + if ($data === null) { + continue; + } + + if ($this->state->shouldEmitStreamStart()) { + $this->state->withMessageId(EventID::generate())->markStreamStarted(); + + yield new StreamStartEvent( + id: EventID::generate(), + timestamp: time(), + model: $request->model(), + provider: 'modelslab' + ); + } + + if ($this->state->shouldEmitStepStart()) { + $this->state->markStepStarted(); + + yield new StepStartEvent( + id: EventID::generate(), + timestamp: time() + ); + } + + if ($this->hasError($data)) { + yield from $this->handleErrors($data); + + continue; + } + + $content = data_get($data, 'choices.0.delta.content', '') ?? ''; + + if ($content !== '') { + if ($this->state->shouldEmitTextStart()) { + $this->state->markTextStarted(); + + yield new TextStartEvent( + id: EventID::generate(), + timestamp: time(), + messageId: $this->state->messageId() + ); + } + + $text .= $content; + + yield new TextDeltaEvent( + id: EventID::generate(), + timestamp: time(), + delta: $content, + messageId: $this->state->messageId() + ); + } + + $rawFinishReason = data_get($data, 'choices.0.finish_reason'); + if ($rawFinishReason !== null) { + $finishReason = FinishReasonMap::map($rawFinishReason); + + if ($this->state->hasTextStarted() && $text !== '') { + $this->state->markTextCompleted(); + + yield new TextCompleteEvent( + id: EventID::generate(), + timestamp: time(), + messageId: $this->state->messageId() + ); + } + + $this->state->withFinishReason($finishReason); + + $usage = $this->extractUsage($data); + if ($usage instanceof Usage) { + $this->state->addUsage($usage); + } + } + } + + $this->state->markStepFinished(); + yield new StepFinishEvent( + id: EventID::generate(), + timestamp: time() + ); + + yield new StreamEndEvent( + id: EventID::generate(), + timestamp: time(), + finishReason: $this->state->finishReason() ?? FinishReason::Stop, + usage: $this->state->usage() + ); + } + + /** + * @return array|null + */ + protected function parseNextDataLine(StreamInterface $stream): ?array + { + $line = $this->readLine($stream); + + if (! str_starts_with($line, 'data:')) { + return null; + } + + $line = trim(substr($line, strlen('data: '))); + + if ($line === '' || $line === '[DONE]') { + return null; + } + + try { + return json_decode($line, true, flags: JSON_THROW_ON_ERROR); + } catch (Throwable $e) { + throw new PrismStreamDecodeException('ModelsLab', $e); + } + } + + /** + * @param array $data + */ + protected function hasError(array $data): bool + { + return data_get($data, 'error') !== null; + } + + /** + * @param array $data + */ + protected function extractUsage(array $data): ?Usage + { + $usage = data_get($data, 'usage'); + + if (! $usage) { + return null; + } + + return new Usage( + promptTokens: (int) data_get($usage, 'prompt_tokens', 0), + completionTokens: (int) data_get($usage, 'completion_tokens', 0) + ); + } + + protected function sendRequest(Request $request): Response + { + try { + /** @var Response $response */ + $response = $this + ->client + ->withOptions(['stream' => true]) + ->throw() + ->post('v7/llm/chat/completions', + array_merge([ + 'stream' => true, + 'model' => $request->model(), + 'messages' => (new MessageMap($request->messages(), $request->systemPrompts()))(), + ], Arr::whereNotNull([ + 'max_tokens' => $request->maxTokens(), + 'temperature' => $request->temperature(), + 'top_p' => $request->topP(), + 'presence_penalty' => $request->providerOptions('presence_penalty'), + 'frequency_penalty' => $request->providerOptions('frequency_penalty'), + ])) + ); + + return $response; + } catch (RequestException $e) { + if ($e->response->getStatusCode() === 429) { + throw new PrismRateLimitedException([]); + } + + throw PrismException::providerRequestError($request->model(), $e); + } + } + + protected function readLine(StreamInterface $stream): string + { + $buffer = ''; + + while (! $stream->eof()) { + $byte = $stream->read(1); + + if ($byte === '') { + return $buffer; + } + + $buffer .= $byte; + + if ($byte === "\n") { + break; + } + } + + return $buffer; + } + + /** + * @param array $data + * @return Generator + */ + protected function handleErrors(array $data): Generator + { + $error = data_get($data, 'error', []); + $type = is_array($error) ? data_get($error, 'type', 'unknown_error') : 'unknown_error'; + $message = is_array($error) ? data_get($error, 'message', 'No error message provided') : (string) $error; + + if ($type === 'rate_limit_exceeded') { + throw new PrismRateLimitedException([]); + } + + yield new ErrorEvent( + id: EventID::generate(), + timestamp: time(), + errorType: $type, + message: $message, + recoverable: false + ); + } +} diff --git a/src/Providers/ModelsLab/Handlers/Text.php b/src/Providers/ModelsLab/Handlers/Text.php new file mode 100644 index 000000000..3bdfed8c6 --- /dev/null +++ b/src/Providers/ModelsLab/Handlers/Text.php @@ -0,0 +1,107 @@ +responseBuilder = new ResponseBuilder; + } + + public function handle(Request $request): TextResponse + { + $data = $this->sendRequest($request); + + $this->validateResponse($data); + + return match ($this->mapFinishReason($data)) { + FinishReason::Stop => $this->handleStop($data, $request), + FinishReason::Length => throw new PrismException('ModelsLab: max tokens exceeded'), + default => throw new PrismException('ModelsLab: unknown finish reason'), + }; + } + + /** + * @param array $data + */ + protected function handleStop(array $data, Request $request): TextResponse + { + $this->addStep($data, $request); + + return $this->responseBuilder->toResponse(); + } + + /** + * @return array + */ + protected function sendRequest(Request $request): array + { + /** @var Response $response */ + $response = $this->client->post( + 'v7/llm/chat/completions', + array_merge([ + 'model' => $request->model(), + 'messages' => (new MessageMap($request->messages(), $request->systemPrompts()))(), + ], Arr::whereNotNull([ + 'max_tokens' => $request->maxTokens(), + 'temperature' => $request->temperature(), + 'top_p' => $request->topP(), + 'presence_penalty' => $request->providerOptions('presence_penalty'), + 'frequency_penalty' => $request->providerOptions('frequency_penalty'), + ])) + ); + + return $response->json(); + } + + /** + * @param array $data + */ + protected function addStep(array $data, Request $request): void + { + $this->responseBuilder->addStep(new Step( + text: data_get($data, 'choices.0.message.content') ?? '', + finishReason: $this->mapFinishReason($data), + toolCalls: [], + toolResults: [], + providerToolCalls: [], + usage: new Usage( + data_get($data, 'usage.prompt_tokens', 0), + data_get($data, 'usage.completion_tokens', 0), + ), + meta: new Meta( + id: data_get($data, 'id', ''), + model: data_get($data, 'model', ''), + ), + messages: $request->messages(), + systemPrompts: $request->systemPrompts(), + additionalContent: [], + raw: $data, + )); + } +} diff --git a/src/Providers/ModelsLab/Maps/FinishReasonMap.php b/src/Providers/ModelsLab/Maps/FinishReasonMap.php new file mode 100644 index 000000000..135329b2c --- /dev/null +++ b/src/Providers/ModelsLab/Maps/FinishReasonMap.php @@ -0,0 +1,21 @@ + FinishReason::Stop, + 'tool_calls' => FinishReason::ToolCalls, + 'length' => FinishReason::Length, + 'content_filter' => FinishReason::ContentFilter, + default => FinishReason::Unknown, + }; + } +} diff --git a/src/Providers/ModelsLab/Maps/ImageRequestMap.php b/src/Providers/ModelsLab/Maps/ImageRequestMap.php new file mode 100644 index 000000000..e49ed9ca6 --- /dev/null +++ b/src/Providers/ModelsLab/Maps/ImageRequestMap.php @@ -0,0 +1,66 @@ + + */ + public static function map(Request $request, string $apiKey): array + { + $providerOptions = $request->providerOptions(); + + $baseData = [ + 'key' => $apiKey, + 'prompt' => $request->prompt(), + 'model_id' => $request->model(), + ]; + + $supportedOptions = [ + 'negative_prompt' => $providerOptions['negative_prompt'] ?? null, + 'width' => $providerOptions['width'] ?? null, + 'height' => $providerOptions['height'] ?? null, + 'samples' => $providerOptions['samples'] ?? null, + 'seed' => $providerOptions['seed'] ?? null, + 'safety_checker' => $providerOptions['safety_checker'] ?? null, + 'base64' => $providerOptions['base64'] ?? null, + 'webhook' => $providerOptions['webhook'] ?? null, + 'track_id' => $providerOptions['track_id'] ?? null, + 'guidance_scale' => $providerOptions['guidance_scale'] ?? null, + 'num_inference_steps' => $providerOptions['num_inference_steps'] ?? null, + 'scheduler' => $providerOptions['scheduler'] ?? null, + 'enhance_prompt' => $providerOptions['enhance_prompt'] ?? null, + 'model_id' => $providerOptions['model_id'] ?? null, + ]; + + $additionalOptions = array_diff_key($providerOptions, $supportedOptions); + + return array_merge( + $baseData, + Arr::whereNotNull($supportedOptions), + $additionalOptions + ); + } + + /** + * @return array + */ + public static function mapImg2Img(Request $request, string $apiKey, string $initImage): array + { + $baseMap = self::map($request, $apiKey); + + $baseMap['init_image'] = $initImage; + + if ($strength = $request->providerOptions('strength')) { + $baseMap['strength'] = $strength; + } + + return $baseMap; + } +} diff --git a/src/Providers/ModelsLab/Maps/MessageMap.php b/src/Providers/ModelsLab/Maps/MessageMap.php new file mode 100644 index 000000000..381c21c21 --- /dev/null +++ b/src/Providers/ModelsLab/Maps/MessageMap.php @@ -0,0 +1,102 @@ + */ + protected array $mappedMessages = []; + + /** + * @param array $messages + * @param SystemMessage[] $systemPrompts + */ + public function __construct( + protected array $messages, + protected array $systemPrompts + ) { + $this->messages = array_merge( + $this->systemPrompts, + $this->messages + ); + } + + /** + * @return array + */ + public function __invoke(): array + { + array_map( + $this->mapMessage(...), + $this->messages + ); + + return $this->mappedMessages; + } + + protected function mapMessage(Message $message): void + { + match ($message::class) { + UserMessage::class => $this->mapUserMessage($message), + AssistantMessage::class => $this->mapAssistantMessage($message), + ToolResultMessage::class => $this->mapToolResultMessage($message), + SystemMessage::class => $this->mapSystemMessage($message), + default => throw new Exception('Could not map message type '.$message::class), + }; + } + + protected function mapSystemMessage(SystemMessage $message): void + { + $this->mappedMessages[] = [ + 'role' => 'system', + 'content' => $message->content, + ]; + } + + protected function mapToolResultMessage(ToolResultMessage $message): void + { + foreach ($message->toolResults as $toolResult) { + $this->mappedMessages[] = [ + 'role' => 'tool', + 'tool_call_id' => $toolResult->toolCallId, + 'content' => $toolResult->result, + ]; + } + } + + protected function mapUserMessage(UserMessage $message): void + { + $this->mappedMessages[] = [ + 'role' => 'user', + 'content' => $message->text(), + ]; + } + + protected function mapAssistantMessage(AssistantMessage $message): void + { + $toolCalls = array_map(fn (ToolCall $toolCall): array => [ + 'id' => $toolCall->id, + 'type' => 'function', + 'function' => [ + 'name' => $toolCall->name, + 'arguments' => json_encode($toolCall->arguments()), + ], + ], $message->toolCalls); + + $this->mappedMessages[] = array_filter([ + 'role' => 'assistant', + 'content' => $message->content, + 'tool_calls' => $toolCalls, + ]); + } +} diff --git a/src/Providers/ModelsLab/Maps/SpeechToTextRequestMap.php b/src/Providers/ModelsLab/Maps/SpeechToTextRequestMap.php new file mode 100644 index 000000000..a5a655204 --- /dev/null +++ b/src/Providers/ModelsLab/Maps/SpeechToTextRequestMap.php @@ -0,0 +1,39 @@ + + */ + public static function map(SpeechToTextRequest $request, string $apiKey, string $audioUrl): array + { + $providerOptions = $request->providerOptions(); + + $baseData = [ + 'key' => $apiKey, + 'init_audio' => $audioUrl, + ]; + + $supportedOptions = [ + 'language' => $providerOptions['language'] ?? null, + 'timestamp_level' => $providerOptions['timestamp_level'] ?? null, + 'webhook' => $providerOptions['webhook'] ?? null, + 'track_id' => $providerOptions['track_id'] ?? null, + ]; + + $additionalOptions = array_diff_key($providerOptions, $supportedOptions); + + return array_merge( + $baseData, + Arr::whereNotNull($supportedOptions), + $additionalOptions + ); + } +} diff --git a/src/Providers/ModelsLab/Maps/TextToSpeechRequestMap.php b/src/Providers/ModelsLab/Maps/TextToSpeechRequestMap.php new file mode 100644 index 000000000..4315815a6 --- /dev/null +++ b/src/Providers/ModelsLab/Maps/TextToSpeechRequestMap.php @@ -0,0 +1,41 @@ + + */ + public static function map(TextToSpeechRequest $request, string $apiKey): array + { + $providerOptions = $request->providerOptions(); + + $baseData = [ + 'key' => $apiKey, + 'prompt' => $request->input(), + 'voice_id' => $request->voice(), + ]; + + $supportedOptions = [ + 'language' => $providerOptions['language'] ?? 'american english', + 'speed' => $providerOptions['speed'] ?? null, + 'emotion' => $providerOptions['emotion'] ?? null, + 'webhook' => $providerOptions['webhook'] ?? null, + 'track_id' => $providerOptions['track_id'] ?? null, + ]; + + $additionalOptions = array_diff_key($providerOptions, $supportedOptions); + + return array_merge( + $baseData, + Arr::whereNotNull($supportedOptions), + $additionalOptions + ); + } +} diff --git a/src/Providers/ModelsLab/ModelsLab.php b/src/Providers/ModelsLab/ModelsLab.php new file mode 100644 index 000000000..336af5403 --- /dev/null +++ b/src/Providers/ModelsLab/ModelsLab.php @@ -0,0 +1,161 @@ +chatClient($request->clientOptions(), $request->clientRetry()), + $this->apiKey + ); + + try { + return $handler->handle($request); + } catch (RequestException $e) { + $this->handleRequestException($request->model(), $e); + } + } + + #[\Override] + public function images(ImagesRequest $request): ImagesResponse + { + $handler = new Images( + $this->client($request->clientOptions(), $request->clientRetry()), + $this->apiKey + ); + + try { + return $handler->handle($request); + } catch (RequestException $e) { + $this->handleRequestException($request->model(), $e); + } + } + + #[\Override] + public function textToSpeech(TextToSpeechRequest $request): TextToSpeechResponse + { + $handler = new Audio( + $this->client($request->clientOptions(), $request->clientRetry()), + $this->apiKey + ); + + try { + return $handler->handleTextToSpeech($request); + } catch (RequestException $e) { + $this->handleRequestException($request->model(), $e); + } + } + + #[\Override] + public function speechToText(SpeechToTextRequest $request): SpeechToTextResponse + { + $handler = new Audio( + $this->client($request->clientOptions(), $request->clientRetry()), + $this->apiKey + ); + + try { + return $handler->handleSpeechToText($request); + } catch (RequestException $e) { + $this->handleRequestException($request->model(), $e); + } + } + + /** + * @return Generator + */ + #[\Override] + public function stream(TextRequest $request): Generator + { + $handler = new Stream( + $this->chatClient($request->clientOptions(), $request->clientRetry()), + $this->apiKey + ); + + return $handler->handle($request); + } + + #[\Override] + public function handleRequestException(string $model, RequestException $e): never + { + $response = $e->response; + $body = $response->json() ?? []; + $status = $response->status(); + + $message = $body['message'] ?? $body['messege'] ?? $body['error'] ?? 'Unknown error from ModelsLab API'; + + if ($status === 429) { + throw PrismRateLimitedException::make([]); + } + + throw PrismException::providerResponseError( + vsprintf('ModelsLab Error [%s]: %s', [$status, $message]) + ); + } + + /** + * @param array $options + * @param array $retry + */ + protected function client(array $options = [], array $retry = []): PendingRequest + { + return $this->baseClient() + ->withHeaders([ + 'Content-Type' => 'application/json', + ]) + ->withOptions($options) + ->when($retry !== [], fn ($client) => $client->retry(...$retry)) + ->baseUrl($this->url); + } + + /** + * Client for the chat completions endpoint (uses Bearer token auth). + * + * @param array $options + * @param array $retry + */ + protected function chatClient(array $options = [], array $retry = []): PendingRequest + { + return $this->baseClient() + ->withToken($this->apiKey) + ->withHeaders([ + 'Content-Type' => 'application/json', + ]) + ->withOptions($options) + ->when($retry !== [], fn ($client) => $client->retry(...$retry)) + ->baseUrl('https://modelslab.com/api/'); + } +} diff --git a/tests/Fixtures/modelslab/stream-basic-text-1.sse b/tests/Fixtures/modelslab/stream-basic-text-1.sse new file mode 100644 index 000000000..8a57c6059 --- /dev/null +++ b/tests/Fixtures/modelslab/stream-basic-text-1.sse @@ -0,0 +1,35 @@ +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"role":"assistant","content":""},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":"Hello"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":"!"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" I'm"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" a"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" helpful"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" AI"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" assistant"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":"."},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" How"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" can"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" I"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" help"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" you"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":" today"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":"?"},"finish_reason":null}]} + +data: {"id":"chatcmpl-ml123456","object":"chat.completion.chunk","created":1736272055,"model":"llama-3.3-70b","choices":[{"index":0,"delta":{"content":""},"finish_reason":"stop"}],"usage":{"prompt_tokens":10,"completion_tokens":20,"total_tokens":30}} + +data: [DONE] diff --git a/tests/Providers/ModelsLab/AudioTest.php b/tests/Providers/ModelsLab/AudioTest.php new file mode 100644 index 000000000..40ab8595e --- /dev/null +++ b/tests/Providers/ModelsLab/AudioTest.php @@ -0,0 +1,341 @@ +set('prism.providers.modelslab.api_key', 'test-api-key'); +}); + +describe('Text-to-Speech', function (): void { + it('can generate audio with text-to-speech', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'success', + 'output' => ['https://example.com/audio.mp3'], + ], 200), + 'example.com/audio.mp3' => Http::response('fake-audio-content', 200), + ]); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Hello world!') + ->withVoice('madison') + ->asAudio(); + + expect($response->audio)->not->toBeNull(); + expect($response->audio->hasBase64())->toBeTrue(); + expect($response->audio->base64)->toBe(base64_encode('fake-audio-content')); + + Http::assertSent(function (Request $request): bool { + if (! str_contains($request->url(), 'text_to_speech')) { + return true; + } + + $data = $request->data(); + + return $request->url() === 'https://modelslab.com/api/v6/voice/text_to_speech' && + $data['prompt'] === 'Hello world!' && + $data['voice_id'] === 'madison' && + $data['key'] === 'test-api-key'; + }); + }); + + it('can generate audio with language option', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'success', + 'output' => ['https://example.com/spanish-audio.mp3'], + ], 200), + 'example.com/spanish-audio.mp3' => Http::response('fake-spanish-audio', 200), + ]); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Hola mundo!') + ->withVoice('sofia') + ->withProviderOptions([ + 'language' => 'spanish', + ]) + ->asAudio(); + + Http::assertSent(function (Request $request): bool { + if (! str_contains($request->url(), 'text_to_speech')) { + return true; + } + + $data = $request->data(); + + return $data['language'] === 'spanish'; + }); + }); + + it('can generate audio with speed option', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'success', + 'output' => ['https://example.com/fast-audio.mp3'], + ], 200), + 'example.com/fast-audio.mp3' => Http::response('fake-fast-audio', 200), + ]); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Quick message') + ->withVoice('madison') + ->withProviderOptions([ + 'speed' => 1.5, + ]) + ->asAudio(); + + Http::assertSent(function (Request $request): bool { + if (! str_contains($request->url(), 'text_to_speech')) { + return true; + } + + $data = $request->data(); + + return $data['speed'] === 1.5; + }); + }); + + it('uses default american english language', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'success', + 'output' => ['https://example.com/audio.mp3'], + ], 200), + 'example.com/audio.mp3' => Http::response('fake-audio', 200), + ]); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Test') + ->withVoice('madison') + ->asAudio(); + + Http::assertSent(function (Request $request): bool { + if (! str_contains($request->url(), 'text_to_speech')) { + return true; + } + + $data = $request->data(); + + return $data['language'] === 'american english'; + }); + }); + + it('handles async processing status for text-to-speech', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'processing', + 'fetch_result' => 'https://modelslab.com/api/v6/voice/fetch/12345', + ], 200), + 'modelslab.com/api/v6/voice/fetch/12345' => Http::response([ + 'status' => 'success', + 'output' => ['https://example.com/async-audio.mp3'], + ], 200), + 'example.com/async-audio.mp3' => Http::response('async-audio-content', 200), + ]); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Hello async world!') + ->withVoice('madison') + ->asAudio(); + + expect($response->audio)->not->toBeNull(); + expect($response->audio->base64)->toBe(base64_encode('async-audio-content')); + }); +}); + +describe('Speech-to-Text', function (): void { + it('can transcribe audio with speech-to-text from base64', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'success', + 'output' => 'Hello, this is a transcription test.', + ], 200), + ]); + + $audioFile = Audio::fromBase64( + base64_encode('fake-audio-content'), + 'audio/mp3' + ); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + + expect($response->text)->toBe('Hello, this is a transcription test.'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $request->url() === 'https://modelslab.com/api/v6/voice/speech_to_text' && + $data['key'] === 'test-api-key' && + str_starts_with((string) $data['init_audio'], 'data:audio/mp3;base64,'); + }); + }); + + it('can transcribe audio from URL', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'success', + 'output' => 'Transcription from URL audio.', + ], 200), + ]); + + $audioFile = Audio::fromUrl('https://example.com/audio-file.mp3'); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + + expect($response->text)->toBe('Transcription from URL audio.'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['init_audio'] === 'https://example.com/audio-file.mp3'; + }); + }); + + it('can transcribe with language option', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'success', + 'output' => 'Bonjour, ceci est un test.', + ], 200), + ]); + + $audioFile = Audio::fromBase64(base64_encode('french-audio'), 'audio/wav'); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->withProviderOptions([ + 'language' => 'fr', + ]) + ->asText(); + + expect($response->text)->toBe('Bonjour, ceci est un test.'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['language'] === 'fr'; + }); + }); + + it('can transcribe with timestamp_level option', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'success', + 'output' => [ + 'text' => 'Hello world', + 'timestamps' => [ + ['word' => 'Hello', 'start' => 0.0, 'end' => 0.5], + ['word' => 'world', 'start' => 0.5, 'end' => 1.0], + ], + ], + ], 200), + ]); + + $audioFile = Audio::fromBase64(base64_encode('audio-content'), 'audio/mp3'); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->withProviderOptions([ + 'timestamp_level' => 'word', + ]) + ->asText(); + + expect($response->text)->toBe('Hello world'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['timestamp_level'] === 'word'; + }); + }); + + it('includes additional content in response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'success', + 'output' => 'Test transcription', + 'language' => 'en', + 'duration' => 5.5, + ], 200), + ]); + + $audioFile = Audio::fromBase64(base64_encode('audio'), 'audio/mp3'); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + + expect($response->additionalContent['status'])->toBe('success'); + expect($response->additionalContent['language'])->toBe('en'); + expect($response->additionalContent['duration'])->toBe(5.5); + }); + + it('handles async processing status for speech-to-text', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'processing', + 'fetch_result' => 'https://modelslab.com/api/v6/voice/fetch/67890', + ], 200), + 'modelslab.com/api/v6/voice/fetch/67890' => Http::response([ + 'status' => 'success', + 'output' => 'Async transcription result.', + ], 200), + ]); + + $audioFile = Audio::fromBase64(base64_encode('audio-content'), 'audio/mp3'); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + + expect($response->text)->toBe('Async transcription result.'); + }); + + it('handles URL-based JSON output for speech-to-text', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'processing', + 'fetch_result' => 'https://modelslab.com/api/v6/voice/fetch/99999', + ], 200), + 'modelslab.com/api/v6/voice/fetch/99999' => Http::response([ + 'status' => 'success', + 'output' => ['https://pub-example.r2.dev/generations/transcript.txt'], + ], 200), + 'pub-example.r2.dev/generations/transcript.txt' => Http::response([ + ['text' => 'This is the transcribed text from the URL.'], + ], 200), + ]); + + $audioFile = Audio::fromBase64(base64_encode('long-audio'), 'audio/mp3'); + + $response = Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + + expect($response->text)->toBe('This is the transcribed text from the URL.'); + }); +}); diff --git a/tests/Providers/ModelsLab/ExceptionHandlingTest.php b/tests/Providers/ModelsLab/ExceptionHandlingTest.php new file mode 100644 index 000000000..88316909c --- /dev/null +++ b/tests/Providers/ModelsLab/ExceptionHandlingTest.php @@ -0,0 +1,239 @@ +set('prism.providers.modelslab.api_key', 'test-api-key'); +}); + +describe('Image Generation Errors', function (): void { + it('throws PrismRateLimitedException on 429 response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + 'message' => 'Rate limit exceeded', + ], 429), + ]); + + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test prompt') + ->generate(); + })->throws(PrismRateLimitedException::class); + + it('throws PrismException on error status in response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + 'message' => 'Invalid prompt', + ], 200), + ]); + + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test prompt') + ->generate(); + })->throws(PrismException::class, 'Invalid prompt'); + + it('throws PrismException on async generation failure', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'processing', + 'id' => 12349, + 'fetch_result' => 'https://modelslab.com/api/v6/images/fetch/12349', + ], 200), + 'modelslab.com/api/v6/images/fetch/12349' => Http::response([ + 'status' => 'failed', + 'message' => 'Generation failed due to content policy', + ], 200), + ]); + + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test prompt') + ->generate(); + })->throws(PrismException::class); + + it('throws PrismException on server error', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + 'message' => 'Internal server error', + ], 500), + ]); + + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test prompt') + ->generate(); + })->throws(PrismException::class); +}); + +describe('Audio Generation Errors', function (): void { + it('throws PrismException on TTS error status', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'error', + 'message' => 'Invalid voice ID', + ], 200), + ]); + + Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Test text') + ->withVoice('invalid-voice') + ->asAudio(); + })->throws(PrismException::class, 'Invalid voice ID'); + + it('throws PrismException on STT error status', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'error', + 'message' => 'Unsupported audio format', + ], 200), + ]); + + $audioFile = Audio::fromBase64(base64_encode('invalid-audio'), 'audio/unknown'); + + Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + })->throws(PrismException::class, 'Unsupported audio format'); + + it('throws PrismRateLimitedException on 429 for TTS', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'error', + 'message' => 'Rate limit exceeded', + ], 429), + ]); + + Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Test text') + ->withVoice('madison') + ->asAudio(); + })->throws(PrismRateLimitedException::class); + + it('throws PrismRateLimitedException on 429 for STT', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/speech_to_text' => Http::response([ + 'status' => 'error', + 'message' => 'Rate limit exceeded', + ], 429), + ]); + + $audioFile = Audio::fromBase64(base64_encode('audio'), 'audio/mp3'); + + Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput($audioFile) + ->asText(); + })->throws(PrismRateLimitedException::class); + + it('throws PrismException when no audio URL in TTS response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/voice/text_to_speech' => Http::response([ + 'status' => 'success', + 'output' => [], + ], 200), + ]); + + Prism::audio() + ->using(Provider::ModelsLab, 'voice') + ->withInput('Test text') + ->withVoice('madison') + ->asAudio(); + })->throws(PrismException::class, 'No audio URL in response'); +}); + +describe('Error Message Handling', function (): void { + it('extracts message from error response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + 'message' => 'Custom error message', + ], 400), + ]); + + try { + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test') + ->generate(); + } catch (PrismException $e) { + expect($e->getMessage())->toContain('Custom error message'); + } + }); + + it('handles missing message in error response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + ], 400), + ]); + + try { + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test') + ->generate(); + } catch (PrismException $e) { + expect($e->getMessage())->toContain('Unknown error'); + } + }); + + it('handles array validation error messages', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + 'message' => [ + 'scheduler' => [ + 'The scheduler field is required unless model id is in flux-kontext-dev.', + ], + ], + ], 200), + ]); + + try { + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test') + ->generate(); + } catch (PrismException $e) { + expect($e->getMessage())->toContain('scheduler field is required'); + } + }); + + it('handles messege typo in error response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'error', + 'messege' => [ + 'scheduler' => [ + 'The scheduler field is required.', + ], + ], + ], 200), + ]); + + try { + Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test') + ->generate(); + } catch (PrismException $e) { + expect($e->getMessage())->toContain('scheduler field is required'); + } + }); +}); diff --git a/tests/Providers/ModelsLab/ImagesTest.php b/tests/Providers/ModelsLab/ImagesTest.php new file mode 100644 index 000000000..b88b605ff --- /dev/null +++ b/tests/Providers/ModelsLab/ImagesTest.php @@ -0,0 +1,232 @@ +set('prism.providers.modelslab.api_key', 'test-api-key'); +}); + +it('can generate an image with text2img', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'success', + 'id' => 12345, + 'output' => ['https://example.com/generated-image.png'], + 'meta' => [ + 'prompt' => 'A cute baby sea otter', + 'seed' => 123456, + ], + 'generationTime' => 2.5, + ], 200), + ]); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('A cute baby sea otter') + ->generate(); + + expect($response->firstImage())->not->toBeNull(); + expect($response->firstImage()->url)->toBe('https://example.com/generated-image.png'); + expect($response->firstImage()->hasUrl())->toBeTrue(); + expect($response->imageCount())->toBe(1); + expect($response->meta->id)->toBe('12345'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $request->url() === 'https://modelslab.com/api/v6/images/text2img' && + $data['prompt'] === 'A cute baby sea otter' && + $data['key'] === 'test-api-key'; + }); +}); + +it('can generate an image with provider options', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'success', + 'id' => 12346, + 'output' => ['https://example.com/hd-image.png'], + ], 200), + ]); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('A sunset over mountains') + ->withProviderOptions([ + 'width' => 1024, + 'height' => 1024, + 'samples' => 1, + 'negative_prompt' => 'blurry, low quality', + ]) + ->generate(); + + expect($response->firstImage()->url)->toBe('https://example.com/hd-image.png'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['width'] === 1024 && + $data['height'] === 1024 && + $data['negative_prompt'] === 'blurry, low quality'; + }); +}); + +it('can generate multiple images', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'success', + 'id' => 12347, + 'output' => [ + 'https://example.com/image-1.png', + 'https://example.com/image-2.png', + ], + ], 200), + ]); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Abstract art') + ->withProviderOptions([ + 'samples' => 2, + ]) + ->generate(); + + expect($response->imageCount())->toBe(2); + expect($response->images[0]->url)->toBe('https://example.com/image-1.png'); + expect($response->images[1]->url)->toBe('https://example.com/image-2.png'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['samples'] === 2; + }); +}); + +it('can edit an image with img2img', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/img2img' => Http::response([ + 'status' => 'success', + 'id' => 12348, + 'output' => ['https://example.com/edited-image.png'], + ], 200), + ]); + + $originalImage = Image::fromLocalPath('tests/Fixtures/diamond.png'); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Add a vaporwave sunset to the background', [$originalImage]) + ->generate(); + + expect($response->firstImage())->not->toBeNull(); + expect($response->firstImage()->url)->toBe('https://example.com/edited-image.png'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $request->url() === 'https://modelslab.com/api/v6/images/img2img' && + $data['prompt'] === 'Add a vaporwave sunset to the background' && + isset($data['init_image']); + }); +}); + +it('can edit an image with img2img using URL', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/img2img' => Http::response([ + 'status' => 'success', + 'id' => 12349, + 'output' => ['https://example.com/edited-image-from-url.png'], + ], 200), + ]); + + $originalImage = Image::fromUrl('https://example.com/source-image.png'); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Transform this image', [$originalImage]) + ->generate(); + + expect($response->firstImage()->url)->toBe('https://example.com/edited-image-from-url.png'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['init_image'] === 'https://example.com/source-image.png'; + }); +}); + +it('handles async processing status', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'processing', + 'id' => 12350, + 'fetch_result' => 'https://modelslab.com/api/v6/images/fetch/12350', + ], 200), + 'modelslab.com/api/v6/images/fetch/12350' => Http::response([ + 'status' => 'success', + 'id' => 12350, + 'output' => ['https://example.com/async-image.png'], + ], 200), + ]); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Complex image generation') + ->generate(); + + expect($response->firstImage()->url)->toBe('https://example.com/async-image.png'); +}); + +it('includes additional content in response', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'success', + 'id' => 12351, + 'output' => ['https://example.com/image.png'], + 'generationTime' => 3.5, + 'meta' => [ + 'seed' => 987654, + ], + ], 200), + ]); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('Test image') + ->generate(); + + expect($response->additionalContent['generation_time'])->toBe(3.5); + expect($response->additionalContent['seed'])->toBe(987654); +}); + +it('passes model_id in provider options', function (): void { + Http::fake([ + 'modelslab.com/api/v6/images/text2img' => Http::response([ + 'status' => 'success', + 'id' => 12352, + 'output' => ['https://example.com/sdxl-image.png'], + ], 200), + ]); + + $response = Prism::image() + ->using(Provider::ModelsLab, 'flux') + ->withPrompt('A beautiful landscape') + ->withProviderOptions([ + 'model_id' => 'sdxl', + ]) + ->generate(); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['model_id'] === 'sdxl'; + }); +}); diff --git a/tests/Providers/ModelsLab/StreamTest.php b/tests/Providers/ModelsLab/StreamTest.php new file mode 100644 index 000000000..8d29e2495 --- /dev/null +++ b/tests/Providers/ModelsLab/StreamTest.php @@ -0,0 +1,110 @@ +set('prism.providers.modelslab.api_key', 'test-api-key'); +}); + +it('can generate text with a basic stream', function (): void { + FixtureResponse::fakeStreamResponses('v7/llm/chat/completions', 'modelslab/stream-basic-text'); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Who are you?') + ->asStream(); + + $text = ''; + $events = []; + + foreach ($response as $event) { + $events[] = $event; + + if ($event instanceof TextDeltaEvent) { + $text .= $event->delta; + } + } + + expect($events) + ->not->toBeEmpty() + ->and($text)->not->toBeEmpty() + ->and($text)->toContain('Hello! I\'m a helpful AI assistant.'); + + $lastEvent = end($events); + expect($lastEvent)->toBeInstanceOf(StreamEndEvent::class); + expect($lastEvent->finishReason)->toBe(FinishReason::Stop); +}); + +it('emits step start and step finish events', function (): void { + FixtureResponse::fakeStreamResponses('v7/llm/chat/completions', 'modelslab/stream-basic-text'); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Who are you?') + ->asStream(); + + $events = []; + foreach ($response as $event) { + $events[] = $event; + } + + $stepStartEvents = array_filter($events, fn (StreamEvent $event): bool => $event instanceof StepStartEvent); + $stepFinishEvents = array_filter($events, fn (StreamEvent $event): bool => $event instanceof StepFinishEvent); + + expect($stepStartEvents)->toHaveCount(1); + expect($stepFinishEvents)->toHaveCount(1); +}); + +it('includes correct token counts in StreamEndEvent', function (): void { + FixtureResponse::fakeStreamResponses('v7/llm/chat/completions', 'modelslab/stream-basic-text'); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Who are you?') + ->asStream(); + + $streamEndEvent = null; + + foreach ($response as $event) { + if ($event instanceof StreamEndEvent) { + $streamEndEvent = $event; + } + } + + expect($streamEndEvent)->not->toBeNull(); + expect($streamEndEvent->usage->promptTokens)->toBe(10); + expect($streamEndEvent->usage->completionTokens)->toBe(20); +}); + +it('emits only one StreamStartEvent and StreamEndEvent', function (): void { + FixtureResponse::fakeStreamResponses('v7/llm/chat/completions', 'modelslab/stream-basic-text'); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Who are you?') + ->asStream(); + + $events = []; + foreach ($response as $event) { + $events[] = $event; + } + + $streamStartEvents = array_filter($events, fn (StreamEvent $event): bool => $event instanceof StreamStartEvent); + $streamEndEvents = array_filter($events, fn (StreamEvent $event): bool => $event instanceof StreamEndEvent); + + expect($streamStartEvents)->toHaveCount(1); + expect($streamEndEvents)->toHaveCount(1); +}); diff --git a/tests/Providers/ModelsLab/TextTest.php b/tests/Providers/ModelsLab/TextTest.php new file mode 100644 index 000000000..2684ed374 --- /dev/null +++ b/tests/Providers/ModelsLab/TextTest.php @@ -0,0 +1,239 @@ +set('prism.providers.modelslab.api_key', 'test-api-key'); +}); + +it('can generate text', function (): void { + Http::fake([ + 'modelslab.com/api/v7/llm/chat/completions' => Http::response([ + 'id' => 'chatcmpl-123', + 'object' => 'chat.completion', + 'model' => 'llama-3.3-70b', + 'choices' => [ + [ + 'index' => 0, + 'message' => [ + 'role' => 'assistant', + 'content' => 'Hello! How can I help you today?', + ], + 'finish_reason' => 'stop', + ], + ], + 'usage' => [ + 'prompt_tokens' => 10, + 'completion_tokens' => 15, + 'total_tokens' => 25, + ], + ], 200), + ]); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Hello!') + ->generate(); + + expect($response->text)->toBe('Hello! How can I help you today?'); + expect($response->finishReason)->toBe(FinishReason::Stop); + expect($response->usage->promptTokens)->toBe(10); + expect($response->usage->completionTokens)->toBe(15); + expect($response->meta->id)->toBe('chatcmpl-123'); + expect($response->meta->model)->toBe('llama-3.3-70b'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $request->url() === 'https://modelslab.com/api/v7/llm/chat/completions' && + $data['model'] === 'llama-3.3-70b' && + $data['messages'][0]['role'] === 'user' && + $data['messages'][0]['content'] === 'Hello!' && + $request->hasHeader('Authorization', 'Bearer test-api-key'); + }); +}); + +it('can generate text with system prompt', function (): void { + Http::fake([ + 'modelslab.com/api/v7/llm/chat/completions' => Http::response([ + 'id' => 'chatcmpl-124', + 'model' => 'llama-3.3-70b', + 'choices' => [ + [ + 'message' => [ + 'role' => 'assistant', + 'content' => 'I am a helpful assistant.', + ], + 'finish_reason' => 'stop', + ], + ], + 'usage' => [ + 'prompt_tokens' => 20, + 'completion_tokens' => 10, + ], + ], 200), + ]); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withSystemPrompt('You are a helpful assistant.') + ->withPrompt('Who are you?') + ->generate(); + + expect($response->text)->toBe('I am a helpful assistant.'); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['messages'][0]['role'] === 'system' && + $data['messages'][0]['content'] === 'You are a helpful assistant.' && + $data['messages'][1]['role'] === 'user' && + $data['messages'][1]['content'] === 'Who are you?'; + }); +}); + +it('can generate text with temperature and top_p', function (): void { + Http::fake([ + 'modelslab.com/api/v7/llm/chat/completions' => Http::response([ + 'id' => 'chatcmpl-125', + 'model' => 'llama-3.3-70b', + 'choices' => [ + [ + 'message' => [ + 'role' => 'assistant', + 'content' => 'Creative response here.', + ], + 'finish_reason' => 'stop', + ], + ], + 'usage' => [ + 'prompt_tokens' => 10, + 'completion_tokens' => 5, + ], + ], 200), + ]); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Be creative') + ->usingTemperature(0.9) + ->usingTopP(0.95) + ->generate(); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['temperature'] === 0.9 && + $data['top_p'] === 0.95; + }); +}); + +it('can generate text with max tokens', function (): void { + Http::fake([ + 'modelslab.com/api/v7/llm/chat/completions' => Http::response([ + 'id' => 'chatcmpl-126', + 'model' => 'llama-3.3-70b', + 'choices' => [ + [ + 'message' => [ + 'role' => 'assistant', + 'content' => 'Short response.', + ], + 'finish_reason' => 'stop', + ], + ], + 'usage' => [ + 'prompt_tokens' => 10, + 'completion_tokens' => 3, + ], + ], 200), + ]); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Test') + ->withMaxTokens(100) + ->generate(); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['max_tokens'] === 100; + }); +}); + +it('can generate text with provider options', function (): void { + Http::fake([ + 'modelslab.com/api/v7/llm/chat/completions' => Http::response([ + 'id' => 'chatcmpl-127', + 'model' => 'llama-3.3-70b', + 'choices' => [ + [ + 'message' => [ + 'role' => 'assistant', + 'content' => 'Response with penalties.', + ], + 'finish_reason' => 'stop', + ], + ], + 'usage' => [ + 'prompt_tokens' => 10, + 'completion_tokens' => 5, + ], + ], 200), + ]); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Test') + ->withProviderOptions([ + 'presence_penalty' => 0.5, + 'frequency_penalty' => 0.3, + ]) + ->generate(); + + Http::assertSent(function (Request $request): bool { + $data = $request->data(); + + return $data['presence_penalty'] === 0.5 && + $data['frequency_penalty'] === 0.3; + }); +}); + +it('includes raw response data', function (): void { + Http::fake([ + 'modelslab.com/api/v7/llm/chat/completions' => Http::response([ + 'id' => 'chatcmpl-130', + 'model' => 'llama-3.3-70b', + 'choices' => [ + [ + 'message' => [ + 'role' => 'assistant', + 'content' => 'Test response', + ], + 'finish_reason' => 'stop', + ], + ], + 'usage' => [ + 'prompt_tokens' => 5, + 'completion_tokens' => 3, + ], + ], 200), + ]); + + $response = Prism::text() + ->using(Provider::ModelsLab, 'llama-3.3-70b') + ->withPrompt('Test') + ->generate(); + + expect($response->raw['id'])->toBe('chatcmpl-130'); + expect($response->raw['model'])->toBe('llama-3.3-70b'); +});