153 lines
5.2 KiB
PHP
153 lines
5.2 KiB
PHP
<?php
|
|
|
|
namespace app\service\LLM;
|
|
|
|
use GuzzleHttp\Client;
|
|
use GuzzleHttp\Exception\RequestException;
|
|
use RuntimeException;
|
|
use Throwable;
|
|
|
|
class OpenAICompatibleClient
|
|
{
|
|
private Client $client;
|
|
private array $config;
|
|
|
|
public function __construct(?array $config = null)
|
|
{
|
|
$this->config = $config ?? config('LLMapi.default', []);
|
|
$baseUrl = rtrim((string) ($this->config['base_url'] ?? ''), '/');
|
|
|
|
$this->client = new Client([
|
|
'base_uri' => $baseUrl . '/',
|
|
'timeout' => (int) ($this->config['timeout'] ?? 60),
|
|
'connect_timeout' => (int) ($this->config['connect_timeout'] ?? 10),
|
|
]);
|
|
}
|
|
|
|
public function isConfigured(): bool
|
|
{
|
|
return trim((string) ($this->config['api_key'] ?? '')) !== ''
|
|
&& trim((string) ($this->config['base_url'] ?? '')) !== '';
|
|
}
|
|
|
|
public function chatJson(array $messages, array $options = []): array
|
|
{
|
|
$content = $this->chat($messages, $options);
|
|
$decoded = json_decode($this->extractJson($content), true);
|
|
|
|
if (!is_array($decoded)) {
|
|
throw new RuntimeException('LLM response is not valid JSON.');
|
|
}
|
|
|
|
return $decoded;
|
|
}
|
|
|
|
public function chat(array $messages, array $options = []): string
|
|
{
|
|
if (!$this->isConfigured()) {
|
|
throw new RuntimeException('LLM API is not configured.');
|
|
}
|
|
|
|
$headers = [
|
|
'Authorization' => 'Bearer ' . $this->config['api_key'],
|
|
'Content-Type' => 'application/json',
|
|
];
|
|
|
|
if (!empty($this->config['organization'])) {
|
|
$headers['OpenAI-Organization'] = $this->config['organization'];
|
|
}
|
|
|
|
if (!empty($this->config['project'])) {
|
|
$headers['OpenAI-Project'] = $this->config['project'];
|
|
}
|
|
|
|
$body = [
|
|
'model' => $options['model'] ?? config('LLMapi.chat.model'),
|
|
'messages' => $messages,
|
|
'temperature' => $options['temperature'] ?? config('LLMapi.chat.temperature', 0.2),
|
|
'max_tokens' => $options['max_tokens'] ?? config('LLMapi.chat.max_tokens', 1200),
|
|
'stream' => (bool) ($options['stream'] ?? config('LLMapi.chat.stream', false)),
|
|
];
|
|
|
|
if (array_key_exists('response_format', $options) && is_array($options['response_format'])) {
|
|
$body['response_format'] = $options['response_format'];
|
|
}
|
|
|
|
if (array_key_exists('thinking', $options) && is_array($options['thinking'])) {
|
|
$body['thinking'] = $options['thinking'];
|
|
}
|
|
|
|
if (array_key_exists('request_id', $options) && is_string($options['request_id'])) {
|
|
$body['request_id'] = $options['request_id'];
|
|
}
|
|
|
|
if (array_key_exists('user_id', $options) && is_string($options['user_id'])) {
|
|
$body['user_id'] = $options['user_id'];
|
|
}
|
|
|
|
try {
|
|
$response = $this->client->post('chat/completions', [
|
|
'headers' => $headers,
|
|
'json' => $body,
|
|
]);
|
|
} catch (RequestException $exception) {
|
|
throw $this->requestException($exception);
|
|
} catch (Throwable $exception) {
|
|
throw new RuntimeException('LLM chat request failed: ' . $exception->getMessage(), 0, $exception);
|
|
}
|
|
|
|
$payload = json_decode((string) $response->getBody(), true);
|
|
$content = $payload['choices'][0]['message']['content'] ?? null;
|
|
|
|
if (!is_string($content) || trim($content) === '') {
|
|
throw new RuntimeException('LLM chat response is empty.');
|
|
}
|
|
|
|
return $content;
|
|
}
|
|
|
|
private function extractJson(string $content): string
|
|
{
|
|
$content = trim($content);
|
|
$content = preg_replace('/^```(?:json)?\s*/i', '', $content) ?? $content;
|
|
$content = preg_replace('/\s*```$/', '', $content) ?? $content;
|
|
|
|
$start = strpos($content, '{');
|
|
$end = strrpos($content, '}');
|
|
if ($start !== false && $end !== false && $end > $start) {
|
|
return substr($content, $start, $end - $start + 1);
|
|
}
|
|
|
|
return $content;
|
|
}
|
|
|
|
private function requestException(RequestException $exception): LLMRequestException
|
|
{
|
|
$statusCode = $exception->getResponse()?->getStatusCode();
|
|
$body = $exception->getResponse() ? (string) $exception->getResponse()->getBody() : '';
|
|
$payload = json_decode($body, true);
|
|
$providerCode = null;
|
|
$providerMessage = null;
|
|
|
|
if (is_array($payload)) {
|
|
$providerCode = isset($payload['error']['code']) ? (string) $payload['error']['code'] : null;
|
|
$providerMessage = isset($payload['error']['message']) ? (string) $payload['error']['message'] : null;
|
|
}
|
|
|
|
$message = 'LLM chat request failed';
|
|
if ($statusCode !== null) {
|
|
$message .= " with HTTP {$statusCode}";
|
|
}
|
|
if ($providerCode !== null) {
|
|
$message .= " and provider code {$providerCode}";
|
|
}
|
|
if ($providerMessage !== null) {
|
|
$message .= ": {$providerMessage}";
|
|
} else {
|
|
$message .= ': ' . $exception->getMessage();
|
|
}
|
|
|
|
return new LLMRequestException($message, $statusCode, $providerCode, is_array($payload) ? $payload : null);
|
|
}
|
|
}
|