123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149 |
- <?php
- namespace App\Services\LLM;
- use App\Models\LlmMessage;
- use App\Models\LlmSession;
- use Illuminate\Support\Facades\Auth;
- use Illuminate\Support\Facades\Http;
- use Illuminate\Support\Facades\Log;
- use Illuminate\Support\Str;
- class DeepSeekService
- {
- public static function streamedResponseChat($llmConfig, $message, $sessionId = null): void
- {
- ini_set('output_buffering', 'off');
- $headers = [
- 'Authorization' => 'Bearer ' . $llmConfig['key'],
- 'Content-Type' => 'application/json',
- ];
- $body = [
- 'model' => $llmConfig['model'],
- 'messages' => $message,
- 'stream' => true,
- 'max_tokens' => 8192,
- 'temperature' => 0.6,
- 'top_p' => 0.7,
- "top_k"=> 50,
- "frequency_penalty" => 2
- ];
- // 保存用户消息
- $session = self::saveUserMessage($message, $sessionId, $llmConfig['model']);
- // 3. 调用DeepSeek流式API
- $response = Http::withHeaders($headers)->timeout(300)->send('POST', $llmConfig['base_url'] , [
- 'json' => $body,
- 'stream' => true, // 启用Guzzle流式接收
- ]);
- // 4. 流式转发数据
- $body = $response->toPsrResponse()->getBody();
- $buffer = '';
- $saveContent = '';
- $saveReasoningContent = '';
- while (true) {
- if ($body->eof() || !$body->isReadable()) {
- break;
- }
- $chunk = $body->read(500);
- $buffer .= $chunk;
- // 处理SSE事件分割
- while (($pos = strpos($buffer, "\n\n")) !== false) {
- $event = substr($buffer, 0, $pos);
- $buffer = substr($buffer, $pos + 2);
- // 解析并输出内容
- $startStr = 'data: ';
- if (str_starts_with($event, $startStr)) {
- $jsonEvent = substr($event, strlen($startStr));
- if ($jsonEvent === '[DONE]') {
- break;
- }
- try {
- $data = json_decode($jsonEvent, true, 512, JSON_THROW_ON_ERROR);
- $reasoningContent = $data['choices'][0]['delta']['reasoning_content'] ?? '';
- $content = $data['choices'][0]['delta']['content'] ?? '';
- $saveContent.= $content;
- $saveReasoningContent.= $reasoningContent;
- if ($reasoningContent !== '') {
- echo "data: " . json_encode(['reasoning_content' => $reasoningContent], JSON_UNESCAPED_UNICODE) . "\n\n";
- ob_flush();
- flush();
- }
- if ($content === '') {
- break;
- }
- echo "data: " . json_encode(['content' => $content], JSON_UNESCAPED_UNICODE) . "\n\n";
- ob_flush();
- flush();
- } catch (\JsonException $e) {
- Log::error('JSON 解析失败: ' . $e->getMessage());
- }
- }
- }
- // 与客户端连接关闭时
- if (connection_aborted()) {
- break;
- }
- }
- echo "data: [DONE]\n";
- ob_flush();
- flush();
- ob_end_flush();
- // 保存模型回复
- self::saveAssistantMessage($session->id, $saveContent, $saveReasoningContent);
- }
- /**
- * 保存用户消息并创建或获取会话
- */
- private static function saveUserMessage($messages, $sessionId = null, $model = null)
- {
- // 获取最后一条用户消息
- $userMessage = end($messages);
- // 创建或获取会话
- if (!$sessionId) {
- $title = mb_substr($userMessage['content'], 0, 50) . (mb_strlen($userMessage['content']) > 50 ? '...' : '');
- $session = LlmSession::create([
- 'session_id' => Str::uuid(),
- 'user_id' => Auth::id() ?? 0,
- 'model' => $model,
- 'title' => $title,
- ]);
- } else {
- $session = LlmSession::where('session_id', $sessionId)->firstOrFail();
- $session->updated_at = now();
- $session->save();
- }
- // 保存用户消息
- LlmMessage::create([
- 'session_id' => $session->id,
- 'role' => 'user',
- 'content' => $userMessage['content'],
- ]);
- return $session;
- }
- /**
- * 保存模型回复
- */
- private static function saveAssistantMessage($sessionId, $content, $reasoningContent = null)
- {
- return LlmMessage::create([
- 'session_id' => $sessionId,
- 'role' => 'assistant',
- 'content' => trim($content),
- 'reasoning_content' => trim($reasoningContent),
- ]);
- }
- }
|