messages = $this->chatable->chats()->orderByRaw('created_at, id')->limit(25)->get(['role', 'content'])->toArray(); } public function startCompletion($suggestedPrompt = null) { // prevent spam if ($this->isRateLimited() || $this->streaming) { array_push($this->messages, [ 'role' => 'assistant', 'content' => __('Hang on! You\'re doing that too much.') ]); $this->js('scrollChatWindow(250)'); return; } if ($suggestedPrompt) { $this->prompt = $suggestedPrompt; } if (empty(trim($this->prompt))) { $this->resetPrompt(); array_push($this->messages, ['role' => 'assistant', 'content' => __('Feel free to ask me a question!')]); $this->js('scrollChatWindow(250)'); return; } $this->chatable->chats()->save(new AiChat(['role' => 'user', 'content' => $this->prompt])); array_push($this->messages, ['role' => 'user', 'content' => $this->prompt]); $this->js('scrollChatWindow(250)'); $this->resetPrompt(); $this->streaming = true; $this->js('$wire.generateCompletion()'); } public function generateCompletion(): void { try { $client = $this->createOpenAiClient(); $stream = $client->chat()->createStreamed([ 'model' => config('openai.model'), 'messages' => [ ['role' => 'system', 'content' => "Today's date is " .now()->toDateString() .".\n\n".$this->system_prompt], ...array_slice($this->messages, -10) ], ]); } catch (\Exception $e) { $this->chatable->chats()->save(new AiChat(['role' => 'assistant', 'content' => $e->getMessage()])); array_push($this->messages, ['role' => 'assistant', 'content' => $e->getMessage()]); $this->resetPrompt(); return; } $this->stream(to: "answer", content: '', replace: true); foreach($stream as $response){ if(!empty($response->choices[0]->delta->content)) { $this->stream(to: 'answer', content: $response->choices[0]->delta->content, replace: false); $this->answer .= $response->choices[0]->delta->content; } $this->js('scrollChatWindow()'); } $this->chatable->chats()->save(new AiChat(['role' => 'assistant', 'content' => $this->answer])); array_push($this->messages, ['role' => 'assistant', 'content' => $this->answer]); $this->resetPrompt(); $this->js('$wire.generateSuggestedPrompts()'); } public function generateSuggestedPrompts(): void { try { $client = $this->createOpenAiClient(); $suggested_prompts = $client->chat()->create([ 'model' => config('openai.model'), 'response_format' => [ 'type' => 'json_schema', 'json_schema' => [ 'name' => 'suggested_prompts_schema', 'strict' => true, 'schema' => [ "type" => "object", "properties" => [ "suggested_prompts" => [ "type" => "array", "items" => [ "type" => "object", "properties" => [ "text" => [ "type" => "string", "description" => "The suggested prompt question (no more than 5 words)" ], "value" => [ "type" => "string", "description" => "The detailed version of the question" ] ], "required" => ["text", "value"], "additionalProperties" => false ] ] ], "required" => ["suggested_prompts"], "additionalProperties" => false ] ] ], 'messages' => [ ['role' => 'system', 'content' => " Your role is to assist investors in asking thoughtful questions of their investment advisors. When you help investors ask good questions, you should ensure the you questions you recommend are based on the provided context. Be sure to keep the questions short! The questions you recommend might be based on natural follow up from the given context, requests to further refine a previous response, clarify undefined terms, common decision frameworks, possible risks or benefits, or commonly understood investing concepts that may require additional explanation. Your response should only include valid JSON. "], ['role' => 'user', 'content' => " Generate between 1 and 5 (no more than 5) follow up questions a savvy investor might ask their advisor based on the following conversation: \n\n ".json_encode(array_slice($this->messages, -4)) ], ], ]); $this->suggested_prompts = json_decode($suggested_prompts->choices[0]->message->content, true)['suggested_prompts']; } catch (\Exception $e) { $this->suggested_prompts = []; $this->error($e->getMessage()); return; } } public function resetPrompt(): void { $this->answer = null; $this->prompt = null; $this->streaming = false; } public function isRateLimited(): bool { $rateLimitKey = auth()->id() . '/' . $this->chatable->id; if (RateLimiter::tooManyAttempts($rateLimitKey, 20)) { return true; } RateLimiter::hit($rateLimitKey, 60); return false; } private function createOpenAiClient() { $apiKey = config('openai.api_key'); $organization = config('openai.organization'); $baseUri = config('openai.base_uri'); return OpenAI::factory() ->withApiKey($apiKey) ->withOrganization($organization) ->withHttpHeader('OpenAI-Beta', 'assistants=v2') ->withHttpClient(new \GuzzleHttp\Client(['timeout' => config('openai.request_timeout', 30)])) ->withBaseUri($baseUri) ->make(); } }; ?>
{{-- toggle button --}} {{-- popup --}}

{{ __('AI Chat') }}

{{-- chat window --}}

AI {{ __('Hi, how can I help?') }}

@foreach($messages as $message) @if ($message['role'] == 'user')

{{ __('You') }} {{ $message['content'] }}

@else
AI {!! Str::markdown($message['content']) !!}
@endif @endforeach @if($streaming)

AI {{ $answer }}

@endif
{{-- prompt input --}}
@foreach($suggested_prompts as $prompt) {{ $prompt['text'] }} @endforeach

{{ __('Advice generated by AI may contain errors. Use at your own risk. Always consult a licensed investment advisor.') }}