Last active
May 5, 2024 09:40
-
-
Save goran-popovic/b104e2cc3c16db9312cd1e90d4e5e256 to your computer and use it in GitHub Desktop.
Using Laravel to interact with OpenAI's Assistant API (with Vision) - Basic Example (https://geoligard.com/using-laravel-to-interact-with-openai-s-assistants-api-with-vision)
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
namespace App\Http\Controllers; | |
use Illuminate\Http\Request; | |
use OpenAI\Laravel\Facades\OpenAI; | |
class AssistantController extends Controller | |
{ | |
public function generateAssistantsResponse(Request $request) | |
{ | |
$assistantId = 'asst_someId'; // hard coded for now | |
$userMessage = $request->message; | |
if($request->file('image')) { | |
// validate image | |
// store the image and get the url | |
$imageURL = 'https://images.unsplash.com/photo-1575936123452-b67c3203c357'; // hard coded example | |
$userMessage = $userMessage . ' ' . $imageURL; // concatenate the URL to the message | |
} | |
[$thread, $message, $run] = $this->createThreadAndRun($assistantId, $userMessage); // create new thread and run it | |
$run = $this->waitOnRun($run, $thread->id); // wait for the run to finish | |
$run = $this->processRunFunctions($run); | |
if($run->status == 'completed') { | |
$messages = $this->getMessages($run->threadId, 'asc', $message->id); // get the latest messages after user's message | |
$messagesData = $messages->data; | |
if (!empty($messagesData)) { | |
$messagesCount = count($messagesData); | |
$assistantResponseMessage = ''; | |
if ($messagesCount > 1) { // check if assistant sent more than 1 message | |
foreach ($messagesData as $message) { | |
$assistantResponseMessage .= $message->content[0]->text->value . "\n\n"; // concatenate multiple messages | |
} | |
$assistantResponseMessage = rtrim($assistantResponseMessage); // remove the last new line | |
} else { | |
$assistantResponseMessage = $messagesData[0]->content[0]->text->value; // take the first message | |
} | |
return response()->json([ | |
"assistant_response" => $assistantResponseMessage, | |
]); | |
} else { | |
\Log::error('Something went wrong; assistant didn\'t respond'); | |
} | |
} else { | |
\Log::error('Something went wrong; assistant run wasn\'t completed successfully'); | |
} | |
} | |
private function getMessages($threadId, $order = 'asc', $messageId = null) | |
{ | |
$params = [ | |
'order' => $order, | |
'limit' => 10 | |
]; | |
if($messageId) { | |
$params['after'] = $messageId; | |
} | |
return OpenAI::threads()->messages()->list($threadId, $params); | |
} | |
private function createThreadAndRun($assistantId, $userMessage) | |
{ | |
$thread = OpenAI::threads()->create([]); | |
[$message, $run] = $this->submitMessage($assistantId, $thread->id, $userMessage); | |
return [ | |
$thread, | |
$message, | |
$run | |
]; | |
} | |
private function submitMessage($assistantId, $threadId, $userMessage) | |
{ | |
$message = OpenAI::threads()->messages()->create($threadId, [ | |
'role' => 'user', | |
'content' => $userMessage, | |
]); | |
$run = OpenAI::threads()->runs()->create( | |
threadId: $threadId, | |
parameters: [ | |
'assistant_id' => $assistantId, | |
], | |
); | |
return [ | |
$message, | |
$run | |
]; | |
} | |
private function waitOnRun($run, $threadId) | |
{ | |
while ($run->status == "queued" || $run->status == "in_progress") | |
{ | |
$run = OpenAI::threads()->runs()->retrieve( | |
threadId: $threadId, | |
runId: $run->id, | |
); | |
sleep(1); | |
} | |
return $run; | |
} | |
private function processRunFunctions($run) | |
{ | |
while ($run->status == 'requires_action' && $run->requiredAction->type == 'submit_tool_outputs') // check if the run requires any action | |
{ | |
// Extract tool calls | |
$toolCalls = $run->requiredAction->submitToolOutputs->toolCalls; // multi calls possible | |
$toolOutputs = []; | |
foreach ($toolCalls as $toolCall) { | |
$name = $toolCall->function->name; | |
$arguments = json_decode($toolCall->function->arguments); | |
if ($name == 'describe_image') { | |
$visionResponse = OpenAI::chat()->create([ | |
'model' => 'gpt-4-vision-preview', | |
'messages' => [ | |
[ | |
'role' => 'user', | |
'content' => [ | |
[ | |
"type" => "text", | |
"text" => $arguments?->user_message | |
], | |
[ | |
"type" => "image_url", | |
"image_url" => [ | |
"url" => $arguments?->image, | |
], | |
], | |
] | |
], | |
], | |
'max_tokens' => 2048 | |
]); | |
$toolOutputs[] = [ | |
'tool_call_id' => $toolCall->id, | |
'output' => $visionResponse?->choices[0]?->message?->content, // you get 1 choice by default | |
]; | |
} | |
} | |
$run = OpenAI::threads()->runs()->submitToolOutputs( | |
threadId: $run->threadId, | |
runId: $run->id, | |
parameters: [ | |
'tool_outputs' => $toolOutputs, | |
] | |
); | |
$run = $this->waitOnRun($run, $run->threadId); | |
} | |
return $run; | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment