Skip to content

Instantly share code, notes, and snippets.

@vman
Created October 18, 2023 12:01
static async Task Main(string[] args)
{
var openaiApiKey = "<azure-openi-key>";
var openaiEndpoint = "<azure-openi-endpoint>";
var modelDeploymentName = "gpt-35-turbo"; //azure deployment name
var credential = new AzureKeyCredential(openaiApiKey);
var openAIClient = new OpenAIClient(new Uri(openaiEndpoint), credential);
//get userQuestion from the console
Console.WriteLine("Ask a question to your user directory: ");
string userQuestion = Console.ReadLine();
//1. Call Open AI Chat API with the user's question.
Response<ChatCompletions> result = await CallChatGPT(userQuestion, modelDeploymentName, openAIClient);
//2. Check if the Chat API decided that for answering the question, a function call to the MS Graph needs to be made.
var functionCall = result.Value.Choices[0].Message.FunctionCall;
if (functionCall != null)
{
Console.WriteLine($"Function Name: {functionCall.Name}, Params: {functionCall.Arguments}");
if (functionCall.Name == "msgraph_search_users")
{
//3. If the MS Graph function call needs to be made, the Chat API will also provide which parameters need to be passed to the function.
var userSearchParams = JsonSerializer.Deserialize<UserSearchParams>(functionCall.Arguments);
//3. Call the MS Graph with the parameters provided by the Chat API
var functionResponse = await CallMSGraph(userSearchParams);
Console.WriteLine($"Graph Response: {functionResponse}");
//4. Call the Chat API again with the function response.
var functionMessages = new List<ChatMessage>();
functionMessages.Add(new ChatMessage(ChatRole.Assistant, functionCall.Arguments) { Name = functionCall.Name });
functionMessages.Add(new ChatMessage(ChatRole.Function, functionResponse) { Name = functionCall.Name });
result = await CallChatGPT(userQuestion, modelDeploymentName, openAIClient, functionMessages);
//5. Print the final response from the Chat API.
Console.WriteLine("------------------");
Console.WriteLine(result.Value.Choices[0].Message.Content);
}
}
else
{
//If the LLM decided that a function call is not needed, print the final response from the Chat API.
Console.WriteLine(result.Value.Choices[0].Message.Content);
}
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment