Forked from stevenaldinger/vertex-ai-gemini-example.py
Created
April 15, 2024 01:47
-
-
Save matrix-compute/87b11bf34744d7378fe4754714a8ce89 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import vertexai | |
from vertexai.generative_models import GenerativeModel, ChatSession | |
vertexai.init(project=project, location=location) | |
model = GenerativeModel("gemini-1.5-pro-preview-0409") | |
chat = model.start_chat() | |
def get_chat_response(chat: ChatSession, prompt: str) -> str: | |
text_response = [] | |
responses = chat.send_message(prompt, stream=True) | |
for chunk in responses: | |
text_response.append(chunk.text) | |
return "".join(text_response) | |
system_message = f"""\ | |
Answer questions about the following code repository. \ | |
First, the files will be displayed. Then, you will be asked questions about them. \ | |
For your first response, do not analyze the code, just say you successfully received the files. | |
File contents: | |
{formatted_file_contents} | |
""" | |
print(get_chat_response(chat, system_message)) | |
question = "What is the base image of the Dockerfile?" | |
print(get_chat_response(chat, question)) | |
question = "What is the container name in the docker compose file?" | |
print(get_chat_response(chat, question)) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment