Created
March 28, 2024 00:45
-
-
Save labeveryday/5e4edef0ff067c88d7967fabd179fc49 to your computer and use it in GitHub Desktop.
Here is an example of sending a prompt to Claude 2.1 on Amazon Bedrock
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# Docs https://docs.anthropic.com/claude/reference/messages_post | |
# Announcement: https://aws.amazon.com/blogs/machine-learning/unlocking-innovation-aws-and-anthropic-push-the-boundaries-of-generative-ai-together/ | |
import boto3 | |
import json | |
# Define bedrock | |
bedrock = boto3.client( | |
service_name="bedrock-runtime", | |
region_name="us-west-2" | |
) | |
def send_prompt(prompt: str): | |
prompt_config = { | |
"prompt": f"Human: {prompt} Assistant:", | |
"max_tokens_to_sample": 4096, | |
"temperature": 0.5, | |
"top_k": 250, | |
"top_p": 0.5, | |
"stop_sequences": [], | |
} | |
body = json.dumps(prompt_config) | |
modelId = "anthropic.claude-v2:1" | |
contentType = "application/json" | |
accept = "application/json" | |
response = bedrock.invoke_model( | |
body=body, modelId=modelId, accept=accept, contentType=contentType | |
) | |
response_body = json.loads(response.get("body").read()) | |
results = response_body.get("completion") | |
return results | |
if __name__ == "__main__": | |
results = send_prompt("What is AWS?") | |
print(results) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment