Forked from robshox/gist:6f7ad341d1878d8a988b2d6a0eb69bf4
Created
November 7, 2023 17:54
-
-
Save Sandy4321/ae5da5145c1a8609d6a0bdd0fa72cbd7 to your computer and use it in GitHub Desktop.
Function Code and JSON prompt for tutorial from Rob Shocks on AWS LLAMA 2
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
## LAMNDA FUNCTION lambda_function.py | |
import boto3 | |
import json | |
# grab environment variables | |
ENDPOINT_NAME = "jumpstart-dft-meta-textgeneration-llama-2-7b-rs" | |
runtime= boto3.client('runtime.sagemaker') | |
def lambda_handler(event, context): | |
response = runtime.invoke_endpoint(EndpointName=ENDPOINT_NAME, | |
ContentType='application/json', | |
Body=event['body'], | |
CustomAttributes="accept_eula=true") | |
response_content = response['Body'].read().decode() | |
result = json.loads(response_content) | |
return { | |
"statusCode": 200, | |
"body": json.dumps(result) | |
} | |
## POSTMAN JSON PAYLOAD | |
{ | |
"inputs": [ | |
[ | |
{"role": "system", "content": "You are an expert in copywriting"}, | |
{"role": "user", "content": "Write me a tweet about super conductors"} | |
] | |
], | |
"parameters": {"max_new_tokens": 256, "top_p": 0.9, "temperature": 0.6} | |
} | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment