Skip to content

Instantly share code, notes, and snippets.

@jmarbach
Created February 28, 2024 21:02
Show Gist options
  • Star 1 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save jmarbach/a3acab424770c7fba5a5696ef81faaf1 to your computer and use it in GitHub Desktop.
Save jmarbach/a3acab424770c7fba5a5696ef81faaf1 to your computer and use it in GitHub Desktop.
ChatGPT Vision to Grafana Cloud Metrics Example
require 'base64'
require "json"
require "logger"
require "openai"
require "net/http"
#
# Initialize logger
#
logger = Logger.new(STDOUT)
logger.info 'Begin image parser...'
#
# Initialize OpenAI client
#
@openai_client = OpenAI::Client.new(
access_token: ENV["OPENAI_API_KEY"],
organization_id: ENV["OPENAI_API_ORGANIZATION_ID"]
)
logger.info("OpenAI client initialized")
#
# Prepare input for the ChatGPT Vision model, asking to identify matches of the first image in the second image.
#
system_context = ""
system_context = "You are an expert image analyst capable of identifying patterns between images. You count a match when you find an object in the third image that looks like a car or truck from the first or second images. Only count a match if you're very confident a match exists."
user_messages = ""
user_messages = [
{ "type": "text", "text": "How many times does the object from the first or second image appear in the third image? Be precise."},
{ "type": "image_url",
"image_url": {
"url": "https://images.unsplash.com/photo-1616549972169-0a0d961c9905",
},
},
{ "type": "image_url",
"image_url": {
"url": "https://images.unsplash.com/photo-1544601640-b256c49a192d",
},
},
{ "type": "image_url",
"image_url": {
"url": "https://www.nps.gov/webcams-yell/mammoth_arch.jpg",
},
}
]
example_output = ""
example_output = '
Example response object:
{
"matches": integer,
}
'
logger.info("Prompts prepared for OpenAI API")
#
# Call OpenAI API
#
response = ''
begin
response = @openai_client.chat(
parameters: {
model: "gpt-4-vision-preview",
messages: [
{ role: "system", content: system_context },
{ role: "system", content: example_output },
{ role: "user", content: user_messages }
],
temperature: 0.4,
max_tokens: 100
})
rescue => err
logger.fatal(err)
return
else
logger.info("OpenAI API response received and successfully processed")
logger.info("Response:\n#{response}")
end
#
# Save and print the the summarized version of what the business does
#
if response != ''
hash_results = {}
hash_results = JSON.parse(response.dig("choices", 0, "message", "content"))
puts "Summary result:\n#{hash_results}"
end
#
# Save results to Grafana Cloud
#
logger.info ""
logger.info "Saving results to Grafana Cloud..."
#
# Initialize Grafana Cloud auth variables
#
@grafana_base64_encoded_auth_token = Base64.strict_encode64(ENV['GRAFANA_CLOUD_METRICS_USER_ID'] + ':' + ENV['GRAFANA_CLOUD_METRICS_API_KEY'])
#
# Set metrics payload
#
metrics_payload = ""
metrics_payload = "nps_entrance,park=yellowstone vehicles=#{hash_results['matches']}"
#
# Push metric to Grafana Cloud using the Influx Line Protocol
#
begin
uri = URI.parse(ENV['GRAFANA_CLOUD_METRICS_INFLUX_PROXY_ENDPOINT'])
response = Net::HTTP.start(uri.host, uri.port, use_ssl: true) do |client|
request = Net::HTTP::Post.new(uri.path)
request.body = metrics_payload
request["Authorization"] = "Basic #{@grafana_base64_encoded_auth_token}"
request["Content-Type"] = "text/plain"
client.request(request)
end
rescue => err
logger.fatal('!!!')
logger.fatal("Caught exception; exiting")
logger.fatal(err)
logger.fatal('!!!')
return
else
logger.info 'Grafana Cloud response:'
logger.info response.code
logger.info ''
return
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment