Skip to content

Instantly share code, notes, and snippets.

@surendharreddy
Created April 9, 2023 04:41
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save surendharreddy/60d291b970133f6c34a3d5302e6a6b6f to your computer and use it in GitHub Desktop.
Save surendharreddy/60d291b970133f6c34a3d5302e6a6b6f to your computer and use it in GitHub Desktop.
require 'bundler/inline'
gemfile do
source 'https://rubygems.org'
gem 'pg', '1.1.4'
gem 'ruby-openai', '3.7.0'
gem 'pry'
end
conn = PG.connect(
host: '127.0.0.1',
port: 5432,
dbname: 'postgres',
user: 'postgres',
password: 'postgres'
)
input = ARGV[0]
begin
client = OpenAI::Client.new(access_token: 'REPLACE_WITH_API_KEY')
response = client.embeddings(
parameters: {
model: 'text-embedding-ada-002',
input: input
}
)
embedding = JSON.parse(response.body)['data'][0]['embedding']
# Inserting vectors
# conn.transaction do
# conn.prepare('create_embeddings', 'INSERT INTO documents (content, embedding) VALUES ($1, $2)')
# conn.exec_prepared('create_embeddings', ["#{input}", embedding])
# end
# Retrieving vectors, generate a prompt, ask OpenAI
conn.transaction do
# Note: You can use cosine distance '<=>'
conn.prepare('select_embeddings', 'SELECT * FROM documents ORDER BY embedding <-> $1 LIMIT 3')
result = conn.exec_prepared('select_embeddings', [embedding])
context = result.to_a.map do |r| r['content'].gsub("\n",'') end
prompt = """
Context question and answers:
#{context}
Question:
#{input}
What is the answer:
"""
response = client.completions(
parameters: {
model: 'text-davinci-002',
prompt: prompt.gsub("\n",''),
max_tokens: 512,
temperature: 0.5,
}
)
puts JSON.parse(response.body)["choices"].map { |c| c["text"] }
end
rescue PG::Error => e
puts e.message
ensure
conn.close
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment