Skip to content

Instantly share code, notes, and snippets.

@datsuns
Last active March 28, 2024 08:33
Show Gist options
  • Save datsuns/2976857b034db3b87a65f2830ce10109 to your computer and use it in GitHub Desktop.
Save datsuns/2976857b034db3b87a65f2830ce10109 to your computer and use it in GitHub Desktop.
gemini api by golang
package main
import (
"context"
"fmt"
"log"
"os"
"time"
"github.com/google/generative-ai-go/genai"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
)
var (
ApiKey = ""
ApiKey2 = ""
Times = 100
//GeminiModel = "gemini-1.0-pro"
GeminiModel = "gemini-pro"
RequestIntervalNormal = time.Second * 5
RequestIntervalLong = time.Second * 60
Client1 *genai.Client
Model1 *genai.GenerativeModel
ChatSession1 *genai.ChatSession
Client2 *genai.Client
Model2 *genai.GenerativeModel
ChatSession2 *genai.ChatSession
Context context.Context
Role1 = "ソフトウェアエンジニア"
Role2 = "ソフトウェアエンジニア"
Effect = "広く一般的過ぎる条件や制限事項、懸念点は敢えて除外し、より先鋭的な結論を目指してみましょう。"
Theme = "もっとも優れたプログラミング言語は何か?"
GeminiLimitMax = 1024 * 32
GeminiLimitMergin = 1024 * 4
GeminiLimitSize = (GeminiLimitMax - GeminiLimitMergin)
)
func load() {
var raw []byte
var err error
raw, err = os.ReadFile("apikey.txt")
if err != nil {
panic(err)
}
ApiKey = string(raw)
raw, err = os.ReadFile("apikey2.txt")
if err != nil {
panic(err)
}
ApiKey2 = string(raw)
}
type Chat struct {
Context *context.Context
Client *genai.Client
Model *genai.GenerativeModel
ChatSession *genai.ChatSession
TotalPromptSize int
TotalResponceSize int
}
func NewChat(apiKey string, ctx *context.Context) (*Chat, error) {
var err error
ret := &Chat{Context: ctx, TotalPromptSize: 0, TotalResponceSize: 0}
ret.Client, err = genai.NewClient(*ret.Context, option.WithAPIKey(apiKey))
if err != nil {
return nil, err
}
ret.Model = ret.Client.GenerativeModel(GeminiModel)
ret.ChatSession = ret.Model.StartChat()
return ret, nil
}
func (c *Chat) question(prompt string) (string, error) {
ret := ""
fmt.Printf(">>> Status[%v / %v / %v]\n", c.TotalPromptSize, c.TotalResponceSize, c.TotalPromptSize+c.TotalResponceSize)
fmt.Printf(">>> PROMPT(size:%v)[%v]\n", len(prompt), prompt)
fmt.Println(">>>>>>>>>>>>")
iter := c.ChatSession.SendMessageStream(*c.Context, genai.Text(prompt))
for {
res, err := iter.Next()
if err == iterator.Done {
break
}
if err != nil {
return "", err
}
ret += printResponse(res)
}
fmt.Println("<<<<<<<<<<<<")
c.TotalPromptSize += len(prompt)
c.TotalResponceSize += len(ret)
return ret, nil
}
func (c *Chat) TotalSize() int {
return c.TotalPromptSize + c.TotalResponceSize
}
func printResponse(resp *genai.GenerateContentResponse) string {
ret := ""
for _, cand := range resp.Candidates {
if cand.Content != nil {
for _, part := range cand.Content.Parts {
fmt.Println(part)
ret += fmt.Sprintf("%s", part)
}
}
}
return ret
}
func chatSessionSample() {
ctx := context.Background()
client, err := genai.NewClient(ctx, option.WithAPIKey(ApiKey))
if err != nil {
log.Fatal(err)
}
defer client.Close()
model := client.GenerativeModel(GeminiModel)
cs := model.StartChat()
//send := func(msg string) *genai.GenerateContentResponse {
// fmt.Printf("== Me: %s\n== Model:\n", msg)
// res, err := cs.SendMessage(ctx, genai.Text(msg))
// if err != nil {
// log.Fatal(err)
// }
// return res
//}
issueStreaming(cs, &ctx, "Can you name some brands of air fryer?")
issueStreaming(cs, &ctx, "Which one of those do you recommend?")
//fmt.Println("---- HISTORY ----")
//for i, c := range cs.History {
// log.Printf(" %d: %+v", i, c)
//}
//fmt.Println("---- HISTORY END ----")
issueStreaming(cs, &ctx, "Why do you like the Philips?")
}
func issueStreaming(cs *genai.ChatSession, ctx *context.Context, prompt string) {
fmt.Printf(">>> PROMPT[%v]\n", prompt)
fmt.Println(">>> responce")
iter := cs.SendMessageStream(*ctx, genai.Text(prompt))
for {
res, err := iter.Next()
if err == iterator.Done {
break
}
if err != nil {
log.Fatal(err)
}
printResponse(res)
}
fmt.Println("<<< responce")
}
//func issueRequest(prompt string) (string, error) {
// ctx := context.Background()
// // Access your API key as an environment variable (see "Set up your API key" above)
// client, err := genai.NewClient(ctx, option.WithAPIKey(ApiKey))
// if err != nil {
// log.Fatal(err)
// }
// defer client.Close()
//
// model := client.GenerativeModel(GeminiModel)
// iter := model.GenerateContentStream(ctx, genai.Text(prompt))
//
// for {
// resp, err := iter.Next()
// if err == iterator.Done {
// break
// }
// if err != nil {
// log.Fatal(err)
// }
//
// // ... print resp
// }
//}
func main() {
var err error
var ret string
var prompt string
load()
//chatSessionSample()
ctx := context.Background()
c1, err := NewChat(ApiKey, &ctx)
if err != nil {
panic(err)
}
c2, err := NewChat(ApiKey2, &ctx)
if err != nil {
panic(err)
}
ret, err = c1.question(
fmt.Sprintf("あなたは%vです。\n%vを議論しましょう。%v", Role1, Theme, Effect),
)
if err != nil {
panic(err)
}
ret, err = c2.question(
fmt.Sprintf("あなたは%vです。\n%vを議論しましょう。%v", Role1, Theme, Effect),
)
if err != nil {
panic(err)
}
var cur string
for i := 0; i < Times; i++ {
if c1.TotalSize() > GeminiLimitSize {
fmt.Println("break loop by max size")
break
}
fmt.Printf("--- [%d]th conversation ---\n", i)
prompt = "あなたの意見に対する以下の意見に対してどう考えますか?\n" + ret
cur, err = c2.question(prompt)
if err != nil {
fmt.Printf("err[%v]. retry\n", err.Error())
time.Sleep(RequestIntervalLong)
continue
//panic(err)
}
ret = cur
prompt = "あなたの意見に対する以下の意見に対してどう考えますか?\n" + ret
cur, err = c1.question(prompt)
if err != nil {
fmt.Printf("err[%v]. retry\n", err.Error())
time.Sleep(RequestIntervalLong)
continue
//panic(err)
}
ret = cur
time.Sleep(RequestIntervalNormal)
}
prompt = fmt.Sprintf("ここまでの議論を踏まえて「%v」を定義してください。できるだけ簡潔にしましょう", Theme)
result := []string{}
ret, err = c1.question(prompt)
if err != nil {
panic(err)
}
result = append(result, ret)
ret, err = c2.question(prompt)
if err != nil {
panic(err)
}
result = append(result, ret)
prompt = fmt.Sprintf("ここまでの議論で理解は深まったでしょうか?感想を教えてください")
ret, err = c1.question(prompt)
if err != nil {
panic(err)
}
result = append(result, ret)
ret, err = c2.question(prompt)
if err != nil {
panic(err)
}
result = append(result, ret)
fmt.Println("=============================================")
fmt.Println("client1の結論", result[0])
fmt.Println("=============================================")
fmt.Println("client2の結論", result[1])
fmt.Println("=============================================")
fmt.Println("client1の感想", result[2])
fmt.Println("=============================================")
fmt.Println("client2の感想", result[3])
}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment