Skip to content

Instantly share code, notes, and snippets.

@linjunpop
Last active October 17, 2023 02:56
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save linjunpop/fe15718d41ad7194ec5858e1f62f47ef to your computer and use it in GitHub Desktop.
Save linjunpop/fe15718d41ad7194ec5858e1f62f47ef to your computer and use it in GitHub Desktop.
Ollama API in Elixir
Mix.install(
[
{:finch, "~> 0.16.0"},
{:jason, "~> 1.4"}
],
)
Finch.start_link(name: OllamaFinch)
defmodule Ollama do
@api_endpoint "http://localhost:11434/api/generate"
@model "llama2-uncensored"
def stream_response(prompt, fun) do
payload =
%{
model: @model,
prompt: prompt
}
|> Jason.encode!()
process_fn = fn
{:status, _status}, acc ->
acc
{:headers, _headers}, acc ->
acc
{:data, json_string}, acc ->
data = Jason.decode!(json_string)
process_chunk(data, acc, fun)
end
Finch.build(:post, @api_endpoint, [{"content-type", "application/json"}], payload)
|> Finch.stream(OllamaFinch, "", process_fn)
end
defp process_chunk(%{"response" => new_content, "done" => false}, acc, fun) do
updated_content = acc <> new_content
updated_content
|> tap(fun)
end
defp process_chunk(%{"done" => true}, acc, _fun) do
acc
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment