Skip to content

Instantly share code, notes, and snippets.

@subtleGradient
Created November 4, 2023 18:37
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save subtleGradient/dc9afee7df4552901b9b5261f972170c to your computer and use it in GitHub Desktop.
Save subtleGradient/dc9afee7df4552901b9b5261f972170c to your computer and use it in GitHub Desktop.
playing with ollama
class GenAI {
static INJECT = "<{([ INJECT ])}>"
static PRE = "<PRE>"
static SUF = " <SUF>"
static MID = " <MID>"
static END = " <EOT>"
#apiUrl
#model
constructor(apiUrl, model) {
this.#apiUrl = apiUrl
this.#model = model
}
async #fetch(prompt, abort) {
return await fetch(this.#apiUrl, {
method: "POST",
body: JSON.stringify({ model: this.#model, prompt }),
headers: { "Content-Type": "application/json" },
signal: abort?.signal,
})
}
gen = async (strings, ...values) => {
let prefix, suffix
let prompt = String.raw(strings, ...values.map(v => (typeof v === "string" ? v : JSON.stringify(v))))
if (prompt.includes(GenAI.INJECT)) {
;[prefix, suffix] = prompt.split(GenAI.INJECT)
const { PRE, MID, SUF } = GenAI
// prettier-ignore
prompt =''
+ `${PRE}${prefix}`
+ `${SUF}${suffix} `
+ MID
}
const abort = new AbortController()
const response = await this.#fetch(prompt, abort)
return {
[Symbol.asyncIterator]: () => this.#genChunks(response.body?.getReader()),
abort: () => abort.abort(),
...{ prompt, prefix, suffix, response },
}
}
async *#genChunks(reader) {
const decoder = new TextDecoder()
try {
generating: while (reader?.read) {
let done, value
try {
;({ done, value } = await reader.read())
} catch (error) {
if (error.name === "AbortError") {
yield { done: true, reason: "aborted" }
} else {
throw error
}
break generating // Exit the loop if the reading is aborted.
}
if (done) break
const decoded = decoder.decode(value, { stream: true })
const chunk = JSON.parse(decoded)
if (chunk.response.endsWith(GenAI.END)) {
chunk.response = chunk.response.slice(0, -GenAI.END.length)
}
yield chunk
}
} finally {
reader?.releaseLock()
}
}
}
// Usage:
const codellama7b_code = new GenAI("http://localhost:11434/api/generate", "codellama:7b-code")
async function demo() {
const doneButton = document.createElement("button")
doneButton.textContent = "Done"
doneButton.onclick = () => window.STOP()
document.body.append(doneButton)
const textarea = document.createElement("textarea")
textarea.cols = 80
textarea.rows = 20
document.body.append(textarea)
const { gen: js } = codellama7b_code
const stream = await js`
/** This function does stuff that is awesome */
function doAwesomeStuff(...nums) {
${GenAI.INJECT}
}
console.assert(doAwesomeStuff(1,2,3) !== "lame stuff")
`
console.log(`ollama run codellama:7b-code ""${JSON.stringify(stream.prompt)}""`)
window.STOP = () => stream.abort()
if (!stream.response.ok) {
throw new Error(`HTTP error! status: ${response.status}`)
}
textarea.value = stream.prefix
for await (const chunk of stream) {
const { model, created_at, response, done, reason } = chunk
if (done) {
console.log(reason)
break
}
textarea.value += response
textarea.rows = textarea.value.split("\n").length + 1
}
textarea.value += stream.suffix
}
demo()
;("demo() returned")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment