If you have uv installed (and you should!), you can install llm globally in a uv
-managed tool environment with:
uv tool install llm
If you want to use models other than OpenAI models, you'll need some extensions:
#!/usr/bin/swift | |
// Started from the code in https://terminalbytes.com/iphone-8-solar-powered-vision-ocr-server/ | |
// Edited from Mistral generated code: https://chat.mistral.ai/chat/563cacdf-6def-49e4-9df6-ee8e263978c5 | |
import AppKit | |
import CoreGraphics | |
import Foundation | |
import SwiftUI | |
import Vision | |
func processImageSync(imagePath: String) -> String? { |
import requests | |
import subprocess | |
import yaml | |
def get_data_from_api(): | |
base_url = "http://localhost:1234/v1" | |
response = requests.get(base_url + "/models") | |
if response.status_code == 200: | |
json_data = response.json() |
import sys | |
from io import StringIO | |
import streamlit as st # pip install streamlit | |
from code_editor import code_editor # pip install streamlit_code_editor | |
import ollama as ol # pip install ollama | |
st.set_page_config(layout='wide') | |
st.title('`Offline code completion`') |
""" | |
A minimal, fast example generating text with Llama 3.1 in MLX. | |
To run, install the requirements: | |
pip install -U mlx transformers fire | |
Then generate text with: | |
python l3min.py "How tall is K2?" |
<!doctype html> | |
<html lang="en"> | |
<head> | |
<meta charset="UTF-8" /> | |
<meta name="viewport" content="width=device-width, initial-scale=1.0" /> | |
<title>LM Studio Chat Interface</title> | |
<style> | |
body { | |
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", | |
Roboto, sans-serif; |
package main | |
import ( | |
"encoding/json" | |
"fmt" | |
"io/ioutil" | |
"os" | |
"os/user" | |
"path/filepath" | |
"strings" |
#!/bin/bash | |
# Ollama Model Export Script | |
# Usage: bash ollama-export.sh vicuna:7b | |
# License: MIT (https://ncurl.xyz/s/o_o6DVqIR) | |
# https://gist.github.com/supersonictw/f6cf5e599377132fe5e180b3d495c553 | |
# Interrupt if any error occurred | |
set -e | |
# Declare |
# Faster but not private way to achive the above | |
# is to define a bash function and source it at startup | |
function html_to_md () { | |
if [[ $# -eq 2 ]]; then | |
curl "https://r.jina.ai/$1" > "$2".md | |
echo "Content saved to \"$2\".md" | |
else | |
curl "https://r.jina.ai/$@" | |
fi | |
} |
Replace natbib by biblatex to get citation links
\usepackage[backend=bibtex, style=authoryear-comp, natbib=true, sortcites=false]{biblatex}
\addbibresource{main.bib}
% optional if you want (Smith 1776) instead of (Smith, 1776)
\renewcommand*{\nameyeardelim}{\addspace}
\begin{document}