Last active
January 29, 2024 20:27
-
-
Save codeprimate/bb244d3d05da5eec4e726fc75a685546 to your computer and use it in GitHub Desktop.
Tmux AI llama.cpp integration
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
#!/bin/bash | |
# Script: ~/bin/tmai | |
# | |
# Helper script for tmux llama.cpp integration | |
# | |
# Usage: | |
# - Optionally set the PROMPT_FILE envvar, containing the full path to a text file containing the system prompt | |
# - While in an active tmux session, press: CTRL-B x | |
# - Enter your prompt and press Enter | |
# - Resize the output pane as desired | |
# - Press q to exit window | |
# | |
# Requirements: | |
# - llama.cpp (compiled and working) | |
# - tmux (must be active/running) | |
# - sed | |
# - Add ~/bin to $PATH, or ensure that this script is in the $PATH, or reference the full script path in the Tmux configuration | |
# | |
# Setup: | |
# - Add keybinding to ~/.tmux.conf : | |
# bind-key x command-prompt -p "Enter query:" "split-window -p 5 'tmai \"%%\"'" | |
# - Update the following variables below: | |
# - model/context_size/n_predict | |
# - llama_path | |
# - model_path | |
# (Ensure your model is located in $model_path) | |
prompt="You are a helpful AI assistant that answers questions and performs tasks as requested." | |
model=nous-hermes-2-solar-10.7b.Q8_0.4k.gguf; context_size=4096; n_predict=1000 | |
#model=dolphin-2.6-mistral-7b.Q8_0.16k.gguf; context_size=14000; n_predict=2000 | |
temperature=0.6 | |
prompt_prefix="\\n### USER: " | |
prompt_suffix="\\n### AI: " | |
output_limiter="1,/### AI: /d" | |
llama_path=~/services/llama.cpp | |
model_path=~/services/textgen/models | |
model_location="$model_path/$model" | |
function gettmuxwindow() { | |
current_window=$(tmux display-message -p '#S:#I') | |
tmux list-panes -a -F '#{session_name}:#{window_index}.#{pane_index}' | grep "^$current_window" | while read -r pane_id; do | |
tmux capture-pane -S -100 -p -t "$pane_id" | grep -v '^\s*$' | |
done | |
} | |
function wait_for_q() { | |
echo -e "\n\n\n...Press 'q' to close..." | |
while true; do | |
read -n 1 k <&1 | |
if [[ $k = q ]]; then | |
break | |
fi | |
done | |
} | |
if [ -n "$PROMPT_FILE" ]; then | |
prompt=$(cat $PROMPT_FILE) | |
fi | |
prompt+="\\n$prompt_prefix" | |
prompt+="\\n$(gettmuxwindow)" | |
prompt+=$1 | |
prompt+="\\n" | |
prompt+=$prompt_suffix | |
prompt+="\\n\\n" | |
prompt=${prompt%\\n} | |
cd $llama_path | |
./main --temp $temperature --n-predict $n_predict -e -c $context_size -m $model_location -p "$prompt" 2>/dev/null | \ | |
sed "$output_limiter" | |
wait_for_q |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment