-
-
Save supersonictw/f6cf5e599377132fe5e180b3d495c553 to your computer and use it in GitHub Desktop.
#!/bin/bash | |
# Ollama Model Export Script | |
# Usage: bash ollama-export.sh vicuna:7b | |
# SPDX-License-Identifier: MIT (https://ncurl.xyz/s/o_o6DVqIR) | |
# https://gist.github.com/supersonictw/f6cf5e599377132fe5e180b3d495c553 | |
# Interrupt if any error occurred | |
set -e | |
# Declare | |
echo "Ollama Model Export Script" | |
echo "License: MIT (https://ncurl.xyz/s/o_o6DVqIR)" | |
echo "" | |
# OS-specific | |
case "$OSTYPE" in | |
linux*) | |
HOME="$(echo ~ollama)" | |
;; | |
esac | |
# Define variables | |
OLLAMA_HOME="${OLLAMA_HOME:="$HOME/.ollama"}" | |
BLOBS_FILE_BASE_PATH="$OLLAMA_HOME/models/blobs" | |
MANIFESTS_FILE_BASE_PATH="$OLLAMA_HOME/models/manifests" | |
# Define constants | |
SUCCESS_PREFIX="\033[1;32mSuccess:\033[0m" | |
FAILED_PREFIX="\033[0;31mFailed:\033[0m" | |
# Read arguments | |
IFS='/' read -ra NAME_ARGS <<< "${1/://}" | |
case "${#NAME_ARGS[@]}" in | |
4) | |
MANIFESTS_REGISTRY_NAME="${NAME_ARGS[0]}" | |
MANIFESTS_LIBRARY_NAME="${NAME_ARGS[1]}" | |
MANIFESTS_MODEL_NAME="${NAME_ARGS[2]}" | |
MANIFESTS_PARAMS_NAME="${NAME_ARGS[3]}" | |
;; | |
3) | |
MANIFESTS_LIBRARY_NAME="${NAME_ARGS[0]}" | |
MANIFESTS_MODEL_NAME="${NAME_ARGS[1]}" | |
MANIFESTS_PARAMS_NAME="${NAME_ARGS[2]}" | |
;; | |
2) | |
MANIFESTS_MODEL_NAME="${NAME_ARGS[0]}" | |
MANIFESTS_PARAMS_NAME="${NAME_ARGS[1]}" | |
;; | |
1) | |
MANIFESTS_MODEL_NAME="${NAME_ARGS[0]}" | |
;; | |
esac | |
# Define variables | |
MANIFESTS_REGISTRY_NAME="${MANIFESTS_REGISTRY_NAME:="registry.ollama.ai"}" | |
MANIFESTS_LIBRARY_NAME="${MANIFESTS_LIBRARY_NAME:="library"}" | |
MANIFESTS_MODEL_NAME="${MANIFESTS_MODEL_NAME:="vicuna"}" | |
MANIFESTS_PARAMS_NAME="${MANIFESTS_PARAMS_NAME:="latest"}" | |
# Echo the model full name | |
MODEL_FULL_NAME="$MANIFESTS_MODEL_NAME:$MANIFESTS_PARAMS_NAME" | |
echo "Exporting model \"$MODEL_FULL_NAME\"..." | |
echo "" | |
# Make sure manifests exist | |
MANIFESTS_FILE_PATH="$MANIFESTS_FILE_BASE_PATH/$MANIFESTS_REGISTRY_NAME/$MANIFESTS_LIBRARY_NAME/$MANIFESTS_MODEL_NAME/$MANIFESTS_PARAMS_NAME" | |
if [ ! -f "$MANIFESTS_FILE_PATH" ]; then | |
echo -e "$FAILED_PREFIX \"$MANIFESTS_FILE_PATH\" not exists, the model \"$MODEL_FULL_NAME\" you requested is not found." | |
exit 1 | |
fi | |
# Make sure dist not exist | |
EXPORT_DST_BASE_PATH="${EXPORT_DST_BASE_PATH:="$PWD/${MODEL_FULL_NAME/:/-}"}" | |
if [ -d "$EXPORT_DST_BASE_PATH" ]; then | |
echo -e "$FAILED_PREFIX \"$EXPORT_DST_BASE_PATH\" already exists, exits for preventing from unexpected operations." | |
exit 1 | |
fi | |
# Create dist directory | |
mkdir -p "$EXPORT_DST_BASE_PATH" | |
printf "%s" "$MANIFESTS_REGISTRY_NAME/$MANIFESTS_LIBRARY_NAME/$MANIFESTS_MODEL_NAME:$MANIFESTS_PARAMS_NAME" >"$EXPORT_DST_BASE_PATH/source.txt" | |
# Read manifests and handle them | |
while IFS= read -r layer; do | |
BLOB_FILE_NAME="${layer/:/-}" | |
BLOB_FILE_PATH="$BLOBS_FILE_BASE_PATH/$BLOB_FILE_NAME" | |
BLOB_TYPE_NAME=$(jq -r --arg layer "$layer" '.layers[] | select(.digest == $layer) | .mediaType' "$MANIFESTS_FILE_PATH" | sed 's|.*\.ollama\.image\.\(.*\)|\1|') | |
EXPORT_MODEL_FILE_PATH="$EXPORT_DST_BASE_PATH/Modelfile" | |
EXPORT_MODEL_BIN_PATH="$EXPORT_DST_BASE_PATH/model.bin" | |
case "$BLOB_TYPE_NAME" in | |
model) | |
cp "$BLOB_FILE_PATH" "$EXPORT_MODEL_BIN_PATH" | |
printf "%s\n" "FROM ./model.bin" >>"$EXPORT_MODEL_FILE_PATH" | |
;; | |
params) | |
PARAMS_JSON="$(cat "$BLOB_FILE_PATH")" | |
printf "%s" "$(jq -r 'keys[] as $key | .[$key][] | "PARAMETER \($key) \"\(.)\"" ' <<<"$PARAMS_JSON")" >>"$EXPORT_MODEL_FILE_PATH" | |
;; | |
*) | |
TYPE_NAME="$(echo "$BLOB_TYPE_NAME" | tr '[:lower:]' '[:upper:]')" | |
FILE_CONTENT="$(cat "$BLOB_FILE_PATH")" | |
printf "%s\n" "$TYPE_NAME \"\"\"$FILE_CONTENT\"\"\"" >>"$EXPORT_MODEL_FILE_PATH" | |
;; | |
esac | |
done < <(jq -r '.layers[].digest' "${MANIFESTS_FILE_PATH}") | |
# Echo success message | |
echo -e "$SUCCESS_PREFIX Model \"$MODEL_FULL_NAME\" has been exported to \"$EXPORT_DST_BASE_PATH\"!" |
Problem in macos:
OK! I fixed some bugs and make the script support the bash on macOS.
Thanks for your using and reporting! 😄Now
"-"
is default.Simple fix is
FETCH_BLOB_FILE_NAME() { echo "${1/:/-}" }
I fixed it
Oh OK. Because I was using 0.1.29
so I didn't notice that it's already upgraded to be 0.1.42
now.
Wow! Time flies so fast 😎
Upgraded! Thanks for reporting. 😄
@supersonictw thank you.
For Ollama 0.3.4 OLLAMA_HOME must be set to "/usr/share/ollama/.ollama"
"/usr/share/ollama/.ollama"
OK but I need to check it both on Linux and macOS.
Please specify the OLLAMA_HOME
variable on your own first, thanks.
I'm a little busy recently... Thanks for your reporting! 😃
A little problem with this script: it can't correctly handle models using Apache 2.0 license(or any other license whose text contains %
), because the %
character in the license text is not escaped when passing to the printf
command.
solved this by escaping the license text first
LICENSE=$(cat $BLOB_FILE_PATH)
ESCAPED_LICENSE=$(echo "$LICENSE" | sed 's/%/%%/g')
printf "$TYPE_NAME \"\"\"$ESCAPED_LICENSE\"\"\"\n" >>"$EXPORT_MODEL_FILE_PATH"
;;
./ollama-export qwen2:0.5 excute occur:
./ollama-export.sh: line 86: printf: `)': invalid format character ,but model already exported
./ollama-export qwen2:0.5 excute occur: ./ollama-export.sh: line 86: printf: `)': invalid format character ,but model already exported
try this
@AndersonALAC @cnopens
Oops! 😟
Thanks for your inform that make me realized it might be a security concern due to non-escaped printf
.
The new patch already applied! Thanks for reporting again! 🥇
@sokovnich
The patch already applied!
Linux can recognize their OLLAMA_HOME
automatically. 🐧
@supersonictw thank you
i got an warning while exporting llava:34b model:
/ollama-export.sh: line 103: warning: command substitution: ignored null byte in input
i inspected the llava model manifest file, and the warning is maybe caused by the mediaType: projector:
{
"mediaType": "application/vnd.ollama.image.projector",
"digest": "sha256:83720bd8438ccdc910deba5efbdc3340820b29258d94a7a60d1addc9a1b5f095",
"size": 699956416
}
there is no blob file corresponding to the sha256 digest under ollama models/blob folder
Hi, I can use this script to export the model that I have already downloaded using for example "ollama pull" right?
This exported model I can then use to fine tune using LoRA?
I understood that I cannot fine tune using ollama and hence have to export the model, fine tune outside, then import it back to Ollama to use it with ollama inference?
(I am totally new to AI models)
Hello! @vasanthnagkv
This exported model I can then use to fine tune using LoRA?
Yes if the model is llama model based. It's related to ggerganov/llama.cpp#6680
import it back to Ollama to use it with ollama inference
The script can be worked with ollama create
for importing, it's compatible.
https://github.com/ollama/ollama/blob/main/docs/import.md
I'm using the Docker version of Ollama, and I found that running your script has the following error. After analyzing the manifest, I found that the digest is separated by a colon : between the sha and the subsequent string sha:xxx, while in the blobs, it's sha-xxx, which causes the following error:
root@9587469fdad7:~/ollama_tool# bash ollama_export.sh qwen:1.8b
Ollama Model Export Script
License: MIT (https://ncurl.xyz/s/RD0Yl5fSg)
Exporting model "qwen:1.8b"...
cp: cannot stat '/root/.ollama/models/blobs/sha256:1296b084ed6bc4c6eaee99255d73e9c715d38e0087b6467fd1c498b908180614': No such file or directory
I modified it and now it works normally: