iandennismiller's picture
update to installed paths
17a696b
raw
history blame
5.17 kB
#!/bin/bash
# if conf does not exist, create it
if [ ! -f "$HOME/.config/llama/llama-main.conf" ]; then
mkdir -p "$HOME/.config/llama"
cat <<EOF > "$HOME/.config/llama/llama-main.conf"
LLAMA_TEMPERATURE=0.1
LLAMA_CONTEXT_SIZE=4096
LLAMA_REPETITION_PENALTY=1.15
LLAMA_TOP_P=0.9
LLAMA_TOP_K=20
LLAMA_TEMPLATE=chatml
LLAMA_MODEL_NAME=teknium/OpenHermes-2.5-Mistral-7B/openhermes-2.5-mistral-7b-f16.gguf
EOF
fi
function llama_interactive {
source $HOME/.config/llama/llama-main.conf
# if arguments are provided...
if [[ $# -eq 4 ]]; then
LLAMA_MODEL_NAME=$1
LLAMA_TEMPLATE=$2
LLAMA_CONTEXT_SIZE=$3
LLAMA_TEMPERATURE=$4
fi
llama \
--n-gpu-layers 1 \
--model "$(model_path $LLAMA_MODEL_NAME)" \
--prompt-cache "$(cache_path $LLAMA_MODEL_NAME)" \
--file "$(get_model_prompt $LLAMA_MODEL_NAME)" \
--in-prefix "$(get_model_prefix $LLAMA_TEMPLATE)" \
--in-suffix "$(get_model_suffix $LLAMA_TEMPLATE)" \
--reverse-prompt "$(get_model_prefix $LLAMA_TEMPLATE)" \
--reverse-prompt "<|im_end|>" \
--threads "6" \
--temp "$LLAMA_TEMPERATURE" \
--top-p "$LLAMA_TOP_P" \
--top-k "$LLAMA_TOP_K" \
--repeat-penalty "$LLAMA_REPETITION_PENALTY" \
--ctx-size "$LLAMA_CONTEXT_SIZE" \
--batch-size 1024 \
--n-predict -1 \
--keep -1 \
--instruct \
--no-mmap \
--color \
--escape
}
function model_path {
MODEL_NAME=$1
echo "$HOME/.ai/models/llama/${MODEL_NAME}"
}
function cache_path {
MODEL_NAME=$1
echo "$HOME/.ai/cache/menu-${MODEL_NAME//\//_}.cache"
}
function get_model_prefix {
TEMPLATE_NAME=$1
# if TEMPLATE_NAME contains string "guanaco"
if [[ $TEMPLATE_NAME == *"guanaco"* ]]; then
printf "### Human: "
elif [[ $TEMPLATE_NAME == *"alpaca"* ]]; then
printf "### Instruction: "
elif [[ $TEMPLATE_NAME == *"upstage"* ]]; then
printf "### Instruction: "
elif [[ $TEMPLATE_NAME == *"airoboros"* ]]; then
printf "### Instruction: "
elif [[ $TEMPLATE_NAME == *"hermes"* ]]; then
printf "### Instruction:"
elif [[ $TEMPLATE_NAME == *"vicuna"* ]]; then
printf "USER: "
elif [[ $TEMPLATE_NAME == *"based"* ]]; then
printf "Human: "
elif [[ $TEMPLATE_NAME == *"wizardlm"* ]]; then
printf "USER: "
elif [[ $TEMPLATE_NAME == *"orca"* ]]; then
printf "### User: "
elif [[ $TEMPLATE_NAME == *"samantha"* ]]; then
printf "USER: "
elif [[ $TEMPLATE_NAME == "chatml" ]]; then
printf "<|im_start|>user\\\n"
else
printf "Input: "
fi
}
# USER: hello, who are you? ASSISTANT:
function get_model_suffix {
TEMPLATE_NAME=$1
# if TEMPLATE_NAME contains string "guanaco"
if [[ $TEMPLATE_NAME == *"guanaco"* ]]; then
printf "### Assistant: "
elif [[ $TEMPLATE_NAME == *"alpaca"* ]]; then
printf "### Response: "
elif [[ $TEMPLATE_NAME == *"airoboros"* ]]; then
printf "### Response: "
elif [[ $TEMPLATE_NAME == *"upstage"* ]]; then
printf "### Response: "
elif [[ $TEMPLATE_NAME == *"hermes"* ]]; then
printf "### Response: "
elif [[ $TEMPLATE_NAME == *"vicuna"* ]]; then
printf "ASSISTANT: "
elif [[ $TEMPLATE_NAME == *"samantha"* ]]; then
printf "ASSISTANT: "
elif [[ $TEMPLATE_NAME == *"based"* ]]; then
printf "Assistant: "
elif [[ $TEMPLATE_NAME == *"wizardlm"* ]]; then
printf "ASSISTANT: "
elif [[ $TEMPLATE_NAME == *"orca"* ]]; then
printf "### Response: "
elif [[ $TEMPLATE_NAME == "chatml" ]]; then
printf "<|im_end|>\n<|im_start|>assistant\\\n"
else
printf "Output: "
fi
}
function get_model_prompt {
MODEL_NAME=$1
if [[ $MODEL_NAME == *"guanaco"* ]]; then
echo "$HOME/.local/share/llama/prompts/guanaco.txt"
elif [[ $MODEL_NAME == *"samantha"* ]]; then
echo "$HOME/.local/share/llama/prompts/samantha.txt"
elif [[ $MODEL_NAME == *"openhermes-2-mistral-7b"* ]]; then
echo "$HOME/.local/share/llama/prompts/hermes-mistral.txt"
elif [[ $MODEL_NAME == *"alpaca"* ]]; then
echo "$HOME/.local/share/llama/prompts/alpaca.txt"
elif [[ $MODEL_NAME == *"upstage"* ]]; then
echo "$HOME/.local/share/llama/prompts/alpaca.txt"
elif [[ $MODEL_NAME == *"airoboros"* ]]; then
echo "$HOME/.local/share/llama/prompts/alpaca.txt"
elif [[ $MODEL_NAME == *"hermes"* ]]; then
echo "$HOME/.local/share/llama/prompts/alpaca.txt"
elif [[ $MODEL_NAME == *"vicuna"* ]]; then
echo "$HOME/.local/share/llama/prompts/vicuna-v11.txt"
elif [[ $MODEL_NAME == *"based"* ]]; then
echo "$HOME/.local/share/llama/prompts/based.txt"
elif [[ $MODEL_NAME == *"wizardlm"* ]]; then
echo "$HOME/.local/share/llama/prompts/wizardlm-30b.txt"
elif [[ $MODEL_NAME == *"orca"* ]]; then
echo "$HOME/.local/share/llama/prompts/orca.txt"
else
echo "$HOME/.local/share/llama/prompts/idm-gpt-lite.txt"
fi
}
llama_interactive $*