|
#!/bin/bash |
|
|
|
|
|
if [[ $# -lt 4 ]]; then |
|
echo "Usage: llama.sh <model> <template> <context-size> <temperature>" |
|
exit 1 |
|
fi |
|
|
|
|
|
if [ ! -f "$HOME/.config/llama/llama-main.conf" ]; then |
|
mkdir -p "$HOME/.config/llama" |
|
cat <<EOF > "$HOME/.config/llama/llama-main.conf" |
|
LLAMA_TEMPERATURE=0.1 |
|
LLAMA_CONTEXT_SIZE=4096 |
|
LLAMA_REPETITION_PENALTY=1.15 |
|
LLAMA_TOP_P=0.9 |
|
LLAMA_TOP_K=20 |
|
LLAMA_TEMPLATE=chatml |
|
LLAMA_MODEL_NAME=teknium/OpenHermes-2.5-Mistral-7B/openhermes-2.5-mistral-7b-f16.gguf |
|
EOF |
|
fi |
|
|
|
source $HOME/.config/llama/llama-main.conf |
|
|
|
function llama_interactive { |
|
MODEL_NAME=$1 |
|
TEMPLATE=$2 |
|
CONTEXT_SIZE=$3 |
|
TEMPERATURE=$4 |
|
|
|
CMD=$HOME/.ai/bin/llama |
|
|
|
$CMD \ |
|
--n-gpu-layers 1 \ |
|
--model "$(model_path $MODEL_NAME)" \ |
|
--prompt-cache "$(cache_path $MODEL_NAME)" \ |
|
--file "$(get_model_prompt $MODEL_NAME)" \ |
|
--in-prefix "$(get_model_prefix $TEMPLATE)" \ |
|
--in-suffix "$(get_model_suffix $TEMPLATE)" \ |
|
--reverse-prompt "$(get_model_prefix $TEMPLATE)" \ |
|
--reverse-prompt "<|im_end|>" \ |
|
--threads "7" \ |
|
--temp "$TEMPERATURE" \ |
|
--top-p "$LLAMA_TOP_P" \ |
|
--top-k "$LLAMA_TOP_K" \ |
|
--repeat-penalty "$LLAMA_REPETITION_PENALTY" \ |
|
--ctx-size "$CONTEXT_SIZE" \ |
|
--batch-size 1024 \ |
|
--n-predict -1 \ |
|
--keep -1 \ |
|
--instruct \ |
|
--no-mmap \ |
|
--color \ |
|
--escape |
|
} |
|
|
|
function model_path { |
|
MODEL_NAME=$1 |
|
echo "$HOME/.ai/models/llama/${MODEL_NAME}" |
|
} |
|
|
|
function cache_path { |
|
MODEL_NAME=$1 |
|
echo "$HOME/.ai/cache/menu-${MODEL_NAME//\//_}.cache" |
|
} |
|
|
|
function get_model_prefix { |
|
TEMPLATE_NAME=$1 |
|
|
|
|
|
if [[ $TEMPLATE_NAME == *"guanaco"* ]]; then |
|
printf "### Human: " |
|
elif [[ $TEMPLATE_NAME == *"alpaca"* ]]; then |
|
printf "### Instruction: " |
|
elif [[ $TEMPLATE_NAME == *"upstage"* ]]; then |
|
printf "### Instruction: " |
|
elif [[ $TEMPLATE_NAME == *"airoboros"* ]]; then |
|
printf "### Instruction: " |
|
elif [[ $TEMPLATE_NAME == *"hermes"* ]]; then |
|
printf "### Instruction:" |
|
elif [[ $TEMPLATE_NAME == *"vicuna"* ]]; then |
|
printf "USER: " |
|
elif [[ $TEMPLATE_NAME == *"based"* ]]; then |
|
printf "Human: " |
|
elif [[ $TEMPLATE_NAME == *"wizardlm"* ]]; then |
|
printf "USER: " |
|
elif [[ $TEMPLATE_NAME == *"orca"* ]]; then |
|
printf "### User: " |
|
elif [[ $TEMPLATE_NAME == *"samantha"* ]]; then |
|
printf "USER: " |
|
elif [[ $TEMPLATE_NAME == "chatml" ]]; then |
|
printf "<|im_start|>user\\\n" |
|
else |
|
printf "Input: " |
|
fi |
|
} |
|
|
|
|
|
|
|
function get_model_suffix { |
|
TEMPLATE_NAME=$1 |
|
|
|
|
|
if [[ $TEMPLATE_NAME == *"guanaco"* ]]; then |
|
printf "### Assistant: " |
|
elif [[ $TEMPLATE_NAME == *"alpaca"* ]]; then |
|
printf "### Response: " |
|
elif [[ $TEMPLATE_NAME == *"airoboros"* ]]; then |
|
printf "### Response: " |
|
elif [[ $TEMPLATE_NAME == *"upstage"* ]]; then |
|
printf "### Response: " |
|
elif [[ $TEMPLATE_NAME == *"hermes"* ]]; then |
|
printf "### Response: " |
|
elif [[ $TEMPLATE_NAME == *"vicuna"* ]]; then |
|
printf "ASSISTANT: " |
|
elif [[ $TEMPLATE_NAME == *"samantha"* ]]; then |
|
printf "ASSISTANT: " |
|
elif [[ $TEMPLATE_NAME == *"based"* ]]; then |
|
printf "Assistant: " |
|
elif [[ $TEMPLATE_NAME == *"wizardlm"* ]]; then |
|
printf "ASSISTANT: " |
|
elif [[ $TEMPLATE_NAME == *"orca"* ]]; then |
|
printf "### Response: " |
|
elif [[ $TEMPLATE_NAME == "chatml" ]]; then |
|
printf "<|im_end|>\n<|im_start|>assistant\\\n" |
|
else |
|
printf "Output: " |
|
fi |
|
} |
|
|
|
function get_model_prompt { |
|
MODEL_NAME=$1 |
|
|
|
if [[ $MODEL_NAME == *"guanaco"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/guanaco.txt" |
|
elif [[ $MODEL_NAME == *"samantha"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/samantha.txt" |
|
elif [[ $MODEL_NAME == *"openhermes-2-mistral-7b"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/hermes-mistral.txt" |
|
elif [[ $MODEL_NAME == *"alpaca"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/alpaca.txt" |
|
elif [[ $MODEL_NAME == *"upstage"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/alpaca.txt" |
|
elif [[ $MODEL_NAME == *"airoboros"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/alpaca.txt" |
|
elif [[ $MODEL_NAME == *"hermes"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/alpaca.txt" |
|
elif [[ $MODEL_NAME == *"vicuna"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/vicuna-v11.txt" |
|
elif [[ $MODEL_NAME == *"based"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/based.txt" |
|
elif [[ $MODEL_NAME == *"wizardlm"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/wizardlm-30b.txt" |
|
elif [[ $MODEL_NAME == *"orca"* ]]; then |
|
echo "$HOME/.local/share/ai/prompts/orca.txt" |
|
else |
|
echo "$HOME/.local/share/ai/prompts/idm-gpt-lite.txt" |
|
fi |
|
} |
|
|
|
llama_interactive "$1" "$2" "$3" "$4" |
|
|