bashrc: add llama.cpp env
This commit is contained in:
@@ -1,5 +1,16 @@
|
||||
export PATH="/usr/local/bin:/home/w/.cache/ms-playwright/chromium-1129/chrome-linux:$PATH"
|
||||
|
||||
export GGML_CUDA=1
|
||||
export LLAMA_CURL=1
|
||||
export LLAMA_ARG_N_GPU_LAYERS=99
|
||||
export LLAMA_ARG_FLASH_ATTN=1
|
||||
export LLAMA_ARG_HOST="0.0.0.0"
|
||||
export LLAMA_ARG_HF_REPO="bartowski/Phi-3-medium-128k-instruct-GGUF"
|
||||
export LLAMA_ARG_HF_FILE="Phi-3-medium-128k-instruct-Q4_K_S.gguf"
|
||||
export LLAMA_ARG_CTX_SIZE=65536
|
||||
|
||||
|
||||
alias rp=runpodctl
|
||||
export PATH="/usr/local/bin:/home/w/.cache/ms-playwright/chromium-1129/chrome-linux:$PATH"
|
||||
export COMPOSE_STOP_GRACE_PERIOD=0
|
||||
export PATH=/usr/bin:$PATH
|
||||
alias prune="docker system prune -f"
|
||||
@@ -39,9 +50,7 @@ log() {
|
||||
alias download="huggingface-cli download"
|
||||
alias journal="sudo journalctl -u"
|
||||
|
||||
export GGML_CUDA=1
|
||||
export LLAMA_CURL=1
|
||||
# export LIBGL_ALWAYS_INDIRECT=1
|
||||
export LIBGL_ALWAYS_INDIRECT=1
|
||||
|
||||
export LD_LIBRARY_PATH="/usr/local/cuda/lib64:/usr/local/lib:$LD_LIBRARY_PATH"
|
||||
export PATH="/home/w/.venv/bin:/home/w/hub/llama.cpp:/usr/local/cuda/bin:$PATH"
|
||||
@@ -70,17 +79,6 @@ alias mac="ssh jaewooklee@192.168.12.45"
|
||||
alias ip="ip -4"
|
||||
alias ping="ping -c 2"
|
||||
|
||||
diff() {
|
||||
if [[ $# -eq 0 ]]; then
|
||||
clear
|
||||
git status
|
||||
echo
|
||||
echo
|
||||
git --no-pager diff
|
||||
else
|
||||
diff -qr "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
alias less="less -SEX"
|
||||
|
||||
|
||||
Reference in New Issue
Block a user