clean bashrc
This commit is contained in:
3
.gitignore
vendored
3
.gitignore
vendored
@@ -1,2 +1,3 @@
|
||||
.ipynb_checkpoints
|
||||
__pycache__
|
||||
__pycache__
|
||||
cpu-bench/*.png
|
||||
|
||||
@@ -1,43 +1,55 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
def ensure(*packages):
|
||||
for pkg in packages:
|
||||
try: __import__(pkg)
|
||||
except: import os; os.system(f'pip install -q {pkg}')
|
||||
ensure('pandas', 'tabulate')
|
||||
|
||||
import subprocess, multiprocessing as mp, warnings, pandas as pd
|
||||
from tqdm.auto import tqdm
|
||||
from tabulate import tabulate
|
||||
warnings.filterwarnings("ignore")
|
||||
R, G, B, W = (f'\x1b[{x}m' for x in (31, 32, 34, 0))
|
||||
subprocess.run('clear')
|
||||
subprocess.run('neofetch')
|
||||
|
||||
command = lambda core: f'sysbench cpu --cpu-max-prime=20000 --time=1 --threads={core} run | grep second'
|
||||
|
||||
print(f'{G}$ {command("$(nproc)")}{W}', flush=True)
|
||||
|
||||
|
||||
def speed(core):
|
||||
output = subprocess.run(
|
||||
command(core), shell=True, capture_output=True, text=True
|
||||
).stdout.split()[-1]
|
||||
return float(output)
|
||||
|
||||
df = []
|
||||
for core in range(1, mp.cpu_count()+1):
|
||||
s = speed(core)
|
||||
row = {
|
||||
'#Threads': core,
|
||||
'Throughput(/s)': s,
|
||||
'(per-core)': s/core
|
||||
}
|
||||
df.append(row)
|
||||
df = pd.DataFrame(df)
|
||||
df.to_csv('result.csv', index=False)
|
||||
|
||||
df.iloc[:, 0] = df.iloc[:, 0].apply(lambda s: f'{R}{s}{W}')
|
||||
df.iloc[:, 1] = df.iloc[:, 1].apply(lambda s: f'{G}{int(s)}{W}')
|
||||
df.iloc[:, 2] = df.iloc[:, 2].apply(lambda s: f'{B}{int(s)}{W}')
|
||||
print(tabulate(df, headers='keys', tablefmt='rounded_outline', showindex=False))
|
||||
FROM python:slim
|
||||
|
||||
RUN apt update && apt install -y sysbench fastfetch
|
||||
|
||||
RUN pip install tqdm pandas tabulate
|
||||
|
||||
RUN tee bench.py <<EOF
|
||||
#!/usr/bin/env python
|
||||
|
||||
def ensure(*packages):
|
||||
for pkg in packages:
|
||||
try: __import__(pkg)
|
||||
except: import os; os.system(f'pip install -q {pkg}')
|
||||
ensure('pandas', 'tabulate')
|
||||
|
||||
import subprocess, multiprocessing as mp, warnings, pandas as pd
|
||||
from tqdm.auto import tqdm
|
||||
from tabulate import tabulate
|
||||
warnings.filterwarnings("ignore")
|
||||
R, G, B, W = (f'\x1b[{x}m' for x in (31, 32, 34, 0))
|
||||
subprocess.run('clear')
|
||||
subprocess.run('fastfetch')
|
||||
|
||||
command = lambda core: f'sysbench cpu --cpu-max-prime=20000 --time=1 --threads={core} run | grep second'
|
||||
|
||||
print(f'{G}$ {command("$(nproc)")}{W}', flush=True)
|
||||
|
||||
|
||||
def speed(core):
|
||||
output = subprocess.run(
|
||||
command(core), shell=True, capture_output=True, text=True
|
||||
).stdout.split()[-1]
|
||||
return float(output)
|
||||
|
||||
df = []
|
||||
prev = 0
|
||||
for core in range(1, mp.cpu_count()+1):
|
||||
s = speed(core)
|
||||
row = {
|
||||
'#Threads': core,
|
||||
'Throughput(/s)': s,
|
||||
'(per-core)': s - prev
|
||||
}
|
||||
prev = s
|
||||
df.append(row)
|
||||
df = pd.DataFrame(df)
|
||||
df.to_csv('result.csv', index=False)
|
||||
|
||||
df.iloc[:, 0] = df.iloc[:, 0].apply(lambda s: f'{R}{s}{W}')
|
||||
df.iloc[:, 1] = df.iloc[:, 1].apply(lambda s: f'{G}{int(s)}{W}')
|
||||
df.iloc[:, 2] = df.iloc[:, 2].apply(lambda s: f'{B}{int(s)}{W}')
|
||||
print(tabulate(df, headers='keys', tablefmt='rounded_outline', showindex=False))
|
||||
EOF
|
||||
|
||||
ENTRYPOINT ["python", "bench.py"]
|
||||
4
cpu-bench/Makefile
Normal file
4
cpu-bench/Makefile
Normal file
@@ -0,0 +1,4 @@
|
||||
all: build
|
||||
docker run yauk.tv/bench
|
||||
build:
|
||||
sudo docker build . -t yauk.tv/bench
|
||||
@@ -1,78 +1,18 @@
|
||||
alias x=pnpx
|
||||
export CLOUDFLARE_API_TOKEN=Pah8Q6hBUsQsKnKhaMJB2QvL-LHAWCcU8xti66Q3
|
||||
dive(){ docker exec -it "$@" sh; }
|
||||
alias reboot='sudo reboot'
|
||||
llama-build(){
|
||||
cmake -B build -DGGML_CUDA=ON
|
||||
cmake --build build --config Release
|
||||
}
|
||||
alias nmap="nmap -p1-65535"
|
||||
alias restart="docker restart"
|
||||
|
||||
export build="docker build ."
|
||||
export DISPLAY=:0
|
||||
export CLOUDFLARE_API_TOKEN=Pah8Q6hBUsQsKnKhaMJB2QvL-LHAWCcU8xti66Q3
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./build/bin
|
||||
export PATH="$HOME/wsl:$HOME/go/bin:/usr/local/go/bin:$HOME/.zig:/Users/w/hub/wireguard-go/:/Users/w/.venv/bin:/usr/local/bin:/home/w/.cache/ms-playwright/chromium-1129/chrome-linux:$PATH"
|
||||
dive(){ docker exec -it "$@" bash; }
|
||||
alias dev="bun run dev --host"
|
||||
alias preview="bun run build && bun run preview --host"
|
||||
export PYTHONPATH="/home/w/wsl"
|
||||
export HF_TOKEN=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
|
||||
alias restart='docker restart'
|
||||
export HOMEBREW_NO_ENV_HINTS=true
|
||||
# alias caddy="docker exec -w /etc/caddy caddy caddy fmt --overwrite; docker exec -w /etc/caddy caddy caddy reload"
|
||||
# alias caddy-passwd="docker exec -w /etc/caddy caddy caddy hash-password -p"
|
||||
alias up="docker compose down; docker compose up -d --remove-orphans; docker compose logs -f"
|
||||
alias vite="npm run dev -- --host 0.0.0.0"
|
||||
alias cls="clear"
|
||||
alias nginx="docker exec -it nginx bash"
|
||||
export GGML_CUDA=1
|
||||
export LLAMA_CURL=1
|
||||
export LLAMA_ARG_N_GPU_LAYERS=99
|
||||
export LLAMA_ARG_FLASH_ATTN=1
|
||||
export LLAMA_ARG_HOST="0.0.0.0"
|
||||
# export LLAMA_ARG_HF_REPO="bartowski/Phi-3-medium-128k-instruct-GGUF"
|
||||
# export LLAMA_ARG_HF_FILE="Phi-3-medium-128k-instruct-Q4_K_S.gguf"
|
||||
# export LLAMA_ARG_CTX_SIZE=65536
|
||||
export HOMEBREW_NO_AUTO_UPDATE=1
|
||||
export GOPATH=$HOME/go
|
||||
|
||||
# export LANG=C.UTF-8
|
||||
# export LC_ALL=C.UTF-8
|
||||
|
||||
|
||||
alias rerun="docker compose down && docker compose up -d --build && docker compose logs -f"
|
||||
# export OLLAMA_HOST=host.docker.internal:11434
|
||||
|
||||
alias rp=runpodctl
|
||||
export PATH="/home/w/hub/llama.cpp/build/bin:/usr/local/bin:/home/w/.cache/ms-playwright/chromium-1129/chrome-linux:$PATH"
|
||||
export COMPOSE_STOP_GRACE_PERIOD=0
|
||||
alias prune="docker system prune -f"
|
||||
alias rc="vi ~/.bash_aliases && tail ~/.bash_aliases && source ~/.bash_aliases"
|
||||
# alias up="down && docker compose up --remove-orphans"
|
||||
alias down="docker compose down"
|
||||
pkill() { pkill -f "$@" ; ps ux ; }
|
||||
inspect() { docker image inspect "$@" | jq ; }
|
||||
alias d="docker"
|
||||
alias c="docker compose"
|
||||
alias up="docker compose down && docker compose up -d --remove-orphans && docker compose logs -f"
|
||||
alias prune="docker system prune -f"
|
||||
alias i="sudo apt-get install -y"
|
||||
alias run="docker build . -t temp && docker run --network host --rm temp"
|
||||
alias debian="docker run -it --rm --gpus all pytorch/pytorch:2.4.0-cuda12.4-cudnn9-devel bash"
|
||||
check() { curl -x "localhost:$1" -k https://httpbin.org/ip; }
|
||||
redis-lastsave() { redis-cli "$@" --raw lastsave | xargs -I{} date -d @{} "+%Y-%m-%d %H:%M:%S"; }
|
||||
redis-dump() { sudo rsync -a /var/lib/redis/dump.rdb ~/.backup/dump.rdb-$(date +%Y%m%d); }
|
||||
alias systemctl="sudo systemctl"
|
||||
|
||||
# cd() { clear; echo "$(pwd)/$*"; echo; command cd "$@" && ls -lh --color=auto; echo; }
|
||||
alias ip="ip -4"
|
||||
alias py="python"
|
||||
alias p="python"
|
||||
|
||||
alias my="sudo chown -R $(id -u):$(id -g)"
|
||||
alias l="clear; pwd; echo; command ls -lh --color=auto"
|
||||
alias ls="clear; pwd; echo; command ls -alh --color=auto"
|
||||
export PATH=$PATH:$HOME/minio-binaries/
|
||||
alias make="make -j"
|
||||
alias cls="clear"
|
||||
alias rc="vi ~/.bash_aliases && tail ~/.bash_aliases && source ~/.bash_aliases"
|
||||
alias l='clear; echo "\x1b[92m$(pwd)\x1b[0m"; command ls -lh --color=auto'
|
||||
alias ls='clear; echo "\x1b[92m$(pwd)\x1b[0m"; command ls -alh --color=auto'
|
||||
alias ns="watch -n 0.1 nvidia-smi"
|
||||
alias ping="ping -c 2"
|
||||
alias rsync="rsync -avPh"
|
||||
log() {
|
||||
if [ $# -eq 0 ]; then
|
||||
docker compose logs -f
|
||||
@@ -80,53 +20,38 @@ log() {
|
||||
docker logs -f "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
alias download="huggingface-cli download"
|
||||
alias journal="sudo journalctl -u"
|
||||
|
||||
export LIBGL_ALWAYS_INDIRECT=1
|
||||
|
||||
export LD_LIBRARY_PATH="/usr/local/cuda/lib64:/usr/local/lib:$LD_LIBRARY_PATH"
|
||||
export PATH="/home/w/.venv/bin:/home/w/hub/llama.cpp:/usr/local/cuda/bin:$PATH"
|
||||
alias rsync="rsync -avPh"
|
||||
alias dpkg="sudo dpkg"
|
||||
alias wg="sudo wg"
|
||||
alias systemctl="sudo systemctl"
|
||||
alias service="sudo service"
|
||||
alias apt="sudo apt-get"
|
||||
|
||||
alias chwon="sudo chown"
|
||||
alias bsah="bash" # common typing error
|
||||
alias pyhton="python" # common typing error
|
||||
alias soruce="source"
|
||||
alias stauts="status"
|
||||
alias stuats="status"
|
||||
alias reboot='sudo reboot'
|
||||
|
||||
dash() { sudo docker run -it --rm --gpus all --entrypoint=bash "$1"; }
|
||||
export HOMEBREW_NO_AUTO_UPDATE=1
|
||||
export GOPATH=$HOME/go
|
||||
export PATH="/Users/w/.venv/bin:$PATH:$HOME/go/bin"
|
||||
export PATH="/home/w/hub/llama.cpp/build/bin:$PATH"
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./build/bin
|
||||
export HF_TOKEN=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
|
||||
export CLOUDFLARE_API_TOKEN=Pah8Q6hBUsQsKnKhaMJB2QvL-LHAWCcU8xti66Q3
|
||||
dive(){ docker exec -it "$@" sh; }
|
||||
|
||||
alpine() {
|
||||
docker run -it --rm alpine
|
||||
}
|
||||
|
||||
dsh() { sudo docker run -it --rm "$1" sh; }
|
||||
alias nuc="ssh w@192.168.12.5"
|
||||
# alias mac="ssh jaewooklee@192.168.12.45"
|
||||
alias mac="ssh jaewooklee@10.0.0.2"
|
||||
alias mac="ssh w@192.168.12.2"
|
||||
alias ip="ip -4"
|
||||
alias ping="ping -c 2"
|
||||
|
||||
|
||||
# alias less="less -SEX"
|
||||
|
||||
docker() {
|
||||
if [[ "$1" == "ps" ]]; then
|
||||
command docker ps | less -SEX
|
||||
elif [[ "$1" == "images" ]]; then
|
||||
command docker images | less -SEX
|
||||
elif [[ "$1" == "rm" ]]; then
|
||||
command docker "$@" -f
|
||||
else
|
||||
command docker "$@"
|
||||
fi
|
||||
}
|
||||
# docker() {
|
||||
# if [[ "$1" == "ps" ]]; then
|
||||
# command docker ps | less -SEX
|
||||
# elif [[ "$1" == "images" ]]; then
|
||||
# command docker images | less -SEX
|
||||
# elif [[ "$1" == "rm" ]]; then
|
||||
# command docker "$@" -f
|
||||
# else
|
||||
# command docker "$@"
|
||||
# fi
|
||||
# }
|
||||
|
||||
alias ..="cd .." # 상위 디렉토리로 이동
|
||||
alias ...="cd ../.." # 두 단계 위로 이동
|
||||
@@ -217,23 +142,3 @@ alias sudo="sudo "
|
||||
alias dryblack="clear; black --check --diff --color ."
|
||||
alias compose="docker compose"
|
||||
alias compsoe="compose"
|
||||
|
||||
alias ps="ps ux"
|
||||
clone() { git clone "https://git.yauk.tv/$@"; }
|
||||
|
||||
ign() {
|
||||
for item in "$@"; do
|
||||
if [ -d "$item" ]; then
|
||||
echo "$item/" >> .gitignore
|
||||
echo "Added directory: $item/ to .gitignore"
|
||||
elif [ -f "$item" ]; then
|
||||
echo "$item" >> .gitignore
|
||||
echo "Added file: $item to .gitignore"
|
||||
else
|
||||
echo "Warning: $item does not exist"
|
||||
fi
|
||||
done
|
||||
}
|
||||
|
||||
alias r="redis-cli"
|
||||
q() { "$@" > /dev/null 2>&1 & }
|
||||
|
||||
33
zmq-sqlite/README.md
Normal file
33
zmq-sqlite/README.md
Normal file
@@ -0,0 +1,33 @@
|
||||
# Server
|
||||
```python
|
||||
def SQL():
|
||||
import sqlite3, hashlib, os; con = sqlite3.connect('.db', isolation_level=None)
|
||||
sql = lambda q, *p: list(con.execute(q, p))
|
||||
if not os.path.exists('.db-blob') and os.mkdir('.db-blob') is None:
|
||||
con.executescript('''PRAGMA journal_mode=WAL;
|
||||
CREATE TABLE kv(k, v, t DEFAULT CURRENT_TIMESTAMP);
|
||||
CREATE INDEX idx_kv_v ON kv(v); CREATE INDEX idx_kv_k_t ON kv(k,t DESC)''')
|
||||
def setitem(_, filename, blob):
|
||||
if not sql('SELECT 1 FROM kv WHERE v=?', sha1 := hashlib.sha1(blob).hexdigest()):
|
||||
with open(f'.db-blob/{sha1}', 'xb') as f: f.write(blob)
|
||||
sql('INSERT INTO kv(k,v) VALUES(?,?)', filename, sha1)
|
||||
def getitem(_, filename):
|
||||
if sha1 := sql('SELECT v FROM kv WHERE k=? ORDER BY t DESC', filename):
|
||||
return open(f'.db-blob/{sha1[0][0]}', 'rb').read()
|
||||
return type('', (), dict(__setitem__=setitem, __getitem__=getitem))()
|
||||
sql = SQL()
|
||||
|
||||
import zmq; (socket := zmq.Context().socket(zmq.REP)).bind('tcp://*:5555')
|
||||
while True:
|
||||
filename, blob = socket.recv_pyobj()
|
||||
sql[filename] = blob
|
||||
socket.send_string('OK')
|
||||
```
|
||||
|
||||
# Client
|
||||
```python
|
||||
def put(filename, blob, addr='tcp://localhost:5555'):
|
||||
import zmq; (socket := zmq.Context().socket(zmq.REQ)).connect(addr)
|
||||
assert socket.send_pyobj((filename, blob)) or socket.recv_string() == 'OK'
|
||||
put('hello', b'world')
|
||||
```
|
||||
Reference in New Issue
Block a user