2025-03-01 14:52:43 +00:00
2025-02-01 11:07:10 +00:00
2024-11-22 11:55:01 -05:00
2024-10-04 01:55:27 -04:00
2024-09-22 04:17:12 -04:00
2024-07-15 19:10:26 +09:00
2025-01-27 02:16:16 +00:00
2024-07-20 02:48:50 +09:00
2025-01-22 10:36:17 +00:00
2025-02-05 03:26:55 +00:00
2024-08-06 20:07:06 +09:00
2025-01-31 03:06:48 +00:00
2024-07-20 22:22:25 +09:00
2024-08-30 13:04:43 +09:00
2024-09-01 14:48:03 +09:00
2024-11-30 01:19:39 -05:00
2024-07-02 12:48:55 +09:00
2024-10-30 03:31:28 -04:00
2024-07-16 22:07:14 +09:00
2024-07-16 20:33:47 +09:00
2024-12-27 07:03:30 +00:00
2025-01-18 21:37:28 +00:00
2024-10-12 19:37:52 -04:00
2024-08-05 07:27:48 +09:00
2025-01-24 21:00:50 +09:00
2024-08-25 13:48:09 -04:00
2024-06-30 02:46:26 +09:00
2025-01-27 01:27:33 +00:00
2025-02-04 01:20:46 +00:00
2024-08-23 05:05:45 -04:00
2024-08-18 13:53:58 +09:00
2025-02-24 10:19:38 +00:00
2025-02-16 04:21:55 +00:00
2024-08-27 01:36:58 -04:00
2024-08-21 08:01:59 -04:00
2025-03-01 04:44:52 +00:00
2025-02-28 18:31:32 +00:00
2024-08-23 12:04:30 -04:00
2024-07-01 17:27:36 +09:00
2024-08-21 02:44:47 -04:00
2024-09-25 23:48:58 -04:00
2024-10-13 00:17:56 -04:00
2025-01-21 17:23:13 +00:00
2024-08-26 06:37:03 +09:00
2025-02-22 10:46:48 +00:00
2025-01-23 21:21:18 +00:00
2025-01-24 13:23:22 +00:00
2024-11-30 01:19:39 -05:00
2024-10-22 22:03:30 -04:00
2024-08-30 13:04:43 +09:00
2024-09-04 12:46:48 +00:00
2024-11-27 00:07:48 +09:00
2025-02-22 19:00:01 +00:00

sqlite3 [25-02-23]

from tqdm.auto import tqdm, trange; import json
Soup = lambda html: __import__('bs4').BeautifulSoup(html, 'lxml')
for i,c in enumerate('RGYB'):globals()[c]=lambda s,i=i:f'\x1b[{91+i}m{s}\x1b[0m'
def SQL(db='.db'):
    import sqlite3, os, json; (not os.path.exists(db) and os.makedirs(db) is None,
        (con := sqlite3.connect(f'{db}/sql', isolation_level=None)).execute(
        "PRAGMA busy_timeout="f'{1e9}'))[0] and con.executescript(
        "PRAGMA journal_mode=WAL; CREATE TABLE kv(k,v,t DEFAULT CURRENT_TIMESTAMP);"
        "CREATE INDEX idx_kv_v ON kv(v); CREATE INDEX idx_kv_k_t ON kv(k,t DESC);")
    return (sql := type('', (), dict(__call__=lambda _, q, *p: [*con.execute(q, p)],
        __setitem__=lambda _, k, v: sql("INSERT INTO kv(k,v) VALUES(?,?)", 
            k, v if type(v) is bytes else json.dumps(v)),
        __getitem__=lambda _, k: (json.loads(v) if type(v := v[0][0]) is str else v) if
            (v := sql("SELECT v FROM kv WHERE k=? ORDER BY t DESC LIMIT 1", k)) else None,
        __eq__=lambda _, query: sql("SELECT * FROM kv WHERE k LIKE ?", query),
        __ne__=lambda _, query: sql("SELECT * FROM kv WHERE k NOT LIKE ?", query),
        __delitem__=lambda _, rows: [
            sql("DELETE FROM kv WHERE k=? AND v=? AND t=?", *row) for row in rows],
        __contains__=lambda _, k: bool(sql("SELECT 1 FROM kv WHERE k=?", k)),
        __len__=lambda _: sql("SELECT COUNT(*) FROM kv")[0][0],
        __iter__=lambda _: iter(k[0] for k in sql("SELECT k FROM kv"))))())
sql = SQL()

tauri + svelte + shadcn

git clone https://github.com/alysonhower/tauri2-svelte5-shadcn.git
cd tauri2-svelte5-shadcn
bun i
bun run dev -- --host

syncify

def sync(coro):
    import asyncio, functools, nest_asyncio; nest_asyncio.apply()
    return functools.wraps(coro)(lambda *args, **kwargs:
        asyncio.run(coro(*args, **kwargs)))

@sync
async def main(): print('hello'); return 'world'

main()

playwright

@(sync := lambda coro: __import__('nest_asyncio').apply() or
    __import__('functools').wraps(coro)(lambda *args, **kwargs:
    __import__('asyncio').run(coro(*args, **kwargs))))
async def Page():
    from playwright.async_api import async_playwright
    browser = await (await async_playwright().start()).chromium.launch()
    (context := await browser.new_context()).set_default_timeout(0)
    for attr in dir(page := await context.new_page()):
        if attr[0] != '_' and callable(method := getattr(page, attr)):
            setattr(page, attr, sync(method))
    page._repr_png_ = page.screenshot
    page.goto = lambda url, goto=page.goto: goto(url)
    page.soup = lambda: __import__('bs4').BeautifulSoup(page.content(), 'lxml')
    return page
''
page = Page()
page.goto('https://naver.com')
page
''
page.soup()

wireguard

wg genkey|%{$_;$_|wg pubkey}
Description
No description provided
Readme Apache-2.0 979 KiB
Languages
Jupyter Notebook 72.2%
Python 16.5%
Dockerfile 3.6%
Shell 2.8%
Go 1.8%
Other 3.1%