Update chrome/README.md

This commit is contained in:
2025-02-05 03:13:13 +00:00
parent 52a000a172
commit df64161e57

View File

@@ -1,5 +1,5 @@
```python
def Chrome(headless=True, agent=None, proxy=None):
def Chrome(agent=None, proxy=None, cookies=None, headless=True):
import requests, subprocess, base64
try: requests.get(f'http://localhost:9222', timeout=.1)
except: subprocess.Popen(['chrome', f'--remote-debugging-port={9222}',
@@ -10,7 +10,7 @@ def Chrome(headless=True, agent=None, proxy=None):
'--ignore-certificate-errors', '--remote-allow-origins=*',
'--disable-backgrounding-occluded-windows',])
browser = requests.get(f'http://localhost:9222/json/version').json()['webSocketDebuggerUrl']
def new_page(self):
def Page():
def send(url, request):
import websocket, json; ws = websocket.create_connection(url)
try: ws.send(json.dumps(request)); return json.loads(ws.recv())
@@ -26,9 +26,7 @@ def Chrome(headless=True, agent=None, proxy=None):
def wait_element(self, selector, timeout=30, check_interval=0.5):
import time; start_time = time.time()
while time.time() - start_time < timeout:
if self.evaluate(
f"document.querySelector('{selector}') ? true : false"):
return
if self.evaluate(f"!!(document.querySelector('{selector}'))"): return
time.sleep(check_interval)
raise TimeoutError(f"Element '{selector}' not found within {timeout} seconds")
def cookies(self, cookies=None):
@@ -42,7 +40,8 @@ def Chrome(headless=True, agent=None, proxy=None):
self('Page.navigate', url=url if '://' in url else f'https://{url}')
while self.evaluate('document.readyState') != 'complete':
self.sleep(0.1)
return True
Soup = lambda html: __import__('bs4').BeautifulSoup(html, 'lxml')
return Soup(self.source())
return type('', (), dict(__call__=lambda _, method, **params:
send(page, {"id": 1, "method": method, "params": params})['result'],
goto=goto,
@@ -54,5 +53,12 @@ def Chrome(headless=True, agent=None, proxy=None):
screenshot=screenshot,
source=lambda self: self.evaluate('document.documentElement.outerHTML'),
))()
return type('', (), dict(new_page=new_page))()
(page := Page()).cookies(cookies)
return page
page = Chrome()
print(page.goto('naver.com').title) # <title>NAVER</title>
page.wait_element('img')
page.screenshot()
page.evaluate('window.location.href') # 'https://www.naver.com/'
```