@@ -41,13 +41,9 @@ class CrawlClient(Protocol):
41
41
def crawl (self , urls : List [str ]): ...
42
42
43
43
44
- # ---- Constants ---------------------------------------------------------------
45
-
46
44
DEFAULT_VIEW_TOKENS = 1024
47
45
CAPPED_TOOL_CONTENT_LEN = 8000
48
46
49
- # ---- Helpers ----------------------------------------------------------------
50
-
51
47
52
48
def cap_tool_content (text : str ) -> str :
53
49
if not text :
@@ -68,9 +64,6 @@ def _safe_domain(u: str) -> str:
68
64
return u
69
65
70
66
71
- # ---- BrowserState ------------------------------------------------------------
72
-
73
-
74
67
class BrowserState :
75
68
def __init__ (self , initial_state : Optional [BrowserStateData ] = None ):
76
69
self ._data = initial_state or BrowserStateData (view_tokens = DEFAULT_VIEW_TOKENS )
@@ -82,9 +75,6 @@ def set_data(self, data: BrowserStateData) -> None:
82
75
self ._data = data
83
76
84
77
85
- # ---- Browser ----------------------------------------------------------------
86
-
87
-
88
78
class Browser :
89
79
def __init__ (
90
80
self ,
@@ -203,8 +193,6 @@ def _display_page(self, page: Page, cursor: int, loc: int, num_lines: int) -> st
203
193
204
194
return header + '\n ' .join (body_lines )
205
195
206
- # ---- page builders ----
207
-
208
196
def _build_search_results_page_collection (self , query : str , results : Dict [str , Any ]) -> Page :
209
197
page = Page (
210
198
url = f'search_results_{ query } ' ,
@@ -338,8 +326,6 @@ def _build_find_results_page(self, pattern: str, page: Page) -> Page:
338
326
find_page .lines = self ._wrap_lines (find_page .text , 80 )
339
327
return find_page
340
328
341
- # ---- public API: search / open / find ------------------------------------
342
-
343
329
def search (self , * , query : str , topn : int = 5 ) -> Dict [str , Any ]:
344
330
if not self ._client :
345
331
raise RuntimeError ('Client not provided' )
0 commit comments