@@ -75,7 +75,7 @@ FirecrawlClient client = new FirecrawlClient(
75
75
// Simple scraping
76
76
FirecrawlDocument doc = client. scrapeURL(" https://example.com" , null );
77
77
System . out. println(doc. getHtml());
78
- System . out. println(doc. getText());
78
+ System . out. println(doc. getText());
79
79
80
80
// Advanced scraping with options
81
81
ScrapeParams params = new ScrapeParams ();
@@ -95,15 +95,15 @@ SearchResponse resp = client.search(params);
95
95
96
96
// Process results
97
97
if (resp. isSuccess()) {
98
- for (SearchResult result : resp. getResults()) {
98
+ for (SearchResult result : resp. getResults()) {
99
99
System . out. println(result. getTitle() + " - " + result. getUrl());
100
- }
101
- }
100
+ }
101
+ }
102
102
103
103
// Check for warnings
104
- if (resp. getWarning() != null ) {
105
- System . err. println(" Warning: " + resp. getWarning());
106
- }
104
+ if (resp. getWarning() != null ) {
105
+ System . err. println(" Warning: " + resp. getWarning());
106
+ }
107
107
```
108
108
109
109
### Web Crawling
@@ -122,8 +122,8 @@ System.out.println("Crawl status: " + status.getStatus());
122
122
// Synchronous crawling (with polling)
123
123
CrawlStatusResponse result = client. crawlURL(" https://example.com" , params, idempotencyKey, 5 );
124
124
if (" completed" . equals(result. getStatus())) {
125
- FirecrawlDocument [] documents = result. getData();
126
- // Process crawled documents
125
+ FirecrawlDocument [] documents = result. getData();
126
+ // Process crawled documents
127
127
}
128
128
129
129
// Cancel a crawl job
0 commit comments