@@ -75,7 +75,7 @@ FirecrawlClient client = new FirecrawlClient(
7575//  Simple scraping
7676FirecrawlDocument  doc =  client. scrapeURL(" https://example.com" null );
7777System . out. println(doc. getHtml());
78-          System . out. println(doc. getText());
78+ System . out. println(doc. getText());
7979
8080//  Advanced scraping with options
8181ScrapeParams  params =  new  ScrapeParams ();
@@ -95,15 +95,15 @@ SearchResponse resp = client.search(params);
9595
9696//  Process results
9797if  (resp. isSuccess()) {
98-          for  (SearchResult  result :  resp. getResults()) {
98+     for  (SearchResult  result :  resp. getResults()) {
9999        System . out. println(result. getTitle() +  "  - " +  result. getUrl());
100-          }
101-          }
100+     }
101+ }
102102
103103//  Check for warnings
104-          if  (resp. getWarning() !=  null ) {
105-          System . err. println(" Warning: " +  resp. getWarning());
106-          }
104+ if  (resp. getWarning() !=  null ) {
105+     System . err. println(" Warning: " +  resp. getWarning());
106+ }
107107``` 
108108
109109### Web Crawling  
@@ -122,8 +122,8 @@ System.out.println("Crawl status: " + status.getStatus());
122122//  Synchronous crawling (with polling)
123123CrawlStatusResponse  result =  client. crawlURL(" https://example.com" 5 );
124124if  (" completed" . equals(result. getStatus())) {
125- FirecrawlDocument [] documents =  result. getData();
126- //  Process crawled documents
125+      FirecrawlDocument [] documents =  result. getData();
126+      //  Process crawled documents
127127}
128128
129129//  Cancel a crawl job
0 commit comments