try (CrawlLogIterator log = new CrawlLogIterator(Paths.get("crawl.log"))) {
for (CrawlDataItem line : log) {
System.out.println(line.getStatusCode());
System.out.println(line.getURL());
}
}
CrawlSummary.byRegisteredDomain(log);
CrawlSummary.byHost(log);
CrawlSummary.byKey(log, item -> item.getCaptureBegan().toString().substring(0, 4)); // by year
CrawlSummary.build(log).topN(10); // top 10 status codes, mime-types etc
StatusCodes.describe(404); // "Not found"
StatusCodes.describe(-4); // "HTTP timeout"
StatusCodes.isError(-4); // true
StatusCodes.isServerError(503); // true
Output a JSON crawl summary grouped by registered domain:
java -jar target/*.jar -g registered-domain crawl.log
For more options:
java -jar target/*.jar --help
Install Maven and then run:
mvn package