+ e.getMessage(), e);
}
// get client configured in same way as proxy is using it
final HttpClient httpClient = createHttpClientFor(mavenProxyRepository);
final ScrapeContext context = new ScrapeContext(mavenProxyRepository, httpClient, config.getRemoteScrapeDepth());
final Page rootPage = Page.getPageFor(context, remoteRepositoryRootUrl);
final ArrayList<Scraper> appliedScrapers = new ArrayList<Scraper>(scrapers);
Collections.sort(appliedScrapers, new PriorityOrderingComparator<Scraper>());
for (Scraper scraper : appliedScrapers) {
log.debug("Remote scraping {} with Scraper {}", mavenProxyRepository, scraper.getId());
scraper.scrape(context, rootPage);
if (context.isStopped()) {
if (context.isSuccessful()) {
log.debug("Remote scraping {} with Scraper {} succeeded.", mavenProxyRepository, scraper.getId());
return new StrategyResult(context.getMessage(), context.getPrefixSource(), true);
}
else {
log.debug("Remote scraping {} with Scraper {} stopped execution.", mavenProxyRepository,
scraper.getId());
throw new StrategyFailedException(context.getMessage());
}
}
log.debug("Remote scraping {} with Scraper {} skipped.", mavenProxyRepository, scraper.getId());
}