Merge pull request #1082 from vioao/common-downloader-status-process

Common the downloader status process and pass error information when …
pull/1083/head
Sutra Zhou 2 years ago committed by GitHub
commit ee5a0585d7
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -38,7 +38,7 @@ public abstract class AbstractDownloader implements Downloader {
protected void onSuccess(Request request) {
}
protected void onError(Request request) {
protected void onError(Request request, Throwable e) {
}
}

@ -87,7 +87,7 @@ public class HttpClientDownloader extends AbstractDownloader {
return page;
} catch (IOException e) {
logger.warn("download page {} error", request.getUrl(), e);
onError(request);
onError(request, e);
return page;
} finally {
if (httpResponse != null) {

@ -16,21 +16,17 @@ import java.io.*;
* @version 0.5.3
*/
public class PhantomJSDownloader extends AbstractDownloader {
private static Logger logger = LoggerFactory.getLogger(PhantomJSDownloader.class);
private static final Logger logger = LoggerFactory.getLogger(PhantomJSDownloader.class);
private static String crawlJsPath;
private static String phantomJsCommand = "phantomjs"; // default
private int retryNum;
private int threadNum;
public PhantomJSDownloader() {
this.initPhantomjsCrawlPath();
}
/**
* phantomjs
*
* <p>
* example:
* phantomjs.exe windows
* phantomjs --ignore-ssl-errors=yes https
@ -69,7 +65,7 @@ public class PhantomJSDownloader extends AbstractDownloader {
* -- crawl.js end
* </pre>
* js使
*
* <p>
* example:
* new PhantomJSDownloader("/your/path/phantomjs", "/your/path/crawl.js");
*
@ -82,7 +78,8 @@ public class PhantomJSDownloader extends AbstractDownloader {
}
private void initPhantomjsCrawlPath() {
PhantomJSDownloader.crawlJsPath = new File(this.getClass().getResource("/").getPath()).getPath() + System.getProperty("file.separator") + "crawl.js ";
PhantomJSDownloader.crawlJsPath = new File(this.getClass().getResource("/").getPath()).getPath()
+ System.getProperty("file.separator") + "crawl.js ";
}
@Override
@ -90,61 +87,41 @@ public class PhantomJSDownloader extends AbstractDownloader {
if (logger.isInfoEnabled()) {
logger.info("downloading page: " + request.getUrl());
}
Page page = Page.fail();
try {
String content = getPage(request);
if (content.contains("HTTP request failed")) {
for (int i = 1; i <= getRetryNum(); i++) {
content = getPage(request);
if (!content.contains("HTTP request failed")) {
break;
}
}
if (content.contains("HTTP request failed")) {
//when failed
Page page = new Page();
page.setRequest(request);
return page;
}
}
Page page = new Page();
page.setDownloadSuccess(true);
page.setRawText(content);
page.setUrl(new PlainText(request.getUrl()));
page.setRequest(request);
page.setStatusCode(200);
}
onSuccess(request);
} catch (Exception e) {
onError(request, e);
logger.warn("download page {} error", request.getUrl(), e);
}
return page;
}
@Override
public void setThread(int threadNum) {
this.threadNum = threadNum;
// ignore
}
protected String getPage(Request request) {
try {
protected String getPage(Request request) throws Exception {
String url = request.getUrl();
Runtime runtime = Runtime.getRuntime();
Process process = runtime.exec(phantomJsCommand + " " + crawlJsPath + " " + url);
InputStream is = process.getInputStream();
BufferedReader br = new BufferedReader(new InputStreamReader(is));
StringBuffer stringBuffer = new StringBuffer();
StringBuilder builder = new StringBuilder();
String line;
while ((line = br.readLine()) != null) {
stringBuffer.append(line).append("\n");
}
return stringBuffer.toString();
} catch (IOException e) {
e.printStackTrace();
builder.append(line).append("\n");
}
return null;
}
public int getRetryNum() {
return retryNum;
}
public PhantomJSDownloader setRetryNum(int retryNum) {
this.retryNum = retryNum;
return this;
return builder.toString();
}
}

@ -11,7 +11,7 @@ import us.codecraft.webmagic.Page;
import us.codecraft.webmagic.Request;
import us.codecraft.webmagic.Site;
import us.codecraft.webmagic.Task;
import us.codecraft.webmagic.downloader.Downloader;
import us.codecraft.webmagic.downloader.AbstractDownloader;
import us.codecraft.webmagic.selector.Html;
import us.codecraft.webmagic.selector.PlainText;
@ -27,7 +27,7 @@ import java.util.Map;
* Date: 13-7-26 <br>
* Time: 1:37 <br>
*/
public class SeleniumDownloader implements Downloader, Closeable {
public class SeleniumDownloader extends AbstractDownloader implements Closeable {
private volatile WebDriverPool webDriverPool;
@ -73,17 +73,17 @@ public class SeleniumDownloader implements Downloader, Closeable {
@Override
public Page download(Request request, Task task) {
checkInit();
WebDriver webDriver;
WebDriver webDriver = null;
Page page = Page.fail();
try {
webDriver = webDriverPool.get();
} catch (InterruptedException e) {
logger.warn("interrupted", e);
return null;
}
logger.info("downloading page " + request.getUrl());
webDriver.get(request.getUrl());
try {
if (sleepTime > 0) {
Thread.sleep(sleepTime);
}
} catch (InterruptedException e) {
e.printStackTrace();
}
@ -106,12 +106,20 @@ public class SeleniumDownloader implements Downloader, Closeable {
WebElement webElement = webDriver.findElement(By.xpath("/html"));
String content = webElement.getAttribute("outerHTML");
Page page = new Page();
page.setDownloadSuccess(true);
page.setRawText(content);
page.setHtml(new Html(content, request.getUrl()));
page.setUrl(new PlainText(request.getUrl()));
page.setRequest(request);
onSuccess(request);
} catch (Exception e) {
logger.warn("download page {} error", request.getUrl(), e);
onError(request, e);
} finally {
if (webDriver != null) {
webDriverPool.returnToPool(webDriver);
}
}
return page;
}

Loading…
Cancel
Save