1 |
|
2 | 'use strict';
|
3 |
|
4 | const Crawler = require('../lib/crawler');
|
5 | const nock = require('nock');
|
6 |
|
7 | describe('Callback test', function() {
|
8 | before(function() {
|
9 | nock.cleanAll();
|
10 | });
|
11 |
|
12 | let crawler = null;
|
13 | const url = 'http://www.whatever.com';
|
14 |
|
15 | beforeEach(() => {
|
16 | crawler = new Crawler({
|
17 | retryTimeout:0,
|
18 | retries:0,
|
19 | timeout:100,
|
20 | logger: {
|
21 | log:() => {}
|
22 | },
|
23 | });
|
24 | });
|
25 |
|
26 | afterEach(() => {
|
27 | crawler = null;
|
28 | });
|
29 |
|
30 | it('should end as expected without callback', function(done) {
|
31 | nock(url)
|
32 | .get('/get')
|
33 | .reply(200, '<html></html>',{
|
34 | 'Content-Type': 'text/html'
|
35 | });
|
36 |
|
37 | crawler.on('drain', done);
|
38 | crawler.queue(`${url}/get`);
|
39 | });
|
40 |
|
41 | it('should end as expected without callback when timedout', function(done) {
|
42 | |
43 |
|
44 |
|
45 |
|
46 | nock(url)
|
47 | .get('/delay')
|
48 |
|
49 |
|
50 | .delayBody(500)
|
51 |
|
52 | .reply(200, '<html></html>',{
|
53 | 'Content-Type': 'text/html'
|
54 | });
|
55 |
|
56 | crawler.on('drain', done);
|
57 | crawler.queue(`${url}/delay`);
|
58 | });
|
59 |
|
60 | it('should end as expected without callback when encoding error', function(done) {
|
61 | nock(url)
|
62 | .get('/get')
|
63 | .reply(200, '<html></html>',{
|
64 | 'Content-Type': 'text/html'
|
65 | });
|
66 |
|
67 | crawler._doEncoding = function(){throw new Error('Error for testing.');};
|
68 | crawler.on('drain', done);
|
69 | crawler.queue(`${url}/get`);
|
70 | });
|
71 | });
|