1 | {
|
2 | "name": "crawler",
|
3 | "version": "1.2.1",
|
4 | "description": "Crawler is a web spider written with Nodejs. It gives you the full power of jQuery on the server to parse a big number of pages as they are downloaded, asynchronously",
|
5 | "main": "./lib/crawler.js",
|
6 | "directories": {
|
7 | "test": "tests"
|
8 | },
|
9 | "scripts": {
|
10 | "hint": "eslint ./lib/*.js ./tests/*.js",
|
11 | "test": "mocha --timeout=15000 tests/*.test.js",
|
12 | "cover": "nyc --reporter=lcovonly --reporter=text --reporter=text-summary mocha --timeout=15000 --reporter spec tests/*.test.js"
|
13 | },
|
14 | "repository": {
|
15 | "type": "git",
|
16 | "url": "https://github.com/bda-research/node-crawler.git"
|
17 | },
|
18 | "engine-strict": {
|
19 | "node": ">=4.0.0"
|
20 | },
|
21 | "dependencies": {
|
22 | "bottleneckp": "~1.1.3",
|
23 | "cheerio": "^0.22.0",
|
24 | "iconv-lite": "^0.4.8",
|
25 | "lodash": "^4.17.10",
|
26 | "request": "~2.88.0",
|
27 | "seenreq": "^3.0.0",
|
28 | "type-is": "^1.6.14"
|
29 | },
|
30 | "devDependencies": {
|
31 | "chai": "^4.2.0",
|
32 | "coveralls": "^3.0.2",
|
33 | "eslint": "^5.0.0",
|
34 | "jsdom": "^9.6.0",
|
35 | "mocha": "^6.1.0",
|
36 | "mocha-testdata": "^1.2.0",
|
37 | "nock": "^10.0.6",
|
38 | "nyc": "^13.1.0",
|
39 | "sinon": "^7.0.0",
|
40 | "whacko": "^0.19.1"
|
41 | },
|
42 | "keywords": [
|
43 | "dom",
|
44 | "javascript",
|
45 | "crawling",
|
46 | "spider",
|
47 | "scraper",
|
48 | "scraping",
|
49 | "jquery",
|
50 | "crawler",
|
51 | "nodejs"
|
52 | ],
|
53 | "licenses": [
|
54 | {
|
55 | "type": "MIT",
|
56 | "url": "http://github.com/bda-research/node-crawler/blob/master/LICENSE.txt"
|
57 | }
|
58 | ],
|
59 | "bugs": {
|
60 | "url": "https://github.com/bda-research/node-crawler/issues"
|
61 | },
|
62 | "homepage": "https://github.com/bda-research/node-crawler"
|
63 | }
|