package.json
1.65 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
{
"name": "crawler",
"version": "1.3.0",
"description": "Crawler is a web spider written with Nodejs. It gives you the full power of jQuery on the server to parse a big number of pages as they are downloaded, asynchronously",
"main": "./lib/crawler.js",
"directories": {
"test": "tests"
},
"scripts": {
"hint": "eslint ./lib/*.js ./tests/*.js",
"test": "mocha --timeout=15000 tests/*.test.js",
"http2test": "mocha --timeout=15000 tests/http2*.test.js",
"cover": "nyc --reporter=lcovonly --reporter=text --reporter=text-summary mocha --timeout=15000 --reporter spec tests/*.test.js"
},
"repository": {
"type": "git",
"url": "https://github.com/bda-research/node-crawler.git"
},
"engine-strict": {
"node": ">=10.0.0"
},
"dependencies": {
"bottleneckp": "~1.1.3",
"cheerio": "^0.22.0",
"iconv-lite": "^0.4.8",
"lodash": "^4.17.10",
"request": "~2.88.0",
"seenreq": "^3.0.0",
"type-is": "^1.6.14"
},
"devDependencies": {
"chai": "^4.2.0",
"coveralls": "^3.0.2",
"eslint": "^5.0.0",
"jsdom": "^9.6.0",
"mocha": "^6.1.0",
"nock": "^13.0.5",
"mocha-testdata": "^1.2.0",
"nyc": "^13.1.0",
"sinon": "^7.0.0",
"whacko": "^0.19.1"
},
"keywords": [
"dom",
"javascript",
"crawling",
"spider",
"scraper",
"scraping",
"jquery",
"crawler",
"nodejs"
],
"licenses": [
{
"type": "MIT",
"url": "http://github.com/bda-research/node-crawler/blob/master/LICENSE.txt"
}
],
"bugs": {
"url": "https://github.com/bda-research/node-crawler/issues"
},
"homepage": "https://github.com/bda-research/node-crawler"
}