From ef5411a6e14c9ae6cbd1488122462f12ae197ce0 Mon Sep 17 00:00:00 2001 From: Marvin Borner Date: Sat, 10 Nov 2018 01:48:38 +0100 Subject: Improved performance --- crawler.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/crawler.js b/crawler.js index ceacc52..0f09c4a 100644 --- a/crawler.js +++ b/crawler.js @@ -1,9 +1,9 @@ const crawlService = require("crawler"); const crypto = require("crypto"); const database = require("./database"); -const url = require("url"); const crawler = new crawlService({ + skipDuplicates: true, userAgent: "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)", rateLimit: 100, // TODO: Dynamic rate limit setting depending on errors maxConnections: 1, // set to 10 (and remove the line above) for faster crawling but higher probability of rate limiting (429) @@ -16,7 +16,6 @@ const crawler = new crawlService({ const urlHash = crypto.createHash("sha256").update(res.request.uri.href).digest("base64"); database.exists("crawled", "site", urlHash).then(exists => { if (crawler.queueSize === 0 || !exists) { - console.log(crawler.queue()); console.log("\nCrawling: " + res.request.uri.href); database.index('crawled', 'site', [ { -- cgit v1.2.3