Skip to content

Instantly share code, notes, and snippets.

@WA9ACE
Last active April 27, 2016 07:16
Show Gist options
  • Save WA9ACE/8509117 to your computer and use it in GitHub Desktop.
Save WA9ACE/8509117 to your computer and use it in GitHub Desktop.
Node.js Web Crawler using Request and Cheerio
var request = require('request');
var cheerio = require('cheerio');
var fs = require('fs');
var data = fs.createWriteStream('data.txt', {'flags': 'a'});
var urlsToCrawl = [];
var spider = function(url) {
var index = urlsToCrawl.indexOf(url);
// Remove the current url we're crawling from the list to be crawled.
if(index > -1) {
urlsToCrawl.splice(index, 1);
}
try {
request(url, function(error, response, body) {
if(!error && response.statusCode == 200) {
var $ = cheerio.load(body);
data.write($.html());
console.log('Data saved for url: ' + url);
$('a').each(function(i, element) {
var link = element.attribs.href;
urlsToCrawl.push(link);
});
// console.log(urlsToCrawl.length);
return spider(urlsToCrawl[0]);
} else {
// This was probably a relative url or a page anchor,
// which I don't account for yet.
// console.log(urlsToCrawl.length);
return spider(urlsToCrawl[0]);
// console.log(error);
}
});
} catch(error) {
return spider(urlsToCrawl[0]);
// console.log(error);
}
};
spider('https://news.ycombinator.com/');
@GeneGenie
Copy link

Hello. Your issue is that you do consequet calls (in your recursion) And you will always get this exception at some point, coz you never close scope.

Use event emitter instead of recursion

function spider(url){
.....
// finished parsing page
eventEmitter.emit('page.finished',result)
...
}
eventEmitter.on('page.finished',function(result){
//handle result
spider(nextUrl)
})
spider(firstUrl)

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment