-
-
Save estevanjantsk/7e2c68d62e49497bc0da8e8bca2af2c2 to your computer and use it in GitHub Desktop.
Simple example to scrape some posts and put into a CSV file using Node & Cheerio
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const request = require('request'); | |
const cheerio = require('cheerio'); | |
const fs = require('fs'); | |
const writeStream = fs.createWriteStream('post.csv'); | |
// Write Headers | |
writeStream.write(`Title,Link,Date \n`); | |
request('http://codedemos.com/sampleblog', (error, response, html) => { | |
if (!error && response.statusCode == 200) { | |
const $ = cheerio.load(html); | |
$('.post-preview').each((i, el) => { | |
const title = $(el) | |
.find('.post-title') | |
.text() | |
.replace(/\s\s+/g, ''); | |
const link = $(el) | |
.find('a') | |
.attr('href'); | |
const date = $(el) | |
.find('.post-date') | |
.text() | |
.replace(/,/, ''); | |
// Write Row To CSV | |
writeStream.write(`${title}, ${link}, ${date} \n`); | |
}); | |
console.log('Scraping Done...'); | |
} | |
}); |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment