Created
August 2, 2020 16:14
-
-
Save christiancost47/92f404e1813276eec75a6bc7a3519d82 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
const express = require('express'); | |
const mongodb = require('mongodb'); | |
const request = require('request'); | |
const cheerio = require('cheerio'); | |
const router = express.Router(); | |
//GET | |
router.get('/', async (req, res) => { | |
const links = await loadlinksCollection(); | |
res.send(await links.find({}).toArray()); | |
}); | |
// function to scrap the links | |
function getLinks(url) { | |
return new Promise((resolve, reject) => { | |
request(url, (error, response, html) => { | |
if (!error && response.statusCode == 200) { | |
let links = [] | |
const $ = cheerio.load(html); | |
$('a').each((i, element) => { | |
const link = $(element).attr('href'); | |
if(link != undefined){ | |
console.log(link); | |
links.push(link); | |
} | |
}); | |
resolve(links) | |
} | |
}); | |
}) | |
} | |
/* | |
async function populate(scrapedLinks) { | |
const links = await loadlinksCollection(); | |
let i = 0; | |
while (i < scrapedLinks.length) { | |
await links.insertOne({ | |
url: scrapedLinks[i], | |
}); | |
i++; | |
} | |
} | |
*/ | |
function populate(scrapedLinks) { | |
return new Promise(async (resolve, reject) => { | |
const links = await loadlinksCollection(); | |
let i = 0; | |
while (i < scrapedLinks.length) { | |
await links.insertOne({ | |
url: scrapedLinks[i], | |
}); | |
i++; | |
} | |
resolve(true) | |
}) | |
} | |
/* | |
async function populate(scrapedLinks) { | |
const links = await loadlinksCollection(); | |
let i = 0; | |
while (i < scrapedLinks.length) { | |
await links.insertOne({ | |
url: scrapedLinks[i], | |
}); | |
i++; | |
} | |
return true; | |
} | |
*/ | |
//PUT | |
router.post('/', async (req, res) => { | |
const links = await loadlinksCollection(); | |
await links.insertOne({ | |
url: req.body.url, | |
}); | |
let url = req.body.url; | |
let scrapedLinks = await getLinks(url); | |
await populate(scrapedLinks); | |
res.send({ scrapedLinks: scrapedLinks }); | |
}); | |
//DELETE | |
router.delete('/:id', async (req, res) => { | |
const links = await loadlinksCollection(); | |
await links.deleteOne({ _id: new mongodb.ObjectID(req.params.id) }); | |
res.status(200).send({}); | |
}); | |
// LOAD FROM DATABASE | |
async function loadlinksCollection(){ | |
const client = await mongodb.MongoClient.connect | |
('mongodb+srv://abcd1234:abcd1234@cluster0.9jcmb.mongodb.net/Cluster0?retryWrites=true&w=majority',{ | |
useNewUrlParser: true | |
}) | |
return client.db('Cluster0').collection('links'); | |
} | |
module.exports = router; |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment