Skip to content

Instantly share code, notes, and snippets.

View dpbac's full-sized avatar

Danielle Paes Barretto de Arruda Camara dpbac

View GitHub Profile
# importing packages
import requests
from bs4 import BeautifulSoup
import pandas as pd
def parse_website(url):
"""
Parse content of a website
@dpbac
dpbac / retrieve_hyperlink.py
Last active February 4, 2020 17:57
Function used to retrieve hyperlinks in a 'main_url'
import requests
from bs4 import BeautifulSoup
def retrieve_hyperlinks(main_url):
""" Extract all hyperlinks in 'main_url' and return a list with these hyperlinks """
# Packages the request, send the request and catch the response: r
r = requests.get(main_url)
@dpbac
dpbac / upload_and_read_colaboratory.py
Created March 13, 2018 10:41 — forked from sagarhowal/upload_and_read_colaboratory.py
Upload Dataset on Google Colaboratory.
#Uploading the Dataset
from google.colab import files
uploaded = files.upload()
#Save uploaded file on the Virtual Machine's
#Thanks to user3800642 from StackOverflow
with open("breast_cancer.csv", 'w') as f:
f.write(uploaded[uploaded.keys()[0]])