Skip to content

Instantly share code, notes, and snippets.

def send_email(subject, msg, mail_to):
try:
mail_from = 'email address'
password = 'password'
mail_to = mail_to
server = smtplib.SMTP('smtp.live.com', 587)
server.ehlo()
server.starttls()
server.login(mail_from, password)
message = f'Subject: {subject}\n\n{msg}'
import sqlite3
conn = sqlite3.connect('my_database.db')
cur = conn.cursor()
cur.execute('USE my_database')
for page in pages:
'''
Here's your scraper
for i, page in enumerate(pages):
if i == 0:
with open('my_file.csv', 'w') as file:
file.write(f'{col_1},{col_2},{col_3}')
'''
Here's your scraper
'''
with open('my_file.csv', 'a') as file:
for page in pages:
'''
Here's your scraper
'''
with open('my_file.csv', 'a') as file:
file.write(f'{value_1},{value_2},{value_3}')
for page in pages:
'''
Here's your scraper
'''
file = open('my_file.csv', 'a')
try:
file.write(f'{value_1},{value_2},{value_3}')
list_of_lists = []
for page in pages:
list_of_page = []
'''
Here's your scraper
'''
list_of_page.append(value_1)
list_of_page.append(value_2)
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from time import sleep
import pandas as pd
def get_currencies(currencies, start, end, export_csv=False):
for currency in currencies:
while True:
try:
# Opening the connection and grabbing the page
my_url = f'https://br.investing.com/currencies/usd-{currency.lower()}-historical-data'
option = Options()
option.headless = False
driver = webdriver.Chrome(options=option)
driver.get(my_url)
driver.maximize_window()
from bs4 import BeautifulSoup
import requests
import numpy as np
import re
import time
np.random.seed(int(time.time()))
pages_crawled = []
from bs4 import BeautifulSoup
import requests
pages_crawled = []
def crawler(url):
page = requests.get(url)
soup = BeautifulSoup(page.text, 'html.parser')
links = soup.find_all('a')