Skip to content

Instantly share code, notes, and snippets.

@s0g00d
Last active August 13, 2020 04:32
Show Gist options
  • Save s0g00d/9d8eb636cff7b8e41d2a01c0c188aca7 to your computer and use it in GitHub Desktop.
Save s0g00d/9d8eb636cff7b8e41d2a01c0c188aca7 to your computer and use it in GitHub Desktop.
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from os import path
from bs4 import BeautifulSoup as Soup
import requests
DATA_DIR = 'C:/Users/xbsqu/Desktop/Python Learning/Projects/Premarket Stock Price'
#Connecting to G Sheet...
scope = ['https://spreadsheets.google.com/feeds',
'https://www.googleapis.com/auth/drive']
creds = ServiceAccountCredentials.from_json_keyfile_name(path.join(DATA_DIR, 'client_secret.json'), scope)
client = gspread.authorize(creds)
sheet = client.open('Stock Watcher')
worksheet = sheet.get_worksheet(0)
#Now connected to the G Sheet.
row_limit = 1+len(worksheet.col_values(1))
for i in range(2, row_limit):
if worksheet.acell(f'A{i}') == "":
pass
else:
stock_symbol = worksheet.acell(f'A{i}').value
#Here is where we use a web scraper to grab the premarket price information
url_slug = 'https://finance.yahoo.com/quote/'
stock_url = url_slug + stock_symbol
try:
stock_response = requests.get(stock_url)
stock_soup = Soup(stock_response.text)
stock_pm_price = stock_soup.find('p',{'class': 'D(ib) W(45%) Pstart(10px) Va(t)'}).find('span').text
#Webscraper complete; now we need to write the scraped data to the worksheet and repeat for the remaining rows
worksheet.update_cell(i, 9, stock_pm_price)
except:
worksheet.update_cell(i, 9, "")
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment