Created
March 29, 2020 12:28
-
-
Save cha1690/60196c80c9f18fe89ae5cfb6bb5b762b to your computer and use it in GitHub Desktop.
Coronavirus App
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
from flask import Flask, render_template | |
import requests | |
from bs4 import BeautifulSoup | |
import dateutil.parser | |
import pandas as pd | |
import plotly | |
from plotly import graph_objs as go | |
import json | |
app = Flask(__name__) | |
def data_scrape(): | |
base_url = 'https://www.worldometers.info/coronavirus/' | |
response = requests.get(base_url, headers={ | |
'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'}) | |
soup = BeautifulSoup(response.text, 'html.parser') | |
table = soup.find('table', id=["main_table_countries_today"]) | |
table_rows = table.find_all('tr') | |
data = [] | |
for tr in table_rows: | |
td = tr.find_all('td') | |
row = [i.text.strip().replace("+","") for i in td] | |
data.append(row) | |
return data | |
def data_cleanup(): | |
data = data_scrape() | |
cleaned_data = [] | |
for row in data: | |
row_list = [] | |
for i in row: | |
i = i.replace("+","") | |
i = i.replace("-","") | |
i = i.replace(",","") | |
if i == "": | |
i = "0" | |
row_list.append(i.strip()) | |
row_list[1:8] = map(int, row_list[1:8]) | |
cleaned_data.append(row_list) | |
return cleaned_data | |
def news_scrape(): | |
secret = '2ab6926b9fac414faa1471562bcd2f60' | |
url = 'http://newsapi.org/v2/top-headlines?country=in&category=health&apiKey=2ab6926b9fac414faa1471562bcd2f60' | |
parameters = { | |
'q': 'coronavirus', | |
'pageSize': 40, | |
'apiKey': secret, | |
} | |
response = requests.get(url, | |
params=parameters) | |
response_json = response.json() | |
article = response_json['articles'] | |
return article | |
def india_statewise(): | |
base_url = 'https://www.mohfw.gov.in/' | |
response = requests.get(base_url, headers={ | |
'User-agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:61.0) Gecko/20100101 Firefox/61.0'}) | |
soup = BeautifulSoup(response.text, 'html.parser') | |
div = soup.find('div', {'class': 'content newtab'}) | |
tbody = div.find('tbody') | |
state_rows = tbody.find_all('tr') | |
state_num = [] | |
for tr in state_rows[:-2]: | |
td = tr.find_all('td') | |
row = [i.text.strip() for i in td] | |
row[2:5] = map(int, row[2:5]) | |
state_num.append(row) | |
state_num.sort(key=lambda x: x[2], reverse=True) | |
return state_num | |
@app.route('/') | |
def home(): | |
data = data_scrape() | |
for row in data: | |
if 'India' in row: | |
country = row[0] | |
total_cases = row[1] | |
new_cases = row[2] | |
total_deaths = row[3] | |
new_deaths = row[4] | |
active_cases = row[6] | |
total_recovered = row[5] | |
serious_critical = row[7] | |
state_num = india_statewise() | |
return render_template('index.html', country=country, total_cases=total_cases, new_cases=new_cases, | |
total_deaths=total_deaths, new_deaths=new_deaths, | |
active_cases=active_cases, total_recovered=total_recovered, | |
serious_critical=serious_critical, | |
state_num=state_num) | |
@app.route('/global_data') | |
def global_data(): | |
data = data_scrape() | |
return render_template('global_data.html', data=data) | |
@app.route('/comparative_chart') | |
def comparative_chart(): | |
data = data_cleanup() | |
df = pd.DataFrame(data[:15], columns=['Country', 'Total Cases', 'New Cases', 'Total Deaths', 'New Deaths', | |
'Total Recovered', 'Active Cases', 'Serious Cases', 'Total Cases/ 1mn', | |
'Total Deaths/ 1mn','1st Case']) | |
df.head() | |
to_drop = ['Total Cases/ 1mn', 'Total Deaths/ 1mn', '1st Case'] | |
df.drop(to_drop, inplace=True, axis=1) | |
# print(df) | |
df.sort_values(['Total Cases'], ascending=True, inplace=True) | |
marker1 = dict( | |
color='#1B4079', | |
line=dict(color='#1B4079', width=3) | |
) | |
marker2 = dict( | |
color='#4D7C8A', | |
line=dict(color='#4D7C8A', width=3) | |
) | |
marker3 = dict( | |
color='#7F9C96', | |
line=dict(color='#7F9C96', width=3) | |
) | |
trace1 = go.Bar(y=df['Country'], x=df['Active Cases'], name='Active Cases', orientation='h', marker=marker1, | |
text=df['Active Cases'], texttemplate='%{text:.2s}', textposition='outside', width=0.8) | |
trace2 = go.Bar(y=df['Country'], x=df['Total Recovered'], name='Total Recovered', orientation='h', marker=marker2, | |
text=df['Total Recovered'], texttemplate='%{text:.2s}', textposition='outside', width=0.8) | |
trace3 = go.Bar(y=df['Country'], x=df['Total Deaths'], name='Total Deaths', orientation='h', marker=marker3, | |
text=df['Total Deaths'], texttemplate='%{text:.2s}', textposition='outside', width=0.8) | |
layout = go.Layout(barmode='stack', | |
font=dict(family="Courier New, monospace", size=20, color="#1E2019"), | |
legend=dict(x=0.1, y=1.09, | |
traceorder="normal", | |
font=dict(family="sans-serif", size=20, color="black"), | |
bordercolor="Black", | |
borderwidth=1), | |
legend_orientation='h', | |
autosize=True, | |
xaxis_showgrid=False, yaxis_showgrid=False, | |
xaxis_showticklabels=False, | |
bargap=0.6,height=800, | |
margin=dict( | |
l=150, | |
r=50, | |
b=100, | |
t=100, | |
pad=10 | |
) | |
) | |
data = [trace1, trace2, trace3] | |
fig = dict(data=data, layout=layout) | |
graphJSON = json.dumps(fig, cls=plotly.utils.PlotlyJSONEncoder) | |
return render_template('comparative_chart.html', graphJSON=graphJSON) | |
@app.route('/india_news') | |
def india_news(): | |
article = news_scrape() | |
title = [] | |
description = [] | |
link = [] | |
published = [] | |
for ar in article: | |
title.append(ar["title"]) | |
description.append(ar["description"]) | |
d = dateutil.parser.parse(ar["publishedAt"]) | |
published.append(d.strftime('%d-%m-%Y %H:%M:%S')) | |
link.append(ar["url"]) | |
return render_template('india_news.html', title=title, description=description, published=published, link=link) | |
if __name__ == '__main__': | |
app.run() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment