Skip to content

Instantly share code, notes, and snippets.

# Question2 :which states provides higher average salary for data-related jobs?
##2.1 Create a table for salary mapping
Salary_state= filter(SalaryData_Join,JOB_TITLE_SUBGROUP %in% c("data scientist","data analyst","business analyst")) %>%
group_by(WORK_STATE) %>%
summarise(Avg_pay=mean(PAID_WAGE_PER_YEAR))
Salary_table=inner_join(Salary_state,States,by=c("WORK_STATE"="Abr"))
Salary_table=Salary_table[,-c(5,6)]
### Treemap
L=tmPlot(Cities,
index=c("WORK_STATE","modelchange"),
vSize=c("n"),
vColor=c("salary"),
type="categorical",
range=c(-15, 95),
palette=c("#fdae61", "#fee08b", "#d9ef8b", "#a6d96a", "#66bd63", "#1a9850"),
algorithm="pivotSize",
sortID="-size"
## Server Side
server=shinyServer(function(input, output){
colfunc=colorRampPalette(c("grey","pink"))
## show map using googleVis
output$map = renderGvis({
gvisGeoChart(Fuc_Map(Fortune,input$Year,input$Industry), "Country", "Count",
options=list(width="auto", height="490px",colors="['#90EE90' ,'#FFA500', '#FF0000']"))
})
### Scraping using package "Beautiful Soup" from Python
### Scrape the links for every countries and store the data in a txt file called "record3.txt"
f = open('record.txt', 'r') # 'r' for read
lines3 = f.readlines()
f.close()
for i in range(len(lines3)):
url="http://www.alexa.com/siteinfo/" + lines3[i]
text = requests.get(url).text
soup = BeautifulSoup(text)
TT=soup.find("span",{"data-cat":"countryRank"}).find("a").get_text()
## Server Side
server=shinyServer(function(input, output){
## show map using googleVis
output$map = renderPlotly({
p=plot_ly(Fun_neutral(Neutural_table_join,input$Neutral), z = indicator, text = country, locations = CODE , type = 'choropleth',
color = indicator, marker = list(line = l),
colorbar = list(title = 'Ranking'),
colors=colorRampPalette(c("red" ,"yellow" ,"dark green"))(length(Fun_neutral(Neutural_table_join,input$Neutral)[,1]))
)