Skip to content

Instantly share code, notes, and snippets.

writer.writerow([
name.encode('utf-8'),
position.encode('utf-8'),
office.encode('utf-8'),
extn.encode('utf-8'),
start_date.encode('utf-8'),
salary.encode('utf-8')])
file.close()
print('CSV created')
import requests
import csv
url = 'https://datatables.net/examples/ajax/data/arrays.txt?_=1656247207356'
headers = {
'value': 'application/json, text/javascript, */*; q=0.01',
'accept': 'application/json, text/javascript, */*; q=0.01',
'cookie': 'PHPSESSID=196d9e692bf75bea701ea53461032689; __utmc=120757021; __utmz=120757021.1655866355.1.1.utmcsr=bing|utmccn=(organic)|utmcmd=organic|utmctr=(not provided); __utma=120757021.1861635672.1655866355.1656246692.1656255144.5'
}
url = 'http://api.scraperapi.com?api_key={YOUR_API_KEY}&url={TARGET_URL}'
url = 'http://api.scraperapi.com?api_key=51e43be283e4db2a5afb62660xxxxxxx&url=https://datatables.net/examples/ajax/data/arrays.txt?_=1656247207356'
install.packages("rvest")
install.packages("dplyr")
http://api.scraperapi.com?api_key={YOUR_API-KEY}&url={TARGET_URL}
response = read_html("http://api.scraperapi.com?api_key=51e43be283e4db2a5afb62660xxxxxxx&url=https://datatables.net/examples/basic_init/multiple_tables.html")
tables = response %>% html_table()
table_one = tables[[1]]