Skip to content

Instantly share code, notes, and snippets.

# !/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
import requests
import json
import csv
def get_info(link):
#!/usr/bin/env python
import scraperwiki
import requests
from bs4 import BeautifulSoup
stem = "https://online.contractsfinder.businesslink.gov.uk/Search%20Contracts/Search%20Contracts%20Results.aspx?site=1000&lang=en&sc=0e73dd37-0b89-425a-9867-d7386f3c4f25&osc=7ed25e5c-ef12-442c-b1b3-b39078a50853&rb=1&ctlPageSize_pagesize=200&ctlPaging_page="
for p in range(1,2):
url = stem + str(p)
html = requests.get(url).content
@DataMinerUK
DataMinerUK / index.html
Last active August 29, 2015 14:01
azimuthal-map
<!DOCTYPE html>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html;charset=utf-8"/>
<script src="http://d3js.org/d3.v3.min.js" charset="utf-8"></script>
<script src="http://d3js.org/d3.geo.projection.v0.min.js" charset="utf-8"></script>
<script src="http://d3js.org/topojson.v1.min.js"></script>
<link type="text/css" rel="stylesheet" href="style.css"/>
<style type="text/css">
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import csv
sys.stdin = os.fdopen( sys.stdin.fileno(), "rU" )
# Open stdin as a csv
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import csv
col = int(sys.argv[1])
sys.stdin = os.fdopen( sys.stdin.fileno(), "rU" )
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import csv
sys.stdin = os.fdopen( sys.stdin.fileno(), "rU" )
# Open stdin as a csv
# !/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import csv
#col = int(sys.argv[1])
sys.stdin = os.fdopen( sys.stdin.fileno(), "rU" )
@DataMinerUK
DataMinerUK / nazi-loot.py
Created December 9, 2013 15:35
Scraper for Nazi loot
#!/usr/bin/env python
import scraperwiki
import requests
from bs4 import BeautifulSoup
stem = "http://www.lostart.de"
site = "Webs/EN/Datenbank/KunstfundMuenchen.html?cms_param=INST_ID%3D12366%26page%3D"
site_id = "#id66922"
@DataMinerUK
DataMinerUK / violations-syria.py
Created November 28, 2013 13:27
Scraper of data on deaths in Syria collected by the Violations Documentation Center in Syria (http://www.vdc-sy.info/index.php/en/martyrs)
#!/usr/bin/env python
import scraperwiki
import requests
from bs4 import BeautifulSoup
from time import sleep
for page in range(1,798):
url = "http://www.vdc-sy.info/index.php/en/martyrs/" + str(page) + "/c29ydGJ5PWEua2lsbGVkX2RhdGV8c29ydGRpcj1ERVNDfGFwcHJvdmVkPXZpc2libGV8ZXh0cmFkaXNwbGF5PTB8"
print url
#!/usr/bin/env python
import scraperwiki
import requests
from bs4 import BeautifulSoup
# scraperwiki.sql.execute('drop table swdata')
# scraperwiki.sql.commit()
def get_next_page(offset):