Test model (dimensions and mapping):
ostool cfg.ini csvimport --model=model.json --dry-run --raise-on-error --max-lines=1 data.csv
Dry run:
ostool cfg.ini csvimport --model=model.json --dry-run data.csv
Test model (dimensions and mapping):
ostool cfg.ini csvimport --model=model.json --dry-run --raise-on-error --max-lines=1 data.csv
Dry run:
ostool cfg.ini csvimport --model=model.json --dry-run data.csv
'''This is a test using the real setup with elasticsearch. | |
It requires you to run nginx on port 8088 with config as per | |
https://github.com/okfn/elastic-proxy/blob/master/elasticproxy plus, | |
obviously, elasticsearch on port 9200. | |
''' | |
import json | |
import paste.fixture | |
import paste.proxy |
#!/usr/bin/env python | |
import urlparse | |
import mimetypes | |
import os | |
import ConfigParser | |
import urllib2 | |
import json | |
import csv | |
import time |
import csv | |
import json | |
import geojson | |
fp = 'data/US_Rendition_FOIA.csv' | |
fpout = 'data/US_Rendition_FOIA.geojson.csv' | |
jsonout = 'data/US_Rendition_FOIA.geojson.json' | |
jsondata = [] | |
def convert(): |
<?xml version="1.0" encoding="utf-8"?> | |
<Root xmlns:wb="http://www.worldbank.org"> | |
<data> | |
<record> | |
<field name="Country or Area" key="ARB">Arab World</field> | |
<field name="Item" key="SP.POP.TOTL">Population, total</field> | |
<field name="Year">1960</field> | |
<field name="Value">96388069</field> | |
</record> | |
<record> |
// Parse a summary to extract title, tags, location and start and end | |
parseNoteSummary = function(text) { | |
var result = { | |
title: '', | |
tags: [] | |
}; | |
var ourtext = text; | |
regex = / #([\w-\.]+)/; | |
while(ourtext.search(regex)!=-1) { | |
var out = ourtext.match(regex)[1]; |
,Millions LU journeys adjusted for odd days,Millions bus journeys adjusted for odd days - new measure,Millions bus plus underground journeys adjusted for odd days - new measure,LU Average,Bus Average,LU plus bus average,LU growth,Bus growth,LU plus bus growth,LU moving average annual growth,Bus moving average annual growth,LU plus busmoving average annual growth,,,,,,,, | |
2006/2007 - 1,72.3,151.3,223.5,73.9,138.8,212.7,-5.4%,-1.5%,-2.9%,-1.6%,0.6%,-0.2%,,,,,,,, | |
2006/2007 - 2,75.6,158.9,234.5,73.8,139.1,212.9,-2.5%,3.4%,1.3%,-2.1%,0.8%,-0.2%,,,,,,,, | |
2006/2007 - 3,74.3,158.4,232.7,73.6,139.6,213.2,-2.7%,4.4%,1.9%,-2.6%,1.1%,-0.2%,,,,,,,, | |
2006/2007 - 4,77.4,161.5,238.9,74.1,140.1,214.2,8.2%,4.9%,6.0%,-1.7%,1.4%,0.3%,,,,,,,, | |
2006/2007 - 5,73.7,153.5,227.2,74.8,141.1,215.9,15.0%,9.9%,11.6%,0.2%,2.3%,1.6%,,,,,,,, | |
2006/2007 - 6,74.1,153.2,227.3,75.2,141.7,216.9,7.9%,5.8%,6.5%,1.2%,3.0%,2.3%,,,,,,,, | |
2006/2007 - 7,81.1,165.1,246.2,75.6,142.1,217.7,6.1%,4.0%,4.8%,1.8%,3.5%,2.9%,,,,,,,, | |
2006/2007 - 8,83.4,166.5,250.0,76.0 |
{ | |
"datasets": { | |
"adur_district_spending": { | |
"author": "Lucy Chambers", | |
"author_email": "", | |
"extras": { | |
"spatial-text": "Adur, West Sussex, South East England, England, United Kingdom", | |
"spatial": "{ \"type\": \"Polygon\", \"coordinates\": [ [ [-0.3715, 50.8168],[-0.3715, 50.8747], [-0.2155, 50.8747], [-0.2155, 50.8168], [-0.3715, 50.8168] ] ] }" | |
}, | |
"license": "License Not Specified", |
var jsdom = require('jsdom'); | |
var fs = require('fs'); | |
// var jquery = fs.readFileSync("./jquery-1.7.1.min.js").toString(); | |
var linklist = 'http://police.uk/data'; | |
jsdom.env({ | |
html: linklist, | |
scripts: [ | |
'http://code.jquery.com/jquery.js' |
''' Upload datawrangling handbook to wordpress site. | |
Copy this file to same directory as your sphinx build directory and then do | |
python upload.py -h | |
NB: You need to enable XML-RPC access to the wordpress site (via Settings -> Writing) | |
NB: this requires pywordpress (pip install pywordpress) and associated config | |
file - see https://github.com/rgrp/pywordpress |