Skip to content

Embed URL

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP
USDACommunityConnectScoresFromFCC_NBM_APIs.py - This code creates a CSV output file with basic 2010 demographic information for each Census Place community in a state. It also uses this demographic information to calculate the value of two USDA Community
import json
import urllib2
import datetime
from time import strftime
def getEconomicScore(inMedianHouseholdIncome):
#Set this variable from:
#http://www.census.gov/hhes/www/income/data/historical/household/2010/H08_2010.xls
#This data would make a nice web service!
stateMedianHouseholdIncome = 56787
percentageOfStateAvg = inMedianHouseholdIncome/float(stateMedianHouseholdIncome) * 100
score = int((79.9999999 - percentageOfStateAvg)/5) * 5
if score < 5:
score = 0
if score > 30:
score = 30
return score
def getRuralityScore(inCommunityPopulation):
score = 0
if inCommunityPopulation < 500:
score = 40
elif inCommunityPopulation < 1000:
score = 35
elif inCommunityPopulation < 2000:
score = 30
elif inCommunityPopulation < 3000:
score = 25
elif inCommunityPopulation < 4000:
score = 20
elif inCommunityPopulation < 5000:
score = 15
elif inCommunityPopulation < 10000:
score = 10
elif inCommunityPopulation < 20000:
score = 5
return score
###
# SET THESE PARAMETERS
stateFIPSStr = '49' #FIPS code for state of Utah
outFileFolder = 'c:/projects/broadband/'
outFileName = outFileFolder + 'usdacommunityconnectscore_fromfccwebservices_' + strftime("%Y%m%d%H%M%S") + '.csv'
# ALSO SET stateMedianHouseholdIncome in getEconomicScore()
###
nbmGeographyLookupByGeographyTypeState_URL = r'http://www.broadbandmap.gov/broadbandmap/geography/state/'+ stateFIPSStr + '/censusplace?format=json&maxresults=2000&all'
nbmDemographicsByGeographyTypeID_URL = r'http://www.broadbandmap.gov/broadbandmap/demographic/jun2011/censusplace/ids/GEOGID?format=json'
responseGeography = urllib2.urlopen(nbmGeographyLookupByGeographyTypeState_URL)
jsonGeography = json.load(responseGeography)
jsonGeographyResults = jsonGeography["Results"]
count = 0
writeHeaderRow = True
outputStr = ""
for place in jsonGeographyResults:
count = count + 1
print str(count) + " " + place["geographyId"] + " " + place["geographyName"]
getDemographicURL = nbmDemographicsByGeographyTypeID_URL.replace("GEOGID", str(place["geographyId"]))
responseDemographic = urllib2.urlopen(getDemographicURL)
jsonDemographic = json.load(responseDemographic)
jsonDemographicResults = jsonDemographic["Results"]
currentRow = ""
for demographicResult in jsonDemographicResults:
keys = []
for key in demographicResult:
if key not in keys:
keys.append(key)
if writeHeaderRow == True:
for key in keys:
currentRow += '"' + key + '",'
#write out USDA rurality and economic need column headers
currentRow += '"usda_ruralityscore",'
currentRow += '"usda_economicscore"'
currentRow += '\n'
writeHeaderRow = False
usdaEconomicNeed = 0
usdaRurality = 0
for key in demographicResult:
currentRow += str(demographicResult[key])
currentRow += ','
if key == "population":
usdaRuralityScore = getRuralityScore(demographicResult[key])
if key == "medianIncome":
usdaEconomicScore = getEconomicScore(demographicResult[key])
#write out USDA rurality and economic need column data values
currentRow += str(usdaRuralityScore) + ","
currentRow += str(usdaEconomicScore)
currentRow += '\n'
outputStr += currentRow
#print outputStr
outFile = open(outFileName, "w")
outFile.write(outputStr)
outFile.close()
@BGranberg
Owner

The USDA Rural Development Community Connect grant application materials specify applicants to use the 2010 census community places list (or other populated communities listed in the Rand McNally Commercial Atlas and Marketing Guide) together with the year 2000 median household income statistics (perplexing...to say the least). Please note that the code above pulls in 2010 median household income.

For more information on the grant program, see: http://www.rurdev.usda.gov/utp_commconnect.html

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Something went wrong with that request. Please try again.