Skip to content

Instantly share code, notes, and snippets.

from networkx import *
import os
os.chdir('/Where/You/Stored/TheData/File/')
G=read_edgelist('your_net.edgelist',delimiter='\t',create_using=Graph(),nodetype=str)
info(G)
N=K.neighbors('your_twitter_id')
F=[]
for u in N:
D=K.neighbors(u)
for i in D:
if N.count(i)<1 and i is not 'your_twitter_id':
F.append(i)
#!/usr/bin/env python
# encoding: utf-8
"""
exploded_view_3d.py
The purpose of this script is to create an 'exploded view'
of a network in 3D using hierarchical clustering of geodesic
distances in NetworkX and UbiGraph.
This script is intended as an illustrative proof of concept
#!/usr/bin/env python
# encoding: utf-8
"""
Experiments for growing fractal networks
Created by Drew Conway on 2009-05-19.
Copyright (c) 2009. All rights reserved.
"""
import sys
import html5lib
from html5lib import treebuilder
def parse_data(player_urls):
# Returns a dict of player data parse trees indexed by player name
# Create a dict indexed by player names
player_data=dict.fromkeys(player_urls.keys())
# Download player profile data and parse using html5lib
for name in player_urls.keys():
# html5lib integrates the easy-to-use BeautifulSoup parse tree using the treebuilders library.
import os
import sys
import csv
def get_players(path):
# Returns a list of player names from CSV file
reader=csv.reader(open(path,'U'),delimiter=',')
'''
players=[]
row_num=0
import urllib2
def get_player_profiles(player_list):
# Returns a dict of player profile URLs to be used in the next step
# Dict will hold player profile pages indexed by player name
player_profile_urls=dict.fromkeys(player_list)
for n in player_list:
names=n.split(' ')
# Search for the player names at NFL.com to get their individual player profiles, which contain the
# data we ultimately want.
def write_data(data,path,new_path):
# Takes data dict and writes new data to a new file
reader=csv.reader(open(path,'U'),delimiter=',')
writer=csv.writer(open(new_path,"w"))
row_num=0
for row in reader:
if row_num<1:
# Keep ther same column headers as before, so we simply
# re-write the first row.
writer.writerow(row)
# File-Name: currency_converter.R
# Date: 2009-11-17
# Author: Drew Conway
# Purpose: Convert currency data
# Data Used: vc_invests.csv
# Packages Used: foreign,XML
# Output File: vc_invests_USD.csv
# Data Output:
# Machine: Drew Conway's MacBook
# File-Name: cpi_oprobit.R
# Date: 2009-11-17
# Author: Drew Conway
# Purpose: Quick ordered probit analysis of the Corruption Perceptions Index 2009
# to check for effect of number of surveys used on CPI scores
# Data Used: corruption_index.csv
# available here: http://www.drewconway.com/zia/wp-content/uploads/2009/11/corruption_index.csv
# Packages Used: foreign,Zelig
# Output File:
# Data Output: