anonymous / gist:6234725
Created

Embed URL

HTTPS clone URL

SSH clone URL

You can clone with HTTPS or SSH.

Download Gist
View gist:6234725
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94
require 'openssl'
require 'base64'
require 'cgi'
require 'open-uri'
require 'rubygems'
require 'json'
 
def get_link_urls_from_response(response)
response.map { |link| link["uu"] }
end
 
# You can obtain you access id and secret key here: http://www.seomoz.org/api/keys
ACCESS_ID = ENV["ID"]
SECRET_KEY = ENV["KEY"]
 
# Set your expires for several minutes into the future.
# Values excessively far in the future will not be honored by the Mozscape API.
expires = Time.now.to_i + 300
 
# A new linefeed is necessary between your AccessID and Expires.
string_to_sign = "#{ACCESS_ID}\n#{expires}"
 
# Get the "raw" or binary output of the hmac hash.
binary_signature = OpenSSL::HMAC.digest('sha1', SECRET_KEY, string_to_sign)
 
# We need to base64-encode it and then url-encode that.
URL_SAFE_SIGNATURE = CGI::escape(Base64.encode64(binary_signature).chomp)
 
# Set Link Metrics request parameters
source_cols = "4"
target_cols = "4"
link_cols = "0"
scope = "page_to_subdomain"
sort = "page_authority"
filter = "external+follow"
limit = "150"
 
target_domains = []
responses = []
 
# Check for compare all option flag and store if present
option_flag = ARGV.pop if ARGV.size == 4
 
# Put domains being compared into array
ARGV.each { |domain| target_domains << URI::encode(domain) }
 
# Base URL for requesting link metrics
request_base = "http://lsapi.seomoz.com/linkscape/links/"
 
# Now put your entire request query string together
request_query_string = "?SourceCols=#{source_cols}&TargetCols=#{target_cols}&LinkCols=#{link_cols}&Scope=#{scope}&Sort=#{sort}&Filter=#{filter}&Limit=#{limit}&AccessID=#{ACCESS_ID}&Expires=#{expires}&Signature=#{URL_SAFE_SIGNATURE}"
 
target_domains.each do |target_domain|
request_url = request_base + target_domain + request_query_string
 
# Go and fetch the URL
response = open(request_url).read
 
# Push parsed response into link profiles array
responses << JSON.parse(response)
 
# Delay next request
sleep(10)
end
 
# Create array to store link profiles
link_profiles = []
 
# Get URLs from each response
responses.each { |response| link_profiles << get_link_urls_from_response(response) }
 
# Find common links between latter two domains
common_links = link_profiles[1] & link_profiles[2]
 
# If -ca option is present, compare all; if not, subtract our domain's links
option_flag == "-ca" ? common_links &= link_profiles[0] : common_links -= link_profiles[0]
 
# Output possible link opportunities
puts
puts "You found #{common_links.size} common links."
puts
unless common_links.empty?
common_links.each { |url| puts url }
puts
 
# Write common links to file if any are found
timestamp = Time.now.to_i
file_name = "common-links-#{timestamp}.txt"
file = File.open(file_name, "w")
common_links.each { |url| file.puts url }
file.close
puts "Common links saved in working directory: #{file_name}"
puts
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Something went wrong with that request. Please try again.