Skip to content

Instantly share code, notes, and snippets.

@devton
Created February 8, 2015 06:39
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save devton/9ca52d6e7e2075a74899 to your computer and use it in GitHub Desktop.
Save devton/9ca52d6e7e2075a74899 to your computer and use it in GitHub Desktop.
app/services/crawler/web_spec.rb
require "rails_helper"
RSpec.describe Crawler::Web, :type => :service do
describe ".collect_links_from" do
let(:collected_links) {
[
'http://www.example.com/page_link_1.html',
'http://www.example.com/page_link_2.html',
'http://www.example.com/page_link_3.html',
'http://www.example.com/page_link_4.html',
]
}
before do
1.upto(4).each do |i|
FakeWeb.register_uri(:get, "http://www.example.com/page_link_#{i}.html", body: File.read(Rails.root + "spec/support/page_link_#{i}.html"))
end
end
context "collect all internal url's from site" do
subject { Crawler::Web.collect_links_from "http://www.example.com/page_link_1.html" }
it { is_expected.to eq(collected_links)}
end
end
end
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment