Created
November 8, 2012 20:43
-
-
Save symm/4041451 to your computer and use it in GitHub Desktop.
Returns the episode titles a certain American Podcast series
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"require": | |
{ | |
"symfony/dom-crawler": "2.2.x-dev", | |
"symfony/css-selector": "2.2.x-dev" | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<?php | |
require('vendor/autoload.php'); | |
use Symfony\Component\DomCrawler\Crawler; | |
$mainPage = downloadPage('http://www.thisamericanlife.org/radio-archives'); | |
$crawler = new Crawler($mainPage); | |
$baseUrl = "http://www.thisamericanlife.org"; | |
$episodes = array(); | |
$urlsToScrape = array(); | |
// Read the contents of the Navigation bar so we know which pages to download. | |
$yearNodes = $crawler->filter('#archive-date-nav li a')->reduce(function($node, $i) {}); | |
foreach($yearNodes as $yearNode) { | |
$urlsToScrape[] = $baseUrl . $yearNode->getAttribute('href'); | |
} | |
// Go through each archived Year and pull out the episode titles. | |
foreach ($urlsToScrape as $url) { | |
$page = downloadPage($url); | |
$crawler = new Crawler($page); | |
$showNodes = $crawler->filter('.episode-archive > h3 a')->reduce(function($node, $i) {}); | |
// Read the episode title for the shows. | |
foreach($showNodes as $showNode) { | |
$bits = explode(':', $showNode->nodeValue); | |
$epNum = $bits[0]; | |
$episodes[$epNum] = $showNode->nodeValue; | |
} | |
} | |
ksort($episodes); | |
foreach ($episodes as $episode) { | |
echo $episode . PHP_EOL; | |
} | |
function downloadPage($url) { | |
// Simple caching. | |
$cacheDir = 'cache'; | |
$cacheExpiry = 60 * 60 * 24 * 7; // One week | |
$urlHash = sha1($url); | |
$cacheFile = $cacheDir . '/' . $urlHash; | |
if (!file_exists($cacheDir)) { | |
mkdir($cacheDir); | |
} | |
if(file_exists($cacheFile) && (time() - filemtime($cacheFile)) < $cacheExpiry){ | |
return file_get_contents($cacheFile); | |
} else { | |
$page = file_get_contents($url); | |
file_put_contents($cacheFile, $page); | |
return $page; | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment