This gist shows how to create a GIF screencast using only free OS X tools: QuickTime, ffmpeg, and gifsicle.
To capture the video (filesize: 19MB), using the free "QuickTime Player" application:
<? | |
///////////////////// | |
// slack2html | |
// by @levelsio | |
///////////////////// | |
// | |
///////////////////// | |
// WHAT DOES THIS DO? | |
///////////////////// | |
// |
# Requires the silver searcher - https://github.com/ggreer/the_silver_searcher | |
# Leave empty line at the end | |
while IFS= read -r line; do | |
echo '-------------------------' | |
echo "Checking for word(s): $line" | |
ag -i "$line" chapters/ | |
done < valid_words.txt |
require 'simple_worker' | |
require 'eventmachine' | |
require 'em-http-request' | |
require 'nokogiri' | |
require 'aws' | |
require 'redis' | |
class RedEx < SimpleWorker::Base | |
merge_gem 'em-redis' |
var http = require("http"); | |
var fs = require("fs"); | |
var crawler = require("simplecrawler"); | |
var cheerio = require("cheerio"); | |
var util = require('util'); | |
var exit = require('exit'); | |
var log = console.log.bind(console); | |
function str(a) {return util.inspect(a, false, null);} |
var Crawler = require("crawler").Crawler; // https://github.com/sylvinus/node-crawler | |
var S = require('string'); | |
var fs = require('fs'); | |
// A list of some patterns that will show up in webcam URLs | |
var patterns = ["jpg/image.jpg\?r=" | |
, "mjpg/video.mjpg" | |
, "record/current.jpg" | |
, "cgi-bin/faststream.jpg" | |
, "oneshotimage.jpg" |
var Crawler = require("simplecrawler"); | |
var querystring = require('querystring'); | |
var myCrawler = new Crawler("www.geocaching.com", "/login/"); | |
myCrawler.initialProtocol = "https"; | |
myCrawler.acceptCookies = true; | |
myCrawler.on("addcookie", function (cookie) { | |
console.log("Cookie"); | |
}); |
#!/usr/local/bin/ruby | |
# crawler.rb | |
# by: Jason Larsen | |
# a generic web crawler that allows the user to do whatever they want by passing blocks | |
# @version 0.7 | |
# 14 Dec 2009 | |
# 0.6 things seem to be working well | |
# 0.7 modified so that URL's being added to the queue truncate fragments, | |
# this should save a lot of work |
class ExampleCrawler < PoltergeistCrawler | |
def crawl | |
visit "https://news.ycombinator.com/" | |
click_on "More" | |
page.evaluate_script("window.location = '/'") | |
end | |
end | |
ExampleCrawler.new.crawl |
require 'open-uri' | |
require 'nokogiri' | |
class PublicFileCrawler | |
def initialize(params={}) | |
@call_sign = params[:call_sign] | |
@url = "https://stations.fcc.gov/station-profile/#{@call_sign}/political-files/browse-%3e2012" | |
@checked = {} | |
@found = {} |