vboxmanage list vms
vboxmanage unregistervm <uuid> --delete
shopt -s globstar | |
find ./consumers_s*/**/data/* -type f -exec rm -rf {} \; | |
find ./consumers_s*/**/tlog/* -type f -exec rm -rf {} \; | |
find ./orders_s*/**/data/* -type f -exec rm -rf {} \; | |
find ./orders_s*/**/tlog/* -type f -exec rm -rf {} \; |
var DateMathParser = function() { | |
return { | |
parse: function(match) { | |
var dt = new moment(); | |
if (0 == match.length) { | |
return dt; | |
}; | |
var splitter = new RegExp("([^/]*)([/]{0,1})([^\\-\\+]*)(.*)"); | |
var pos = 0; |
import java.io.ByteArrayOutputStream; | |
import java.io.IOException; | |
import java.io.InputStream; | |
import java.util.List; | |
import java.util.Map; | |
import java.util.Properties; | |
import java.util.concurrent.ExecutorService; | |
import java.util.concurrent.Executors; | |
import org.apache.avro.io.BinaryDecoder; |
import java.io.ByteArrayOutputStream; | |
import java.io.IOException; | |
import java.io.InputStream; | |
import java.util.List; | |
import java.util.Map; | |
import java.util.Properties; | |
import org.apache.avro.io.BinaryDecoder; | |
import org.apache.avro.io.BinaryEncoder; | |
import org.apache.avro.io.DecoderFactory; |
hadoop fs -cat /Work/lon_text/lon_order_data_t/cdw320_lon_order_data_t.1.txt | head -100 | gzip > test.csv.gz | |
cat cdw320_lon_order_data_t.1.txt | head -100 | gzip > ../../tsnyder/cdw320_lon_order_data_t.1.txt.gz | |
hadoop fs -cat /Work/tsnyder/cdw320_lon_order_data_t.1.txt.gz | gunzip |
// Encrypt where jo is input, and query is output and ENCRPYTION_KEy is key | |
byte[] input = jo.toString().getBytes("utf-8"); | |
MessageDigest md = MessageDigest.getInstance("MD5"); | |
byte[] thedigest = md.digest(ENCRYPTION_KEY.getBytes("UTF-8")); | |
SecretKeySpec skc = new SecretKeySpec(thedigest, "AES/ECB/PKCS5Padding"); | |
Cipher cipher = Cipher.getInstance("AES/ECB/PKCS5Padding"); | |
cipher.init(Cipher.ENCRYPT_MODE, skc); | |
byte[] cipherText = new byte[cipher.getOutputSize(input.length)]; |
var phantom = require('phantom'), | |
vows = require('vows'), | |
assert = require('assert'); | |
// nesting tests inside phantom callback so we only | |
// have to create it once | |
phantom.create(function(ph) { | |
var get_page_result = function(url, fn, result) { | |
ph.createPage(function(page) { |
public void testKafka() throws Exception { | |
Properties props = new Properties(); | |
props.put("zk.connect", "zookeeper.cloudfront.io:2181"); | |
props.put("zk.sessiontimeout.ms", "300000"); | |
props.put("serializer.class", "kafka.serializer.DefaultEncoder"); | |
props.put("key.serializer.class", "kafka.serializer.StringEncoder"); | |
props.put("compression.codec", "1"); // gzip | |
props.put("producer.type", "async"); | |
props.put("batch.size", "248"); | |
props.put("queue.enqueueTimeout.ms", "-1"); |