Navigation Menu

Skip to content

Instantly share code, notes, and snippets.

@JoeyAndres
JoeyAndres / uri.js
Created March 13, 2016 11:40 — forked from jlong/uri.js
URI Parsing with Javascript
var parser = document.createElement('a');
parser.href = "http://example.com:3000/pathname/?search=test#hash";
parser.protocol; // => "http:"
parser.hostname; // => "example.com"
parser.port; // => "3000"
parser.pathname; // => "/pathname/"
parser.search; // => "?search=test"
parser.hash; // => "#hash"
parser.host; // => "example.com:3000"
@JoeyAndres
JoeyAndres / ConcatenateArray.cpp
Created December 28, 2016 23:16
C++ code for concatenating array.
#include <array> // std::array
#include <algorithm> // std::copy
using std::array;
/**
* Concatenates array.
*
* @tparam T Type of data stored in array.
* @tparam D1 Dimension of first array.
template <class T, class A = std::allocator<T> >
class X {
public:
typedef A allocator_type;
typedef typename A::value_type value_type;
typedef typename A::reference reference;
typedef typename A::const_reference const_reference;
typedef typename A::difference_type difference_type;
@JoeyAndres
JoeyAndres / f1-results.py
Last active October 22, 2017 04:17
Gets all the F1 race results from 1950 to 2017. The results are flatten so that they are easily utilized in mapreduce.
#!/usr/bin/python
import sys
import urllib2
import time
import json
def get_results(output_dir):
for year in range(1950, 2017 + 1):
for rnd in range(1, 50):
package HadoopF1.OverallTeamRaceResult;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import HadoopF1.ResultsRecord;
package HadoopF1.OverallTeamRaceResult;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;
public class OverallTeamRaceResultReducer extends Reducer<Text, IntWritable, Text, IntWritable> {
@JoeyAndres
JoeyAndres / hadoop.sh
Created October 23, 2017 18:35
/etc/profile.d hadoop env script.
# Hadoop
export HADOOP_HOME=/opt/hadoop/hadoop-2.7.4
export PATH=$PATH:$PATH:$HADOOP_HOME/bin:$HADOOP_HOME/sbin
#!/bin/bash
start() {
source "/etc/profile.d/hadoop.sh"
start-dfs.sh
start-yarn.sh
}
stop() {
[Unit]
Description=Hadoop start/stop
[Service]
User=hadoop
Group=hadoop
Type=oneshot
ExecStart=/opt/hadoop/hadoop-service.sh start
ExecStop=/opt/hadoop/hadoop-service.sh stop
RemainAfterExit=yes
@JoeyAndres
JoeyAndres / interfaces
Last active October 27, 2017 20:06
/etc/network/interfaces static ip setup
auto eth0
iface eth0 inet static
address 192.168.1.1
netmask 255.255.0.0
gateway 192.168.0.1
dns-nameservers 8.8.8.8 8.8.4.4