I hereby claim:
- I am cstenac on github.
- I am cstenac (https://keybase.io/cstenac) on keybase.
- I have a public key ASBp4SkWXpY9gqiw4R5Hjc0h0BCEMx7eC6k3ta5PyN51UAo
To claim this, I am signing this object:
I hereby claim:
To claim this, I am signing this object:
#! /bin/sh | |
RESTART_SCRIPT=/path/to/dssrestart.sh | |
STACKS_BACKUP_DIR=/path/to/dir | |
DSS_URL=http://localhost:32000 | |
MAX_TIME=600 # 10 minutes timeout | |
curl -s -m $MAX_TIME $DSS_URL/dip/api/get-configuration > /dev/null | |
RETCODE=$? |
import StringIO | |
from sklearn.tree import _tree | |
import random | |
def dataframe_train_test_split(size, X, Y): | |
## sklearn.cross_validation would not respect X / Y original index | |
if type(size) != int: | |
size = int (len(X.index) * size) |
Process: node [56095] | |
Path: /Applications/TileMill.app/Contents/Resources/node | |
Identifier: node | |
Version: ??? | |
Code Type: X86-64 (Native) | |
Parent Process: node [56091] | |
User ID: 501 | |
Date/Time: 2013-07-11 19:05:23.631 +0200 | |
OS Version: Mac OS X 10.8.4 (12E55) |
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; | |
import org.apache.hadoop.hive.ql.metadata.HiveException; | |
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; | |
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; | |
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; | |
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector; | |
import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector.PrimitiveCategory; | |
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory; | |
import org.apache.hadoop.io.IntWritable; |
import org.apache.hadoop.hive.ql.exec.UDFArgumentException; | |
import org.apache.hadoop.hive.ql.metadata.HiveException; | |
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF; | |
import org.apache.hadoop.hive.serde2.objectinspector.ListObjectInspector; | |
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector; | |
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector.Category; | |
public class UDFArrayFirst extends GenericUDF { | |
ListObjectInspector listInputObjectInspector; |
public interface GenericUDF { | |
public Object evaluate(DeferredObject[] args) throws HiveException; | |
public String getDisplayString(String[] args); | |
public ObjectInspector initialize(ObjectInspector[] args) throws UDFArgumentException; | |
} |
public class ArraySum extends UDF { | |
public double evaluate(List<Double> value) { | |
double sum = 0; | |
for (int i = 0; i < value.size(); i++) { | |
if (value.get(i) != null) { | |
sum += value.get(i); | |
} | |
} | |
return sum; | |
} |
/** A simple UDF to get the absolute value of a number */ | |
public class AbsValue extends UDF { | |
public double evaluate(double value) { | |
return Math.abs(value); | |
} | |
public long evaluate(long value) { | |
return Math.abs(value); | |
} | |
public int evaluate(int value) { | |
return Math.abs(value); |
/** A simple UDF to convert Celcius to Fahrenheit */ | |
public class ConvertToCelcius extends UDF { | |
public double evaluate(double value) { | |
return (value - 32) / 1.8; | |
} | |
} |