Skip to content

Instantly share code, notes, and snippets.

@hammer
hammer / pysl.py
Created December 7, 2008 09:13
Using the SoftLayer API via XML-RPC with Python
import xmlrpclib
from pprint import pprint
"""
A simple workout for the SoftLayer API as exposed via XML-RPC
"""
# TODO: Make this object-based instead of dictionary-based
# TODO: Read authentication information from a secure location
# TODO: Demonstrate how to call a method that requires a parameter
# Get this information from https://manage.softlayer.com/Administrative/apiKeychain
@hammer
hammer / pysoftlayer.py
Created December 18, 2008 09:07
A more usable version of my Python wrapper for the SoftLayer API
import xmlrpclib
"""
A simple Python wrapper for the SoftLayer API as exposed via XML-RPC
"""
# TODO: Make this object-based for services, data types, and methods instead of dictionary-based
# TODO: Read authentication information from a secure location
# TODO: Demonstrate how to call a method that requires a parameter
$ ./bin/start-hbase.sh
Exception in thread "main" java.lang.NoClassDefFoundError: org/apache/hadoop/hbase/HBaseConfTool
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.hbase.HBaseConfTool
at java.net.URLClassLoader$1.run(URLClassLoader.java:200)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:188)
at java.lang.ClassLoader.loadClass(ClassLoader.java:315)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:330)
at java.lang.ClassLoader.loadClass(ClassLoader.java:250)
at java.lang.ClassLoader.loadClassInternal(ClassLoader.java:398)
$ bin/hadoop org.apache.hadoop.hbase.PerformanceEvaluation sequentialWrite 4
10/06/11 03:29:15 INFO zookeeper.ZooKeeper: Client environment:zookeeper.version=3.3.1-942149, built on 05/07/2010 17:14 GMT
10/06/11 03:29:15 INFO zookeeper.ZooKeeper: Client environment:host.name=172.28.172.2
10/06/11 03:29:15 INFO zookeeper.ZooKeeper: Client environment:java.version=1.6.0_17
10/06/11 03:29:15 INFO zookeeper.ZooKeeper: Client environment:java.vendor=Apple Inc.
10/06/11 03:29:15 INFO zookeeper.ZooKeeper: Client environment:java.home=/System/Library/Frameworks/JavaVM.framework/Versions/1.6.0/Home
10/06/11 03:29:15 INFO zookeeper.ZooKeeper: Client environment:java.class.path=/Users/hammer/codebox/hadoop-0.20.2+228/bin/../conf:/System/Library/Frameworks/JavaVM.framework/Versions/1.6.0/Home/lib/tools.jar:/Users/hammer/codebox/hadoop-0.20.2+228/bin/..:/Users/hammer/codebox/hadoop-0.20.2+228/bin/../hadoop-0.20.2+228-core.jar:/Users/hammer/codebox/hadoop-0.20.2+228/bin/../lib/commons-cli-1.2.jar:/Users/hammer/codebox/hadoo
@hammer
hammer / server_side_datatable.py
Created January 21, 2014 15:58
Implementation of a server-side DataTable (cf. http://datatables.net/release-datatables/examples/data_sources/server_side.html) using Flask, Flask-RESTful, and Psycopg. You should only have to edit source_database, source_table, and source_columns to make it work. Of course you'll probably want to edit the resource name and URL as well.
from string import Template
from distutils.util import strtobool
from flask import Flask, request
from flask.ext.restful import Api, Resource
import psycopg2
# Flask-RESTful Api object
app = Flask(__name__)
@hammer
hammer / HelloAvro.scala
Last active October 17, 2022 04:16
Concise example of how to write an Avro record out as JSON in Scala
import java.io.{IOException, File, ByteArrayOutputStream}
import org.apache.avro.file.{DataFileReader, DataFileWriter}
import org.apache.avro.generic.{GenericDatumReader, GenericDatumWriter, GenericRecord, GenericRecordBuilder}
import org.apache.avro.io.EncoderFactory
import org.apache.avro.SchemaBuilder
import org.apache.hadoop.fs.Path
import parquet.avro.{AvroParquetReader, AvroParquetWriter}
import scala.util.control.Breaks.break
object HelloAvro {
@hammer
hammer / HelloKryo.scala
Created May 4, 2014 23:22
Demonstration of problems with chill-avro (twitter/chill #183)
import com.twitter.bijection.avro.{GenericAvroCodec, GenericAvroCodecs}
import com.twitter.bijection.Injection
import com.twitter.chill._
import org.apache.avro.generic.{GenericRecord, GenericRecordBuilder}
import org.apache.avro.SchemaBuilder
import scala.util.{Failure, Success}
object HelloKryo {
def main(args: Array[String]) {
// Build a schema
#!/bin/bash
pid=$1
nsamples=$2
sleeptime=$3
for x in $(seq 1 $nsamples)
do
jstack $pid
sleep $sleeptime
done | \
type sample = {
x : float;
y : float
}
type model = {
theta : float;
beta : float
}
@hammer
hammer / seqs2bed.py
Created January 11, 2016 22:20
Convert DeepSEA training sequences a BED file
import h5py
# HDF5 file with two arrays: 'trainxdata' (samples) and 'traindata' (labels)
INFILE_SAMPLES = ''
INFILE_REFERENCE_FASTA = ''
OUTFILE_FASTA = 'deepsea_train10k.fa'
OUTFILE_BED = 'deepsea_train10k.bed'
def onehot2base(onehot):
if onehot == [1,0,0,0]: