Skip to content

Instantly share code, notes, and snippets.

View tupshin's full-sized avatar

Tupshin Harper tupshin

View GitHub Profile
Hadoop reduce task list for job_201207011048_0009 on 10
Killed Tasks
Task Complete Status Start Time Finish Time Errors Counters
task_201207011048_0009_r_000000 0.00%
2-Jul-2012 12:26:55
2-Jul-2012 12:29:32 (2mins, 37sec)
java.lang.RuntimeException: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while processing row (tag=0) [Error getting row data with exception java.lang.ArrayIndexOutOfBoundsException: 175
extern mod std;
use std::net::tcp::connect;
use std::net::ip::v4::parse_addr;
use std::uv::global_loop;
fn main() {
let port:uint = 80;
let iotask = global_loop::get();
let ip = parse_addr("127.0.0.1");
extern mod std;
use std::net::tcp::connect;
use std::net::tcp::write;
use std::net::ip::v4::parse_addr;
use std::uv::global_loop;
fn main() {
let port:uint = 80;
extern mod std;
use std::net::tcp::connect;
use std::net::tcp::write;
use std::net::ip::v4::parse_addr;
use std::uv::global_loop;
fn main() {
let port:uint = 80;
import java.lang.Math.sqrt
object Positioning extends App {
class Circle(val x: Double, val y: Double, val r: Double)
class Point(val x: Double, val y: Double)
run_test(new Circle(-1.0, -1.0, 1.5), new Circle(1.0, 1.0, 2.0))
run_test(new Circle(236, 67, 695.075535464), new Circle(-268, 172, 409.498473746))
fn send (&mut self, err: c_int, bytes: &[&[u8]]) {
assert!(self.sender.is_some());
let len = bytes.iter().fold(0, |l, b| { l + b.len()});
let header = fuse_out_header {
len: (mem::size_of::<fuse_out_header>() + len) as u32,
error: -err,
unique: self.unique,
};
as_bytes(&header, |headerbytes| {
let sender = self.sender.take_unwrap();
import com.datastax.spark.connector.cql.CassandraConnector
import org.apache.spark.{SparkContext,SparkConf}
import org.apache.spark.sql.SQLContext
import com.databricks.spark.csv._
import com.datastax.spark.connector._
object CSV2Cassandra {
def main(args: Array[String]): Unit = {
val conf = new SparkConf(true).set("spark.cassandra.connection.host", "127.0.0.1").setMaster("local")
val sc = new SparkContext("local", "test", conf)
use std::io::{BufferedReader,File};
use std::collections::BinaryHeap; //latest nightlies have switch renamed PriorityQueue to BinaryHeap
fn main() {
let n = 3u; // n longest lines.
let path = Path::new("test.txt");
let mut file = BufferedReader::new(File::open(&path));
let mut pq = BinaryHeap::new(); // note: max-heap. See above about BinaryHeap
for (i, line) in file.lines().enumerate() {
#[repr(C, packed)]
pub struct CqlStringMap<'a> {
count:u16,
map:[u8]
}
pub type Body = [u8];
pub trait BodyBuilder{}
impl BodyBuilder for Body {}
pub struct Body2 {body:[u8]}
pub trait BodyBuilder2{}
impl BodyBuilder2 for Body2 {}
both fail with