Skip to content

Instantly share code, notes, and snippets.

diff --git a/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java b/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java
index a8d83a2..bfbc6d7 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java
@@ -125,6 +125,7 @@ public class AutoRebalancer implements Rebalancer, MappingCalculator {
int maxPartition = currentIdealState.getMaxPartitionsPerInstance();
ReplicaPlacementScheme placementScheme = new DefaultPlacementScheme();
+ System.out.println(">>> Computing partition assignment");
placementScheme.init(_manager);
@vinothchandar
vinothchandar / gist:e8837df301501f85e257
Last active March 15, 2016 20:37
Helix Skew Patch
diff --git a/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java b/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java
index a8d83a2..bfbc6d7 100644
--- a/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java
+++ b/helix-core/src/main/java/org/apache/helix/controller/rebalancer/AutoRebalancer.java
@@ -125,6 +125,7 @@ public class AutoRebalancer implements Rebalancer, MappingCalculator {
int maxPartition = currentIdealState.getMaxPartitionsPerInstance();
ReplicaPlacementScheme placementScheme = new DefaultPlacementScheme();
+ System.out.println(">>> Computing partition assignment");
placementScheme.init(_manager);
import com.uber.hacman.CountingDirectDStream
import org.apache.spark.SparkConf
import org.apache.spark.streaming.{Milliseconds, Seconds, StreamingContext}
/**
*/
object CountingDStreamTest {
def main(args: Array[String]) = {
import org.apache.spark.Logging
import org.apache.spark.rdd.RDD
import org.apache.spark.streaming.{Time, StreamingContext}
import org.apache.spark.streaming.dstream.InputDStream
import scala.collection.mutable
import scala.util.Random
/**
* DStream that just keeps generating random numbers as events
@vinothchandar
vinothchandar / gist:3ad542c2028c2a20e356
Created March 3, 2015 22:20
HDFS HA Name Node Config
public static void main(String[] args) throws Exception {
if (args.length != 2){
System.out.println("Usage: pgm <hdfs:///path/to/copy> </local/path/to/copy/from>");
System.exit(1);
}
Configuration conf = new Configuration(false);
conf.set("fs.defaultFS", "hdfs://nameservice1");
conf.set("fs.default.name", conf.get("fs.defaultFS"));
conf.set("dfs.nameservices","nameservice1");
@vinothchandar
vinothchandar / gist:4b2a86b196285b819158
Created February 6, 2015 18:20
Assymmetric Encryption Speed
import javax.crypto.Cipher;
import java.security.*;
import java.util.Random;
/**
* Created by vinoth on 2/6/15.
*/
abstract class EncryptionProvider {
@vinothchandar
vinothchandar / gist:92837ba84d75d2cb59e6
Created December 10, 2014 16:50
NonBlocking Counter
import java.util.concurrent.atomic.AtomicLong;
import voldemort.utils.Time;
/**
* A simple concurrent, non-blocking event counter that resets itself every
* interval, and provides eventRate and average event value metrics over the
* last interval
*
*/
@vinothchandar
vinothchandar / gist:5923088
Created July 3, 2013 21:41
GCFriendlyCleaner implementation using NIO Zero Copy
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.nio.ByteBuffer;
import java.nio.channels.FileChannel;
import java.util.Random;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
public class interface LocationStoreClient {
/**
* Gets the data item that matches the exact {lat, lon, context} values in the key
*/
public List<Versioned<V>> get(GeoKey key);
/**
* Performs a bounding box search around the {lat, lon} in the key and returns all the
* data items in the given radius
*/
@vinothchandar
vinothchandar / gist:5565054
Last active December 17, 2015 06:28
Postgres K-V store supporting radius searches
package postgres;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.List;