This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
import java.net.URI | |
import org.apache.hadoop.conf.Configuration | |
import org.apache.hadoop.fs._ | |
import org.apache.hadoop.hdfs.client._ | |
import org.apache.hadoop.hdfs.inotify._ | |
val url = new URI("hdfs://localhost:8020") | |
val conf = new Configuration(false) | |
val dfs = new HdfsAdmin(url, conf) | |
val stream = dfs.getInotifyEventStream() |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package systemsimulation | |
import io.gatling.core.Predef._ | |
import io.gatling.core.session._ | |
import io.gatling.http.Predef._ | |
import scala.concurrent.duration._ | |
import general._ | |
class SystemSimulation extends Simulation { | |
// configure proxy |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package org.apache.spark | |
import org.apache.spark.sql.catalyst.util._ | |
import org.apache.spark.sql.types._ | |
@SQLUserDefinedType(udt = classOf[PointType]) | |
case class Point(mac: String, start: Long, end: Long) { | |
override def hashCode(): Int = { | |
31 * (31 * mac.hashCode) + start.hashCode | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
//////////////////////////////////////////////////////////////// | |
// == Parquet read == | |
//////////////////////////////////////////////////////////////// | |
import org.apache.hadoop.conf.Configuration | |
import org.apache.hadoop.fs.Path | |
import org.apache.hadoop.mapreduce._ | |
import org.apache.hadoop.mapreduce.lib.input.FileSplit | |
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl | |
import org.apache.parquet.hadoop.ParquetInputSplit | |
import org.apache.parquet.hadoop.ParquetRecordReader |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
def clazz[T](cls: Class[T], encoders: Seq[(String, ExpressionEncoder[_])]): ExpressionEncoder[T] = { | |
encoders.foreach { case (_, enc) => enc.assertUnresolved() } | |
val schema = StructType(encoders.map { | |
case (fieldName, e) => | |
val (dataType, nullable) = if (e.flat) { | |
e.schema.head.dataType -> e.schema.head.nullable | |
} else { | |
e.schema -> true | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
final class Example { | |
void /* test */ func() { | |
String a = "a"; | |
String b = "a" + b + "c()"; | |
Buffer buf = "test" + "new Buffer() {};"; | |
HashSet<String> test = new HashSet<String>(); | |
} | |
public int get_int() { |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
package org.apache.spark.sql.sadikovi | |
import java.io.{ObjectInputStream, ObjectOutputStream} | |
import java.util.UUID | |
import org.apache.hadoop.conf.Configuration | |
import org.apache.hadoop.fs.Path | |
import org.apache.hadoop.mapreduce.{JobContext, TaskAttemptContext} | |
import org.apache.spark.internal.io._ |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
diff --git a/grammars/java.cson b/grammars/java.cson | |
index cb9947a..399c914 100644 | |
--- a/grammars/java.cson | |
+++ b/grammars/java.cson | |
@@ -109,6 +109,9 @@ | |
{ | |
'include': '#code' | |
} | |
+ { | |
+ 'include': '#module' |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
// ---------------------------------------------------------------------- | |
// Statistics updates | |
struct MutableStatisticsBuffer<T: DataType> { | |
typed: TypedStatistics<T>, | |
sort_order: SortOrder | |
} | |
impl<T: DataType> MutableStatisticsBuffer<T> { | |
pub fn new(column_order: ColumnOrder, is_min_max_deprecated: bool) -> Self { |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
abstract class Spelling | |
case class CorrectSpelling(word: String) extends Spelling | |
case class IncorrectSpelling(word: String, suggestions: List[String]) extends Spelling | |
case class Spellchecker(dictionary: String) { | |
private val numSuggestions = 10 | |
private val maxDistance = 5 | |
// set of valid words (replace with trie for space efficiency) | |
private val set = readDict(dictionary) | |
private val heap = new java.util.PriorityQueue[(Int, String)]( |