Skip to content

Instantly share code, notes, and snippets.

@eyalgo
eyalgo / Stripe.diff
Created July 8, 2017 08:28
NI Traktor Kontrol S5 - Add Minutes Markerts to Stripe
29c29,30
<
---
> AppProperty { id: trackBPM; path: "app.traktor.decks." + (deckId+1) + ".tempo.base_bpm" }
>
184a186,213
> Repeater {
> readonly property int minutes: numberOfMinutes(trackLength.value)
> readonly property real minuteDelta: (stripe.width * 60.0) / trackLength.value
>
@eyalgo
eyalgo / BeatgridView.diff
Created July 1, 2017 19:04
Diff files for Traktor Kontrol S5
165c165
< color: colors.colorWhite09
---
> color: colors.colorWhite31
import CSI 1.0
import QtQuick 2.0
//--------------------------------------------------------------------------------------------------------------------
// DECK HEADER TEXT
//--------------------------------------------------------------------------------------------------------------------
Text {
id: header_text
import CSI 1.0
import QtQuick 2.0
import QtGraphicalEffects 1.0
import './../Definitions' as Definitions
import './../Widgets' as Widgets
//--------------------------------------------------------------------------------------------------------------------
// DECK HEADER
//--------------------------------------------------------------------------------------------------------------------
import QtQuick 2.0
QtObject {
function rgba(r,g,b,a) { return Qt.rgba( neutralizer(r)/255. , neutralizer(g)/255. , neutralizer(b)/255. , neutralizer(a)/255. ) }
// this categorizes any rgb value to multiples of 8 for each channel to avoid unbalanced colors on the display (r5-g6-b5 bit)
// function neutralizer(value) { if(value%8 > 4) { return value - value%8 + 8} else { return value - value%8 }}
function neutralizer(value) { return value}
java.sql.SQLException: No suitable driver found for jdbc:mysql://localhost/example_db
at java.sql.DriverManager.getConnection(DriverManager.java:689)
at java.sql.DriverManager.getConnection(DriverManager.java:247)
at org.apache.commons.dbcp.DriverManagerConnectionFactory.createConnection(DriverManagerConnectionFactory.java:75)
at org.apache.commons.dbcp.PoolableConnectionFactory.makeObject(PoolableConnectionFactory.java:582)
at org.apache.commons.pool.impl.GenericObjectPool.borrowObject(GenericObjectPool.java:1148)
at org.apache.commons.dbcp.PoolingDataSource.getConnection(PoolingDataSource.java:106)
at scalikejdbc.CommonsConnectionPool.borrow(CommonsConnectionPool.scala:43)
at scalikejdbc.NamedDB.db$lzycompute(NamedDB.scala:33)
at scalikejdbc.NamedDB.db(NamedDB.scala:31)
package org.apache.spark.sql
import org.apache.spark.sql.execution.datasources.jdbc.DriverRegistry
object DriverRegistryWrapper {
def register(className: String) = DriverRegistry.register(className)
}
def getRecordWriter(job: TaskAttemptContext) =
format.getRecordWriter(job)
package org.apache.spark.util
import org.apache.spark.util.ChildFirstURLClassLoader
import java.net.URL
class WhiteListClassLoader(urls: Array[URL], parent: ClassLoader, whiteList: List[String])
extends ChildFirstURLClassLoader(urls, parent) {
override def loadClass(name: String) = {
if (whiteList.exists(name.startsWith)) {
super.loadClass(name)
val sc = SparkContext.getOrCreate()
val cl = new UserListClassLoader(sc.jars.map(new URL(_)).toArray,
Thread.currentThread.getContextClassLoader, List(
"org.apache.parquet.hadoop.OurCustomParquetOutputFormat",
"org.apache.parquet.hadoop.CodecFactory",
"org.apache.parquet.hadoop.ParquetFileWriter",
"org.apache.parquet.hadoop.ParquetRecordWriter",
"org.apache.parquet.hadoop.InternalParquetRecordWriter",
"org.apache.parquet.hadoop.ColumnChunkPageWriteStore",
"org.apache.parquet.hadoop.MemoryManager"