Skip to content

Instantly share code, notes, and snippets.

@aorjoa
aorjoa / gist:5fe0c46920f164990020
Created October 22, 2015 19:30
Gdisk for fixed MBR boot Mac OS (rEFIt)
$ sudo gdisk /dev/disk0
Password:
GPT fdisk (gdisk) version 1.0.1
Warning: Devices opened with shared lock will not have their
partition table automatically reloaded!
Partition table scan:
MBR: hybrid
BSD: not present
APM: not present
@aorjoa
aorjoa / helloworld.json
Last active January 19, 2019 02:06
Simple Spark Task
{"paragraphs":[{"text":"%spark\nsc.version","user":"anonymous","dateUpdated":"2019-01-19T02:05:18+0000","config":{"editorSetting":{"language":"scala","editOnDblClick":false,"completionKey":"TAB","completionSupport":true},"colWidth":12,"editorMode":"ace/mode/scala","fontSize":9,"results":{},"enabled":true},"settings":{"params":{},"forms":{}},"apps":[],"jobName":"paragraph_1547863518098_-1385869494","id":"20190117-204650_193106845","dateCreated":"2019-01-19T02:05:18+0000","status":"READY","errorMessage":"","progressUpdateIntervalMs":500,"focus":true,"$$hashKey":"object:959"},{"text":"%spark\nimport org.apache.commons.io.IOUtils\nimport java.net.URL\nimport java.nio.charset.Charset\n\n// Word count with RDD\nval fileFromURL = IOUtils.toString(\n new URL(\"https://raw.githubusercontent.com/apache/spark/master/README.md\"),\n Charset.forName(\"utf8\")).split(\"\\n\")\nval textFile = sc.parallelize(fileFromURL)\nval counts = textFile.flatMap(line => line.split(\" \")).map(word => (word, 1)).reduceByKe
package oot.lab8;
public class Calculator implements MathCalculator {
private double operand1;
private double operand2;
public Calculator(double operand1, double operand2){
this.operand1 = operand1;
this.operand2 = operand2;
}
public double add() {
return operand1 + operand2;
import java.io.*;
import java.math.BigInteger;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.HashMap;
/**
* Created by Bhuridech Sudsee.
*/
import java.io.*;
import java.math.BigInteger;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.HashMap;
/**
* Created by Bhuridech Sudsee.
*/
import java.io.*;
import java.math.BigInteger;
import java.text.DecimalFormat;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.HashMap;
/**
* Created by Bhuridech Sudsee.
*/
@aorjoa
aorjoa / .gitignore
Created December 24, 2017 08:57
Git ignore file for SE 60
build/
node_modules/
src/main/resources/static/built/
*.class
# Package Files #
*.jar
*.war
*.ear
@aorjoa
aorjoa / owncoin.sol
Last active October 29, 2017 03:52
this original source code come from https://ethereum.org/token
pragma solidity ^0.4.16;
interface tokenRecipient { function receiveApproval(address _from, uint256 _value, address _token, bytes _extraData) public; }
contract TokenERC20 {
// Public variables of the token
string public name;
string public symbol;
uint8 public decimals = 0;
// 18 decimals is the strongly suggested default, avoid changing it
pragma solidity ^0.4.15;
// -----------------------------------------
// Mongkol 99, M99C
// (c) Bhuridech Sudsee, Under MIT License
// Thanks BokkyPooBah for original version.
// -----------------------------------------
contract ERC20Interface {
function balanceOf(address _owner) constant returns (uint256 balance);
#!/bin/bash
echo "rdd-WordCount-sparktest-sha256-java-checkpoint-with-cache-in-one-test"
testname="rdd-WordCount-sparktest-sha256-java-checkpoint-with-cache-in-one-test"
echo -e '''
package th.ac.sut.aiyara.sparktest
import org.apache.spark.cprdd.{ImplicitDatasetCheckpoint, ImplicitRDDCheckpoint}
import com.bloomberg.sparkflow
import org.scalatest.FunSuite
import th.ac.sut.aiyara.sparktest.utils.{ConfigSpark, MathFunc}