Skip to content

Instantly share code, notes, and snippets.

View DmytroMitin's full-sized avatar
😎
everything is ok

Dmytro Mitin DmytroMitin

😎
everything is ok
View GitHub Profile
dmitin@dmitin-HP-Pavilion-Laptop:/media/data/Projects2/scala3demo2/target/scala-3.3.3/classes$ cd org/public_domain/
dmitin@dmitin-HP-Pavilion-Laptop:/media/data/Projects2/scala3demo2/target/scala-3.3.3/classes/org/public_domain$ ls
main.class Main.class Main$.class main.tasty Main.tasty
dmitin@dmitin-HP-Pavilion-Laptop:/media/data/Projects2/scala3demo2/target/scala-3.3.3/classes/org/public_domain$ cd ..
dmitin@dmitin-HP-Pavilion-Laptop:/media/data/Projects2/scala3demo2/target/scala-3.3.3/classes/org$ cd ..
dmitin@dmitin-HP-Pavilion-Laptop:/media/data/Projects2/scala3demo2/target/scala-3.3.3/classes$ javap org.public_domain.Main
Compiled from "Main.scala"
public final class org.public_domain.Main {
public static void main();
}
{
"ExportSchema" : [
{
"Application" : "fuzzitDwh",
"EntityType" : "4",
"CountryCode" : "AT",
"ExportType" : "DIFF",
"ExportUtcDateTime" : "2021-12-15T01:45:01",
"LastExportUtcDateTime" : "2021-12-14T10:19:37",
"FileName" : "UL_DataLake_fuzzitDwh_MaterialExternal_AT_DIFF_20211215_014501.json"
import org.apache.spark.SparkContext
object Main {
val sc = new SparkContext("local[1]", "SimpleProg")
val nums = sc.parallelize(List(1, 2, 3, 4))
//println(nums.reduce((a, b) => a - b))
def main(args: Array[String]): Unit = {
import org.apache.spark.SparkContext
object Main {
val sc = new SparkContext("local[1]", "SimpleProg")
val nums = sc.parallelize(List(1, 2, 3, 4))
println(nums.reduce((a, b) => a - b))
def main(args: Array[String]): Unit = {
import com.fasterxml.jackson.core.Version
import com.fasterxml.jackson.databind.{Module, ObjectMapper}
import org.apache.spark.sql.SparkSession
object App {
val spark = SparkSession.builder
.master("local")
.appName("Spark app")
.getOrCreate()
import com.fasterxml.jackson.core.Version
import com.fasterxml.jackson.databind.{Module, ObjectMapper}
import org.apache.spark.sql.SparkSession
object App {
val spark = SparkSession.builder
.master("local")
.appName("Spark app")
.getOrCreate()
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
object App38 extends App {
val sc = new SparkContext("local[1]", "SimpleProg")
val nums = sc.parallelize(List(1, 2, 3, 4))
println(nums.reduce((a, b) => a - b))
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
object App38 /*extends App*/ {
val sc = new SparkContext("local[1]", "SimpleProg")
val nums = sc.parallelize(List(1, 2, 3, 4))
// println(nums.reduce((a, b) => a - b))
import org.apache.spark.SparkContext
import org.apache.spark.sql.SparkSession
object App38 /*extends App*/ {
val sc = new SparkContext("local[1]", "SimpleProg")
val nums = sc.parallelize(List(1, 2, 3, 4))
println(nums.reduce((a, b) => a - b))
my-project
ᄂsubproject1
ᄂsrc/main/scala
ᄂ com.foo.bar
├ foo1
│ ᄂ (...classes)
ᄂsubproject2
ᄂsrc/main/scala
ᄂ com.foo.bar
ᄂ foo2