Created
November 19, 2016 16:39
-
-
Save cmeiklejohn/fccbefdbc949c874b0c9a8729121844b to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
16/11/19 17:39:01 INFO SparkContext: Created broadcast 0 from textFile at SimpleApp.scala:49 | |
Creating pairs | |
Exception in thread "main" java.io.IOException: No FileSystem for scheme: s3a | |
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2385) | |
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2392) | |
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:89) | |
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2431) | |
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2413) | |
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:368) | |
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:296) | |
at org.apache.hadoop.mapred.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:256) | |
at org.apache.hadoop.mapred.FileInputFormat.listStatus(FileInputFormat.java:228) | |
at org.apache.hadoop.mapred.FileInputFormat.getSplits(FileInputFormat.java:304) | |
at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:207) | |
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) | |
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) | |
at scala.Option.getOrElse(Option.scala:120) | |
at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) | |
at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:32) | |
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) | |
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) | |
at scala.Option.getOrElse(Option.scala:120) | |
at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) | |
at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:32) | |
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:219) | |
at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:217) | |
at scala.Option.getOrElse(Option.scala:120) | |
at org.apache.spark.rdd.RDD.partitions(RDD.scala:217) | |
at org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:65) | |
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$reduceByKey$3.apply(PairRDDFunctions.scala:290) | |
at org.apache.spark.rdd.PairRDDFunctions$$anonfun$reduceByKey$3.apply(PairRDDFunctions.scala:290) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:148) | |
at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:109) | |
at org.apache.spark.rdd.RDD.withScope(RDD.scala:286) | |
at org.apache.spark.rdd.PairRDDFunctions.reduceByKey(PairRDDFunctions.scala:289) | |
at SimpleApp$.create_pairs(SimpleApp.scala:74) | |
at SimpleApp$.main(SimpleApp.scala:52) | |
at SimpleApp.main(SimpleApp.scala) | |
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) | |
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) | |
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) | |
at java.lang.reflect.Method.invoke(Method.java:483) | |
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:664) | |
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:169) | |
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:192) | |
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:111) | |
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) | |
16/11/19 17:39:01 INFO SparkContext: Invoking stop() from shutdown hook | |
16/11/19 17:39:01 INFO SparkUI: Stopped Spark web UI at http://192.168.0.13:4040 | |
16/11/19 17:39:01 INFO DAGScheduler: Stopping DAGScheduler | |
16/11/19 17:39:01 INFO MapOutputTrackerMasterEndpoint: MapOutputTrackerMasterEndpoint stopped! | |
16/11/19 17:39:01 INFO Utils: path = /private/var/folders/dm/mdzr55c97898h4232_14lkdr0000gn/T/spark-006d1f46-7bf1-4f08-9fb0-c7cb09aadc15/blockmgr-5fc8faa6-f308-4cad-9bde-f3252d9f977b, already present as root for deletion. | |
16/11/19 17:39:01 INFO MemoryStore: MemoryStore cleared | |
16/11/19 17:39:01 INFO BlockManager: BlockManager stopped | |
16/11/19 17:39:01 INFO BlockManagerMaster: BlockManagerMaster stopped | |
16/11/19 17:39:01 INFO SparkContext: Successfully stopped SparkContext | |
16/11/19 17:39:01 INFO Utils: Shutdown hook called | |
16/11/19 17:39:01 INFO OutputCommitCoordinator$OutputCommitCoordinatorEndpoint: OutputCommitCoordinator stopped! | |
16/11/19 17:39:01 INFO Utils: Deleting directory /private/var/folders/dm/mdzr55c97898h4232_14lkdr0000gn/T/spark-006d1f46-7bf1-4f08-9fb0-c7cb09aadc15 |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment