Created
August 3, 2017 23:31
-
-
Save Qwedir/e39255bf5606dbc4daf81b852fc38f20 to your computer and use it in GitHub Desktop.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
ERROR [2017-08-03 19:18:16,934] ({cluster6-nio-worker-2} Logging.scala[logError]:72) - Failed to execute: com.datastax.spark.connector.writer.RichBoundStatement@4746499f | |
com.datastax.driver.core.exceptions.UnavailableException: Not enough replicas available for query at consistency LOCAL_QUORUM (2 required but only 1 alive) | |
at com.datastax.driver.core.exceptions.UnavailableException.copy(UnavailableException.java:128) | |
at com.datastax.driver.core.Responses$Error.asException(Responses.java:114) | |
at com.datastax.driver.core.RequestHandler$SpeculativeExecution.onSet(RequestHandler.java:506) | |
at com.datastax.driver.core.Connection$Dispatcher.channelRead0(Connection.java:1070) | |
at com.datastax.driver.core.Connection$Dispatcher.channelRead0(Connection.java:993) | |
at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:318) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:304) | |
at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:318) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:304) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:318) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:304) | |
at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:276) | |
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:263) | |
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:318) | |
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:304) | |
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846) | |
at io.netty.channel.epoll.AbstractEpollStreamChannel$EpollStreamUnsafe.epollInReady(AbstractEpollStreamChannel.java:823) | |
at io.netty.channel.epoll.EpollEventLoop.processReady(EpollEventLoop.java:339) | |
at io.netty.channel.epoll.EpollEventLoop.run(EpollEventLoop.java:255) | |
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:112) | |
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137) | |
at java.lang.Thread.run(Thread.java:745) | |
Caused by: com.datastax.driver.core.exceptions.UnavailableException: Not enough replicas available for query at consistency LOCAL_QUORUM (2 required but only 1 alive) | |
at com.datastax.driver.core.Responses$Error$1.decode(Responses.java:50) | |
at com.datastax.driver.core.Responses$Error$1.decode(Responses.java:37) | |
at com.datastax.driver.core.Message$ProtocolDecoder.decode(Message.java:289) | |
at com.datastax.driver.core.Message$ProtocolDecoder.decode(Message.java:269) | |
at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:89) | |
... 13 more | |
INFO [2017-08-03 19:18:16,936] ({Executor task launch worker-14} Logging.scala[logInfo]:54) - Finished task 6.0 in stage 6.0 (TID 41). 1304 bytes result sent to driver | |
ERROR [2017-08-03 19:18:16,937] ({Executor task launch worker-15} Logging.scala[logError]:91) - Exception in task 7.0 in stage 6.0 (TID 42) | |
java.io.IOException: Failed to write statements to events_prod.ctr. | |
at com.datastax.spark.connector.writer.TableWriter$$anonfun$writeInternal$1.apply(TableWriter.scala:207) | |
at com.datastax.spark.connector.writer.TableWriter$$anonfun$writeInternal$1.apply(TableWriter.scala:175) | |
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:112) | |
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:111) | |
at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:145) | |
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:111) | |
at com.datastax.spark.connector.writer.TableWriter.writeInternal(TableWriter.scala:175) | |
at com.datastax.spark.connector.writer.TableWriter.insert(TableWriter.scala:162) | |
at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:149) | |
at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) | |
at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) | |
at java.lang.Thread.run(Thread.java:745) | |
INFO [2017-08-03 19:18:16,939] ({task-result-getter-1} Logging.scala[logInfo]:54) - Finished task 6.0 in stage 6.0 (TID 41) in 163 ms on localhost (executor driver) (7/8) | |
WARN [2017-08-03 19:18:16,945] ({task-result-getter-2} Logging.scala[logWarning]:66) - Lost task 7.0 in stage 6.0 (TID 42, localhost, executor driver): java.io.IOException: Failed to write statements to events_prod.ctr. | |
at com.datastax.spark.connector.writer.TableWriter$$anonfun$writeInternal$1.apply(TableWriter.scala:207) | |
at com.datastax.spark.connector.writer.TableWriter$$anonfun$writeInternal$1.apply(TableWriter.scala:175) | |
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:112) | |
at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:111) | |
at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:145) | |
at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:111) | |
at com.datastax.spark.connector.writer.TableWriter.writeInternal(TableWriter.scala:175) | |
at com.datastax.spark.connector.writer.TableWriter.insert(TableWriter.scala:162) | |
at com.datastax.spark.connector.writer.TableWriter.write(TableWriter.scala:149) | |
at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) | |
at com.datastax.spark.connector.RDDFunctions$$anonfun$saveToCassandra$1.apply(RDDFunctions.scala:36) | |
at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:87) | |
at org.apache.spark.scheduler.Task.run(Task.scala:99) | |
at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:282) | |
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) | |
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) | |
at java.lang.Thread.run(Thread.java:745) | |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment