Skip to content

Instantly share code, notes, and snippets.

@ivanursul
Created April 10, 2017 08:07
Show Gist options
  • Save ivanursul/4d023c783a80ea745f6ebade88c6b810 to your computer and use it in GitHub Desktop.
Save ivanursul/4d023c783a80ea745f6ebade88c6b810 to your computer and use it in GitHub Desktop.
kafka-streams.log
/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/bin/java -Didea.launcher.port=7537 "-Didea.launcher.bin.path=/Applications/IntelliJ IDEA CE.app/Contents/bin" -Dfile.encoding=UTF-8 -classpath "/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/charsets.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/deploy.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/cldrdata.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/dnsns.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/jaccess.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/jfxrt.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/localedata.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/nashorn.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/sunec.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/sunjce_provider.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/sunpkcs11.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/ext/zipfs.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/javaws.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/jce.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/jfr.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/jfxswt.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/jsse.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/management-agent.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/plugin.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/resources.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/jre/lib/rt.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/ant-javafx.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/dt.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/javafx-mx.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/jconsole.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/packager.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/sa-jdi.jar:/Users/ivanursul/development/java/jdk1.8.0_66.jdk/Contents/Home/lib/tools.jar:/Users/ivanursul/git/examples/kafkastreamshelloworld/target/classes:/Users/ivanursul/.m2/repository/org/apache/kafka/kafka-streams/0.10.2.0-cp1/kafka-streams-0.10.2.0-cp1.jar:/Users/ivanursul/.m2/repository/org/apache/kafka/kafka-clients/0.10.2.0-cp1/kafka-clients-0.10.2.0-cp1.jar:/Users/ivanursul/.m2/repository/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar:/Users/ivanursul/.m2/repository/org/xerial/snappy/snappy-java/1.1.2.6/snappy-java-1.1.2.6.jar:/Users/ivanursul/.m2/repository/org/apache/kafka/connect-json/0.10.2.0-cp1/connect-json-0.10.2.0-cp1.jar:/Users/ivanursul/.m2/repository/org/apache/kafka/connect-api/0.10.2.0-cp1/connect-api-0.10.2.0-cp1.jar:/Users/ivanursul/.m2/repository/com/fasterxml/jackson/core/jackson-databind/2.8.5/jackson-databind-2.8.5.jar:/Users/ivanursul/.m2/repository/com/fasterxml/jackson/core/jackson-annotations/2.8.0/jackson-annotations-2.8.0.jar:/Users/ivanursul/.m2/repository/com/fasterxml/jackson/core/jackson-core/2.8.5/jackson-core-2.8.5.jar:/Users/ivanursul/.m2/repository/org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar:/Users/ivanursul/.m2/repository/org/rocksdb/rocksdbjni/5.0.1/rocksdbjni-5.0.1.jar:/Users/ivanursul/.m2/repository/org/slf4j/slf4j-log4j12/1.7.21/slf4j-log4j12-1.7.21.jar:/Users/ivanursul/.m2/repository/log4j/log4j/1.2.17/log4j-1.2.17.jar:/Applications/IntelliJ IDEA CE.app/Contents/lib/idea_rt.jar" com.intellij.rt.execution.application.AppMain org.kafka.examples.Main
0 [main] INFO org.apache.kafka.streams.StreamsConfig - StreamsConfig values:
application.id = wordcount-lambda-example
application.server =
bootstrap.servers = [localhost:9092]
buffered.records.per.partition = 1000
cache.max.bytes.buffering = 10485760
client.id =
commit.interval.ms = 30000
connections.max.idle.ms = 540000
key.serde = class org.apache.kafka.common.serialization.Serdes$StringSerde
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
num.standby.replicas = 0
num.stream.threads = 1
partition.grouper = class org.apache.kafka.streams.processor.DefaultPartitionGrouper
poll.ms = 100
receive.buffer.bytes = 32768
reconnect.backoff.ms = 50
replication.factor = 1
request.timeout.ms = 40000
retry.backoff.ms = 100
rocksdb.config.setter = null
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
state.cleanup.delay.ms = 60000
state.dir = /tmp/kafka-streams
timestamp.extractor = class org.apache.kafka.streams.processor.FailOnInvalidTimestamp
value.serde = class org.apache.kafka.common.serialization.Serdes$StringSerde
windowstore.changelog.additional.retention.ms = 86400000
zookeeper.connect =
97 [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-global-restore-consumer
connections.max.idle.ms = 540000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id =
heartbeat.interval.ms = 3000
interceptor.classes = null
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.ms = 50
request.timeout.ms = 305000
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
97 [main] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Starting the Kafka consumer
98 [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-global-restore-consumer
connections.max.idle.ms = 540000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id =
heartbeat.interval.ms = 3000
interceptor.classes = null
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.ms = 50
request.timeout.ms = 305000
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
107 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
121 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-closed:
122 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-created:
123 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent-received:
123 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent:
124 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-received:
124 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name select-time:
124 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name io-time:
140 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name heartbeat-latency
140 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name join-latency
141 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name sync-latency
142 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name commit-latency
149 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-fetched
149 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-fetched
150 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name fetch-latency
150 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-lag
150 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name fetch-throttle-time
154 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version : 0.10.2.0-cp1
154 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId : a7edc1a290639753
155 [main] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Kafka consumer created
165 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.commit-latency
165 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.poll-latency
165 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.process-latency
165 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.punctuate-latency
166 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.task-created
166 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.task-closed
166 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name thread.wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1.skipped-records
166 [main] INFO org.apache.kafka.streams.processor.internals.StreamThread - stream-thread [StreamThread-1] Creating producer client
171 [main] INFO org.apache.kafka.clients.producer.ProducerConfig - ProducerConfig values:
acks = 1
batch.size = 16384
block.on.buffer.full = false
bootstrap.servers = [localhost:9092]
buffer.memory = 33554432
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1-producer
compression.type = none
connections.max.idle.ms = 540000
interceptor.classes = null
key.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer
linger.ms = 100
max.block.ms = 60000
max.in.flight.requests.per.connection = 5
max.request.size = 1048576
metadata.fetch.timeout.ms = 60000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.sample.window.ms = 30000
partitioner.class = class org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes = 32768
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retries = 0
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
timeout.ms = 30000
value.serializer = class org.apache.kafka.common.serialization.ByteArraySerializer
175 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bufferpool-wait-time
177 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name buffer-exhausted-records
177 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
177 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-closed:
177 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-created:
177 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent-received:
178 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent:
178 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-received:
178 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name select-time:
179 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name io-time:
182 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name batch-size
182 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name compression-rate
183 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name queue-time
183 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name request-time
183 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name produce-throttle-time
183 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-per-request
183 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name record-retries
184 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name errors
184 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name record-size-max
185 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version : 0.10.2.0-cp1
185 [kafka-producer-network-thread | wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1-producer] DEBUG org.apache.kafka.clients.producer.internals.Sender - Starting Kafka producer I/O thread.
185 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId : a7edc1a290639753
185 [main] DEBUG org.apache.kafka.clients.producer.KafkaProducer - Kafka producer started
185 [main] INFO org.apache.kafka.streams.processor.internals.StreamThread - stream-thread [StreamThread-1] Creating consumer client
187 [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1-consumer
connections.max.idle.ms = 540000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = wordcount-lambda-example
heartbeat.interval.ms = 3000
interceptor.classes = null
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [org.apache.kafka.streams.processor.internals.StreamPartitionAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.ms = 50
request.timeout.ms = 305000
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
187 [main] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Starting the Kafka consumer
187 [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1-consumer
connections.max.idle.ms = 540000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = wordcount-lambda-example
heartbeat.interval.ms = 3000
interceptor.classes = null
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [org.apache.kafka.streams.processor.internals.StreamPartitionAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.ms = 50
request.timeout.ms = 305000
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
188 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
188 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-closed:
188 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-created:
188 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent-received:
188 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent:
188 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-received:
189 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name select-time:
189 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name io-time:
192 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
193 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-closed:
193 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-created:
193 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent-received:
193 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent:
193 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-received:
193 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name select-time:
194 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name io-time:
195 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name heartbeat-latency
195 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name join-latency
195 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name sync-latency
196 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name commit-latency
196 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-fetched
196 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-fetched
197 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name fetch-latency
197 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-lag
197 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name fetch-throttle-time
197 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version : 0.10.2.0-cp1
197 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId : a7edc1a290639753
197 [main] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Kafka consumer created
197 [main] INFO org.apache.kafka.streams.processor.internals.StreamThread - stream-thread [StreamThread-1] Creating restore consumer client
198 [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1-restore-consumer
connections.max.idle.ms = 540000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id =
heartbeat.interval.ms = 3000
interceptor.classes = null
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.ms = 50
request.timeout.ms = 305000
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
198 [main] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Starting the Kafka consumer
199 [main] INFO org.apache.kafka.clients.consumer.ConsumerConfig - ConsumerConfig values:
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [localhost:9092]
check.crcs = true
client.id = wordcount-lambda-example-de558669-c828-40ff-b19e-0dbb3b1c0f9c-StreamThread-1-restore-consumer
connections.max.idle.ms = 540000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id =
heartbeat.interval.ms = 3000
interceptor.classes = null
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.ms = 50
request.timeout.ms = 305000
retry.backoff.ms = 100
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = null
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
199 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
199 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-closed:
199 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-created:
199 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent-received:
199 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent:
200 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-received:
200 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name select-time:
200 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name io-time:
201 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name heartbeat-latency
201 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name join-latency
201 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name sync-latency
201 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name commit-latency
202 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-fetched
202 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-fetched
202 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name fetch-latency
202 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name records-lag
203 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name fetch-throttle-time
203 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka version : 0.10.2.0-cp1
203 [main] INFO org.apache.kafka.common.utils.AppInfoParser - Kafka commitId : a7edc1a290639753
203 [main] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Kafka consumer created
205 [main] DEBUG org.apache.kafka.streams.KafkaStreams - Starting Kafka Stream process.
206 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
206 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-closed:
206 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name connections-created:
206 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent-received:
206 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-sent:
206 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name bytes-received:
206 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name select-time:
207 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name io-time:
207 [main] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 1 to Cluster(id = null, nodes = [localhost:9092 (id: -1 rack: null)], partitions = [])
207 [main] DEBUG org.apache.kafka.clients.NetworkClient - Initiating connection to node -1 at localhost:9092.
225 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node--1.bytes-sent
226 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node--1.bytes-received
227 [main] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node--1.latency
230 [main] DEBUG org.apache.kafka.common.network.Selector - Created socket with SO_RCVBUF = 326640, SO_SNDBUF = 146988, SO_TIMEOUT = 0 to node -1
230 [main] DEBUG org.apache.kafka.clients.NetworkClient - Completed connection to node -1. Fetching API versions.
231 [main] DEBUG org.apache.kafka.clients.NetworkClient - Initiating API versions fetch from node -1.
296 [main] DEBUG org.apache.kafka.clients.NetworkClient - Recorded API versions for node -1: (Produce(0): 0 to 2 [usable: 2], Fetch(1): 0 to 3 [usable: 3], Offsets(2): 0 to 1 [usable: 1], Metadata(3): 0 to 2 [usable: 2], LeaderAndIsr(4): 0 [usable: 0], StopReplica(5): 0 [usable: 0], UpdateMetadata(6): 0 to 3 [usable: 3], ControlledShutdown(7): 1 [usable: 1], OffsetCommit(8): 0 to 2 [usable: 2], OffsetFetch(9): 0 to 2 [usable: 2], GroupCoordinator(10): 0 [usable: 0], JoinGroup(11): 0 to 1 [usable: 1], Heartbeat(12): 0 [usable: 0], LeaveGroup(13): 0 [usable: 0], SyncGroup(14): 0 [usable: 0], DescribeGroups(15): 0 [usable: 0], ListGroups(16): 0 [usable: 0], SaslHandshake(17): 0 [usable: 0], ApiVersions(18): 0 [usable: 0], CreateTopics(19): 0 to 1 [usable: 1], DeleteTopics(20): 0 [usable: 0])
413 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name put
413 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-put
413 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name put-if-absent
414 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-put-if-absent
414 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name get
415 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-get
415 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name delete
416 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-delete
417 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name put-all
417 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-put-all
418 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name all
418 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-all
419 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name range
420 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-range
421 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name flush
421 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-flush
421 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name restore
422 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordCounts-restore
523 [GlobalStreamThread] INFO org.apache.kafka.streams.processor.internals.GlobalStateManagerImpl - restoring state for global store WordCounts
527 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Initiating connection to node -1 at localhost:9092.
529 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node--1.bytes-sent
529 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node--1.bytes-received
530 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node--1.latency
530 [GlobalStreamThread] DEBUG org.apache.kafka.common.network.Selector - Created socket with SO_RCVBUF = 342972, SO_SNDBUF = 146988, SO_TIMEOUT = 0 to node -1
530 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Completed connection to node -1. Fetching API versions.
530 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Initiating API versions fetch from node -1.
532 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Recorded API versions for node -1: (Produce(0): 0 to 2 [usable: 2], Fetch(1): 0 to 3 [usable: 3], Offsets(2): 0 to 1 [usable: 1], Metadata(3): 0 to 2 [usable: 2], LeaderAndIsr(4): 0 [usable: 0], StopReplica(5): 0 [usable: 0], UpdateMetadata(6): 0 to 3 [usable: 3], ControlledShutdown(7): 1 [usable: 1], OffsetCommit(8): 0 to 2 [usable: 2], OffsetFetch(9): 0 to 2 [usable: 2], GroupCoordinator(10): 0 [usable: 0], JoinGroup(11): 0 to 1 [usable: 1], Heartbeat(12): 0 [usable: 0], LeaveGroup(13): 0 [usable: 0], SyncGroup(14): 0 [usable: 0], DescribeGroups(15): 0 [usable: 0], ListGroups(16): 0 [usable: 0], SaslHandshake(17): 0 [usable: 0], ApiVersions(18): 0 [usable: 0], CreateTopics(19): 0 to 1 [usable: 1], DeleteTopics(20): 0 [usable: 0])
532 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Sending metadata request (type=MetadataRequest, topics=) to node -1
536 [GlobalStreamThread] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 2 to Cluster(id = up9fDzsERi-9mxsaTZd44A, nodes = [192.168.1.5:9092 (id: 0 rack: null)], partitions = [])
542 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Subscribed to partition(s): WordsWithCountsTopic-0
543 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Partition WordsWithCountsTopic-0 is unknown for fetching offset, wait for metadata refresh
543 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Initialize connection to node 0 for sending metadata request
543 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Initiating connection to node 0 at 192.168.1.5:9092.
544 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node-0.bytes-sent
545 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node-0.bytes-received
545 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name node-0.latency
546 [GlobalStreamThread] DEBUG org.apache.kafka.common.network.Selector - Created socket with SO_RCVBUF = 342972, SO_SNDBUF = 146988, SO_TIMEOUT = 0 to node 0
546 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Completed connection to node 0. Fetching API versions.
546 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Initiating API versions fetch from node 0.
548 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Recorded API versions for node 0: (Produce(0): 0 to 2 [usable: 2], Fetch(1): 0 to 3 [usable: 3], Offsets(2): 0 to 1 [usable: 1], Metadata(3): 0 to 2 [usable: 2], LeaderAndIsr(4): 0 [usable: 0], StopReplica(5): 0 [usable: 0], UpdateMetadata(6): 0 to 3 [usable: 3], ControlledShutdown(7): 1 [usable: 1], OffsetCommit(8): 0 to 2 [usable: 2], OffsetFetch(9): 0 to 2 [usable: 2], GroupCoordinator(10): 0 [usable: 0], JoinGroup(11): 0 to 1 [usable: 1], Heartbeat(12): 0 [usable: 0], LeaveGroup(13): 0 [usable: 0], SyncGroup(14): 0 [usable: 0], DescribeGroups(15): 0 [usable: 0], ListGroups(16): 0 [usable: 0], SaslHandshake(17): 0 [usable: 0], ApiVersions(18): 0 [usable: 0], CreateTopics(19): 0 to 1 [usable: 1], DeleteTopics(20): 0 [usable: 0])
548 [GlobalStreamThread] DEBUG org.apache.kafka.clients.NetworkClient - Sending metadata request (type=MetadataRequest, topics=WordsWithCountsTopic) to node 0
550 [GlobalStreamThread] DEBUG org.apache.kafka.clients.Metadata - Updated cluster metadata version 3 to Cluster(id = up9fDzsERi-9mxsaTZd44A, nodes = [192.168.1.5:9092 (id: 0 rack: null)], partitions = [Partition(topic = WordsWithCountsTopic, partition = 0, leader = 0, replicas = [0], isr = [0])])
554 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Handling ListOffsetResponse response for WordsWithCountsTopic-0. Fetched offset 181, timestamp -1
555 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Subscribed to partition(s): WordsWithCountsTopic-0
555 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Seeking to beginning of partition WordsWithCountsTopic-0
555 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Resetting offset for partition WordsWithCountsTopic-0 to earliest offset.
556 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Handling ListOffsetResponse response for WordsWithCountsTopic-0. Fetched offset 0, timestamp -1
557 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Sending fetch for partitions [WordsWithCountsTopic-0] to broker 192.168.1.5:9092 (id: 0 rack: null)
579 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name topic.WordsWithCountsTopic.bytes-fetched
580 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name topic.WordsWithCountsTopic.records-fetched
581 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Added sensor with name WordsWithCountsTopic-0.records-lag
582 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Ignoring fetched records for WordsWithCountsTopic-0 at offset 0 since the current position is 181
582 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.internals.Fetcher - Sending fetch for partitions [WordsWithCountsTopic-0] to broker 192.168.1.5:9092 (id: 0 rack: null)
Exception in thread "main" org.apache.kafka.streams.errors.StreamsException: Exception caught during initialization of GlobalStreamThread
586 [GlobalStreamThread] DEBUG org.apache.kafka.clients.consumer.KafkaConsumer - Unsubscribed all topics or patterns and assigned partitions
at org.apache.kafka.streams.processor.internals.GlobalStreamThread.initialize(GlobalStreamThread.java:173)
586 [GlobalStreamThread] DEBUG org.apache.kafka.common.metrics.Metrics - Removed sensor with name WordsWithCountsTopic-0.records-lag
at org.apache.kafka.streams.processor.internals.GlobalStreamThread.run(GlobalStreamThread.java:133)
Caused by: java.lang.NullPointerException
at org.rocksdb.RocksDB.put(RocksDB.java:488)
at org.apache.kafka.streams.state.internals.RocksDBStore.putInternal(RocksDBStore.java:246)
at org.apache.kafka.streams.state.internals.RocksDBStore.access$000(RocksDBStore.java:65)
at org.apache.kafka.streams.state.internals.RocksDBStore$1.restore(RocksDBStore.java:156)
at org.apache.kafka.streams.processor.internals.GlobalStateManagerImpl.restoreState(GlobalStateManagerImpl.java:179)
at org.apache.kafka.streams.processor.internals.GlobalStateManagerImpl.register(GlobalStateManagerImpl.java:138)
at org.apache.kafka.streams.processor.internals.AbstractProcessorContext.register(AbstractProcessorContext.java:99)
at org.apache.kafka.streams.state.internals.RocksDBStore.init(RocksDBStore.java:152)
at org.apache.kafka.streams.state.internals.MeteredKeyValueStore$7.run(MeteredKeyValueStore.java:100)
at org.apache.kafka.streams.processor.internals.StreamsMetricsImpl.measureLatencyNs(StreamsMetricsImpl.java:188)
at org.apache.kafka.streams.state.internals.MeteredKeyValueStore.init(MeteredKeyValueStore.java:131)
at org.apache.kafka.streams.state.internals.CachingKeyValueStore.init(CachingKeyValueStore.java:62)
at org.apache.kafka.streams.processor.internals.GlobalStateManagerImpl.initialize(GlobalStateManagerImpl.java:98)
at org.apache.kafka.streams.processor.internals.GlobalStateUpdateTask.initialize(GlobalStateUpdateTask.java:61)
at org.apache.kafka.streams.processor.internals.GlobalStreamThread$StateConsumer.initialize(GlobalStreamThread.java:97)
at org.apache.kafka.streams.processor.internals.GlobalStreamThread.initialize(GlobalStreamThread.java:167)
... 1 more
Process finished with exit code 1
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment