Skip to content

Instantly share code, notes, and snippets.

@smccarthy-ie
Created March 3, 2020 15:41
Show Gist options
  • Save smccarthy-ie/185cabaa1a84585698174b1cf132c429 to your computer and use it in GitHub Desktop.
Save smccarthy-ie/185cabaa1a84585698174b1cf132c429 to your computer and use it in GitHub Desktop.
Service registry log file from apicurio/apicurio-registry-streams image
$ oc logs service-registry-1-8vxsn
exec java -Dquarkus.http.host=0.0.0.0 -Djava.util.logging.manager=org.jboss.logmanager.LogManager -javaagent:/opt/agent-bond/agent-bond.jar=jmx_exporter{{9779:/opt/agent-bond/jmx_exporter_config.yml}} -Xmx650m -XX:ParallelGCThreads=1 -XX:ConcGCThreads=1 -Djava.util.concurrent.ForkJoinPool.common.parallelism=1 -XX:CICompilerCount=2 -XX:+UseParallelGC -XX:GCTimeRatio=4 -XX:AdaptiveSizePolicyWeight=90 -XX:MinHeapFreeRatio=20 -XX:MaxHeapFreeRatio=40 -XX:+ExitOnOutOfMemoryError -cp . -jar /deployments/apicurio-registry-storage-streams-1.1.2-SNAPSHOT-runner.jar
2020-03-03 15:15:18,256 WARN [io.qua.config] (main) Unrecognized configuration key "quarkus.datasource.username" was provided; it will be ignored
2020-03-03 15:15:18,256 WARN [io.qua.config] (main) Unrecognized configuration key "quarkus.datasource.driver" was provided; it will be ignored
2020-03-03 15:15:18,257 WARN [io.qua.config] (main) Unrecognized configuration key "quarkus.datasource.url" was provided; it will be ignored
2020-03-03 15:15:18,257 WARN [io.qua.config] (main) Unrecognized configuration key "quarkus.datasource.password" was provided; it will be ignored
2020-03-03 15:15:18,257 WARN [io.qua.config] (main) Unrecognized configuration key "quarkus.hibernate-orm.database.generation" was provided; it will be ignored
2020-03-03 15:15:18,903 INFO [org.apa.kaf.con.jso.JsonConverterConfig] (main) JsonConverterConfig values:
converter.type = key
schemas.cache.size = 0
schemas.enable = true
2020-03-03 15:15:20,880 INFO [io.api.reg.str.StreamsRegistryConfiguration] (main) Application server gRPC: 'localhost:9000'
2020-03-03 15:15:21,002 INFO [org.apa.kaf.str.StreamsConfig] (main) StreamsConfig values:
application.id = my-kafka-streams-app
application.server = localhost:9000
bootstrap.servers = [my-cluster-kafka-bootstrap:9092]
buffered.records.per.partition = 1000
cache.max.bytes.buffering = 10485760
client.id =
commit.interval.ms = 100
connections.max.idle.ms = 540000
default.deserialization.exception.handler = class org.apache.kafka.streams.errors.LogAndFailExceptionHandler
default.key.serde = class org.apache.kafka.common.serialization.Serdes$ByteArraySerde
default.production.exception.handler = class org.apache.kafka.streams.errors.DefaultProductionExceptionHandler
default.timestamp.extractor = class org.apache.kafka.streams.processor.FailOnInvalidTimestamp
default.value.serde = class org.apache.kafka.common.serialization.Serdes$ByteArraySerde
max.task.idle.ms = 0
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
num.standby.replicas = 1
num.stream.threads = 2
partition.grouper = class org.apache.kafka.streams.processor.DefaultPartitionGrouper
poll.ms = 100
processing.guarantee = exactly_once
receive.buffer.bytes = 32768
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
replication.factor = 1
request.timeout.ms = 40000
retries = 0
retry.backoff.ms = 100
rocksdb.config.setter = null
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
state.cleanup.delay.ms = 600000
state.dir = /tmp/kafka-streams
topology.optimization = none
upgrade.from = null
windowstore.changelog.additional.retention.ms = 86400000
2020-03-03 15:15:21,152 INFO [org.apa.kaf.cli.adm.AdminClientConfig] (main) AdminClientConfig values:
bootstrap.servers = [my-cluster-kafka-bootstrap:9092]
client.dns.lookup = default
client.id = my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-admin
connections.max.idle.ms = 300000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 120000
retries = 5
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
2020-03-03 15:15:21,201 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka version: 2.3.1
2020-03-03 15:15:21,201 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka commitId: 18a913733fb71c01
2020-03-03 15:15:21,201 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka startTimeMs: 1583248521200
2020-03-03 15:15:21,205 INFO [org.apa.kaf.str.pro.int.StreamThread] (main) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1] Creating restore consumer client
2020-03-03 15:15:21,210 INFO [org.apa.kaf.cli.con.ConsumerConfig] (main) ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = none
bootstrap.servers = [my-cluster-kafka-bootstrap:9092]
check.crcs = true
client.dns.lookup = default
client.id = my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1-restore-consumer
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = null
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = false
isolation.level = read_committed
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2020-03-03 15:15:21,255 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka version: 2.3.1
2020-03-03 15:15:21,255 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka commitId: 18a913733fb71c01
2020-03-03 15:15:21,255 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka startTimeMs: 1583248521255
2020-03-03 15:15:21,266 INFO [org.apa.kaf.str.pro.int.StreamThread] (main) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1] Creating consumer client
2020-03-03 15:15:21,267 INFO [org.apa.kaf.cli.con.ConsumerConfig] (main) ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [my-cluster-kafka-bootstrap:9092]
check.crcs = true
client.dns.lookup = default
client.id = my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1-consumer
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = my-kafka-streams-app
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = false
isolation.level = read_committed
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2020-03-03 15:15:21,290 WARN [org.apa.kaf.cli.con.ConsumerConfig] (main) The configuration 'admin.retries' was supplied but isn't a known config.
2020-03-03 15:15:21,290 WARN [org.apa.kaf.cli.con.ConsumerConfig] (main) The configuration 'admin.retry.backoff.ms' was supplied but isn't a known config.
2020-03-03 15:15:21,290 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka version: 2.3.1
2020-03-03 15:15:21,290 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka commitId: 18a913733fb71c01
2020-03-03 15:15:21,290 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka startTimeMs: 1583248521290
2020-03-03 15:15:21,293 INFO [org.apa.kaf.str.pro.int.StreamThread] (main) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2] Creating restore consumer client
2020-03-03 15:15:21,293 INFO [org.apa.kaf.cli.con.ConsumerConfig] (main) ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = none
bootstrap.servers = [my-cluster-kafka-bootstrap:9092]
check.crcs = true
client.dns.lookup = default
client.id = my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2-restore-consumer
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = null
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = false
isolation.level = read_committed
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [class org.apache.kafka.clients.consumer.RangeAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2020-03-03 15:15:21,297 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka version: 2.3.1
2020-03-03 15:15:21,297 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka commitId: 18a913733fb71c01
2020-03-03 15:15:21,297 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka startTimeMs: 1583248521297
2020-03-03 15:15:21,299 INFO [org.apa.kaf.str.pro.int.StreamThread] (main) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2] Creating consumer client
2020-03-03 15:15:21,299 INFO [org.apa.kaf.cli.con.ConsumerConfig] (main) ConsumerConfig values:
allow.auto.create.topics = true
auto.commit.interval.ms = 5000
auto.offset.reset = earliest
bootstrap.servers = [my-cluster-kafka-bootstrap:9092]
check.crcs = true
client.dns.lookup = default
client.id = my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2-consumer
client.rack =
connections.max.idle.ms = 540000
default.api.timeout.ms = 60000
enable.auto.commit = false
exclude.internal.topics = true
fetch.max.bytes = 52428800
fetch.max.wait.ms = 500
fetch.min.bytes = 1
group.id = my-kafka-streams-app
group.instance.id = null
heartbeat.interval.ms = 3000
interceptor.classes = []
internal.leave.group.on.close = false
isolation.level = read_committed
key.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
max.partition.fetch.bytes = 1048576
max.poll.interval.ms = 300000
max.poll.records = 1000
metadata.max.age.ms = 300000
metric.reporters = []
metrics.num.samples = 2
metrics.recording.level = INFO
metrics.sample.window.ms = 30000
partition.assignment.strategy = [org.apache.kafka.streams.processor.internals.StreamsPartitionAssignor]
receive.buffer.bytes = 65536
reconnect.backoff.max.ms = 1000
reconnect.backoff.ms = 50
request.timeout.ms = 30000
retry.backoff.ms = 100
sasl.client.callback.handler.class = null
sasl.jaas.config = null
sasl.kerberos.kinit.cmd = /usr/bin/kinit
sasl.kerberos.min.time.before.relogin = 60000
sasl.kerberos.service.name = null
sasl.kerberos.ticket.renew.jitter = 0.05
sasl.kerberos.ticket.renew.window.factor = 0.8
sasl.login.callback.handler.class = null
sasl.login.class = null
sasl.login.refresh.buffer.seconds = 300
sasl.login.refresh.min.period.seconds = 60
sasl.login.refresh.window.factor = 0.8
sasl.login.refresh.window.jitter = 0.05
sasl.mechanism = GSSAPI
security.protocol = PLAINTEXT
send.buffer.bytes = 131072
session.timeout.ms = 10000
ssl.cipher.suites = null
ssl.enabled.protocols = [TLSv1.2, TLSv1.1, TLSv1]
ssl.endpoint.identification.algorithm = https
ssl.key.password = null
ssl.keymanager.algorithm = SunX509
ssl.keystore.location = null
ssl.keystore.password = null
ssl.keystore.type = JKS
ssl.protocol = TLS
ssl.provider = null
ssl.secure.random.implementation = null
ssl.trustmanager.algorithm = PKIX
ssl.truststore.location = null
ssl.truststore.password = null
ssl.truststore.type = JKS
value.deserializer = class org.apache.kafka.common.serialization.ByteArrayDeserializer
2020-03-03 15:15:21,304 WARN [org.apa.kaf.cli.con.ConsumerConfig] (main) The configuration 'admin.retries' was supplied but isn't a known config.
2020-03-03 15:15:21,332 WARN [org.apa.kaf.cli.con.ConsumerConfig] (main) The configuration 'admin.retry.backoff.ms' was supplied but isn't a known config.
2020-03-03 15:15:21,332 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka version: 2.3.1
2020-03-03 15:15:21,332 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka commitId: 18a913733fb71c01
2020-03-03 15:15:21,332 INFO [org.apa.kaf.com.uti.AppInfoParser] (main) Kafka startTimeMs: 1583248521332
2020-03-03 15:15:22,195 INFO [org.apa.kaf.str.KafkaStreams] (main) stream-client [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac] State transition from CREATED to REBALANCING
2020-03-03 15:15:22,235 INFO [org.apa.kaf.str.pro.int.StreamThread] (my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1] Starting
2020-03-03 15:15:22,236 INFO [org.apa.kaf.str.pro.int.StreamThread] (my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1] State transition from CREATED to STARTING
2020-03-03 15:15:22,236 INFO [org.apa.kaf.cli.con.KafkaConsumer] (my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1) [Consumer clientId=my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-1-consumer, groupId=my-kafka-streams-app] Subscribed to pattern: 'global-id-topic|storage-topic'
2020-03-03 15:15:22,245 INFO [org.apa.kaf.str.pro.int.StreamThread] (my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2] Starting
2020-03-03 15:15:22,245 INFO [org.apa.kaf.str.pro.int.StreamThread] (my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2) stream-thread [my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2] State transition from CREATED to STARTING
2020-03-03 15:15:22,245 INFO [org.apa.kaf.cli.con.KafkaConsumer] (my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2) [Consumer clientId=my-kafka-streams-app-41b77ff2-0a41-434e-8099-58deab9a1cac-StreamThread-2-consumer, groupId=my-kafka-streams-app] Subscribed to pattern: 'global-id-topic|storage-topic'
2020-03-03 15:15:22,259 INFO [io.quarkus] (main) apicurio-registry-storage-streams 1.1.2-SNAPSHOT (running on Quarkus 1.2.1.Final) started in 4.574s. Listening on: http://0.0.0.0:8080
2020-03-03 15:15:22,259 INFO [io.quarkus] (main) Profile prod activated.
2020-03-03 15:15:22,259 INFO [io.quarkus] (main) Installed features: [cdi, resteasy, resteasy-jackson, servlet, smallrye-health, smallrye-metrics]
2020-03-03 15:15:28,961 INFO [io.api.reg.sto.RegistryStorageProducer] (vert.x-worker-thread-1) Using RegistryStore: io.apicurio.registry.streams.StreamsRegistryStorage_ClientProxy
org.apache.kafka.streams.errors.InvalidStateStoreException: StreamsMetadata is currently unavailable. This can occur during rebalance operations.
at io.apicurio.registry.streams.diservice.DistributedService.allServices(DistributedService.java:141)
at io.apicurio.registry.streams.diservice.DistributedService.allServicesStream(DistributedService.java:154)
at io.apicurio.registry.streams.diservice.DistributedAsyncBiFunctionService.apply(DistributedAsyncBiFunctionService.java:42)
at io.apicurio.registry.streams.StreamsRegistryConfiguration_ProducerMethod_stateService_2ec690603bbb29d1b2064b16d42e48a389d2f96d_ClientProxy.apply(StreamsRegistryConfiguration_ProducerMethod_stateService_2ec690603bbb29d1b2064b16d42e48a389d2f96d_ClientProxy.zig:44)
at io.apicurio.registry.streams.StreamsRegistryStorage.isReady(StreamsRegistryStorage.java:203)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass.isReady$$superaccessor31(StreamsRegistryStorage_Subclass.zig:14247)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass$$function$$34.apply(StreamsRegistryStorage_Subclass$$function$$34.zig:47)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:54)
at io.smallrye.metrics.interceptors.TimedInterceptor.timedCallable(TimedInterceptor.java:95)
at io.smallrye.metrics.interceptors.TimedInterceptor.timedMethod(TimedInterceptor.java:70)
at io.smallrye.metrics.interceptors.TimedInterceptor_Bean.intercept(TimedInterceptor_Bean.zig:259)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor.concurrentCallable(ConcurrentGaugeInterceptor.java:96)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor.countedMethod(ConcurrentGaugeInterceptor.java:69)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor_Bean.intercept(ConcurrentGaugeInterceptor_Bean.zig:215)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.smallrye.metrics.interceptors.CountedInterceptor.countedCallable(CountedInterceptor.java:95)
at io.smallrye.metrics.interceptors.CountedInterceptor.countedMethod(CountedInterceptor.java:70)
at io.smallrye.metrics.interceptors.CountedInterceptor_Bean.intercept(CountedInterceptor_Bean.zig:325)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.apicurio.registry.metrics.PersistenceTimeoutReadinessInterceptor.intercept(PersistenceTimeoutReadinessInterceptor.java:27)
at io.apicurio.registry.metrics.PersistenceTimeoutReadinessInterceptor_Bean.intercept(PersistenceTimeoutReadinessInterceptor_Bean.zig:189)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.apicurio.registry.metrics.PersistenceExceptionLivenessInterceptor.intercept(PersistenceExceptionLivenessInterceptor.java:48)
at io.apicurio.registry.metrics.PersistenceExceptionLivenessInterceptor_Bean.intercept(PersistenceExceptionLivenessInterceptor_Bean.zig:190)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.perform(AroundInvokeInvocationContext.java:41)
at io.quarkus.arc.impl.InvocationContexts.performAroundInvoke(InvocationContexts.java:32)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass.isReady(StreamsRegistryStorage_Subclass.zig:16534)
at io.apicurio.registry.streams.StreamsRegistryStorage_ClientProxy.isReady(StreamsRegistryStorage_ClientProxy.zig:1034)
at io.apicurio.registry.storage.RegistryStorageProducer_ProducerMethod_realImpl_cf1c876861dd1c25dca504d30a12bfedeafd47bd_ClientProxy.isReady(RegistryStorageProducer_ProducerMethod_realImpl_cf1c876861dd1c25dca504d30a12bfedeafd47bd_ClientProxy.zig:745)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck.test(PersistenceSimpleReadinessCheck.java:33)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck.call(PersistenceSimpleReadinessCheck.java:44)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck_ClientProxy.call(PersistenceSimpleReadinessCheck_ClientProxy.zig:176)
at io.smallrye.health.SmallRyeHealthReporter.jsonObject(SmallRyeHealthReporter.java:154)
at io.smallrye.health.SmallRyeHealthReporter.fillCheck(SmallRyeHealthReporter.java:141)
at io.smallrye.health.SmallRyeHealthReporter.processChecks(SmallRyeHealthReporter.java:129)
at io.smallrye.health.SmallRyeHealthReporter.getHealth(SmallRyeHealthReporter.java:107)
at io.smallrye.health.SmallRyeHealthReporter.getReadiness(SmallRyeHealthReporter.java:97)
at io.smallrye.health.SmallRyeHealthReporter_ClientProxy.getReadiness(SmallRyeHealthReporter_ClientProxy.zig:101)
at io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler.handle(SmallRyeReadinessHandler.java:40)
at io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler.handle(SmallRyeReadinessHandler.java:32)
at io.quarkus.vertx.http.runtime.ResumeHandler.handle(ResumeHandler.java:19)
at io.quarkus.vertx.http.runtime.ResumeHandler.handle(ResumeHandler.java:6)
at io.vertx.ext.web.impl.BlockingHandlerDecorator.lambda$handle$0(BlockingHandlerDecorator.java:48)
at io.vertx.core.impl.ContextImpl.lambda$executeBlocking$2(ContextImpl.java:316)
at io.vertx.core.impl.TaskQueue.run(TaskQueue.java:76)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
org.apache.kafka.streams.errors.InvalidStateStoreException: StreamsMetadata is currently unavailable. This can occur during rebalance operations.
at io.apicurio.registry.streams.diservice.DistributedService.allServices(DistributedService.java:141)
at io.apicurio.registry.streams.diservice.DistributedService.allServicesStream(DistributedService.java:154)
at io.apicurio.registry.streams.diservice.DistributedAsyncBiFunctionService.apply(DistributedAsyncBiFunctionService.java:42)
at io.apicurio.registry.streams.StreamsRegistryConfiguration_ProducerMethod_stateService_2ec690603bbb29d1b2064b16d42e48a389d2f96d_ClientProxy.apply(StreamsRegistryConfiguration_ProducerMethod_stateService_2ec690603bbb29d1b2064b16d42e48a389d2f96d_ClientProxy.zig:44)
at io.apicurio.registry.streams.StreamsRegistryStorage.isReady(StreamsRegistryStorage.java:203)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass.isReady$$superaccessor31(StreamsRegistryStorage_Subclass.zig:14247)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass$$function$$34.apply(StreamsRegistryStorage_Subclass$$function$$34.zig:47)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:54)
at io.smallrye.metrics.interceptors.TimedInterceptor.timedCallable(TimedInterceptor.java:95)
at io.smallrye.metrics.interceptors.TimedInterceptor.timedMethod(TimedInterceptor.java:70)
at io.smallrye.metrics.interceptors.TimedInterceptor_Bean.intercept(TimedInterceptor_Bean.zig:259)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor.concurrentCallable(ConcurrentGaugeInterceptor.java:96)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor.countedMethod(ConcurrentGaugeInterceptor.java:69)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor_Bean.intercept(ConcurrentGaugeInterceptor_Bean.zig:215)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.smallrye.metrics.interceptors.CountedInterceptor.countedCallable(CountedInterceptor.java:95)
at io.smallrye.metrics.interceptors.CountedInterceptor.countedMethod(CountedInterceptor.java:70)
at io.smallrye.metrics.interceptors.CountedInterceptor_Bean.intercept(CountedInterceptor_Bean.zig:325)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.apicurio.registry.metrics.PersistenceTimeoutReadinessInterceptor.intercept(PersistenceTimeoutReadinessInterceptor.java:27)
at io.apicurio.registry.metrics.PersistenceTimeoutReadinessInterceptor_Bean.intercept(PersistenceTimeoutReadinessInterceptor_Bean.zig:189)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.apicurio.registry.metrics.PersistenceExceptionLivenessInterceptor.intercept(PersistenceExceptionLivenessInterceptor.java:48)
at io.apicurio.registry.metrics.PersistenceExceptionLivenessInterceptor_Bean.intercept(PersistenceExceptionLivenessInterceptor_Bean.zig:190)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.perform(AroundInvokeInvocationContext.java:41)
at io.quarkus.arc.impl.InvocationContexts.performAroundInvoke(InvocationContexts.java:32)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass.isReady(StreamsRegistryStorage_Subclass.zig:16534)
at io.apicurio.registry.streams.StreamsRegistryStorage_ClientProxy.isReady(StreamsRegistryStorage_ClientProxy.zig:1034)
at io.apicurio.registry.storage.RegistryStorageProducer_ProducerMethod_realImpl_cf1c876861dd1c25dca504d30a12bfedeafd47bd_ClientProxy.isReady(RegistryStorageProducer_ProducerMethod_realImpl_cf1c876861dd1c25dca504d30a12bfedeafd47bd_ClientProxy.zig:745)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck.test(PersistenceSimpleReadinessCheck.java:33)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck.call(PersistenceSimpleReadinessCheck.java:44)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck_ClientProxy.call(PersistenceSimpleReadinessCheck_ClientProxy.zig:176)
at io.smallrye.health.SmallRyeHealthReporter.jsonObject(SmallRyeHealthReporter.java:154)
at io.smallrye.health.SmallRyeHealthReporter.fillCheck(SmallRyeHealthReporter.java:141)
at io.smallrye.health.SmallRyeHealthReporter.processChecks(SmallRyeHealthReporter.java:129)
at io.smallrye.health.SmallRyeHealthReporter.getHealth(SmallRyeHealthReporter.java:107)
at io.smallrye.health.SmallRyeHealthReporter.getReadiness(SmallRyeHealthReporter.java:97)
at io.smallrye.health.SmallRyeHealthReporter_ClientProxy.getReadiness(SmallRyeHealthReporter_ClientProxy.zig:101)
at io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler.handle(SmallRyeReadinessHandler.java:40)
at io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler.handle(SmallRyeReadinessHandler.java:32)
at io.quarkus.vertx.http.runtime.ResumeHandler.handle(ResumeHandler.java:19)
at io.quarkus.vertx.http.runtime.ResumeHandler.handle(ResumeHandler.java:6)
at io.vertx.ext.web.impl.BlockingHandlerDecorator.lambda$handle$0(BlockingHandlerDecorator.java:48)
at io.vertx.core.impl.ContextImpl.lambda$executeBlocking$2(ContextImpl.java:316)
at io.vertx.core.impl.TaskQueue.run(TaskQueue.java:76)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
org.apache.kafka.streams.errors.InvalidStateStoreException: StreamsMetadata is currently unavailable. This can occur during rebalance operations.
at io.apicurio.registry.streams.diservice.DistributedService.allServices(DistributedService.java:141)
at io.apicurio.registry.streams.diservice.DistributedService.allServicesStream(DistributedService.java:154)
at io.apicurio.registry.streams.diservice.DistributedAsyncBiFunctionService.apply(DistributedAsyncBiFunctionService.java:42)
at io.apicurio.registry.streams.StreamsRegistryConfiguration_ProducerMethod_stateService_2ec690603bbb29d1b2064b16d42e48a389d2f96d_ClientProxy.apply(StreamsRegistryConfiguration_ProducerMethod_stateService_2ec690603bbb29d1b2064b16d42e48a389d2f96d_ClientProxy.zig:44)
at io.apicurio.registry.streams.StreamsRegistryStorage.isReady(StreamsRegistryStorage.java:203)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass.isReady$$superaccessor31(StreamsRegistryStorage_Subclass.zig:14247)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass$$function$$34.apply(StreamsRegistryStorage_Subclass$$function$$34.zig:47)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:54)
at io.smallrye.metrics.interceptors.TimedInterceptor.timedCallable(TimedInterceptor.java:95)
at io.smallrye.metrics.interceptors.TimedInterceptor.timedMethod(TimedInterceptor.java:70)
at io.smallrye.metrics.interceptors.TimedInterceptor_Bean.intercept(TimedInterceptor_Bean.zig:259)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor.concurrentCallable(ConcurrentGaugeInterceptor.java:96)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor.countedMethod(ConcurrentGaugeInterceptor.java:69)
at io.smallrye.metrics.interceptors.ConcurrentGaugeInterceptor_Bean.intercept(ConcurrentGaugeInterceptor_Bean.zig:215)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.smallrye.metrics.interceptors.CountedInterceptor.countedCallable(CountedInterceptor.java:95)
at io.smallrye.metrics.interceptors.CountedInterceptor.countedMethod(CountedInterceptor.java:70)
at io.smallrye.metrics.interceptors.CountedInterceptor_Bean.intercept(CountedInterceptor_Bean.zig:325)
at io.quarkus.arc.impl.InitializedInterceptor.intercept(InitializedInterceptor.java:79)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.apicurio.registry.metrics.PersistenceTimeoutReadinessInterceptor.intercept(PersistenceTimeoutReadinessInterceptor.java:27)
at io.apicurio.registry.metrics.PersistenceTimeoutReadinessInterceptor_Bean.intercept(PersistenceTimeoutReadinessInterceptor_Bean.zig:189)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.proceed(AroundInvokeInvocationContext.java:50)
at io.apicurio.registry.metrics.PersistenceExceptionLivenessInterceptor.intercept(PersistenceExceptionLivenessInterceptor.java:48)
at io.apicurio.registry.metrics.PersistenceExceptionLivenessInterceptor_Bean.intercept(PersistenceExceptionLivenessInterceptor_Bean.zig:190)
at io.quarkus.arc.impl.InterceptorInvocation.invoke(InterceptorInvocation.java:41)
at io.quarkus.arc.impl.AroundInvokeInvocationContext.perform(AroundInvokeInvocationContext.java:41)
at io.quarkus.arc.impl.InvocationContexts.performAroundInvoke(InvocationContexts.java:32)
at io.apicurio.registry.streams.StreamsRegistryStorage_Subclass.isReady(StreamsRegistryStorage_Subclass.zig:16534)
at io.apicurio.registry.streams.StreamsRegistryStorage_ClientProxy.isReady(StreamsRegistryStorage_ClientProxy.zig:1034)
at io.apicurio.registry.storage.RegistryStorageProducer_ProducerMethod_realImpl_cf1c876861dd1c25dca504d30a12bfedeafd47bd_ClientProxy.isReady(RegistryStorageProducer_ProducerMethod_realImpl_cf1c876861dd1c25dca504d30a12bfedeafd47bd_ClientProxy.zig:745)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck.test(PersistenceSimpleReadinessCheck.java:33)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck.call(PersistenceSimpleReadinessCheck.java:44)
at io.apicurio.registry.metrics.PersistenceSimpleReadinessCheck_ClientProxy.call(PersistenceSimpleReadinessCheck_ClientProxy.zig:176)
at io.smallrye.health.SmallRyeHealthReporter.jsonObject(SmallRyeHealthReporter.java:154)
at io.smallrye.health.SmallRyeHealthReporter.fillCheck(SmallRyeHealthReporter.java:141)
at io.smallrye.health.SmallRyeHealthReporter.processChecks(SmallRyeHealthReporter.java:129)
at io.smallrye.health.SmallRyeHealthReporter.getHealth(SmallRyeHealthReporter.java:107)
at io.smallrye.health.SmallRyeHealthReporter.getReadiness(SmallRyeHealthReporter.java:97)
at io.smallrye.health.SmallRyeHealthReporter_ClientProxy.getReadiness(SmallRyeHealthReporter_ClientProxy.zig:101)
at io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler.handle(SmallRyeReadinessHandler.java:40)
at io.quarkus.smallrye.health.runtime.SmallRyeReadinessHandler.handle(SmallRyeReadinessHandler.java:32)
at io.quarkus.vertx.http.runtime.ResumeHandler.handle(ResumeHandler.java:19)
at io.quarkus.vertx.http.runtime.ResumeHandler.handle(ResumeHandler.java:6)
at io.vertx.ext.web.impl.BlockingHandlerDecorator.lambda$handle$0(BlockingHandlerDecorator.java:48)
at io.vertx.core.impl.ContextImpl.lambda$executeBlocking$2(ContextImpl.java:316)
at io.vertx.core.impl.TaskQueue.run(TaskQueue.java:76)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment