Skip to content

Instantly share code, notes, and snippets.

@pierDipi
Last active January 15, 2024 12:37
Show Gist options
  • Save pierDipi/decc9fee6677e4651a7b22de1c1e341a to your computer and use it in GitHub Desktop.
Save pierDipi/decc9fee6677e4651a7b22de1c1e341a to your computer and use it in GitHub Desktop.
KnativeKafka override producer and consumer config
apiVersion: operator.serverless.openshift.io/v1alpha1
kind: KnativeKafka
metadata:
name: knative-kafka
namespace: knative-eventing
spec:
# ... other spec fields omitted ...
# Change config-kafka-broker-data-plane ConfigMap
# We *MUST* specify all values as otherwise some values will be different (Config are not merged)
config:
config-kafka-broker-data-plane:
config-kafka-broker-producer.properties: |
key.serializer=org.apache.kafka.common.serialization.StringSerializer
value.serializer=io.cloudevents.kafka.CloudEventSerializer
acks=all
buffer.memory=33554432
# compression.type=snappy
retries=2147483647
batch.size=16384
client.dns.lookup=use_all_dns_ips
connections.max.idle.ms=600000
delivery.timeout.ms=120000
linger.ms=0
max.block.ms=60000
max.request.size=1048576
partitioner.class=org.apache.kafka.clients.producer.internals.DefaultPartitioner
receive.buffer.bytes=-1
request.timeout.ms=2000
enable.idempotence=false
max.in.flight.requests.per.connection=5
metadata.max.age.ms=300000
# metric.reporters=""
metrics.num.samples=2
metrics.recording.level=INFO
metrics.sample.window.ms=30000
reconnect.backoff.max.ms=1000
reconnect.backoff.ms=50
retry.backoff.ms=100
# transaction.timeout.ms=60000
# transactional.id=null
config-kafka-broker-consumer.properties: |
key.deserializer=org.apache.kafka.common.serialization.StringDeserializer
value.deserializer=io.cloudevents.kafka.CloudEventDeserializer
fetch.min.bytes=1
heartbeat.interval.ms=3000
max.partition.fetch.bytes=65536
session.timeout.ms=10000
# ssl.key.password=
# ssl.keystore.location=
# ssl.keystore.password=
# ssl.truststore.location=
# ssl.truststore.password=
allow.auto.create.topics=true
auto.offset.reset=latest
client.dns.lookup=use_all_dns_ips
connections.max.idle.ms=540000
default.api.timeout.ms=2000
enable.auto.commit=false
exclude.internal.topics=true
fetch.max.bytes=52428800
isolation.level=read_uncommitted
max.poll.interval.ms=300000
max.poll.records=50
partition.assignment.strategy=org.apache.kafka.clients.consumer.StickyAssignor
receive.buffer.bytes=65536
request.timeout.ms=2000
# sasl.client.callback.handler.class=
# sasl.jaas.config=
# sasl.kerberos.service.name=
# sasl.login.callback.handler.class
# sasl.login.class
# sasl.mechanism
security.protocol=PLAINTEXT
send.buffer.bytes=131072
# ssl.enabled.protocols=
# ssl.keystore.type=
# ssl.protocol=
# ssl.provider=
auto.commit.interval.ms=5000
check.crcs=true
# client.rack=
fetch.max.wait.ms=500
# interceptor.classes=
metadata.max.age.ms=600000
# metrics.reporters=
# metrics.num.samples=
# metrics.recording.level=INFO
# metrics.sample.window.ms=
reconnect.backoff.max.ms=1000
retry.backoff.ms=100
# sasl.kerberos.kinit.cmd=
# sasl.kerberos.min.time.before.relogin=
# sasl.kerberos.ticket.renew.jitter=
# sasl.login.refresh.buffer.seconds=
# sasl.login.refresh.min.period.seconds=
# sasl.login.refresh.window.factor
# sasl.login.refresh.window.jitter
# security.providers
# ssl.cipher.suites
# ssl.endpoint.identification.algorithm
# ssl.keymanager.algorithm
# ssl.secure.random.implementation
# ssl.trustmanager.algorithm
config-kafka-broker-webclient.properties: |
idleTimeout=10000
maxPoolSize=100
config-kafka-broker-httpserver.properties: |
idleTimeout=0
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment