Skip to content

Instantly share code, notes, and snippets.

@lucas-dclrcq
Created October 19, 2021 07:59
Show Gist options
  • Star 0 You must be signed in to star a gist
  • Fork 0 You must be signed in to fork a gist
  • Save lucas-dclrcq/21ac903f47f3831ccbfacd61bacbc6a5 to your computer and use it in GitHub Desktop.
Save lucas-dclrcq/21ac903f47f3831ccbfacd61bacbc6a5 to your computer and use it in GitHub Desktop.
Spring cloud stream dlq configuration
server:
port: ${SERVER_PORT:8083}
logging:
level:
io:
confluent:
kafka: ${KAFKA_LOGGING_LEVEL:INFO}
org:
apache:
kafka: ${KAFKA_LOGGING_LEVEL:INFO}
kafka: ${KAFKA_LOGGING_LEVEL:INFO}
mongodb:
driver: ${MONGODB_LOGGING_LEVEL:INFO}
root: ${LOGGING_LEVEL:INFO}
pattern:
dateformat: yyyy-MM-dd HH:mm:ss.SSS,Europe/Paris
management:
endpoint:
health:
show-details: always
mongock:
change-logs-scan-package: com.adeo.toa.backend.core.db.orders
spring:
data:
mongodb:
database: ${DATABASE_NAME_ORDERS:toa-order-db}
uri: ${MONGODB_URI_ORDERS:mongodb://localhost:27017/toa-event-db}
jackson:
default-property-inclusion: NON_NULL
main:
allow-bean-definition-overriding: true
cloud:
function:
definition: processInternationalOrderEvent
stream:
kafka:
binder:
brokers: ${BROKER_URL:localhost:9092}
consumerProperties:
specific.avro.reader: true
schema.registry.url: ${SCHEMA_REGISTRY_URL:http://localhost:9081}
schema.registry.basic.auth.user.info: ${KAFKA_SCHEMA_REGISTRY_API_KEY}:${KAFKA_SCHEMA_REGISTRY_API_SECRET}
basic.auth.credentials.source: ${AUTH_CREDENTIALS_SOURCE:#{null}}
sasl.mechanism: ${SASL_MECHANISM:#{null}}
security.protocol: ${SSL_PROTOCOL:#{null}}
ssl.endpoint.identification.algorithm: ${SSL_IDENTIFICATION_ALGORITHM:#{null}}
sasl.jaas.config: ${SASL_JAAS_CONFIG:#{null}}
default.value.serde: io.confluent.kafka.streams.serdes.avro.SpecificAvroSerde
value.subject.name.strategy: io.confluent.kafka.serializers.subject.RecordNameStrategy
auto.register.schemas: ${AUTO_REGISTER_SCHEMAS:true}
key.deserializer: org.apache.kafka.common.serialization.StringDeserializer
value.deserializer: io.confluent.kafka.serializers.KafkaAvroDeserializer
producerProperties:
specific.avro.reader: true
schema.registry.url: ${SCHEMA_REGISTRY_URL:http://localhost:9081}
schema.registry.basic.auth.user.info: ${KAFKA_SCHEMA_REGISTRY_API_KEY}:${KAFKA_SCHEMA_REGISTRY_API_SECRET}
basic.auth.credentials.source: ${AUTH_CREDENTIALS_SOURCE:#{null}}
sasl.mechanism: ${SASL_MECHANISM:#{null}}
security.protocol: ${SSL_PROTOCOL:#{null}}
ssl.endpoint.identification.algorithm: ${SSL_IDENTIFICATION_ALGORITHM:#{null}}
sasl.jaas.config: ${SASL_JAAS_CONFIG:#{null}}
value.subject.name.strategy: io.confluent.kafka.serializers.subject.RecordNameStrategy
auto.register.schemas: ${AUTO_REGISTER_SCHEMAS:true}
key.serializer: org.apache.kafka.common.serialization.StringSerializer
value.serializer: io.confluent.kafka.serializers.KafkaAvroSerializer
configuration:
sasl.mechanism: ${SASL_MECHANISM:#{null}}
security.protocol: ${SSL_PROTOCOL:#{null}}
ssl.endpoint.identification.algorithm: ${SSL_IDENTIFICATION_ALGORITHM:#{null}}
sasl.jaas.config: ${SASL_JAAS_CONFIG:#{null}}
auto.offset.reset: ${KAFKA_AUTO_OFFSET_RESET:earliest}
bindings:
processTrackingAmontEvents-in-0:
destination: ${TOA_EVENTS_TOPIC:adeo-uat1-europe-west1-INTERNAL-TOA-AS-FR-P1-C2-TRACKING-EVENTS-EXPORT.tracking-event-db.tracking_events}
content-type: application/*+avro
group: ${TOA_EXPORTER_GROUP_ID:toa--exporter-dev}
consumer:
useNativeDecoding: true
enableDlq: true
dlqName: ${TOA_EVENTS_DLQ_TOPIC:adeo-uat1-europe-west1-INTERNAL-TOA-AS-FR-P1-C2-TRACKING-EVENTS-EXPORT-DLQ.tracking-event-db.tracking_events}
dlqProducerProperties:
configuration:
key.serializer: org.apache.kafka.common.serialization.StringSerializer
value.serializer: io.confluent.kafka.serializers.KafkaAvroSerializer
processTrackingAmontEvents-out-0:
destination: ${ALERTS_TOPIC:adeo-uat1-europe-west1-INTERNAL-TOA-AS-FR-P1-C2-ALERT-EVENTS}
content-type: application/*+avro
producer:
useNativeEncoding: true