Skip to content

Instantly share code, notes, and snippets.

@fragaLY
Last active July 2, 2021 12:04
Show Gist options
  • Save fragaLY/6b9afd690e9fd515ed77bb4bf0887d59 to your computer and use it in GitHub Desktop.
Save fragaLY/6b9afd690e9fd515ed77bb4bf0887d59 to your computer and use it in GitHub Desktop.
Spring Boot Zipkin Kafka Integration [ for 'prod' profile - config server in use ]
spring:
application:
name: producer-service
cloud:
config:
enabled: false
main:
lazy-initialization: false
web-application-type: servlet
banner-mode: off
jackson:
time-zone: UTC
locale: en_US
mvc:
format:
date: iso
date-time: iso
lifecycle:
timeout-per-shutdown-phase: 60s
zipkin:
kafka:
topic: zipkin
sender:
type: KAFKA
kafka:
producer:
bootstrap-servers: http://localhost:9092, http://localhost:9093, http://localhost:9094
key-serializer: org.apache.kafka.common.serialization.LongSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
acks: all
retries: 5
transaction-id-prefix: kafka-producer
properties:
enable.idempotence: true
compression.type: none
max.in.flight.requests.per.connection: 1
auto.create.topics.enable: false
template:
default-topic: notification-event
properties:
min.insync.replicas: 2
listener:
ack-mode: manual
missing-topics-fatal: false
server:
port: 8083
shutdown: graceful
servlet:
session:
timeout: 60m
cookie:
http-only: true
application-display-name: kafka-producer
compression:
enabled: true
mime-types: "text/html, text/xml, text/plain, text/css, text/javascript, application/javascript, application/json, application/xml, image/jpeg, image/png, application/octet-stream"
error:
whitelabel:
enabled: false
logging:
level:
root: info
org:
springframework:
boot: info
kafka: info
---
spring:
application:
name: producer-service
main:
banner-mode: off
cloud:
config:
enabled: true
config:
activate:
on-profile: prod
import: configserver:http://config-server-prod:8088?fail-fast=true&max-attempts=3&max-interval=1500&multiplier=1.2&initial-interval=1100
import org.jetbrains.kotlin.gradle.tasks.KotlinCompile
import org.jetbrains.kotlin.util.parseSpaceSeparatedArgs
plugins {
application
id("org.springframework.boot") version "2.5.2"
id("io.spring.dependency-management") version "1.0.11.RELEASE"
kotlin("jvm") version "1.5.20"
kotlin("plugin.spring") version "1.5.20"
id("com.google.cloud.tools.jib") version "3.1.1"
}
springBoot {
buildInfo()
}
group = "by.vk"
version = "0.0.1"
java.sourceCompatibility = JavaVersion.VERSION_11
application {
mainClass.set("com.kafka.producer.demo.NotificationProducerKt")
applicationName = "kafka-producer"
}
repositories {
mavenCentral()
}
extra["springCloudVersion"] = "2020.0.3"
configurations.all {
exclude(group = "org.springframework.boot", module ="spring-boot-starter-tomcat")
exclude(group = "org.junit.vintage", module = "junit-vintage-engine")
}
dependencies {
implementation("org.springframework.boot:spring-boot-starter-web")
implementation("com.fasterxml.jackson.module:jackson-module-kotlin")
implementation("org.springframework.boot:spring-boot-starter-undertow")
implementation("org.springframework.cloud:spring-cloud-config-client")
implementation("org.springframework.cloud:spring-cloud-starter-sleuth")
implementation("org.springframework.cloud:spring-cloud-sleuth-zipkin")
implementation("org.springframework.cloud:spring-cloud-stream-binder-kafka")
implementation("org.springframework.boot:spring-boot-starter-validation")
implementation("org.jetbrains.kotlin:kotlin-reflect")
implementation("org.jetbrains.kotlin:kotlin-stdlib-jdk8")
implementation("org.springframework.kafka:spring-kafka")
implementation("ch.qos.logback.contrib:logback-json-classic:0.1.5")
implementation("ch.qos.logback.contrib:logback-jackson:0.1.5")
testImplementation("org.springframework.boot:spring-boot-starter-test")
testImplementation("org.springframework.kafka:spring-kafka-test")
}
dependencyManagement {
imports {
mavenBom("org.springframework.cloud:spring-cloud-dependencies:${property("springCloudVersion")}")
}
}
tasks.withType<Test> {
useJUnitPlatform()
}
tasks.withType<KotlinCompile> {
kotlinOptions {
freeCompilerArgs = listOf("-Xjsr305=strict")
jvmTarget = "11"
}
}
kotlin {
sourceSets["test"].apply {
kotlin.srcDirs("src/test/kotlin/unit", "src/test/kotlin/integration")
}
}
object DockerProps {
const val BASE_IMAGE = "gcr.io/distroless/java:11"
const val APP_PORT = "8083"
const val DEBUG_PORT = "5083"
const val JMX_PORT = "38083"
}
object JVMProps {
const val XMX = "512m"
const val XMS = "128m"
const val MAX_METASPACE_SIZE = "128m"
const val MAX_DIRECT_MEMORY_SIZE = "256m"
const val HEAPDUMP_PATH = "/opt/tmp/heapdump.bin"
}
jib {
from {
image = DockerProps.BASE_IMAGE
}
container {
jvmFlags = parseSpaceSeparatedArgs("-noverify -XX:+UseContainerSupport -XX:MaxRAMPercentage=75.0 -XX:InitialRAMPercentage=50.0 -XX:+OptimizeStringConcat -XX:+UseStringDeduplication -XX:+ExitOnOutOfMemoryError -XX:+AlwaysActAsServerClassMachine -Xlog:gc -Xmx${JVMProps.XMX} -Xms${JVMProps.XMS} -XX:MaxMetaspaceSize=${JVMProps.MAX_METASPACE_SIZE} -XX:MaxDirectMemorySize=${JVMProps.MAX_DIRECT_MEMORY_SIZE} -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=${JVMProps.HEAPDUMP_PATH} -Djava.rmi.server.hostname=localhost -Dcom.sun.management.jmxremote.authenticate=false -Dcom.sun.management.jmxremote.ssl=false -Dcom.sun.management.jmxremote -Dcom.sun.management.jmxremote.port=${DockerProps.JMX_PORT} -Dcom.sun.management.jmxremote.rmi.port=${DockerProps.JMX_PORT} -Dspring.profiles.active=prod")
ports = listOf(DockerProps.APP_PORT, DockerProps.DEBUG_PORT, DockerProps.JMX_PORT)
labels.set(mapOf("maintainer" to "Vadzim Kavalkou <vadzim.kavalkou@gmail.com>",
"app-name" to application.applicationName,
"service-version" to version.toString()))
creationTime = "USE_CURRENT_TIMESTAMP"
}
}
version: '3.7'
services:
config-server-prod:
image: fragaly/config-server
restart: on-failure
postgres-prod:
image: postgres:13.3-alpine
restart: on-failure
environment:
- POSTGRES_USER=user
- POSTGRES_PASSWORD=P@55w0rd
- POSTGRES_DB=notification-dev
zookeeper-prod:
image: confluentinc/cp-zookeeper:6.2.0
healthcheck:
test: echo stat | nc localhost 2181
interval: 2s
timeout: 2s
retries: 15
ports:
- 2181:2181
environment:
ZOOKEEPER_SERVER_ID: 1
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka-prod-1:
image: confluentinc/cp-kafka:6.2.0
healthcheck:
test: ps augwwx | egrep [S]upportedKafka
depends_on:
- zookeeper-prod
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper-prod:2181'
KAFKA_LISTENERS: INTERNAL://kafka-prod-1:29092
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka-prod-1:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
kafka-prod-2:
image: confluentinc/cp-kafka:6.2.0
healthcheck:
test: ps augwwx | egrep [S]upportedKafka
depends_on:
- zookeeper-prod
environment:
KAFKA_BROKER_ID: 2
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper-prod:2181'
KAFKA_LISTENERS: INTERNAL://kafka-prod-2:29092
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka-prod-2:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
kafka-prod-3:
image: confluentinc/cp-kafka:6.2.0
healthcheck:
test: ps augwwx | egrep [S]upportedKafka
depends_on:
- zookeeper-prod
environment:
KAFKA_BROKER_ID: 3
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper-prod:2181'
KAFKA_LISTENERS: INTERNAL://kafka-prod-3:29092
KAFKA_ADVERTISED_LISTENERS: INTERNAL://kafka-prod-3:29092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: INTERNAL:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: INTERNAL
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 3
KAFKA_AUTO_CREATE_TOPICS_ENABLE: "false"
schema-registry-prod:
image: confluentinc/cp-schema-registry:6.2.0
restart: always
depends_on:
- kafka-prod-1
environment:
SCHEMA_REGISTRY_HOST_NAME: schema-registry-prod
SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'kafka-prod-1:29092'
kafka-setup:
image: confluentinc/cp-kafka:6.2.0
depends_on:
- kafka-prod-1
- kafka-prod-2
- kafka-prod-3
command: "bash -c 'echo Waiting for Kafka to be ready... && \
cub kafka-ready -b kafka-prod-1:29092 1 20 && \
kafka-topics --create --if-not-exists --zookeeper zookeeper-prod:2181 --partitions 3 --replication-factor 2 --topic notification-event && \
kafka-topics --create --if-not-exists --zookeeper zookeeper-prod:2181 --partitions 3 --replication-factor 2 --topic zipkin'"
environment:
KAFKA_BROKER_ID: ignored
KAFKA_ZOOKEEPER_CONNECT: ignored
kafka-ui-prod:
image: obsidiandynamics/kafdrop:3.27.0
restart: on-failure
depends_on:
- kafka-prod-1
- kafka-prod-2
- kafka-prod-3
environment:
KAFKA_BROKERCONNECT: http://kafka-prod-1:29092, http://kafka-prod-2:29092, http://kafka-prod-3:29092
SERVER_SERVLET_CONTEXTPATH: /
SCHEMAREGISTRY_CONNECT: http://schema-registry-prod:8081
JVM_OPTS: "-Xms32M -Xmx64M"
ports:
- 9000:9000
gateway-prod:
image: fragaly/gateway
restart: on-failure
depends_on:
- config-server-prod
producer-prod:
image: fragaly/producer
restart: on-failure
depends_on:
- schema-registry-prod
- gateway-prod
consumer-prod:
image: fragaly/consumer
restart: on-failure
depends_on:
- schema-registry-prod
- gateway-prod
web-prod:
image: fragaly/web
restart: on-failure
ports:
- 80:80
elasticsearch:
image: openzipkin/zipkin-elasticsearch7:2.22.2
container_name: elasticsearch
zipkin:
image: openzipkin/zipkin:2.23.2
container_name: zipkin
ports:
- 9411:9411
environment:
- STORAGE_TYPE=elasticsearch
- ES_HOSTS=elasticsearch:9200
- KAFKA_BOOTSTRAP_SERVERS=http://kafka-prod-1:29092, http://kafka-prod-2:29092, http://kafka-prod-3:29092
depends_on:
- elasticsearch
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment