Created
August 10, 2017 13:58
-
-
Save rmoff/c6b3e1e61339817116c9ab714a7b9744 to your computer and use it in GitHub Desktop.
Kafka Connect Elasticsearch Sink Connector example Raw
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
{ | |
"name": "es-sink-mysql-foobar-01", | |
"config": { | |
"_comment": "-- standard converter stuff -- this can actually go in the worker config globally --", | |
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", | |
"value.converter": "io.confluent.connect.avro.AvroConverter", | |
"key.converter": "io.confluent.connect.avro.AvroConverter", | |
"key.converter.schema.registry.url": "http://localhost:8081", | |
"value.converter.schema.registry.url": "http://localhost:8081", | |
"_comment": "--- Elasticsearch-specific config ---", | |
"_comment": "Elasticsearch server address", | |
"connection.url": "http://localhost:9200", | |
"_comment": "Elasticsearch mapping name. Gets created automatically if doesn't exist ", | |
"type.name": "type.name=kafka-connect", | |
"_comment": "Which topic to stream data from into Elasticsearch", | |
"topics": "mysql-foobar", | |
"_comment": "If the Kafka message doesn't have a key (as is the case with JDBC source) you need to specify key.ignore=true. If you don't, you'll get an error from the Connect task: 'ConnectException: Key is used as document id and can not be null.", | |
"key.ignore": "true" | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment