Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save confluentgist/5717df282b3ad2f4dd38d62160f4d870 to your computer and use it in GitHub Desktop.
Save confluentgist/5717df282b3ad2f4dd38d62160f4d870 to your computer and use it in GitHub Desktop.
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.streams;
import kafka.Kafka;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.processor.AbstractProcessor;
import org.apache.kafka.streams.processor.To;
import java.util.Properties;
public class ProcessorAPIExample {
public static void main(String[] args) {
final Properties properties = new Properties();
properties.setProperty(StreamsConfig.APPLICATION_ID_CONFIG, "test-application");
properties.setProperty(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
properties.setProperty(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
properties.setProperty(StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
final Topology topology = new Topology();
topology.addSource("SourceTopicProcessor","input" );
topology.addProcessor("FilteringProcessor", FilterProcessor::new, "SourceTopicProcessor");
topology.addProcessor("MappingProcessor", MapValuesProcessor::new, "FilteringProcessor");
topology.addSink("SinkProcessor", "output", "MappingProcessor");
System.out.println(builder.describe());
final KafkaStreams streams = new KafkaStreams(builder, properties);
streams.start();
}
static class FilterProcessor extends AbstractProcessor<String, String> {
@Override
public void process(String key, String value) {
if (value.endsWith("FOO")) {
context().forward(key, value);
}
}
}
static class MapValuesProcessor extends AbstractProcessor<String, String> {
@Override
public void process(String key, String value) {
context().forward(key, value.substring(0,3));
// Processor API gives flexibility to forward KV pairs
// to arbitrary child nodes
//context().forward(key, value.substring(0,3), To.child("some node"));
}
}
}
@martin-g
Copy link

martin-g commented May 1, 2019

Is this a working example ?
builder variable is not properly constructed before lines 45-46

@nicodom
Copy link

nicodom commented Sep 23, 2019

System.out.println(topology.describe());
final KafkaStreams streams = new KafkaStreams(topology, properties);

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment