Last active
January 18, 2022 05:16
-
-
Save javier/fc4b28718dc447ea95fe00796adb8f1d to your computer and use it in GitHub Desktop.
A tale of two streams workshop
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* Licensed to the Apache Software Foundation (ASF) under one | |
* or more contributor license agreements. See the NOTICE file | |
* distributed with this work for additional information | |
* regarding copyright ownership. The ASF licenses this file | |
* to you under the Apache License, Version 2.0 (the | |
* "License"); you may not use this file except in compliance | |
* with the License. You may obtain a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* | |
* Unless required by applicable law or agreed to in writing, software | |
* distributed under the License is distributed on an "AS IS" BASIS, | |
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
* See the License for the specific language governing permissions and | |
* limitations under the License. | |
*/ | |
package org.myorg.quickstart; | |
import org.apache.flink.api.java.ExecutionEnvironment; | |
import org.apache.flink.api.java.operators.DataSource; | |
/** | |
* Skeleton for a Flink Batch Job. | |
* | |
* <p>For a tutorial how to write a Flink batch application, check the | |
* tutorials and examples on the <a href="https://flink.apache.org/docs/stable/">Flink Website</a>. | |
* | |
* <p>To package your application into a JAR file for execution, | |
* change the main class in the POM.xml file to this class (simply search for 'mainClass') | |
* and run 'mvn clean package' on the command line. | |
*/ | |
public class BatchJob { | |
public static void main(String[] args) throws Exception { | |
// set up the batch execution environment | |
final ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment(); | |
/* | |
* Here, you can start creating your execution plan for Flink. | |
* | |
* Start with getting some data from the environment, like | |
* env.readTextFile(textPath); | |
* | |
* then, transform the resulting DataSet<String> using operations | |
* like | |
* .filter() | |
* .flatMap() | |
* .join() | |
* .coGroup() | |
* | |
* and many more. | |
* Have a look at the programming guide for the Java API: | |
* | |
* https://flink.apache.org/docs/latest/apis/batch/index.html | |
* | |
* and the examples | |
* | |
* https://flink.apache.org/docs/latest/apis/batch/examples.html | |
* | |
*/ | |
DataSource<String> words = env.readTextFile("test.txt"); | |
words.writeAsText("output"); | |
// execute program | |
env.execute("Flink Batch Java API Skeleton"); | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* Licensed to the Apache Software Foundation (ASF) under one | |
* or more contributor license agreements. See the NOTICE file | |
* distributed with this work for additional information | |
* regarding copyright ownership. The ASF licenses this file | |
* to you under the Apache License, Version 2.0 (the | |
* "License"); you may not use this file except in compliance | |
* with the License. You may obtain a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* | |
* Unless required by applicable law or agreed to in writing, software | |
* distributed under the License is distributed on an "AS IS" BASIS, | |
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
* See the License for the specific language governing permissions and | |
* limitations under the License. | |
*/ | |
package org.myorg.quickstart; | |
import org.apache.flink.api.common.functions.FlatMapFunction; | |
import org.apache.flink.api.common.serialization.SimpleStringSchema; | |
import org.apache.flink.api.java.tuple.Tuple2; | |
import org.apache.flink.streaming.api.datastream.DataStream; | |
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | |
import org.apache.flink.streaming.connectors.kinesis.FlinkKinesisConsumer; | |
import org.apache.flink.streaming.connectors.kinesis.FlinkKinesisProducer; | |
import org.apache.flink.streaming.connectors.kinesis.config.ConsumerConfigConstants; | |
import org.apache.flink.util.Collector; | |
import java.util.Properties; | |
/** | |
* Skeleton for a Flink Streaming Job. | |
* | |
* <p>For a tutorial how to write a Flink streaming application, check the | |
* tutorials and examples on the <a href="https://flink.apache.org/docs/stable/">Flink Website</a>. | |
* | |
* <p>To package your application into a JAR file for execution, run | |
* 'mvn clean package' on the command line. | |
* | |
* <p>If you change the name of the main class (with the public static void main(String[] args)) | |
* method, change the respective entry in the POM.xml file (simply search for 'mainClass'). | |
*/ | |
public class KinesisStreamingJob { | |
public static void main(String[] args) throws Exception { | |
// set up the streaming execution environment | |
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); | |
Properties props = new Properties(); | |
props.setProperty(ConsumerConfigConstants.AWS_REGION, "eu-west-1"); | |
props.setProperty(ConsumerConfigConstants.STREAM_INITIAL_POSITION, "LATEST"); | |
DataStream<String> stream = env | |
.addSource(new FlinkKinesisConsumer<String>("flink_input", new SimpleStringSchema(), props)); | |
props = new Properties(); | |
props.setProperty(ConsumerConfigConstants.AWS_REGION, "eu-west-1"); | |
props.setProperty("AggregationEnabled", "false"); | |
FlinkKinesisProducer<String> myProducer = new FlinkKinesisProducer<>(new SimpleStringSchema(), props); | |
myProducer.setDefaultStream("flink_output"); | |
myProducer.setDefaultPartition("0"); | |
DataStream<Tuple2<String, Integer>> counts = | |
// split up the lines in pairs (2-tuples) containing: (word,1) | |
stream.flatMap(new Tokenizer()) | |
// group by the tuple field "0" and sum up tuple field "1" | |
.keyBy(0).sum(1); | |
DataStream<String> strCounts = counts.map(stringIntegerTuple2 -> stringIntegerTuple2.toString()); | |
strCounts.addSink(myProducer); | |
// execute program | |
env.execute("Flink Streaming Java API Skeleton"); | |
} | |
// ************************************************************************* | |
// USER FUNCTIONS | |
// ************************************************************************* | |
/** | |
* Implements the string tokenizer that splits sentences into words as a | |
* user-defined FlatMapFunction. The function takes a line (String) and | |
* splits it into multiple pairs in the form of "(word,1)" ({@code Tuple2<String, | |
* Integer>}). | |
*/ | |
public static final class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> { | |
@Override | |
public void flatMap(String value, Collector<Tuple2<String, Integer>> out) { | |
// normalize and split the line | |
String[] tokens = value.toLowerCase().split("\\W+"); | |
// emit the pairs | |
for (String token : tokens) { | |
if (token.length() > 0) { | |
out.collect(new Tuple2<>(token, 1)); | |
} | |
} | |
} | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!-- | |
Licensed to the Apache Software Foundation (ASF) under one | |
or more contributor license agreements. See the NOTICE file | |
distributed with this work for additional information | |
regarding copyright ownership. The ASF licenses this file | |
to you under the Apache License, Version 2.0 (the | |
"License"); you may not use this file except in compliance | |
with the License. You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, | |
software distributed under the License is distributed on an | |
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
KIND, either express or implied. See the License for the | |
specific language governing permissions and limitations | |
under the License. | |
--> | |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | |
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | |
<modelVersion>4.0.0</modelVersion> | |
<groupId>org.myorg.quickstart</groupId> | |
<artifactId>quickstart</artifactId> | |
<version>0.1</version> | |
<packaging>jar</packaging> | |
<name>Flink Quickstart Job</name> | |
<url>http://www.myorganization.org</url> | |
<properties> | |
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> | |
<flink.version>1.10-SNAPSHOT</flink.version> | |
<java.version>1.8</java.version> | |
<scala.binary.version>2.11</scala.binary.version> | |
<maven.compiler.source>${java.version}</maven.compiler.source> | |
<maven.compiler.target>${java.version}</maven.compiler.target> | |
</properties> | |
<repositories> | |
<repository> | |
<id>apache.snapshots</id> | |
<name>Apache Development Snapshot Repository</name> | |
<url>https://repository.apache.org/content/repositories/snapshots/</url> | |
<releases> | |
<enabled>false</enabled> | |
</releases> | |
<snapshots> | |
<enabled>true</enabled> | |
</snapshots> | |
</repository> | |
</repositories> | |
<dependencies> | |
<!-- Apache Flink dependencies --> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<!-- Add connector dependencies here. They must be in the default scope (compile). --> | |
<!-- Example: | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-connector-kafka-0.10_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
</dependency> | |
--> | |
<!-- Add logging framework, to produce console output when running in the IDE. --> | |
<!-- These dependencies are excluded from the application JAR by default. --> | |
<dependency> | |
<groupId>org.slf4j</groupId> | |
<artifactId>slf4j-log4j12</artifactId> | |
<version>1.7.7</version> | |
<scope>runtime</scope> | |
</dependency> | |
<dependency> | |
<groupId>log4j</groupId> | |
<artifactId>log4j</artifactId> | |
<version>1.2.17</version> | |
<scope>runtime</scope> | |
</dependency> | |
</dependencies> | |
<build> | |
<plugins> | |
<!-- Java Compiler --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<version>3.1</version> | |
<configuration> | |
<source>${java.version}</source> | |
<target>${java.version}</target> | |
</configuration> | |
</plugin> | |
<!-- We use the maven-shade plugin to create a fat jar that contains all necessary dependencies. --> | |
<!-- Change the value of <mainClass>...</mainClass> if your program entry point changes. --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<version>3.0.0</version> | |
<executions> | |
<!-- Run shade goal on package phase --> | |
<execution> | |
<phase>package</phase> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
<configuration> | |
<artifactSet> | |
<excludes> | |
<exclude>org.apache.flink:force-shading</exclude> | |
<exclude>com.google.code.findbugs:jsr305</exclude> | |
<exclude>org.slf4j:*</exclude> | |
<exclude>log4j:*</exclude> | |
</excludes> | |
</artifactSet> | |
<filters> | |
<filter> | |
<!-- Do not copy the signatures in the META-INF folder. | |
Otherwise, this might cause SecurityExceptions when using the JAR. --> | |
<artifact>*:*</artifact> | |
<excludes> | |
<exclude>META-INF/*.SF</exclude> | |
<exclude>META-INF/*.DSA</exclude> | |
<exclude>META-INF/*.RSA</exclude> | |
</excludes> | |
</filter> | |
</filters> | |
<transformers> | |
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> | |
<mainClass>org.myorg.quickstart.StreamingJob</mainClass> | |
</transformer> | |
</transformers> | |
</configuration> | |
</execution> | |
</executions> | |
</plugin> | |
</plugins> | |
<pluginManagement> | |
<plugins> | |
<!-- This improves the out-of-the-box experience in Eclipse by resolving some warnings. --> | |
<plugin> | |
<groupId>org.eclipse.m2e</groupId> | |
<artifactId>lifecycle-mapping</artifactId> | |
<version>1.0.0</version> | |
<configuration> | |
<lifecycleMappingMetadata> | |
<pluginExecutions> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<versionRange>[3.0.0,)</versionRange> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<versionRange>[3.1,)</versionRange> | |
<goals> | |
<goal>testCompile</goal> | |
<goal>compile</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
</pluginExecutions> | |
</lifecycleMappingMetadata> | |
</configuration> | |
</plugin> | |
</plugins> | |
</pluginManagement> | |
</build> | |
<!-- This profile helps to make things run out of the box in IntelliJ --> | |
<!-- Its adds Flink's core classes to the runtime class path. --> | |
<!-- Otherwise they are missing in IntelliJ, because the dependency is 'provided' --> | |
<profiles> | |
<profile> | |
<id>add-dependencies-for-IDEA</id> | |
<activation> | |
<property> | |
<name>idea.version</name> | |
</property> | |
</activation> | |
<dependencies> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
</dependencies> | |
</profile> | |
</profiles> | |
</project> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!-- | |
Licensed to the Apache Software Foundation (ASF) under one | |
or more contributor license agreements. See the NOTICE file | |
distributed with this work for additional information | |
regarding copyright ownership. The ASF licenses this file | |
to you under the Apache License, Version 2.0 (the | |
"License"); you may not use this file except in compliance | |
with the License. You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, | |
software distributed under the License is distributed on an | |
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
KIND, either express or implied. See the License for the | |
specific language governing permissions and limitations | |
under the License. | |
--> | |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | |
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | |
<modelVersion>4.0.0</modelVersion> | |
<groupId>org.myorg.quickstart</groupId> | |
<artifactId>quickstart</artifactId> | |
<version>0.1</version> | |
<packaging>jar</packaging> | |
<name>Flink Quickstart Job</name> | |
<url>http://www.myorganization.org</url> | |
<properties> | |
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> | |
<flink.version>1.6.2</flink.version> | |
<java.version>1.8</java.version> | |
<scala.binary.version>2.11</scala.binary.version> | |
<maven.compiler.source>${java.version}</maven.compiler.source> | |
<maven.compiler.target>${java.version}</maven.compiler.target> | |
</properties> | |
<repositories> | |
<repository> | |
<id>apache.snapshots</id> | |
<name>Apache Development Snapshot Repository</name> | |
<url>https://repository.apache.org/content/repositories/snapshots/</url> | |
<releases> | |
<enabled>false</enabled> | |
</releases> | |
<snapshots> | |
<enabled>true</enabled> | |
</snapshots> | |
</repository> | |
</repositories> | |
<dependencies> | |
<!-- Apache Flink dependencies --> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<!-- Add connector dependencies here. They must be in the default scope (compile). --> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-connector-kafka-0.11_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<!-- Add logging framework, to produce console output when running in the IDE. --> | |
<!-- These dependencies are excluded from the application JAR by default. --> | |
<dependency> | |
<groupId>org.slf4j</groupId> | |
<artifactId>slf4j-log4j12</artifactId> | |
<version>1.7.7</version> | |
<scope>runtime</scope> | |
</dependency> | |
<dependency> | |
<groupId>log4j</groupId> | |
<artifactId>log4j</artifactId> | |
<version>1.2.17</version> | |
<scope>runtime</scope> | |
</dependency> | |
</dependencies> | |
<build> | |
<plugins> | |
<!-- Java Compiler --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<version>3.1</version> | |
<configuration> | |
<source>${java.version}</source> | |
<target>${java.version}</target> | |
</configuration> | |
</plugin> | |
<!-- We use the maven-shade plugin to create a fat jar that contains all necessary dependencies. --> | |
<!-- Change the value of <mainClass>...</mainClass> if your program entry point changes. --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<version>3.0.0</version> | |
<executions> | |
<!-- Run shade goal on package phase --> | |
<execution> | |
<phase>package</phase> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
<configuration> | |
<artifactSet> | |
<excludes> | |
<exclude>org.apache.flink:force-shading</exclude> | |
<exclude>com.google.code.findbugs:jsr305</exclude> | |
<exclude>org.slf4j:*</exclude> | |
<exclude>log4j:*</exclude> | |
</excludes> | |
</artifactSet> | |
<filters> | |
<filter> | |
<!-- Do not copy the signatures in the META-INF folder. | |
Otherwise, this might cause SecurityExceptions when using the JAR. --> | |
<artifact>*:*</artifact> | |
<excludes> | |
<exclude>META-INF/*.SF</exclude> | |
<exclude>META-INF/*.DSA</exclude> | |
<exclude>META-INF/*.RSA</exclude> | |
</excludes> | |
</filter> | |
</filters> | |
<transformers> | |
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> | |
<mainClass>org.myorg.quickstart.StreamingJob</mainClass> | |
</transformer> | |
</transformers> | |
</configuration> | |
</execution> | |
</executions> | |
</plugin> | |
</plugins> | |
<pluginManagement> | |
<plugins> | |
<!-- This improves the out-of-the-box experience in Eclipse by resolving some warnings. --> | |
<plugin> | |
<groupId>org.eclipse.m2e</groupId> | |
<artifactId>lifecycle-mapping</artifactId> | |
<version>1.0.0</version> | |
<configuration> | |
<lifecycleMappingMetadata> | |
<pluginExecutions> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<versionRange>[3.0.0,)</versionRange> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<versionRange>[3.1,)</versionRange> | |
<goals> | |
<goal>testCompile</goal> | |
<goal>compile</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
</pluginExecutions> | |
</lifecycleMappingMetadata> | |
</configuration> | |
</plugin> | |
</plugins> | |
</pluginManagement> | |
</build> | |
<!-- This profile helps to make things run out of the box in IntelliJ --> | |
<!-- Its adds Flink's core classes to the runtime class path. --> | |
<!-- Otherwise they are missing in IntelliJ, because the dependency is 'provided' --> | |
<profiles> | |
<profile> | |
<id>add-dependencies-for-IDEA</id> | |
<activation> | |
<property> | |
<name>idea.version</name> | |
</property> | |
</activation> | |
<dependencies> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
</dependencies> | |
</profile> | |
</profiles> | |
</project> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!-- | |
Licensed to the Apache Software Foundation (ASF) under one | |
or more contributor license agreements. See the NOTICE file | |
distributed with this work for additional information | |
regarding copyright ownership. The ASF licenses this file | |
to you under the Apache License, Version 2.0 (the | |
"License"); you may not use this file except in compliance | |
with the License. You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, | |
software distributed under the License is distributed on an | |
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
KIND, either express or implied. See the License for the | |
specific language governing permissions and limitations | |
under the License. | |
--> | |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | |
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | |
<modelVersion>4.0.0</modelVersion> | |
<groupId>org.myorg.quickstart</groupId> | |
<artifactId>quickstart</artifactId> | |
<version>0.1</version> | |
<packaging>jar</packaging> | |
<name>Flink Quickstart Job</name> | |
<url>http://www.myorganization.org</url> | |
<properties> | |
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> | |
<flink.version>1.6.2</flink.version> | |
<java.version>1.8</java.version> | |
<scala.binary.version>2.11</scala.binary.version> | |
<maven.compiler.source>${java.version}</maven.compiler.source> | |
<maven.compiler.target>${java.version}</maven.compiler.target> | |
</properties> | |
<repositories> | |
<repository> | |
<id>apache.snapshots</id> | |
<name>Apache Development Snapshot Repository</name> | |
<url>https://repository.apache.org/content/repositories/snapshots/</url> | |
<releases> | |
<enabled>false</enabled> | |
</releases> | |
<snapshots> | |
<enabled>true</enabled> | |
</snapshots> | |
</repository> | |
</repositories> | |
<dependencies> | |
<!-- Apache Flink dependencies --> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<!-- Add connector dependencies here. They must be in the default scope (compile). --> | |
<dependency> | |
<groupId>com.amazonaws</groupId> | |
<artifactId>aws-kinesisanalytics-runtime</artifactId> | |
<version>1.0.1</version> | |
</dependency> | |
<dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-connector-kafka-0.11_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-connector-kinesis_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
</dependency> | |
<!-- Add logging framework, to produce console output when running in the IDE. --> | |
<!-- These dependencies are excluded from the application JAR by default. --> | |
<dependency> | |
<groupId>org.slf4j</groupId> | |
<artifactId>slf4j-log4j12</artifactId> | |
<version>1.7.7</version> | |
<scope>runtime</scope> | |
</dependency> | |
<dependency> | |
<groupId>log4j</groupId> | |
<artifactId>log4j</artifactId> | |
<version>1.2.17</version> | |
<scope>runtime</scope> | |
</dependency> | |
</dependencies> | |
<build> | |
<plugins> | |
<!-- Java Compiler --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<version>3.1</version> | |
<configuration> | |
<source>${java.version}</source> | |
<target>${java.version}</target> | |
</configuration> | |
</plugin> | |
<!-- We use the maven-shade plugin to create a fat jar that contains all necessary dependencies. --> | |
<!-- Change the value of <mainClass>...</mainClass> if your program entry point changes. --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<version>3.0.0</version> | |
<executions> | |
<!-- Run shade goal on package phase --> | |
<execution> | |
<phase>package</phase> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
<configuration> | |
<artifactSet> | |
<excludes> | |
<exclude>org.apache.flink:force-shading</exclude> | |
<exclude>com.google.code.findbugs:jsr305</exclude> | |
<exclude>org.slf4j:*</exclude> | |
<exclude>log4j:*</exclude> | |
</excludes> | |
</artifactSet> | |
<filters> | |
<filter> | |
<!-- Do not copy the signatures in the META-INF folder. | |
Otherwise, this might cause SecurityExceptions when using the JAR. --> | |
<artifact>*:*</artifact> | |
<excludes> | |
<exclude>META-INF/*.SF</exclude> | |
<exclude>META-INF/*.DSA</exclude> | |
<exclude>META-INF/*.RSA</exclude> | |
</excludes> | |
</filter> | |
</filters> | |
<transformers> | |
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> | |
<mainClass>org.myorg.quickstart.KinesisStreamingJob</mainClass> | |
</transformer> | |
</transformers> | |
</configuration> | |
</execution> | |
</executions> | |
</plugin> | |
</plugins> | |
<pluginManagement> | |
<plugins> | |
<!-- This improves the out-of-the-box experience in Eclipse by resolving some warnings. --> | |
<plugin> | |
<groupId>org.eclipse.m2e</groupId> | |
<artifactId>lifecycle-mapping</artifactId> | |
<version>1.0.0</version> | |
<configuration> | |
<lifecycleMappingMetadata> | |
<pluginExecutions> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<versionRange>[3.0.0,)</versionRange> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<versionRange>[3.1,)</versionRange> | |
<goals> | |
<goal>testCompile</goal> | |
<goal>compile</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
</pluginExecutions> | |
</lifecycleMappingMetadata> | |
</configuration> | |
</plugin> | |
</plugins> | |
</pluginManagement> | |
</build> | |
<!-- This profile helps to make things run out of the box in IntelliJ --> | |
<!-- Its adds Flink's core classes to the runtime class path. --> | |
<!-- Otherwise they are missing in IntelliJ, because the dependency is 'provided' --> | |
<profiles> | |
<profile> | |
<id>add-dependencies-for-IDEA</id> | |
<activation> | |
<property> | |
<name>idea.version</name> | |
</property> | |
</activation> | |
<dependencies> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
</dependencies> | |
</profile> | |
</profiles> | |
</project> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
<!-- | |
Licensed to the Apache Software Foundation (ASF) under one | |
or more contributor license agreements. See the NOTICE file | |
distributed with this work for additional information | |
regarding copyright ownership. The ASF licenses this file | |
to you under the Apache License, Version 2.0 (the | |
"License"); you may not use this file except in compliance | |
with the License. You may obtain a copy of the License at | |
http://www.apache.org/licenses/LICENSE-2.0 | |
Unless required by applicable law or agreed to in writing, | |
software distributed under the License is distributed on an | |
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY | |
KIND, either express or implied. See the License for the | |
specific language governing permissions and limitations | |
under the License. | |
--> | |
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" | |
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> | |
<modelVersion>4.0.0</modelVersion> | |
<groupId>org.myorg.quickstart</groupId> | |
<artifactId>quickstart</artifactId> | |
<version>0.1</version> | |
<packaging>jar</packaging> | |
<name>Flink Quickstart Job</name> | |
<url>http://www.myorganization.org</url> | |
<properties> | |
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> | |
<flink.version>1.6.2</flink.version> | |
<java.version>1.8</java.version> | |
<scala.binary.version>2.11</scala.binary.version> | |
<maven.compiler.source>${java.version}</maven.compiler.source> | |
<maven.compiler.target>${java.version}</maven.compiler.target> | |
</properties> | |
<repositories> | |
<repository> | |
<id>apache.snapshots</id> | |
<name>Apache Development Snapshot Repository</name> | |
<url>https://repository.apache.org/content/repositories/snapshots/</url> | |
<releases> | |
<enabled>false</enabled> | |
</releases> | |
<snapshots> | |
<enabled>true</enabled> | |
</snapshots> | |
</repository> | |
</repositories> | |
<dependencies> | |
<!-- Apache Flink dependencies --> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<!-- Add connector dependencies here. They must be in the default scope (compile). --> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-connector-kafka-0.11_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-connector-kinesis_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
</dependency> | |
<!-- Add logging framework, to produce console output when running in the IDE. --> | |
<!-- These dependencies are excluded from the application JAR by default. --> | |
<dependency> | |
<groupId>org.slf4j</groupId> | |
<artifactId>slf4j-log4j12</artifactId> | |
<version>1.7.7</version> | |
<scope>runtime</scope> | |
</dependency> | |
<dependency> | |
<groupId>log4j</groupId> | |
<artifactId>log4j</artifactId> | |
<version>1.2.17</version> | |
<scope>runtime</scope> | |
</dependency> | |
</dependencies> | |
<build> | |
<plugins> | |
<!-- Java Compiler --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<version>3.1</version> | |
<configuration> | |
<source>${java.version}</source> | |
<target>${java.version}</target> | |
</configuration> | |
</plugin> | |
<!-- We use the maven-shade plugin to create a fat jar that contains all necessary dependencies. --> | |
<!-- Change the value of <mainClass>...</mainClass> if your program entry point changes. --> | |
<plugin> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<version>3.0.0</version> | |
<executions> | |
<!-- Run shade goal on package phase --> | |
<execution> | |
<phase>package</phase> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
<configuration> | |
<artifactSet> | |
<excludes> | |
<exclude>org.apache.flink:force-shading</exclude> | |
<exclude>com.google.code.findbugs:jsr305</exclude> | |
<exclude>org.slf4j:*</exclude> | |
<exclude>log4j:*</exclude> | |
</excludes> | |
</artifactSet> | |
<filters> | |
<filter> | |
<!-- Do not copy the signatures in the META-INF folder. | |
Otherwise, this might cause SecurityExceptions when using the JAR. --> | |
<artifact>*:*</artifact> | |
<excludes> | |
<exclude>META-INF/*.SF</exclude> | |
<exclude>META-INF/*.DSA</exclude> | |
<exclude>META-INF/*.RSA</exclude> | |
</excludes> | |
</filter> | |
</filters> | |
<transformers> | |
<transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer"> | |
<mainClass>org.myorg.quickstart.StreamingJob</mainClass> | |
</transformer> | |
</transformers> | |
</configuration> | |
</execution> | |
</executions> | |
</plugin> | |
</plugins> | |
<pluginManagement> | |
<plugins> | |
<!-- This improves the out-of-the-box experience in Eclipse by resolving some warnings. --> | |
<plugin> | |
<groupId>org.eclipse.m2e</groupId> | |
<artifactId>lifecycle-mapping</artifactId> | |
<version>1.0.0</version> | |
<configuration> | |
<lifecycleMappingMetadata> | |
<pluginExecutions> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-shade-plugin</artifactId> | |
<versionRange>[3.0.0,)</versionRange> | |
<goals> | |
<goal>shade</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
<pluginExecution> | |
<pluginExecutionFilter> | |
<groupId>org.apache.maven.plugins</groupId> | |
<artifactId>maven-compiler-plugin</artifactId> | |
<versionRange>[3.1,)</versionRange> | |
<goals> | |
<goal>testCompile</goal> | |
<goal>compile</goal> | |
</goals> | |
</pluginExecutionFilter> | |
<action> | |
<ignore/> | |
</action> | |
</pluginExecution> | |
</pluginExecutions> | |
</lifecycleMappingMetadata> | |
</configuration> | |
</plugin> | |
</plugins> | |
</pluginManagement> | |
</build> | |
<!-- This profile helps to make things run out of the box in IntelliJ --> | |
<!-- Its adds Flink's core classes to the runtime class path. --> | |
<!-- Otherwise they are missing in IntelliJ, because the dependency is 'provided' --> | |
<profiles> | |
<profile> | |
<id>add-dependencies-for-IDEA</id> | |
<activation> | |
<property> | |
<name>idea.version</name> | |
</property> | |
</activation> | |
<dependencies> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-java</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
<dependency> | |
<groupId>org.apache.flink</groupId> | |
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId> | |
<version>${flink.version}</version> | |
<scope>compile</scope> | |
</dependency> | |
</dependencies> | |
</profile> | |
</profiles> | |
</project> |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* Licensed to the Apache Software Foundation (ASF) under one | |
* or more contributor license agreements. See the NOTICE file | |
* distributed with this work for additional information | |
* regarding copyright ownership. The ASF licenses this file | |
* to you under the Apache License, Version 2.0 (the | |
* "License"); you may not use this file except in compliance | |
* with the License. You may obtain a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* | |
* Unless required by applicable law or agreed to in writing, software | |
* distributed under the License is distributed on an "AS IS" BASIS, | |
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
* See the License for the specific language governing permissions and | |
* limitations under the License. | |
*/ | |
package org.myorg.quickstart; | |
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | |
import org.apache.flink.api.common.serialization.SimpleStringSchema; | |
import org.apache.flink.streaming.api.datastream.DataStream; | |
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | |
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011; | |
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011; | |
import java.util.Properties; | |
/** | |
* Skeleton for a Flink Streaming Job. | |
* | |
* <p>For a tutorial how to write a Flink streaming application, check the | |
* tutorials and examples on the <a href="https://flink.apache.org/docs/stable/">Flink Website</a>. | |
* | |
* <p>To package your application into a JAR file for execution, run | |
* 'mvn clean package' on the command line. | |
* | |
* <p>If you change the name of the main class (with the public static void main(String[] args)) | |
* method, change the respective entry in the POM.xml file (simply search for 'mainClass'). | |
*/ | |
public class StreamingJob { | |
public static void main(String[] args) throws Exception { | |
// set up the streaming execution environment | |
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); | |
/* | |
* Here, you can start creating your execution plan for Flink. | |
* | |
* Start with getting some data from the environment, like | |
* env.readTextFile(textPath); | |
* | |
* then, transform the resulting DataStream<String> using operations | |
* like | |
* .filter() | |
* .flatMap() | |
* .join() | |
* .coGroup() | |
* | |
* and many more. | |
* Have a look at the programming guide for the Java API: | |
* | |
* https://flink.apache.org/docs/latest/apis/streaming/index.html | |
* | |
*/ | |
Properties properties = new Properties(); | |
properties.setProperty("bootstrap.servers", "localhost:9092"); | |
// only required for Kafka 0.8 | |
properties.setProperty("zookeeper.connect", "localhost:2181"); | |
properties.setProperty("group.id", "test"); | |
DataStream<String> stream = env | |
.addSource(new FlinkKafkaConsumer011<>("flink_input", new SimpleStringSchema(), properties)); | |
FlinkKafkaProducer011<String> myProducer = new FlinkKafkaProducer011<String>( | |
"localhost:9092", // broker list | |
"flink_output", // target topic | |
new SimpleStringSchema()); // serialization schema | |
// versions 0.10+ allow attaching the records' event timestamp when writing them to Kafka; | |
// this method is not available for earlier Kafka versions | |
myProducer.setWriteTimestampToKafka(true); | |
stream.addSink(myProducer); | |
// execute program | |
env.execute("Flink Streaming Java API Skeleton"); | |
} | |
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
/* | |
* Licensed to the Apache Software Foundation (ASF) under one | |
* or more contributor license agreements. See the NOTICE file | |
* distributed with this work for additional information | |
* regarding copyright ownership. The ASF licenses this file | |
* to you under the Apache License, Version 2.0 (the | |
* "License"); you may not use this file except in compliance | |
* with the License. You may obtain a copy of the License at | |
* | |
* http://www.apache.org/licenses/LICENSE-2.0 | |
* | |
* Unless required by applicable law or agreed to in writing, software | |
* distributed under the License is distributed on an "AS IS" BASIS, | |
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
* See the License for the specific language governing permissions and | |
* limitations under the License. | |
*/ | |
package org.myorg.quickstart; | |
import org.apache.flink.api.common.functions.FlatMapFunction; | |
import org.apache.flink.api.common.serialization.SimpleStringSchema; | |
import org.apache.flink.api.java.tuple.Tuple2; | |
import org.apache.flink.api.java.utils.ParameterTool; | |
import org.apache.flink.streaming.api.datastream.DataStream; | |
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; | |
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer011; | |
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer011; | |
import org.apache.flink.util.Collector; | |
import java.util.Properties; | |
/** | |
* Skeleton for a Flink Streaming Job. | |
* | |
* <p>For a tutorial how to write a Flink streaming application, check the | |
* tutorials and examples on the <a href="https://flink.apache.org/docs/stable/">Flink Website</a>. | |
* | |
* <p>To package your application into a JAR file for execution, run | |
* 'mvn clean package' on the command line. | |
* | |
* <p>If you change the name of the main class (with the public static void main(String[] args)) | |
* method, change the respective entry in the POM.xml file (simply search for 'mainClass'). | |
*/ | |
public class StreamingJobWithCount { | |
public static void main(String[] args) throws Exception { | |
// set up the streaming execution environment | |
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); | |
/* | |
* Here, you can start creating your execution plan for Flink. | |
* | |
* Start with getting some data from the environment, like | |
* env.readTextFile(textPath); | |
* | |
* then, transform the resulting DataStream<String> using operations | |
* like | |
* .filter() | |
* .flatMap() | |
* .join() | |
* .coGroup() | |
* | |
* and many more. | |
* Have a look at the programming guide for the Java API: | |
* | |
* https://flink.apache.org/docs/latest/apis/streaming/index.html | |
* | |
*/ | |
Properties properties = new Properties(); | |
properties.setProperty("bootstrap.servers", "localhost:9092"); | |
// only required for Kafka 0.8 | |
properties.setProperty("zookeeper.connect", "localhost:2181"); | |
properties.setProperty("group.id", "test"); | |
DataStream<String> stream = env | |
.addSource(new FlinkKafkaConsumer011<>("flink_input", new SimpleStringSchema(), properties)); | |
FlinkKafkaProducer011<String> myProducer = new FlinkKafkaProducer011<String>( | |
"localhost:9092", // broker list | |
"flink_output", // target topic | |
new SimpleStringSchema()); // serialization schema | |
// versions 0.10+ allow attaching the records' event timestamp when writing them to Kafka; | |
// this method is not available for earlier Kafka versions | |
myProducer.setWriteTimestampToKafka(true); | |
DataStream<Tuple2<String, Integer>> counts = | |
// split up the lines in pairs (2-tuples) containing: (word,1) | |
stream.flatMap(new Tokenizer()) | |
// group by the tuple field "0" and sum up tuple field "1" | |
.keyBy(0).sum(1); | |
DataStream<String> strCounts = counts.map(stringIntegerTuple2 -> stringIntegerTuple2.toString()); | |
strCounts.addSink(myProducer); | |
// execute program | |
env.execute("Flink Streaming Java API Skeleton"); | |
} | |
// ************************************************************************* | |
// USER FUNCTIONS | |
// ************************************************************************* | |
/** | |
* Implements the string tokenizer that splits sentences into words as a | |
* user-defined FlatMapFunction. The function takes a line (String) and | |
* splits it into multiple pairs in the form of "(word,1)" ({@code Tuple2<String, | |
* Integer>}). | |
*/ | |
public static final class Tokenizer implements FlatMapFunction<String, Tuple2<String, Integer>> { | |
@Override | |
public void flatMap(String value, Collector<Tuple2<String, Integer>> out) { | |
// normalize and split the line | |
String[] tokens = value.toLowerCase().split("\\W+"); | |
// emit the pairs | |
for (String token : tokens) { | |
if (token.length() > 0) { | |
out.collect(new Tuple2<>(token, 1)); | |
} | |
} | |
} | |
} | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment