Skip to content

Commit

Permalink
[Sample] Add Flink Connector Sample Code (apache#11120)
Browse files Browse the repository at this point in the history

Co-authored-by: wudi <>
  • Loading branch information
JNSimba authored Jul 26, 2022
1 parent 03d466f commit f5479fa
Show file tree
Hide file tree
Showing 5 changed files with 149 additions and 3 deletions.
1 change: 1 addition & 0 deletions samples/doris-demo/flink-demo-v1.1/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ under the License.
<fastjson.version>1.2.62</fastjson.version>
<hadoop.version>2.8.3</hadoop.version>
<scope.mode>compile</scope.mode>
<slf4j.version>1.7.30</slf4j.version>
</properties>
<dependencies>
<dependency>
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,68 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.demo.flink;

import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;

public class Cdc2DorisSQLDemo {

public static void main(String[] args) throws Exception {

final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(10000);
env.setParallelism(1);
final StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
// register a table in the catalog
tEnv.executeSql(
"CREATE TABLE cdc_test_source (\n" +
" id INT,\n" +
" name STRING\n" +
") WITH (\n" +
" 'connector' = 'mysql-cdc',\n" +
" 'hostname' = '127.0.0.1',\n" +
" 'port' = '3306',\n" +
" 'username' = 'root',\n" +
" 'password' = '',\n" +
" 'database-name' = 'db',\n" +
" 'table-name' = 'test_source'\n" +
")");
//doris table
tEnv.executeSql(
"CREATE TABLE doris_test_sink (" +
"id INT," +
"name STRING" +
") " +
"WITH (\n" +
" 'connector' = 'doris',\n" +
" 'fenodes' = '127.0.0.1:8030',\n" +
" 'table.identifier' = 'db.test_sink',\n" +
" 'username' = 'root',\n" +
" 'password' = '',\n" +
/* doris stream load label, In the exactly-once scenario,
the label is globally unique and must be restarted from the latest checkpoint when restarting.
Exactly-once semantics can be turned off via sink.enable-2pc. */
" 'sink.label-prefix' = 'doris_label',\n" +
" 'sink.properties.format' = 'json',\n" + //json data format
" 'sink.properties.read_json_by_line' = 'true'\n" +
")");

//insert into mysql table to doris table
tEnv.executeSql("INSERT INTO doris_test_sink select id,name from cdc_test_source");
env.execute();
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -61,7 +61,7 @@ public static void main(String[] args) throws Exception {
DorisExecutionOptions.Builder executionBuilder = DorisExecutionOptions.builder();
executionBuilder
.setStreamLoadProp(pro)
.setLabelPrefix("doris_test")
.setLabelPrefix("doris_test");


builder.setDorisReadOptions(readOptionBuilder.build())
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package org.apache.doris.demo.flink;

import org.apache.doris.flink.cfg.DorisExecutionOptions;
import org.apache.doris.flink.cfg.DorisOptions;
import org.apache.doris.flink.cfg.DorisReadOptions;
import org.apache.doris.flink.sink.DorisSink;
import org.apache.doris.flink.sink.writer.SimpleStringSerializer;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.CheckpointConfig;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;

import java.util.Properties;

public class Kafka2DorisDataStreamDemo {

public static void main(String[] args) throws Exception {

Properties props = new Properties();
props.put("bootstrap.servers", "127.0.0.1:9092");
props.put("group.id", "group");
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("auto.offset.reset", "earliest");

StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.enableCheckpointing(10000);
env.getCheckpointConfig().enableExternalizedCheckpoints(CheckpointConfig.ExternalizedCheckpointCleanup.RETAIN_ON_CANCELLATION);

//source config
FlinkKafkaConsumer<String> flinkKafkaConsumer = new FlinkKafkaConsumer<>("test-topic",new SimpleStringSchema(),props);

//sink config
DorisSink.Builder<String> builder = DorisSink.builder();
DorisOptions.Builder dorisBuilder = DorisOptions.builder();
dorisBuilder.setFenodes("127.0.0.1:8030")
.setTableIdentifier("db.table")
.setUsername("root")
.setPassword("password");

Properties pro = new Properties();
//json data format
pro.setProperty("format", "json");
pro.setProperty("read_json_by_line", "true");
DorisExecutionOptions executionOptions = DorisExecutionOptions.builder()
.setLabelPrefix("label-doris") //streamload label prefix,
.setStreamLoadProp(pro).build();

builder.setDorisReadOptions(DorisReadOptions.builder().build())
.setDorisExecutionOptions(executionOptions)
.setSerializer(new SimpleStringSerializer()) //serialize according to string
.setDorisOptions(dorisBuilder.build());

//build stream
DataStreamSource<String> dataStreamSource = env.addSource(flinkKafkaConsumer);
dataStreamSource.sinkTo(builder.build());

env.execute("flink kafka to doris by datastream");
}
}
4 changes: 2 additions & 2 deletions samples/doris-demo/spark-demo/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ under the License.
<!-- doris spark -->
<dependency>
<groupId>org.apache.doris</groupId>
<artifactId>doris-spark</artifactId>
<version>1.0.0-SNAPSHOT</version>
<artifactId>spark-doris-connector-2.3_2.11</artifactId>
<version>1.0.1</version>
</dependency>
<!-- spark -->
<dependency>
Expand Down

0 comments on commit f5479fa

Please sign in to comment.