Skip to content

Commit 8a10878

Browse files
committed
Merge branch '1.10_test_4.0.x' into feat_1.10_impalaSinkKerberos_mergedTest4.0
# Conflicts: # core/src/main/java/com/dtstack/flink/sql/util/KrbUtils.java
2 parents 0083555 + fa54ba3 commit 8a10878

File tree

92 files changed

+3166
-1272
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

92 files changed

+3166
-1272
lines changed

.gitignore

+3-1
Original file line numberDiff line numberDiff line change
@@ -14,4 +14,6 @@ lib/
1414
.DS_Store
1515
bin/nohup.out
1616
.DS_Store
17-
bin/sideSql.txt
17+
bin/sideSql.txt
18+
*.keytab
19+
krb5.conf

cassandra/cassandra-side/cassandra-all-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAllReqRow.java

+12-25
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,18 @@
1818

1919
package com.dtstack.flink.sql.side.cassandra;
2020

21-
import com.datastax.driver.core.*;
21+
import org.apache.flink.api.java.typeutils.RowTypeInfo;
22+
import org.apache.flink.types.Row;
23+
import org.apache.flink.util.Collector;
24+
25+
import com.datastax.driver.core.Cluster;
26+
import com.datastax.driver.core.ConsistencyLevel;
27+
import com.datastax.driver.core.HostDistance;
28+
import com.datastax.driver.core.PoolingOptions;
29+
import com.datastax.driver.core.QueryOptions;
30+
import com.datastax.driver.core.ResultSet;
31+
import com.datastax.driver.core.Session;
32+
import com.datastax.driver.core.SocketOptions;
2233
import com.datastax.driver.core.policies.DowngradingConsistencyRetryPolicy;
2334
import com.datastax.driver.core.policies.RetryPolicy;
2435
import com.dtstack.flink.sql.side.AbstractSideTableInfo;
@@ -32,10 +43,7 @@
3243
import org.apache.calcite.sql.JoinType;
3344
import org.apache.commons.collections.CollectionUtils;
3445
import org.apache.commons.lang3.StringUtils;
35-
import org.apache.flink.api.java.typeutils.RowTypeInfo;
3646
import org.apache.flink.table.dataformat.BaseRow;
37-
import org.apache.flink.types.Row;
38-
import org.apache.flink.util.Collector;
3947
import org.slf4j.Logger;
4048
import org.slf4j.LoggerFactory;
4149

@@ -72,27 +80,6 @@ public CassandraAllReqRow(RowTypeInfo rowTypeInfo, JoinInfo joinInfo, List<Field
7280
super(new com.dtstack.flink.sql.side.cassandra.CassandraAllSideInfo(rowTypeInfo, joinInfo, outFieldInfoList, sideTableInfo));
7381
}
7482

75-
@Override
76-
public Row fillData(Row input, Object sideInput) {
77-
Map<String, Object> cacheInfo = (Map<String, Object>) sideInput;
78-
Row row = new Row(sideInfo.getOutFieldInfoList().size());
79-
for (Map.Entry<Integer, Integer> entry : sideInfo.getInFieldIndex().entrySet()) {
80-
Object obj = input.getField(entry.getValue());
81-
obj = convertTimeIndictorTypeInfo(entry.getValue(), obj);
82-
row.setField(entry.getKey(), obj);
83-
}
84-
85-
for (Map.Entry<Integer, String> entry : sideInfo.getSideFieldNameIndex().entrySet()) {
86-
if (cacheInfo == null) {
87-
row.setField(entry.getKey(), null);
88-
} else {
89-
row.setField(entry.getKey(), cacheInfo.get(entry.getValue()));
90-
}
91-
}
92-
93-
return row;
94-
}
95-
9683
@Override
9784
protected void initCache() throws SQLException {
9885
Map<String, List<Map<String, Object>>> newCache = Maps.newConcurrentMap();

cassandra/cassandra-side/cassandra-async-side/src/main/java/com/dtstack/flink/sql/side/cassandra/CassandraAsyncSideInfo.java

-53
Original file line numberDiff line numberDiff line change
@@ -72,57 +72,4 @@ public void buildEqualInfo(JoinInfo joinInfo, AbstractSideTableInfo sideTableInf
7272
LOG.info("---------side_exe_sql-----\n{}" + sqlCondition);
7373
}
7474

75-
76-
@Override
77-
public void dealOneEqualCon(SqlNode sqlNode, String sideTableName) {
78-
if (sqlNode.getKind() != SqlKind.EQUALS) {
79-
throw new RuntimeException("not equal operator.");
80-
}
81-
82-
SqlIdentifier left = (SqlIdentifier) ((SqlBasicCall) sqlNode).getOperands()[0];
83-
SqlIdentifier right = (SqlIdentifier) ((SqlBasicCall) sqlNode).getOperands()[1];
84-
85-
String leftTableName = left.getComponent(0).getSimple();
86-
String leftField = left.getComponent(1).getSimple();
87-
88-
String rightTableName = right.getComponent(0).getSimple();
89-
String rightField = right.getComponent(1).getSimple();
90-
91-
if (leftTableName.equalsIgnoreCase(sideTableName)) {
92-
equalFieldList.add(leftField);
93-
int equalFieldIndex = -1;
94-
for (int i = 0; i < getFieldNames().length; i++) {
95-
String fieldName = getFieldNames()[i];
96-
if (fieldName.equalsIgnoreCase(rightField)) {
97-
equalFieldIndex = i;
98-
}
99-
}
100-
if (equalFieldIndex == -1) {
101-
throw new RuntimeException("can't deal equal field: " + sqlNode);
102-
}
103-
104-
equalValIndex.add(equalFieldIndex);
105-
106-
} else if (rightTableName.equalsIgnoreCase(sideTableName)) {
107-
108-
equalFieldList.add(rightField);
109-
int equalFieldIndex = -1;
110-
for (int i = 0; i < getFieldNames().length; i++) {
111-
String fieldName = getFieldNames()[i];
112-
if (fieldName.equalsIgnoreCase(leftField)) {
113-
equalFieldIndex = i;
114-
}
115-
}
116-
if (equalFieldIndex == -1) {
117-
throw new RuntimeException("can't deal equal field: " + sqlNode.toString());
118-
}
119-
120-
equalValIndex.add(equalFieldIndex);
121-
122-
} else {
123-
throw new RuntimeException("resolve equalFieldList error:" + sqlNode.toString());
124-
}
125-
126-
}
127-
12875
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.constant;
20+
21+
/**
22+
* @program: flinkStreamSQL
23+
* @author: wuren
24+
* @create: 2020/09/15
25+
**/
26+
public class PluginParamConsts {
27+
public static final String PRINCIPAL = "principal";
28+
public static final String KEYTAB = "keytab";
29+
public static final String KRB5_CONF = "krb5conf";
30+
}

core/src/main/java/com/dtstack/flink/sql/format/dtnest/DtNestRowDeserializationSchema.java

+32-4
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@
4242
import java.util.Iterator;
4343
import java.util.List;
4444
import java.util.Map;
45+
import java.util.regex.Pattern;
4546

4647
/**
4748
* source data parse to json format
@@ -64,6 +65,9 @@ public class DtNestRowDeserializationSchema extends AbstractDeserializationSchem
6465
private final List<AbstractTableInfo.FieldExtraInfo> fieldExtraInfos;
6566
private final String charsetName;
6667

68+
private static final Pattern TIMESTAMP_PATTERN = Pattern.compile("^\\d+$");
69+
private static final Pattern TIME_FORMAT_PATTERN = Pattern.compile("\\w+\\d+:\\d+:\\d+");
70+
6771
public DtNestRowDeserializationSchema(TypeInformation<Row> typeInfo, Map<String, String> rowAndFieldMapping,
6872
List<AbstractTableInfo.FieldExtraInfo> fieldExtraInfos,
6973
String charsetName) {
@@ -146,11 +150,11 @@ private Object convert(JsonNode node, TypeInformation<?> info) {
146150
return Date.valueOf(node.asText());
147151
} else if (info.getTypeClass().equals(Types.SQL_TIME.getTypeClass())) {
148152
// local zone
149-
return Time.valueOf(node.asText());
153+
return convertToTime(node.asText());
150154
} else if (info.getTypeClass().equals(Types.SQL_TIMESTAMP.getTypeClass())) {
151155
// local zone
152-
return Timestamp.valueOf(node.asText());
153-
} else if (info instanceof RowTypeInfo) {
156+
return convertToTimestamp(node.asText());
157+
} else if (info instanceof RowTypeInfo) {
154158
return convertRow(node, (RowTypeInfo) info);
155159
} else if (info instanceof ObjectArrayTypeInfo) {
156160
return convertObjectArray(node, ((ObjectArrayTypeInfo) info).getComponentInfo());
@@ -165,6 +169,29 @@ private Object convert(JsonNode node, TypeInformation<?> info) {
165169
}
166170
}
167171

172+
/**
173+
* 将 2020-09-07 14:49:10.0 和 1598446699685 两种格式都转化为 Timestamp
174+
*/
175+
private Timestamp convertToTimestamp(String timestamp) {
176+
if (TIMESTAMP_PATTERN.matcher(timestamp).find()) {
177+
return new Timestamp(Long.parseLong(timestamp));
178+
}
179+
if (TIME_FORMAT_PATTERN.matcher(timestamp).find()) {
180+
return Timestamp.valueOf(timestamp);
181+
}
182+
throw new IllegalArgumentException("Incorrect time format of timestamp");
183+
}
184+
185+
private Time convertToTime(String timestamp) {
186+
if (TIMESTAMP_PATTERN.matcher(timestamp).find()) {
187+
return new Time(Long.parseLong(timestamp));
188+
}
189+
if (TIME_FORMAT_PATTERN.matcher(timestamp).find()) {
190+
return Time.valueOf(timestamp);
191+
}
192+
throw new IllegalArgumentException("Incorrect time format of time");
193+
}
194+
168195
private Row convertTopRow() {
169196
Row row = new Row(fieldNames.length);
170197
try {
@@ -175,7 +202,7 @@ private Row convertTopRow() {
175202
if (node == null) {
176203
if (fieldExtraInfo != null && fieldExtraInfo.getNotNull()) {
177204
throw new IllegalStateException("Failed to find field with name '"
178-
+ fieldNames[i] + "'.");
205+
+ fieldNames[i] + "'.");
179206
} else {
180207
row.setField(i, null);
181208
}
@@ -216,6 +243,7 @@ private Object convertObjectArray(JsonNode node, TypeInformation<?> elementType)
216243
}
217244
return array;
218245
}
246+
219247
@Override
220248
public TypeInformation<Row> getProducedType() {
221249
return typeInfo;
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
/*
2+
* Licensed to the Apache Software Foundation (ASF) under one
3+
* or more contributor license agreements. See the NOTICE file
4+
* distributed with this work for additional information
5+
* regarding copyright ownership. The ASF licenses this file
6+
* to you under the Apache License, Version 2.0 (the
7+
* "License"); you may not use this file except in compliance
8+
* with the License. You may obtain a copy of the License at
9+
*
10+
* http://www.apache.org/licenses/LICENSE-2.0
11+
*
12+
* Unless required by applicable law or agreed to in writing, software
13+
* distributed under the License is distributed on an "AS IS" BASIS,
14+
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15+
* See the License for the specific language governing permissions and
16+
* limitations under the License.
17+
*/
18+
19+
package com.dtstack.flink.sql.krb;
20+
21+
import com.google.common.base.Strings;
22+
23+
/**
24+
* @program: flinkStreamSQL
25+
* @author: wuren
26+
* @create: 2020/09/15
27+
**/
28+
public interface KerberosTable {
29+
30+
String getPrincipal();
31+
32+
void setPrincipal(String principal);
33+
34+
String getKeytab();
35+
36+
void setKeytab(String keytab);
37+
38+
String getKrb5conf();
39+
40+
void setKrb5conf(String krb5conf);
41+
42+
boolean isEnableKrb();
43+
44+
void setEnableKrb(boolean enableKrb);
45+
46+
default void judgeKrbEnable() {
47+
boolean allSet =
48+
!Strings.isNullOrEmpty(getPrincipal()) &&
49+
!Strings.isNullOrEmpty(getKeytab()) &&
50+
!Strings.isNullOrEmpty(getKrb5conf());
51+
52+
boolean allNotSet =
53+
Strings.isNullOrEmpty(getPrincipal()) &&
54+
Strings.isNullOrEmpty(getKeytab()) &&
55+
Strings.isNullOrEmpty(getKrb5conf());
56+
57+
if (allSet) {
58+
setEnableKrb(true);
59+
} else if (allNotSet) {
60+
setEnableKrb(false);
61+
} else {
62+
throw new RuntimeException("Missing kerberos parameter! all kerberos params must be set, or all kerberos params are not set");
63+
}
64+
}
65+
}

core/src/main/java/com/dtstack/flink/sql/parser/SqlParser.java

+20
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,11 @@
2828
import com.google.common.collect.Lists;
2929
import com.google.common.base.Strings;
3030

31+
import java.util.ArrayList;
3132
import java.util.List;
3233
import java.util.Set;
34+
import java.util.regex.Matcher;
35+
import java.util.regex.Pattern;
3336

3437
/**
3538
* Reason:
@@ -51,6 +54,8 @@ public static void setLocalSqlPluginRoot(String localSqlPluginRoot){
5154
LOCAL_SQL_PLUGIN_ROOT = localSqlPluginRoot;
5255
}
5356

57+
private static final Pattern ADD_FIlE_PATTERN = Pattern.compile("(?i).*add\\s+file\\s+.+");
58+
5459
/**
5560
* flink support sql syntax
5661
* CREATE TABLE sls_stream() with ();
@@ -70,6 +75,7 @@ public static SqlTree parseSql(String sql, String pluginLoadMode) throws Excepti
7075
.replace("\t", " ").trim();
7176

7277
List<String> sqlArr = DtStringUtil.splitIgnoreQuota(sql, SQL_DELIMITER);
78+
sqlArr = removeAddFileStmt(sqlArr);
7379
SqlTree sqlTree = new SqlTree();
7480
AbstractTableInfoParser tableInfoParser = new AbstractTableInfoParser();
7581
for(String childSql : sqlArr){
@@ -150,4 +156,18 @@ public static SqlTree parseSql(String sql, String pluginLoadMode) throws Excepti
150156

151157
return sqlTree;
152158
}
159+
160+
/**
161+
* remove add file with statment etc. add file /etc/krb5.conf;
162+
*/
163+
private static List<String> removeAddFileStmt(List<String> stmts) {
164+
List<String> cleanedStmts = new ArrayList<>();
165+
for (String stmt : stmts) {
166+
Matcher matcher = ADD_FIlE_PATTERN.matcher(stmt);
167+
if(!matcher.matches()) {
168+
cleanedStmts.add(stmt);
169+
}
170+
}
171+
return cleanedStmts;
172+
}
153173
}

0 commit comments

Comments
 (0)