From 04d5e7c9d879e2e10c73f66826a809522811e9de Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=E8=B0=A2=E5=B8=AE=E6=A1=82?=
<97835322+xiebanggui777@users.noreply.github.com>
Date: Thu, 20 Jan 2022 20:17:12 +0800
Subject: [PATCH 01/11] Update dlink.sql
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
修改了注册中心-文档管理-新建文档的描述框和填充值框的输入字符数限制,可以提交长文本,另外添加几条常用设置的语法快速补全。
---
dlink-doc/sql/dlink.sql | 26 ++++++++++++++++++++++++--
1 file changed, 24 insertions(+), 2 deletions(-)
diff --git a/dlink-doc/sql/dlink.sql b/dlink-doc/sql/dlink.sql
index 06e82fa5a2..38835962c4 100644
--- a/dlink-doc/sql/dlink.sql
+++ b/dlink-doc/sql/dlink.sql
@@ -114,8 +114,8 @@ CREATE TABLE `dlink_flink_document` (
`type` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '类型',
`subtype` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '子类型',
`name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '信息',
- `description` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '描述',
- `fill_value` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '填充值',
+ `description` LONGTEXT CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '描述',
+ `fill_value` LONGTEXT CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '填充值',
`version` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL COMMENT '版本号',
`like_num` int(255) NULL DEFAULT 0 COMMENT '喜爱值',
`enabled` tinyint(1) NOT NULL DEFAULT 0 COMMENT '是否启用',
@@ -124,6 +124,28 @@ CREATE TABLE `dlink_flink_document` (
PRIMARY KEY (`id`) USING BTREE
) ENGINE = InnoDB AUTO_INCREMENT = 264 CHARACTER SET = utf8 COLLATE = utf8_general_ci COMMENT = '文档管理' ROW_FORMAT = Dynamic;
+INSERT INTO `dlink_flink_document` VALUES (1, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.buffer-capacity', '异步查找连接可以触发的最大异步操作的操作数。 \nThe max number of async i/o operation that the async lookup join can trigger.', 'Set \'table.exec.async-lookup.buffer-capacity\'=\'100\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (2, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.async-lookup.timeout', '异步操作完成的超时时间。 \nThe async timeout for the asynchronous operation to complete.', 'Set \'table.exec.async-lookup.timeout\'=\'3 min\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (3, 'Variable', '优化参数', 'Batch', 'set table.exec.disabled-operators', '禁用指定operators,用逗号分隔 \nMainly for testing. A comma-separated list of operator names, each name represents a kind of disabled operator. Operators that can be disabled include \"NestedLoopJoin\", \"ShuffleHashJoin\", \"BroadcastHashJoin\", \"SortMergeJoin\", \"HashAgg\", \"SortAgg\". By default no operator is disabled.', 'Set \'table.exec.disabled-operators\'=\'SortMergeJoin\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (4, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.allow-latency', '最大等待时间可用于MiniBatch缓冲输入记录。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。注意:如果将table.exec.mini-batch.enabled设置为true,则其值必须大于零.', 'Set \'table.exec.mini-batch.allow-latency\'=\'-1 ms\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (5, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.enabled', '指定是否启用MiniBatch优化。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。默认情况下禁用此功能。 要启用此功能,用户应将此配置设置为true。注意:如果启用了mini batch 处理,则必须设置“ table.exec.mini-batch.allow-latency”和“ table.exec.mini-batch.size”.', 'Set \'table.exec.mini-batch.enabled\'=\'false\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (6, 'Variable', '优化参数', 'Streaming', 'set table.exec.mini-batch.size', '可以为MiniBatch缓冲最大输入记录数。 MiniBatch是用于缓冲输入记录以减少状态访问的优化。MiniBatch以允许的等待时间间隔以及达到最大缓冲记录数触发。 注意:MiniBatch当前仅适用于非窗口聚合。如果将table.exec.mini-batch.enabled设置为true,则其值必须为正.', 'Set \'table.exec.mini-batch.size\'=\'-1\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (7, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.resource.default-parallelism', '设置所有Operator的默认并行度。 \nSets default parallelism for all operators (such as aggregate, join, filter) to run with parallel instances. This config has a higher priority than parallelism of StreamExecutionEnvironment (actually, this config overrides the parallelism of StreamExecutionEnvironment). A value of -1 indicates that no default parallelism is set, then it will fallback to use the parallelism of StreamExecutionEnvironment.', 'Set \'table.exec.resource.default-parallelism\'=\'1\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (8, 'Variable', '优化参数', 'Batch/Streaming', 'set table.exec.sink.not-null-enforcer', '对表的NOT NULL列约束强制执行不能将空值插入到表中。Flink支持“error”(默认)和“drop”强制行为 \nThe NOT NULL column constraint on a table enforces that null values can\'t be inserted into the table. Flink supports \'error\' (default) and \'drop\' enforcement behavior. By default, Flink will check values and throw runtime exception when null values writing into NOT NULL columns. Users can change the behavior to \'drop\' to silently drop such records without throwing exception.\nPossible values:\n\"ERROR\" \n\"DROP\"', 'Set \'table.exec.sink.not-null-enforcer\'=\'ERROR\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (9, 'Variable', '优化参数', 'Streaming', 'set table.exec.sink.upsert-materialize', '由于分布式系统中 Shuffle 导致 ChangeLog 数据混乱,Sink 接收到的数据可能不是全局 upsert 的顺序。因此,在 upsert sink 之前添加 upsert materialize 运算符。它接收上游的变更日志记录并为下游生成一个 upsert 视图。默认情况下,当唯一键出现分布式无序时,会添加具体化操作符。您也可以选择不实现(NONE)或强制实现(FORCE)。\nPossible values:\n\"NONE\" \n\"FORCE\" \n\"AUTO\"', 'Set \'table.exec.sink.upsert-materialize\'=\'AUTO\';', '1.14', 0, 1, '2022-01-20 15:00:00', '2022-01-20 15:00:00');
+INSERT INTO `dlink_flink_document` VALUES (10, 'Module', '建表语句', NULL, 'create.table.kafka', 'kafka快速建表格式', 'CREATE TABLE Kafka_Table (\n `event_time` TIMESTAMP(3) METADATA FROM \'timestamp\',\n `partition` BIGINT METADATA VIRTUAL,\n `offset` BIGINT METADATA VIRTUAL,\n `user_id` BIGINT,\n `item_id` BIGINT,\n `behavior` STRING\n) WITH (\n \'connector\' = \'kafka\',\n \'topic\' = \'user_behavior\',\n \'properties.bootstrap.servers\' = \'localhost:9092\',\n \'properties.group.id\' = \'testGroup\',\n \'scan.startup.mode\' = \'earliest-offset\',\n \'format\' = \'csv\'\n);\n--可选: \'value.fields-include\' = \'ALL\',\n--可选: \'json.ignore-parse-errors\' = \'true\',\n--可选: \'key.fields-prefix\' = \'k_\',', '1.14', 0, 1, '2022-01-20 16:59:18', '2022-01-20 17:57:32');
+INSERT INTO `dlink_flink_document` VALUES (11, 'Module', '建表语句', NULL, 'create.table.doris', 'Doris快速建表', 'CREATE TABLE doris_table (\n cid INT,\n sid INT,\n name STRING,\n cls STRING,\n score INT,\n PRIMARY KEY (cid) NOT ENFORCED\n) WITH ( \n\'connector\' = \'doris\',\n\'fenodes\' = \'127.0.0.1:8030\' ,\n\'table.identifier\' = \'test.scoreinfo\',\n\'username\' = \'root\',\n\'password\'=\'\'\n);', '1.14', 0, 1, '2022-01-20 17:08:00', '2022-01-20 17:57:26');
+INSERT INTO `dlink_flink_document` VALUES (12, 'Module', '建表语句', NULL, 'create.table.jdbc', 'JDBC建表语句', 'CREATE TABLE JDBC_table (\n id BIGINT,\n name STRING,\n age INT,\n status BOOLEAN,\n PRIMARY KEY (id) NOT ENFORCED\n) WITH (\n \'connector\' = \'jdbc\',\n \'url\' = \'jdbc:mysql://localhost:3306/mydatabase\',\n \'table-name\' = \'users\',\n \'username\' = \'root\',\n \'password\' = \'123456\'\n);\n--可选: \'sink.parallelism\'=\'1\',\n--可选: \'lookup.cache.ttl\'=\'1000s\',', '1.14', 0, 1, '2022-01-20 17:15:26', '2022-01-20 17:57:20');
+INSERT INTO `dlink_flink_document` VALUES (13, 'Module', '创建catalog模块', NULL, 'create.catalog.hive', '创建HIVE的catalog', 'CREATE CATALOG hive WITH ( \n \'type\' = \'hive\',\n \'default-database\' = \'default\',\n \'hive-conf-dir\' = \'/app/wwwroot/MBDC/hive/conf/\', --hive配置文件\n \'hadoop-conf-dir\'=\'/app/wwwroot/MBDC/hadoop/etc/hadoop/\' --hadoop配置文件,配了环境变量则不需要。\n);', '1.14', 0, 1, '2022-01-20 17:18:54', '2022-01-20 17:18:54');
+INSERT INTO `dlink_flink_document` VALUES (14, 'Operator', '', NULL, 'use.catalog.hive', '使用hive的catalog', 'USE CATALOG hive;', '1.14', 0, 1, '2022-01-20 17:22:53', '2022-01-20 17:22:53');
+INSERT INTO `dlink_flink_document` VALUES (15, 'Operator', NULL, NULL, 'use.catalog.default', '使用default的catalog', 'USE CATALOG default_catalog; \n', '1.14', 0, 1, '2022-01-20 17:23:48', '2022-01-20 17:24:23');
+INSERT INTO `dlink_flink_document` VALUES (16, 'Variable', '设置参数', NULL, 'set dialect.hive', '使用hive方言', 'Set table.sql-dialect=hive;', '1.14', 0, 1, '2022-01-20 17:25:37', '2022-01-20 17:27:23');
+INSERT INTO `dlink_flink_document` VALUES (17, 'Variable', '设置参数', NULL, 'set dialect.default', '使用default方言', 'Set table.sql-dialect=default;', '1.14', 0, 1, '2022-01-20 17:26:19', '2022-01-20 17:27:20');
+INSERT INTO `dlink_flink_document` VALUES (18, 'Module', '建表语句', NULL, 'create.stream.table.hive', '创建流式HIVE表', 'CREATE CATALOG hive WITH ( --创建hive的catalog\n \'type\' = \'hive\',\n \'hive-conf-dir\' = \'/app/wwwroot/MBDC/hive/conf/\',\n \'hadoop-conf-dir\'=\'/app/wwwroot/MBDC/hadoop/etc/hadoop/\'\n);\n\nUSE CATALOG hive; \nUSE offline_db; --选择库\nset table.sql-dialect=hive; --设置方言\n\nCREATE TABLE hive_stream_table (\n user_id STRING,\n order_amount DOUBLE\n) PARTITIONED BY (dt STRING, hr STRING) STORED AS parquet TBLPROPERTIES (\n \'partition.time-extractor.timestamp-pattern\'=\'$dt $hr:00:00\',\n \'sink.partition-commit.trigger\'=\'partition-time\',\n \'sink.partition-commit.delay\'=\'1min\',\n \'sink.semantic\' = \'exactly-once\',\n \'sink.rolling-policy.rollover-interval\' =\'1min\',\n \'sink.rolling-policy.check-interval\'=\'1min\',\n \'sink.partition-commit.policy.kind\'=\'metastore,success-file\'\n);', '1.14', 0, 1, '2022-01-20 17:34:06', '2022-01-20 17:46:41');
+INSERT INTO `dlink_flink_document` VALUES (19, 'Module', '建表语句', NULL, 'create.table.mysql_cdc', '创建Mysql_CDC表', 'CREATE TABLE mysql_cdc_table(\n cid INT,\n sid INT,\n cls STRING,\n score INT,\n PRIMARY KEY (cid) NOT ENFORCED\n) WITH (\n\'connector\' = \'mysql-cdc\',\n\'hostname\' = \'127.0.0.1\',\n\'port\' = \'3306\',\n\'username\' = \'test\',\n\'password\' = \'123456\',\n\'database-name\' = \'test\',\n\'server-time-zone\' = \'UTC\',\n\'scan.incremental.snapshot.enabled\' = \'true\',\n\'debezium.snapshot.mode\'=\'latest-offset\' ,-- 或者key是scan.startup.mode,initial表示要历史数据,latest-offset表示不要历史数据\n\'debezium.datetime.format.date\'=\'yyyy-MM-dd\',\n\'debezium.datetime.format.time\'=\'HH-mm-ss\',\n\'debezium.datetime.format.datetime\'=\'yyyy-MM-dd HH-mm-ss\',\n\'debezium.datetime.format.timestamp\'=\'yyyy-MM-dd HH-mm-ss\',\n\'debezium.datetime.format.timestamp.zone\'=\'UTC+8\',\n\'table-name\' = \'mysql_cdc_table\');', '1.14', 0, 1, '2022-01-20 17:49:14', '2022-01-20 17:52:20');
+INSERT INTO `dlink_flink_document` VALUES (20, 'Module', '建表语句', NULL, 'create.table.hudi', '创建hudi表', 'CREATE TABLE hudi_table\n(\n `goods_order_id` bigint COMMENT \'自增主键id\',\n `goods_order_uid` string COMMENT \'订单uid\',\n `customer_uid` string COMMENT \'客户uid\',\n `customer_name` string COMMENT \'客户name\',\n `create_time` timestamp(3) COMMENT \'创建时间\',\n `update_time` timestamp(3) COMMENT \'更新时间\',\n `create_by` string COMMENT \'创建人uid(唯一标识)\',\n `update_by` string COMMENT \'更新人uid(唯一标识)\',\n PRIMARY KEY (goods_order_id) NOT ENFORCED\n) COMMENT \'hudi_table\'\nWITH (\n\'connector\' = \'hudi\',\n\'path\' = \'hdfs://cluster1/data/bizdata/cdc/mysql/order/goods_order\', -- 路径会自动创建\n\'hoodie.datasource.write.recordkey.field\' = \'goods_order_id\', -- 主键\n\'write.precombine.field\' = \'update_time\', -- 相同的键值时,取此字段最大值,默认ts字段\n\'read.streaming.skip_compaction\' = \'true\', -- 避免重复消费问题\n\'write.bucket_assign.tasks\' = \'2\', -- 并发写的 bucekt 数\n\'write.tasks\' = \'2\',\n\'compaction.tasks\' = \'1\',\n\'write.operation\' = \'upsert\', -- UPSERT(插入更新)\\INSERT(插入)\\BULK_INSERT(批插入)(upsert性能会低些,不适合埋点上报)\n\'write.rate.limit\' = \'20000\', -- 限制每秒多少条\n\'table.type\' = \'COPY_ON_WRITE\', -- 默认COPY_ON_WRITE ,\n\'compaction.async.enabled\' = \'true\', -- 在线压缩\n\'compaction.trigger.strategy\' = \'num_or_time\', -- 按次数压缩\n\'compaction.delta_commits\' = \'20\', -- 默认为5\n\'compaction.delta_seconds\' = \'60\', -- 默认为1小时\n\'hive_sync.enable\' = \'true\', -- 启用hive同步\n\'hive_sync.mode\' = \'hms\', -- 启用hive hms同步,默认jdbc\n\'hive_sync.metastore.uris\' = \'thrift://cdh2.vision.com:9083\', -- required, metastore的端口\n\'hive_sync.jdbc_url\' = \'jdbc:hive2://cdh1.vision.com:10000\', -- required, hiveServer地址\n\'hive_sync.table\' = \'order_mysql_goods_order\', -- required, hive 新建的表名 会自动同步hudi的表结构和数据到hive\n\'hive_sync.db\' = \'cdc_ods\', -- required, hive 新建的数据库名\n\'hive_sync.username\' = \'hive\', -- required, HMS 用户名\n\'hive_sync.password\' = \'123456\', -- required, HMS 密码\n\'hive_sync.skip_ro_suffix\' = \'true\' -- 去除ro后缀\n);', '1.14', 0, 1, '2022-01-20 17:56:50', '2022-01-20 17:56:50');
+
+
-- ----------------------------
-- Table structure for dlink_history
-- ----------------------------
From a5052c9120c2acc09109fc6bc4e9f35c6391c5ff Mon Sep 17 00:00:00 2001
From: wenmo <32723967+wenmo@users.noreply.github.com>
Date: Fri, 21 Jan 2022 00:27:40 +0800
Subject: [PATCH 02/11] =?UTF-8?q?F2=E5=85=A8=E5=B1=8F=E5=BC=80=E5=8F=91?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
.../components/Studio/StudioHome/index.tsx | 3 ++-
.../components/Studio/StudioMenu/index.tsx | 24 +++++++++++++++++--
.../components/Studio/StudioTabs/index.tsx | 6 ++---
dlink-web/src/components/Studio/index.tsx | 6 ++---
dlink-web/src/pages/Welcome.tsx | 3 +++
5 files changed, 33 insertions(+), 9 deletions(-)
diff --git a/dlink-web/src/components/Studio/StudioHome/index.tsx b/dlink-web/src/components/Studio/StudioHome/index.tsx
index e81bb38c63..50bf6e4fa8 100644
--- a/dlink-web/src/components/Studio/StudioHome/index.tsx
+++ b/dlink-web/src/components/Studio/StudioHome/index.tsx
@@ -22,7 +22,8 @@ const StudioHome = (props: any) => {
Alt + 1 保存
Alt + 2 校验
Alt + 3 美化
- Esc 关闭弹框
+ F2 全屏
+ Esc 关闭弹框及全屏
F1 更多快捷键
diff --git a/dlink-web/src/components/Studio/StudioMenu/index.tsx b/dlink-web/src/components/Studio/StudioMenu/index.tsx
index cece7c536b..826659a8d0 100644
--- a/dlink-web/src/components/Studio/StudioMenu/index.tsx
+++ b/dlink-web/src/components/Studio/StudioMenu/index.tsx
@@ -16,7 +16,7 @@ import {executeSql, getJobPlan} from "@/pages/FlinkSqlStudio/service";
import StudioHelp from "./StudioHelp";
import StudioGraph from "./StudioGraph";
import {showCluster, showTables} from "@/components/Studio/StudioEvent/DDL";
-import {useCallback, useEffect, useState} from "react";
+import React, {useCallback, useEffect, useState} from "react";
import StudioExplain from "../StudioConsole/StudioExplain";
import {DIALECT, isOnline, isSql} from "@/components/Studio/conf";
import {
@@ -24,6 +24,7 @@ import {
} from '@ant-design/pro-form';
import SqlExport from "@/pages/FlinkSqlStudio/SqlExport";
import {Dispatch} from "@@/plugin-dva/connect";
+import StudioTabs from "@/components/Studio/StudioTabs";
const menu = (
- 0.5.1 2022-01-23
+ 0.5.1 2022-01-??
@@ -619,6 +619,9 @@ export default (): React.ReactNode => {
-
优化 作业配置查看及全屏开发按钮
+ -
+ 新增 K8S集群配置
+
From 39ef72f1e799623eb2ea7391c82a9ff650922d0f Mon Sep 17 00:00:00 2001
From: wenmo <32723967+wenmo@users.noreply.github.com>
Date: Mon, 24 Jan 2022 18:43:06 +0800
Subject: [PATCH 10/11] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E6=89=93=E5=8C=85bug?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
dlink-admin/pom.xml | 4 ++--
dlink-client/dlink-client-1.14/pom.xml | 2 ++
2 files changed, 4 insertions(+), 2 deletions(-)
diff --git a/dlink-admin/pom.xml b/dlink-admin/pom.xml
index f0b23e3e7b..eacf4d017f 100644
--- a/dlink-admin/pom.xml
+++ b/dlink-admin/pom.xml
@@ -121,11 +121,11 @@
com.dlink
dlink-gateway
-
+