diff --git a/dlink-admin/src/main/java/com/dlink/configure/SaTokenConfigure.java b/dlink-admin/src/main/java/com/dlink/configure/SaTokenConfigure.java index b738b70c80..fa3092c94c 100644 --- a/dlink-admin/src/main/java/com/dlink/configure/SaTokenConfigure.java +++ b/dlink-admin/src/main/java/com/dlink/configure/SaTokenConfigure.java @@ -19,6 +19,7 @@ public void addInterceptors(InterceptorRegistry registry) { // 注册Sa-Token的路由拦截器 registry.addInterceptor(new SaRouteInterceptor()) .addPathPatterns("/**") - .excludePathPatterns("/api/login"); + .excludePathPatterns("/api/login") + .excludePathPatterns("/openapi/**"); } } diff --git a/dlink-admin/src/main/java/com/dlink/controller/APIController.java b/dlink-admin/src/main/java/com/dlink/controller/APIController.java new file mode 100644 index 0000000000..ccee6fbb06 --- /dev/null +++ b/dlink-admin/src/main/java/com/dlink/controller/APIController.java @@ -0,0 +1,32 @@ +package com.dlink.controller; + +import com.dlink.common.result.Result; +import com.dlink.dto.APIExecuteSqlDTO; +import com.dlink.job.JobResult; +import com.dlink.service.APIService; +import lombok.extern.slf4j.Slf4j; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestBody; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RestController; + +/** + * APIController + * + * @author wenmo + * @since 2021/12/11 21:44 + */ +@Slf4j +@RestController +@RequestMapping("/openapi") +public class APIController { + + @Autowired + private APIService apiService; + @PostMapping("/executeSql") + + public Result executeSql(@RequestBody APIExecuteSqlDTO apiExecuteSqlDTO) { + return Result.succeed(apiService.executeSql(apiExecuteSqlDTO),"执行成功"); + } +} diff --git a/dlink-admin/src/main/java/com/dlink/dto/APIExecuteSqlDTO.java b/dlink-admin/src/main/java/com/dlink/dto/APIExecuteSqlDTO.java new file mode 100644 index 0000000000..a238868154 --- /dev/null +++ b/dlink-admin/src/main/java/com/dlink/dto/APIExecuteSqlDTO.java @@ -0,0 +1,51 @@ +package com.dlink.dto; + +import com.dlink.assertion.Asserts; +import com.dlink.gateway.config.GatewayConfig; +import com.dlink.job.JobConfig; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import lombok.Getter; +import lombok.Setter; + +import java.util.HashMap; +import java.util.Map; + +/** + * APIExecuteSqlDTO + * + * @author wenmo + * @since 2021/12/11 21:50 + */ +@Getter +@Setter +public class APIExecuteSqlDTO { + // RUN_MODE + private String type; + private boolean useResult = false; + private boolean useStatementSet = false; + private String address; + private boolean fragment = false; + private String statement; + private String jobName; + private Integer maxRowNum = 100; + private Integer checkPoint = 0; + private Integer parallelism; + private String savePointPath; + private Map configuration; + private GatewayConfig gatewayConfig; + + private static final ObjectMapper mapper = new ObjectMapper(); + + public JobConfig getJobConfig() { + Integer savePointStrategy = 0; + if (Asserts.isNotNullString(savePointPath)) { + savePointStrategy = 3; + } + return new JobConfig( + type, useResult, false, null, true, address, jobName, + fragment, useStatementSet, maxRowNum, checkPoint, parallelism, savePointStrategy, + savePointPath, configuration, gatewayConfig); + } +} diff --git a/dlink-admin/src/main/java/com/dlink/result/APIJobResult.java b/dlink-admin/src/main/java/com/dlink/result/APIJobResult.java new file mode 100644 index 0000000000..2b3eb21f80 --- /dev/null +++ b/dlink-admin/src/main/java/com/dlink/result/APIJobResult.java @@ -0,0 +1,41 @@ +package com.dlink.result; + +import com.dlink.job.Job; +import com.dlink.job.JobResult; +import lombok.Getter; +import lombok.Setter; + +import java.time.LocalDateTime; + +/** + * APIJobResult + * + * @author wenmo + * @since 2021/12/11 22:49 + */ +@Getter +@Setter +public class APIJobResult { + private String jobManagerAddress; + private Job.JobStatus status; + private boolean success; + private String jobId; + private String error; + private LocalDateTime startTime; + private LocalDateTime endTime; + + public APIJobResult(String jobManagerAddress, Job.JobStatus status, boolean success, String jobId, String error, LocalDateTime startTime, LocalDateTime endTime) { + this.jobManagerAddress = jobManagerAddress; + this.status = status; + this.success = success; + this.jobId = jobId; + this.error = error; + this.startTime = startTime; + this.endTime = endTime; + } + + public static APIJobResult build(JobResult jobResult){ + return new APIJobResult(jobResult.getJobManagerAddress(),jobResult.getStatus(),jobResult.isSuccess(), + jobResult.getJobId(),jobResult.getError(),jobResult.getStartTime(),jobResult.getEndTime()); + } +} diff --git a/dlink-admin/src/main/java/com/dlink/service/APIService.java b/dlink-admin/src/main/java/com/dlink/service/APIService.java new file mode 100644 index 0000000000..3477d39531 --- /dev/null +++ b/dlink-admin/src/main/java/com/dlink/service/APIService.java @@ -0,0 +1,15 @@ +package com.dlink.service; + +import com.dlink.dto.APIExecuteSqlDTO; +import com.dlink.result.APIJobResult; + +/** + * APIService + * + * @author wenmo + * @since 2021/12/11 21:45 + */ +public interface APIService { + + APIJobResult executeSql(APIExecuteSqlDTO apiExecuteSqlDTO); +} diff --git a/dlink-admin/src/main/java/com/dlink/service/impl/APIServiceImpl.java b/dlink-admin/src/main/java/com/dlink/service/impl/APIServiceImpl.java new file mode 100644 index 0000000000..2f507b6d01 --- /dev/null +++ b/dlink-admin/src/main/java/com/dlink/service/impl/APIServiceImpl.java @@ -0,0 +1,30 @@ +package com.dlink.service.impl; + +import com.dlink.dto.APIExecuteSqlDTO; +import com.dlink.job.JobConfig; +import com.dlink.job.JobManager; +import com.dlink.job.JobResult; +import com.dlink.result.APIJobResult; +import com.dlink.service.APIService; +import com.dlink.utils.RunTimeUtil; +import org.springframework.stereotype.Service; + +/** + * APIServiceImpl + * + * @author wenmo + * @since 2021/12/11 21:46 + */ +@Service +public class APIServiceImpl implements APIService { + + @Override + public APIJobResult executeSql(APIExecuteSqlDTO apiExecuteSqlDTO) { + JobConfig config = apiExecuteSqlDTO.getJobConfig(); + JobManager jobManager = JobManager.build(config); + JobResult jobResult = jobManager.executeSql(apiExecuteSqlDTO.getStatement()); + APIJobResult apiJobResult = APIJobResult.build(jobResult); + RunTimeUtil.recovery(jobManager); + return apiJobResult; + } +} diff --git a/dlink-admin/src/main/java/com/dlink/service/impl/StudioServiceImpl.java b/dlink-admin/src/main/java/com/dlink/service/impl/StudioServiceImpl.java index 4bd9dd9ed4..585d9d9e40 100644 --- a/dlink-admin/src/main/java/com/dlink/service/impl/StudioServiceImpl.java +++ b/dlink-admin/src/main/java/com/dlink/service/impl/StudioServiceImpl.java @@ -27,6 +27,7 @@ import com.dlink.session.SessionConfig; import com.dlink.session.SessionInfo; import com.dlink.session.SessionPool; +import com.dlink.utils.RunTimeUtil; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; @@ -67,7 +68,9 @@ public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) { config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId())); } JobManager jobManager = JobManager.build(config); - return jobManager.executeSql(studioExecuteDTO.getStatement()); + JobResult jobResult = jobManager.executeSql(studioExecuteDTO.getStatement()); + RunTimeUtil.recovery(jobManager); + return jobResult; } @Override @@ -86,7 +89,7 @@ public List explainSql(StudioExecuteDTO studioExecuteDTO) { if(!config.isUseSession()) { config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId())); } - JobManager jobManager = JobManager.build(config); + JobManager jobManager = JobManager.buildPlanMode(config); return jobManager.explainSql(studioExecuteDTO.getStatement()); } @@ -97,7 +100,7 @@ public ObjectNode getStreamGraph(StudioExecuteDTO studioExecuteDTO) { if(!config.isUseSession()) { config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId())); } - JobManager jobManager = JobManager.build(config); + JobManager jobManager = JobManager.buildPlanMode(config); return jobManager.getStreamGraph(studioExecuteDTO.getStatement()); } @@ -108,7 +111,7 @@ public ObjectNode getJobPlan(StudioExecuteDTO studioExecuteDTO) { if(!config.isUseSession()) { config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId())); } - JobManager jobManager = JobManager.build(config); + JobManager jobManager = JobManager.buildPlanMode(config); String planJson = jobManager.getJobPlanJson(studioExecuteDTO.getStatement()); ObjectMapper mapper = new ObjectMapper(); ObjectNode objectNode =mapper.createObjectNode(); diff --git a/dlink-admin/src/main/resources/json/openapi_executesql_perjob.json b/dlink-admin/src/main/resources/json/openapi_executesql_perjob.json new file mode 100644 index 0000000000..bdf98848d3 --- /dev/null +++ b/dlink-admin/src/main/resources/json/openapi_executesql_perjob.json @@ -0,0 +1,34 @@ +/* http://127.0.0.1:8888/openapi/executeSql */ +{ + /* required-start */ + "type":"yarn-per-job", + "statement":"CREATE TABLE Orders (\r\n order_number INT,\r\n price DECIMAL(32,2),\r\n order_time TIMESTAMP(3)\r\n) WITH (\r\n 'connector' = 'datagen',\r\n 'rows-per-second' = '1',\r\n 'fields.order_number.kind' = 'sequence',\r\n 'fields.order_number.start' = '1',\r\n 'fields.order_number.end' = '1000'\r\n);\r\nCREATE TABLE pt (\r\nordertotal INT,\r\nnumtotal INT\r\n) WITH (\r\n 'connector' = 'print'\r\n);\r\ninsert into pt select 1 as ordertotal ,sum(order_number)*2 as numtotal from Orders", + "gatewayConfig":{ + "clusterConfig":{ + "flinkConfigPath":"/opt/src/flink-1.13.3_conf/conf", + "flinkLibPath":"hdfs:///flink13/lib/flinklib", + "yarnConfigPath":"/usr/local/hadoop/hadoop-2.7.7/etc/hadoop" + }, + "flinkConfig": { + "configuration":{ + "parallelism.default": 1 + } + } + }, + /* required-end */ + /* default-start */ + "useResult":false, + "useStatementSet":false, + "fragment":false, + "maxRowNum":100, + "checkPoint":0, + "parallelism":1, + /* default-start */ + /* custom-start */ + "jobName":"openapitest", + "savePointPath":"hdfs://ns/flink/savepoints/savepoint-5f4b8c-4326844a6843", + "configuration":{ + "table.exec.resource.default-parallelism":2 + } + /* custom-end */ +} \ No newline at end of file diff --git a/dlink-admin/src/main/resources/json/openapi_executesql_yarnsession.json b/dlink-admin/src/main/resources/json/openapi_executesql_yarnsession.json new file mode 100644 index 0000000000..360638af69 --- /dev/null +++ b/dlink-admin/src/main/resources/json/openapi_executesql_yarnsession.json @@ -0,0 +1,23 @@ +/* http://127.0.0.1:8888/openapi/executeSql */ +{ + /* required-start */ + "type":"yarn-session", + "address":"10.1.51.24:8081", + "statement":"CREATE TABLE Orders (\r\n order_number INT,\r\n price DECIMAL(32,2),\r\n order_time TIMESTAMP(3)\r\n) WITH (\r\n 'connector' = 'datagen',\r\n 'rows-per-second' = '1',\r\n 'fields.order_number.kind' = 'sequence',\r\n 'fields.order_number.start' = '1',\r\n 'fields.order_number.end' = '1000'\r\n);\r\nCREATE TABLE pt (\r\nordertotal INT,\r\nnumtotal INT\r\n) WITH (\r\n 'connector' = 'print'\r\n);\r\ninsert into pt select 1 as ordertotal ,sum(order_number)*2 as numtotal from Orders", + /* required-end */ + /* default-start */ + "useResult":false, + "useStatementSet":false, + "fragment":false, + "maxRowNum":100, + "checkPoint":0, + "parallelism":1, + /* default-start */ + /* custom-start */ + "jobName":"openapitest", + "savePointPath":"hdfs://ns/flink/savepoints/savepoint-5f4b8c-4326844a6843", + "configuration":{ + "table.exec.resource.default-parallelism":2 + } + /* custom-end */ +} \ No newline at end of file diff --git a/dlink-core/src/main/java/com/dlink/job/Job.java b/dlink-core/src/main/java/com/dlink/job/Job.java index ded0323ba9..91d02abca2 100644 --- a/dlink-core/src/main/java/com/dlink/job/Job.java +++ b/dlink-core/src/main/java/com/dlink/job/Job.java @@ -35,7 +35,7 @@ public class Job { private Executor executor; private boolean useGateway; - enum JobStatus { + public enum JobStatus { INITIALIZE, RUNNING, SUCCESS, diff --git a/dlink-core/src/main/java/com/dlink/job/JobConfig.java b/dlink-core/src/main/java/com/dlink/job/JobConfig.java index f75741ef82..81c2686783 100644 --- a/dlink-core/src/main/java/com/dlink/job/JobConfig.java +++ b/dlink-core/src/main/java/com/dlink/job/JobConfig.java @@ -22,6 +22,7 @@ @Setter public class JobConfig { + // flink run mode private String type; private boolean useResult; private boolean useSession; @@ -50,8 +51,8 @@ public JobConfig() { public JobConfig(String type, boolean useResult, boolean useSession, String session, boolean useRemote, Integer clusterId, Integer clusterConfigurationId,Integer jarId, Integer taskId, String jobName, boolean useSqlFragment, - boolean useStatementSet, Integer maxRowNum, Integer checkpoint, - Integer parallelism, Integer savePointStrategyValue, String savePointPath, Map config) { + boolean useStatementSet, Integer maxRowNum, Integer checkpoint, Integer parallelism, + Integer savePointStrategyValue, String savePointPath, Map config) { this.type = type; this.useResult = useResult; this.useSession = useSession; @@ -72,6 +73,28 @@ public JobConfig(String type, boolean useResult, boolean useSession, String sess this.config = config; } + public JobConfig(String type, boolean useResult, boolean useSession, String session, boolean useRemote, String address, + String jobName, boolean useSqlFragment, + boolean useStatementSet, Integer maxRowNum, Integer checkpoint, Integer parallelism, + Integer savePointStrategyValue, String savePointPath, Map config, GatewayConfig gatewayConfig) { + this.type = type; + this.useResult = useResult; + this.useSession = useSession; + this.session = session; + this.useRemote = useRemote; + this.address = address; + this.jobName = jobName; + this.useSqlFragment = useSqlFragment; + this.useStatementSet = useStatementSet; + this.maxRowNum = maxRowNum; + this.checkpoint = checkpoint; + this.parallelism = parallelism; + this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue); + this.savePointPath = savePointPath; + this.config = config; + this.gatewayConfig = gatewayConfig; + } + public JobConfig(String type,boolean useResult, boolean useSession, String session, boolean useRemote, Integer clusterId) { this.type = type; this.useResult = useResult; diff --git a/dlink-core/src/main/java/com/dlink/utils/DateFormatUtil.java b/dlink-core/src/main/java/com/dlink/utils/DateFormatUtil.java deleted file mode 100644 index 30620a780d..0000000000 --- a/dlink-core/src/main/java/com/dlink/utils/DateFormatUtil.java +++ /dev/null @@ -1,37 +0,0 @@ -package com.dlink.utils; - -import com.dlink.constant.FlinkHistoryConstant; - -import java.util.Date; -import java.util.TimeZone; - -public class DateFormatUtil { - /** - * 获取一个日期的0:00:00 时间戳 日期必须大于00:00:00否则返回上一天 - * - * @param date - * @return - */ - public static long getZeroTimeStamp(Date date) { - return getZeroTimeStamp(date.getTime()); - } - - public static long getZeroTimeStamp(Long timestamp) { - timestamp += TimeZone.getDefault().getRawOffset(); - return timestamp / FlinkHistoryConstant.ONE_DAY * FlinkHistoryConstant.ONE_DAY - TimeZone.getDefault().getRawOffset(); - } - - /** - * 获取指定时间 当天的最后一秒 23:59:59 日期必须大于00:00:00 否则返回上一天 - * @param date - * @return - */ - public static long getLastTimeStampOfOneday(Date date) { - return getLastTimeStampOfOneday(date.getTime()); - } - - public static long getLastTimeStampOfOneday(Long timestamp) { - timestamp += TimeZone.getDefault().getRawOffset(); - return ( timestamp / FlinkHistoryConstant.ONE_DAY * FlinkHistoryConstant.ONE_DAY + FlinkHistoryConstant.ONE_DAY - 100)- TimeZone.getDefault().getRawOffset(); - } -} diff --git a/dlink-gateway/src/main/java/com/dlink/gateway/config/FlinkConfig.java b/dlink-gateway/src/main/java/com/dlink/gateway/config/FlinkConfig.java index 5409fa50e8..3ba458b1c7 100644 --- a/dlink-gateway/src/main/java/com/dlink/gateway/config/FlinkConfig.java +++ b/dlink-gateway/src/main/java/com/dlink/gateway/config/FlinkConfig.java @@ -8,6 +8,7 @@ import lombok.Setter; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -25,7 +26,8 @@ public class FlinkConfig { private ActionType action; private SavePointType savePointType; private String savePoint; - private List configParas; +// private List configParas; + private Map configuration; private static final ObjectMapper mapper = new ObjectMapper(); @@ -33,29 +35,30 @@ public class FlinkConfig { public FlinkConfig() { } - public FlinkConfig(List configParas) { - this.configParas = configParas; + public FlinkConfig(Map configuration) { + this.configuration = configuration; } - public FlinkConfig(String jobName, String jobId, ActionType action, SavePointType savePointType, String savePoint, List configParas) { + public FlinkConfig(String jobName, String jobId, ActionType action, SavePointType savePointType, String savePoint, Map configuration) { this.jobName = jobName; this.jobId = jobId; this.action = action; this.savePointType = savePointType; this.savePoint = savePoint; - this.configParas = configParas; + this.configuration = configuration; } public static FlinkConfig build(Map paras){ - List configParasList = new ArrayList<>(); + /*List configParasList = new ArrayList<>(); for (Map.Entry entry : paras.entrySet()) { configParasList.add(new ConfigPara(entry.getKey(),entry.getValue())); - } - return new FlinkConfig(configParasList); + }*/ + return new FlinkConfig(paras); } public static FlinkConfig build(String jobName, String jobId, String actionStr, String savePointTypeStr, String savePoint, String configParasStr){ - List configParasList = new ArrayList<>(); +// List configParasList = new ArrayList<>(); + Map configMap = new HashMap<>(); JsonNode paras = null; if(Asserts.isNotNullString(configParasStr)) { try { @@ -64,11 +67,12 @@ public static FlinkConfig build(String jobName, String jobId, String actionStr, e.printStackTrace(); } paras.forEach((JsonNode node) -> { - configParasList.add(new ConfigPara(node.get("key").asText(), node.get("value").asText())); + configMap.put(node.get("key").asText(),node.get("value").asText()); +// configParasList.add(new ConfigPara(node.get("key").asText(), node.get("value").asText())); } ); } - return new FlinkConfig(jobName,jobId,ActionType.get(actionStr),SavePointType.get(savePointTypeStr),savePoint,configParasList); + return new FlinkConfig(jobName,jobId,ActionType.get(actionStr),SavePointType.get(savePointTypeStr),savePoint,configMap); } public static FlinkConfig build(String jobId, String actionStr, String savePointTypeStr, String savePoint){ diff --git a/dlink-gateway/src/main/java/com/dlink/gateway/config/GatewayConfig.java b/dlink-gateway/src/main/java/com/dlink/gateway/config/GatewayConfig.java index bf52c95fa4..d51eda174c 100644 --- a/dlink-gateway/src/main/java/com/dlink/gateway/config/GatewayConfig.java +++ b/dlink-gateway/src/main/java/com/dlink/gateway/config/GatewayConfig.java @@ -8,7 +8,9 @@ import lombok.Setter; import java.util.ArrayList; +import java.util.HashMap; import java.util.List; +import java.util.Map; /** * SubmitConfig @@ -66,13 +68,13 @@ public static GatewayConfig build(JsonNode para){ } if(para.has("configParas")) { try { - List configParas = new ArrayList<>(); + Map configMap = new HashMap<>(); JsonNode paras = mapper.readTree(para.get("configParas").asText()); paras.forEach((JsonNode node)-> { - configParas.add(new ConfigPara(node.get("key").asText(),node.get("value").asText())); + configMap.put(node.get("key").asText(),node.get("value").asText()); } ); - config.getFlinkConfig().setConfigParas(configParas); + config.getFlinkConfig().setConfiguration(configMap); } catch (JsonProcessingException e) { e.printStackTrace(); } diff --git a/dlink-gateway/src/main/java/com/dlink/gateway/yarn/YarnGateway.java b/dlink-gateway/src/main/java/com/dlink/gateway/yarn/YarnGateway.java index 9a378c2127..e4b5c076ac 100644 --- a/dlink-gateway/src/main/java/com/dlink/gateway/yarn/YarnGateway.java +++ b/dlink-gateway/src/main/java/com/dlink/gateway/yarn/YarnGateway.java @@ -32,10 +32,7 @@ import java.io.IOException; import java.net.URI; -import java.util.ArrayList; -import java.util.Collection; -import java.util.Collections; -import java.util.List; +import java.util.*; import java.util.concurrent.CompletableFuture; /** @@ -63,8 +60,8 @@ public void init(){ private void initConfig(){ configuration = GlobalConfiguration.loadConfiguration(config.getClusterConfig().getFlinkConfigPath()); - if(Asserts.isNotNull(config.getFlinkConfig().getConfigParas())) { - addConfigParas(config.getFlinkConfig().getConfigParas()); + if(Asserts.isNotNull(config.getFlinkConfig().getConfiguration())) { + addConfigParas(config.getFlinkConfig().getConfiguration()); } configuration.set(DeploymentOptions.TARGET, getType().getLongValue()); if(Asserts.isNotNullString(config.getFlinkConfig().getSavePoint())) { @@ -87,10 +84,10 @@ private void initYarnClient(){ yarnClient.start(); } - private void addConfigParas(List configParas){ - if(Asserts.isNotNull(configParas)) { - for (ConfigPara configPara : configParas) { - configuration.setString(configPara.getKey(), configPara.getValue()); + private void addConfigParas(Map configMap){ + if(Asserts.isNotNull(configMap)) { + for (Map.Entry entry : configMap.entrySet()) { + this.configuration.setString(entry.getKey(), entry.getValue()); } } } diff --git a/dlink-web/src/app.tsx b/dlink-web/src/app.tsx index ce04a43bde..66a7a688a6 100644 --- a/dlink-web/src/app.tsx +++ b/dlink-web/src/app.tsx @@ -28,7 +28,7 @@ export async function getInitialState(): Promise<{ const fetchUserInfo = async () => { try { const result = await queryCurrentUser(); - const currentUser:API.CurrentUser = { + const currentUser: API.CurrentUser = { name: result.datas.nickname, avatar: result.datas.avatar?result.datas.avatar:'https://gw.alipayobjects.com/zos/antfincdn/XAosXuNZyF/BiazfanxmamNRoxxVxka.png', userid: result.datas.username, @@ -86,7 +86,7 @@ export const request: RequestConfig = { errorHandler: (error: ResponseError) => { const { messages } = getIntl(getLocale()); const { request,response } = error; - const writeUrl = ['/api-user/users/current','/api-uaa/oauth/token']; + const writeUrl = ['/api/current']; if(writeUrl.indexOf(request.originUrl)>-1){ return; }else { diff --git a/dlink-web/src/pages/Welcome.tsx b/dlink-web/src/pages/Welcome.tsx index 4f65dd72c8..568d5ce887 100644 --- a/dlink-web/src/pages/Welcome.tsx +++ b/dlink-web/src/pages/Welcome.tsx @@ -463,6 +463,18 @@ export default (): React.ReactNode => {
  • 新增 Local 的运行模式选择并优化 JobManager
  • +
  • + 修复登录页报错弹框 +
  • +
  • + 优化所有模式的所有功能的执行逻辑 +
  • +
  • + 新增 trigger 的 restAPI 实现 +
  • +
  • + 新增 OpenAPI 的执行 sql 接口 +