Skip to content

Commit

Permalink
优化执行逻辑,openAPI提供
Browse files Browse the repository at this point in the history
  • Loading branch information
aiwenmo committed Dec 11, 2021
1 parent fdbab0f commit a5ba689
Show file tree
Hide file tree
Showing 17 changed files with 302 additions and 71 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ public void addInterceptors(InterceptorRegistry registry) {
// 注册Sa-Token的路由拦截器
registry.addInterceptor(new SaRouteInterceptor())
.addPathPatterns("/**")
.excludePathPatterns("/api/login");
.excludePathPatterns("/api/login")
.excludePathPatterns("/openapi/**");
}
}
32 changes: 32 additions & 0 deletions dlink-admin/src/main/java/com/dlink/controller/APIController.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
package com.dlink.controller;

import com.dlink.common.result.Result;
import com.dlink.dto.APIExecuteSqlDTO;
import com.dlink.job.JobResult;
import com.dlink.service.APIService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

/**
* APIController
*
* @author wenmo
* @since 2021/12/11 21:44
*/
@Slf4j
@RestController
@RequestMapping("/openapi")
public class APIController {

@Autowired
private APIService apiService;
@PostMapping("/executeSql")

public Result executeSql(@RequestBody APIExecuteSqlDTO apiExecuteSqlDTO) {
return Result.succeed(apiService.executeSql(apiExecuteSqlDTO),"执行成功");
}
}
51 changes: 51 additions & 0 deletions dlink-admin/src/main/java/com/dlink/dto/APIExecuteSqlDTO.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
package com.dlink.dto;

import com.dlink.assertion.Asserts;
import com.dlink.gateway.config.GatewayConfig;
import com.dlink.job.JobConfig;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import lombok.Getter;
import lombok.Setter;

import java.util.HashMap;
import java.util.Map;

/**
* APIExecuteSqlDTO
*
* @author wenmo
* @since 2021/12/11 21:50
*/
@Getter
@Setter
public class APIExecuteSqlDTO {
// RUN_MODE
private String type;
private boolean useResult = false;
private boolean useStatementSet = false;
private String address;
private boolean fragment = false;
private String statement;
private String jobName;
private Integer maxRowNum = 100;
private Integer checkPoint = 0;
private Integer parallelism;
private String savePointPath;
private Map<String, String> configuration;
private GatewayConfig gatewayConfig;

private static final ObjectMapper mapper = new ObjectMapper();

public JobConfig getJobConfig() {
Integer savePointStrategy = 0;
if (Asserts.isNotNullString(savePointPath)) {
savePointStrategy = 3;
}
return new JobConfig(
type, useResult, false, null, true, address, jobName,
fragment, useStatementSet, maxRowNum, checkPoint, parallelism, savePointStrategy,
savePointPath, configuration, gatewayConfig);
}
}
41 changes: 41 additions & 0 deletions dlink-admin/src/main/java/com/dlink/result/APIJobResult.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
package com.dlink.result;

import com.dlink.job.Job;
import com.dlink.job.JobResult;
import lombok.Getter;
import lombok.Setter;

import java.time.LocalDateTime;

/**
* APIJobResult
*
* @author wenmo
* @since 2021/12/11 22:49
*/
@Getter
@Setter
public class APIJobResult {
private String jobManagerAddress;
private Job.JobStatus status;
private boolean success;
private String jobId;
private String error;
private LocalDateTime startTime;
private LocalDateTime endTime;

public APIJobResult(String jobManagerAddress, Job.JobStatus status, boolean success, String jobId, String error, LocalDateTime startTime, LocalDateTime endTime) {
this.jobManagerAddress = jobManagerAddress;
this.status = status;
this.success = success;
this.jobId = jobId;
this.error = error;
this.startTime = startTime;
this.endTime = endTime;
}

public static APIJobResult build(JobResult jobResult){
return new APIJobResult(jobResult.getJobManagerAddress(),jobResult.getStatus(),jobResult.isSuccess(),
jobResult.getJobId(),jobResult.getError(),jobResult.getStartTime(),jobResult.getEndTime());
}
}
15 changes: 15 additions & 0 deletions dlink-admin/src/main/java/com/dlink/service/APIService.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
package com.dlink.service;

import com.dlink.dto.APIExecuteSqlDTO;
import com.dlink.result.APIJobResult;

/**
* APIService
*
* @author wenmo
* @since 2021/12/11 21:45
*/
public interface APIService {

APIJobResult executeSql(APIExecuteSqlDTO apiExecuteSqlDTO);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
package com.dlink.service.impl;

import com.dlink.dto.APIExecuteSqlDTO;
import com.dlink.job.JobConfig;
import com.dlink.job.JobManager;
import com.dlink.job.JobResult;
import com.dlink.result.APIJobResult;
import com.dlink.service.APIService;
import com.dlink.utils.RunTimeUtil;
import org.springframework.stereotype.Service;

/**
* APIServiceImpl
*
* @author wenmo
* @since 2021/12/11 21:46
*/
@Service
public class APIServiceImpl implements APIService {

@Override
public APIJobResult executeSql(APIExecuteSqlDTO apiExecuteSqlDTO) {
JobConfig config = apiExecuteSqlDTO.getJobConfig();
JobManager jobManager = JobManager.build(config);
JobResult jobResult = jobManager.executeSql(apiExecuteSqlDTO.getStatement());
APIJobResult apiJobResult = APIJobResult.build(jobResult);
RunTimeUtil.recovery(jobManager);
return apiJobResult;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
import com.dlink.session.SessionConfig;
import com.dlink.session.SessionInfo;
import com.dlink.session.SessionPool;
import com.dlink.utils.RunTimeUtil;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
Expand Down Expand Up @@ -67,7 +68,9 @@ public JobResult executeSql(StudioExecuteDTO studioExecuteDTO) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId()));
}
JobManager jobManager = JobManager.build(config);
return jobManager.executeSql(studioExecuteDTO.getStatement());
JobResult jobResult = jobManager.executeSql(studioExecuteDTO.getStatement());
RunTimeUtil.recovery(jobManager);
return jobResult;
}

@Override
Expand All @@ -86,7 +89,7 @@ public List<SqlExplainResult> explainSql(StudioExecuteDTO studioExecuteDTO) {
if(!config.isUseSession()) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId()));
}
JobManager jobManager = JobManager.build(config);
JobManager jobManager = JobManager.buildPlanMode(config);
return jobManager.explainSql(studioExecuteDTO.getStatement());
}

Expand All @@ -97,7 +100,7 @@ public ObjectNode getStreamGraph(StudioExecuteDTO studioExecuteDTO) {
if(!config.isUseSession()) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId()));
}
JobManager jobManager = JobManager.build(config);
JobManager jobManager = JobManager.buildPlanMode(config);
return jobManager.getStreamGraph(studioExecuteDTO.getStatement());
}

Expand All @@ -108,7 +111,7 @@ public ObjectNode getJobPlan(StudioExecuteDTO studioExecuteDTO) {
if(!config.isUseSession()) {
config.setAddress(clusterService.buildEnvironmentAddress(config.isUseRemote(), studioExecuteDTO.getClusterId()));
}
JobManager jobManager = JobManager.build(config);
JobManager jobManager = JobManager.buildPlanMode(config);
String planJson = jobManager.getJobPlanJson(studioExecuteDTO.getStatement());
ObjectMapper mapper = new ObjectMapper();
ObjectNode objectNode =mapper.createObjectNode();
Expand Down
34 changes: 34 additions & 0 deletions dlink-admin/src/main/resources/json/openapi_executesql_perjob.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
/* http://127.0.0.1:8888/openapi/executeSql */
{
/* required-start */
"type":"yarn-per-job",
"statement":"CREATE TABLE Orders (\r\n order_number INT,\r\n price DECIMAL(32,2),\r\n order_time TIMESTAMP(3)\r\n) WITH (\r\n 'connector' = 'datagen',\r\n 'rows-per-second' = '1',\r\n 'fields.order_number.kind' = 'sequence',\r\n 'fields.order_number.start' = '1',\r\n 'fields.order_number.end' = '1000'\r\n);\r\nCREATE TABLE pt (\r\nordertotal INT,\r\nnumtotal INT\r\n) WITH (\r\n 'connector' = 'print'\r\n);\r\ninsert into pt select 1 as ordertotal ,sum(order_number)*2 as numtotal from Orders",
"gatewayConfig":{
"clusterConfig":{
"flinkConfigPath":"/opt/src/flink-1.13.3_conf/conf",
"flinkLibPath":"hdfs:///flink13/lib/flinklib",
"yarnConfigPath":"/usr/local/hadoop/hadoop-2.7.7/etc/hadoop"
},
"flinkConfig": {
"configuration":{
"parallelism.default": 1
}
}
},
/* required-end */
/* default-start */
"useResult":false,
"useStatementSet":false,
"fragment":false,
"maxRowNum":100,
"checkPoint":0,
"parallelism":1,
/* default-start */
/* custom-start */
"jobName":"openapitest",
"savePointPath":"hdfs://ns/flink/savepoints/savepoint-5f4b8c-4326844a6843",
"configuration":{
"table.exec.resource.default-parallelism":2
}
/* custom-end */
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/* http://127.0.0.1:8888/openapi/executeSql */
{
/* required-start */
"type":"yarn-session",
"address":"10.1.51.24:8081",
"statement":"CREATE TABLE Orders (\r\n order_number INT,\r\n price DECIMAL(32,2),\r\n order_time TIMESTAMP(3)\r\n) WITH (\r\n 'connector' = 'datagen',\r\n 'rows-per-second' = '1',\r\n 'fields.order_number.kind' = 'sequence',\r\n 'fields.order_number.start' = '1',\r\n 'fields.order_number.end' = '1000'\r\n);\r\nCREATE TABLE pt (\r\nordertotal INT,\r\nnumtotal INT\r\n) WITH (\r\n 'connector' = 'print'\r\n);\r\ninsert into pt select 1 as ordertotal ,sum(order_number)*2 as numtotal from Orders",
/* required-end */
/* default-start */
"useResult":false,
"useStatementSet":false,
"fragment":false,
"maxRowNum":100,
"checkPoint":0,
"parallelism":1,
/* default-start */
/* custom-start */
"jobName":"openapitest",
"savePointPath":"hdfs://ns/flink/savepoints/savepoint-5f4b8c-4326844a6843",
"configuration":{
"table.exec.resource.default-parallelism":2
}
/* custom-end */
}
2 changes: 1 addition & 1 deletion dlink-core/src/main/java/com/dlink/job/Job.java
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ public class Job {
private Executor executor;
private boolean useGateway;

enum JobStatus {
public enum JobStatus {
INITIALIZE,
RUNNING,
SUCCESS,
Expand Down
27 changes: 25 additions & 2 deletions dlink-core/src/main/java/com/dlink/job/JobConfig.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
@Setter
public class JobConfig {

// flink run mode
private String type;
private boolean useResult;
private boolean useSession;
Expand Down Expand Up @@ -50,8 +51,8 @@ public JobConfig() {

public JobConfig(String type, boolean useResult, boolean useSession, String session, boolean useRemote, Integer clusterId,
Integer clusterConfigurationId,Integer jarId, Integer taskId, String jobName, boolean useSqlFragment,
boolean useStatementSet, Integer maxRowNum, Integer checkpoint,
Integer parallelism, Integer savePointStrategyValue, String savePointPath, Map<String,String> config) {
boolean useStatementSet, Integer maxRowNum, Integer checkpoint, Integer parallelism,
Integer savePointStrategyValue, String savePointPath, Map<String,String> config) {
this.type = type;
this.useResult = useResult;
this.useSession = useSession;
Expand All @@ -72,6 +73,28 @@ public JobConfig(String type, boolean useResult, boolean useSession, String sess
this.config = config;
}

public JobConfig(String type, boolean useResult, boolean useSession, String session, boolean useRemote, String address,
String jobName, boolean useSqlFragment,
boolean useStatementSet, Integer maxRowNum, Integer checkpoint, Integer parallelism,
Integer savePointStrategyValue, String savePointPath, Map<String,String> config, GatewayConfig gatewayConfig) {
this.type = type;
this.useResult = useResult;
this.useSession = useSession;
this.session = session;
this.useRemote = useRemote;
this.address = address;
this.jobName = jobName;
this.useSqlFragment = useSqlFragment;
this.useStatementSet = useStatementSet;
this.maxRowNum = maxRowNum;
this.checkpoint = checkpoint;
this.parallelism = parallelism;
this.savePointStrategy = SavePointStrategy.get(savePointStrategyValue);
this.savePointPath = savePointPath;
this.config = config;
this.gatewayConfig = gatewayConfig;
}

public JobConfig(String type,boolean useResult, boolean useSession, String session, boolean useRemote, Integer clusterId) {
this.type = type;
this.useResult = useResult;
Expand Down
37 changes: 0 additions & 37 deletions dlink-core/src/main/java/com/dlink/utils/DateFormatUtil.java

This file was deleted.

Loading

0 comments on commit a5ba689

Please sign in to comment.