diff --git a/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql b/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql index 446daec1cd..b909b5df59 100644 --- a/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql +++ b/streampark-console/streampark-console-service/src/main/resources/db/data-h2.sql @@ -82,6 +82,9 @@ insert into `t_menu` values (110302, 110300, 'cluster edit', '/flink/edit_cluste insert into `t_menu` values (120100, 120000, 'spark.application', '/spark/app', 'spark/app/index', null, null, '0', 1, 2, now(), now()); insert into `t_menu` values (120200, 120000, 'spark.sparkHome', '/spark/home', 'spark/home/index', null, null, '0', 1, 3, now(), now()); +insert into `t_menu` values (120300, 120000, 'spark.createApplication', '/spark/app/create', 'spark/app/create', 'app:create', '', '0', 0, null, now(), now()); +insert into `t_menu` values (120400, 120000, 'spark.updateApplication', '/spark/app/edit', 'spark/app/edit', 'app:update', '', '0', 0, null, now(), now()); +insert into `t_menu` values (120500, 120000, 'spark.applicationDetail', '/spark/app/detail', 'spark/app/detail', 'app:detail', '', '0', 0, null, now(), now()); insert into `t_menu` values (130100, 130000, 'resource.project', '/resource/project', 'resource/project/View', null, 'github', '0', 1, 2, now(), now()); insert into `t_menu` values (130200, 130000, 'resource.variable', '/resource/variable', 'resource/variable/View', null, null, '0', 1, 3, now(), now()); diff --git a/streampark-console/streampark-console-webapp/src/api/spark/app.ts b/streampark-console/streampark-console-webapp/src/api/spark/app.ts index 76b57a746a..f4f639effb 100644 --- a/streampark-console/streampark-console-webapp/src/api/spark/app.ts +++ b/streampark-console/streampark-console-webapp/src/api/spark/app.ts @@ -119,7 +119,7 @@ export function fetchSparkYarn() { /** * check spark name */ -export function fetchCheckSparkName(data: { id?: string; jobName: string }) { +export function fetchCheckSparkName(data: { id?: string; appName: string }) { return defHttp.post({ url: `${apiPrefix}/check/name`, data }); } diff --git a/streampark-console/streampark-console-webapp/src/api/spark/app.type.ts b/streampark-console/streampark-console-webapp/src/api/spark/app.type.ts index 1bdd20e23c..71ca0ac474 100644 --- a/streampark-console/streampark-console-webapp/src/api/spark/app.type.ts +++ b/streampark-console/streampark-console-webapp/src/api/spark/app.type.ts @@ -14,138 +14,99 @@ * See the License for the specific language governing permissions and * limitations under the License. */ + +import type { AppStateEnum, OptionStateEnum } from '/@/enums/sparkEnum'; + // dashboard export interface DashboardResponse { - totalTM: number; - task: Task; - availableSlot: number; - totalSlot: number; - runningJob: number; - tmMemory: number; - jmMemory: number; + numTasks: number; + numStages: number; + usedMemory: number; + usedVCores: number; + numCompletedTasks: number; + runningApplication: number; + numCompletedStages: number; } -interface Task { - total: number; - created: number; - scheduled: number; - deploying: number; - running: number; - finished: number; - canceling: number; - canceled: number; - failed: number; - reconciling: number; -} // The list of data export interface AppListResponse { total: string; records: SparkApplication[]; } + export interface SparkApplication { + createTime?: string; + modifyTime?: string; id?: string; teamId?: string; jobType?: number; appType?: number; - versionId?: number; + versionId?: string; appName?: string; executionMode?: number; resourceFrom?: number; - module?: any; + projectId?: number; + module?: string; mainClass?: string; jar?: string; - jarCheckSum?: string; + jarCheckSum?: number; appProperties?: string; appArgs?: string; appId?: string; - yarnQueue?: any; - - projectId?: any; - tags?: any; - userId?: string; - jobName?: string; - jobId?: string; - clusterId?: string; - flinkImage?: string; + yarnQueue?: string; + yarnQueueName?: string; + yarnQueueLabel?: string; + k8sMasterUrl?: string; + k8sContainerImage?: string; + k8sImagePullPolicy?: number; + k8sServiceAccount?: number; k8sNamespace?: string; - state?: number; - release?: number; - build?: boolean; + hadoopUser?: string; restartSize?: number; restartCount?: number; - optionState?: number; - alertId?: any; - args?: string; + state?: AppStateEnum; options?: string; - hotParams?: string; - resolveOrder?: number; - dynamicProperties?: string; + optionState?: OptionStateEnum; + optionTime?: string; + userId?: string; + description?: string; tracking?: number; - + release?: number; + build?: boolean; + alertId?: number; startTime?: string; endTime?: string; - duration?: string; - cpMaxFailureInterval?: any; - cpFailureRateInterval?: any; - cpFailureAction?: any; - totalTM?: any; - totalSlot?: any; - availableSlot?: any; - jmMemory?: number; - tmMemory?: number; - totalTask?: number; - flinkClusterId?: any; - description?: string; - createTime?: string; - optionTime?: string; - modifyTime?: string; - k8sRestExposedType?: any; - k8sPodTemplate?: any; - k8sJmPodTemplate?: any; - k8sTmPodTemplate?: any; - ingressTemplate?: any; - defaultModeIngress?: any; - k8sHadoopIntegration?: boolean; - overview?: any; - teamResource?: any; - dependency?: any; - sqlId?: any; - flinkSql?: any; - stateArray?: any; - jobTypeArray?: any; + duration?: number; + tags?: string; + driverCores?: string; + driverMemory?: string; + executorCores?: string; + executorMemory?: string; + executorMaxNums?: string; + numTasks?: number; + numCompletedTasks?: number; + numStages?: number; + numCompletedStages?: number; + usedMemory?: number; + usedVCores?: number; + teamResource?: number; + dependency?: string; + sqlId?: number; + sparkSql?: string; backUp?: boolean; restart?: boolean; - userName?: string; - nickName?: string; - config?: any; - configId?: any; - flinkVersion?: string; - confPath?: any; - format?: any; - savepointPath?: any; - restoreOrTriggerSavepoint?: boolean; - drain?: boolean; - nativeFormat?: boolean; - allowNonRestored?: boolean; - socketId?: any; - projectName?: any; - createTimeFrom?: any; - createTimeTo?: any; - backUpDescription?: any; - teamIdList?: any; - teamName?: string; - flinkRestUrl?: any; + config?: string; + configId?: number; + sparkVersion?: string; + confPath?: string; + format?: string; + backUpDescription?: string; + sparkRestUrl?: string; buildStatus?: number; appControl?: AppControl; - fsOperator?: any; - workspace?: any; - k8sPodTemplates?: { - empty?: boolean; - }; + canBeStart?: boolean; streamParkJob?: boolean; - hadoopUser?: string; } - interface AppControl { allowStart: boolean; allowStop: boolean; diff --git a/streampark-console/streampark-console-webapp/src/api/spark/build.ts b/streampark-console/streampark-console-webapp/src/api/spark/build.ts new file mode 100644 index 0000000000..ff0889b0b3 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/api/spark/build.ts @@ -0,0 +1,48 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import type { Result } from '/#/axios'; +import { defHttp } from '/@/utils/http/axios'; + +const apiPrefix = `/spark/pipe`; + +/** + * Release application building pipeline. + * + * @param appId application id + * @param forceBuild forced start pipeline or not + * @return Whether the pipeline was successfully started + */ +export function fetchBuildSparkApp(data: { appId: string; forceBuild: boolean }) { + return defHttp.post>( + { url: `${apiPrefix}/build`, data }, + { isTransformResponse: false }, + ); +} + +/** + * Get application building pipeline progress detail. + * + * @param appId application id + * @return "pipeline" -> pipeline details, "docker" -> docker resolved snapshot + */ + +export function fetchBuildProgressDetail(appId: number) { + return defHttp.post<{ + pipeline: any; + docker: any; + }>({ url: `${apiPrefix}/detail`, data: { appId } }); +} diff --git a/streampark-console/streampark-console-webapp/src/api/spark/conf.ts b/streampark-console/streampark-console-webapp/src/api/spark/conf.ts index e4e0ccb595..6df880b3ce 100644 --- a/streampark-console/streampark-console-webapp/src/api/spark/conf.ts +++ b/streampark-console/streampark-console-webapp/src/api/spark/conf.ts @@ -14,18 +14,45 @@ * See the License for the specific language governing permissions and * limitations under the License. */ +import type { SparkApplication } from './app.type'; +import type { SparkApplicationConfig } from './conf.type'; import { defHttp } from '/@/utils/http/axios'; const apiPrefix = '/flink/conf'; export function fetchGetSparkConf(data: { id: string }) { - return defHttp.post({ url: `${apiPrefix}/get`, data }); + return defHttp.post({ url: `${apiPrefix}/get`, data }); } -export function handleSparkConfTemplate() { + +export function fetchSparkConfTemplate() { return defHttp.post({ url: `${apiPrefix}/template`, }); } + +export function fetchSparkConfList(data: Recordable) { + return defHttp.post<{ + total: number; + records: SparkApplicationConfig[]; + }>({ + url: `${apiPrefix}/list`, + data, + }); +} + +export function fetchSparkConfHistory(data: SparkApplication) { + return defHttp.post({ + url: `${apiPrefix}/history`, + data, + }); +} +export function fetchSparkConfRemove(data: { id: number }) { + return defHttp.post({ + url: `${apiPrefix}/delete`, + data, + }); +} + export function fetchSysHadoopConf() { return defHttp.post({ url: `${apiPrefix}/sysHadoopConf`, diff --git a/streampark-console/streampark-console-webapp/src/api/spark/conf.type.ts b/streampark-console/streampark-console-webapp/src/api/spark/conf.type.ts new file mode 100644 index 0000000000..542f8cbf78 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/api/spark/conf.type.ts @@ -0,0 +1,31 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export interface SparkApplicationConfig { + id?: number; + appId?: string; + /** + * 1)yaml
+ * 2)prop
+ * 3)hocon + */ + format?: 1 | 2 | 3; + content?: string; + version?: number; + latest?: boolean; + createTime: string; + effective: boolean; +} diff --git a/streampark-console/streampark-console-webapp/src/api/spark/sql.ts b/streampark-console/streampark-console-webapp/src/api/spark/sql.ts index 8d3ada3442..cee95f2b22 100644 --- a/streampark-console/streampark-console-webapp/src/api/spark/sql.ts +++ b/streampark-console/streampark-console-webapp/src/api/spark/sql.ts @@ -15,16 +15,46 @@ * limitations under the License. */ +import type { SparkSql } from './sql.type'; import { defHttp } from '/@/utils/http/axios'; -const apiPrefix = '/flink/conf'; +const apiPrefix = '/spark/sql'; export function fetchSparkSqlVerify(data: Recordable) { return defHttp.post({ url: `${apiPrefix}/verify`, data }, { isTransformResponse: false }); } +export function fetchSparkSqlList(data: Recordable) { + return defHttp.post<{ + total: number; + records: SparkSql[]; + }>({ + url: `${apiPrefix}/list`, + data, + }); +} + +export function fetchSparkSqlRemove(data: SparkSql) { + return defHttp.post({ + url: `${apiPrefix}/delete`, + data, + }); +} + export function fetchSparkSql(data: Recordable) { return defHttp.post({ url: `${apiPrefix}/get`, data, }); } +export function fetchSparkSqlHistory(data: Recordable) { + return defHttp.post({ + url: `${apiPrefix}/history`, + data, + }); +} +export function fetchSparkSqlComplete(data: Recordable) { + return defHttp.post({ + url: `${apiPrefix}/sqlComplete`, + data, + }); +} diff --git a/streampark-console/streampark-console-webapp/src/api/spark/sql.type.ts b/streampark-console/streampark-console-webapp/src/api/spark/sql.type.ts new file mode 100644 index 0000000000..0a45780bb4 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/api/spark/sql.type.ts @@ -0,0 +1,34 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export interface SparkSql { + id?: number; + appId?: string; + sql?: string; + teamResource?: string; + dependency?: string; + version?: number; + /** + * candidate number: 0: none candidate
+ * 1: newly added record becomes a candidate
+ * 2: specific history becomes a candidate
+ */ + candidate?: number; + createTime?: string; + effective?: boolean; + sqlDifference?: boolean; + dependencyDifference?: boolean; +} diff --git a/streampark-console/streampark-console-webapp/src/enums/sparkEnum.ts b/streampark-console/streampark-console-webapp/src/enums/sparkEnum.ts index a1d4fbd20e..cf40d222ef 100644 --- a/streampark-console/streampark-console-webapp/src/enums/sparkEnum.ts +++ b/streampark-console/streampark-console-webapp/src/enums/sparkEnum.ts @@ -11,6 +11,16 @@ export enum JobTypeEnum { SQL = 2, PYSPARK = 3, } +export enum OptionStateEnum { + /** Application which is currently action: none. */ + NONE = 0, + /** Application which is currently action: deploying. */ + RELEASING = 1, + /** Application which is currently action: starting. */ + STARTING = 2, + /** Application which is currently action: stopping. */ + STOPPING = 3, +} /* ExecutionMode */ export enum ExecModeEnum { @@ -38,3 +48,53 @@ export enum AppExistsStateEnum { /** job name invalid because of special utf-8 character */ INVALID, } + +export enum AppStateEnum { + /** Added new job to database. */ + ADDED = 0, + + /** (From Yarn)Application which was just created. */ + NEW = 1, + + /** (From Yarn)Application which is being saved. */ + NEW_SAVING = 2, + + /** Application which is currently running. */ + STARTING = 3, + + /** (From Yarn)Application which has been submitted. */ + SUBMITTE = 4, + + /** (From Yarn)Application has been accepted by the scheduler. */ + ACCEPTE = 5, + + /** The job has failed and is currently waiting for the cleanup to complete. */ + RUNNIN = 6, + + /** (From Yarn)Application which finished successfully. */ + FINISHE = 7, + + /** (From Yarn)Application which failed. */ + FAILE = 8, + + /** Loss of mapping. */ + LOS = 9, + + /** Mapping. */ + MAPPING = 10, + + /** Other statuses. */ + OTHER = 11, + + /** Has rollback. */ + REVOKED = 12, + + /** Spark job has being cancelling(killing) by streampark */ + STOPPING = 13, + + /** Job SUCCEEDED on yarn. */ + SUCCEEDED = 14, + + /** Has killed in Yarn. */ + KILLED = -9, +} diff --git a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts b/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts index ee876c06db..0238263aaf 100644 --- a/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts +++ b/streampark-console/streampark-console-webapp/src/locales/lang/en/menu.ts @@ -36,7 +36,10 @@ export default { }, spark: { application: 'Applications', - sparkHome: 'Spark Home' + sparkHome: 'Spark Home', + createApplication: 'Create Application', + updateApplication: 'Update Application', + applicationDetail: 'Application Detail', }, setting: { menu: 'Settings', diff --git a/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts b/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts new file mode 100644 index 0000000000..762d9d0f2e --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/locales/lang/en/spark/app.ts @@ -0,0 +1,315 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export default { + dashboard: { + runningTasks: 'Number of Running Applications', + totalTask: 'Total Tasks', + totalStage: 'Total Stages', + completedTask: 'Total Completed Tasks', + completedStage: 'Total Completed Stages', + memory: 'Total Memory Used', + VCore: 'Total VCores Used', + }, + runState: { + added: 'New Applications', + saving: 'Saving', + created: 'Creating', + starting: 'Starting', + submitted: 'Submitted', + accept: 'Accepted', + running: 'Running', + finished: 'Completed', + failed: 'Job Failed', + lost: 'Job Lost', + mapping: 'Mapping', + other: 'Other', + revoked: 'Revoked', + stopping: 'Stopping', + killed: 'Terminated', + }, + releaseState: { + releasing: 'Releasing', + failed: 'Release Failed', + success: 'Release Successful', + waiting: 'Waiting to Release', + pending: 'Pending Rollback', + }, + id: 'Applications ID', + appName: 'Applications Name', + searchName: 'Applications Name', + tags: 'Applications Tags', + owner: 'Creator', + sparkVersion: 'Spark Version', + duration: 'Runtime', + modifiedTime: 'Modification Time', + runStatus: 'Run Status', + releaseBuild: 'Release Status', + jobType: 'Job Type', + developmentMode: 'development Mode', + executionMode: 'Execution Mode', + historyVersion: 'History Version', + resource: 'Resource', + resourcePlaceHolder: 'Select Resource', + selectAppPlaceHolder: 'Select Applications', + dependency: 'Job Dependency', + appConf: 'Applications Configuration', + resolveOrder: 'Class Loading Order', + parallelism: 'Parallelism', + restartSize: '(On Failure) Restart Count', + faultAlertTemplate: 'Alert Template', + checkPointFailureOptions: 'Checkpoint Alert Strategy', + totalMemoryOptions: 'Total Memory', + jmMemoryOptions: 'JM Memory', + tmMemoryOptions: 'TM Memory', + podTemplate: 'Kubernetes Pod Template', + flinkCluster: 'Flink Cluster', + yarnQueue: 'Yarn Queue', + mavenPom: 'Maven POM', + uploadJar: 'Upload Dependency Jar File', + kubernetesNamespace: 'Kubernetes Namespace', + kubernetesClusterId: 'Kubernetes Cluster ID', + flinkBaseDockerImage: 'Flink Base Docker Image', + restServiceExposedType: 'Rest-Service Exposed Type', + resourceFrom: 'Resource Source', + uploadJobJar: 'Upload Jar File', + selectJobJar: 'Select Jar File', + mainClass: 'Main Entry Class', + project: 'Project', + module: 'Module', + appType: 'Job Type', + programArgs: 'Program Arguments', + programJar: 'Program Jar File', + dynamicProperties: 'Dynamic Parameters', + hadoopConfigTitle: 'System Hadoop', + dragUploadTitle: 'Click or Drag jar or py to this area to upload', + dragUploadTip: 'Supports single upload. You can upload local jar here to support the current job', + dependencyError: 'Please check the Flink version first.', + status: 'Run Status', + startTime: 'Start Time', + endTime: 'End Time', + hadoopUser: 'Hadoop User', + restoreModeTip: + 'Flink 1.15 and later supports restore mode; generally, this parameter does not need to be set', + success: 'Submission Successful', + appidCheck: 'appId cannot be empty!', + release: { + releaseTitle: 'This job is currently starting.', + releaseDesc: 'Are you sure you want to force another build?', + releaseFail: 'Job release failed', + releasing: 'Current job is being released', + }, + + clusterState: { + created: 'New', + started: 'Running', + canceled: 'Stopped', + lost: 'Lost', + }, + detail: { + detailTitle: 'Job Details', + flinkWebUi: 'Flink Web UI', + compareConfig: 'Compare Configuration', + compareSparkSql: 'Compare Spark SQL', + candidate: 'Candidate', + compare: 'Compare', + compareSelectTips: 'Please select the target version', + resetApi: 'Open API', + copyCurl: 'Copy CURL', + apiTitle: 'API Details', + resetApiToolTip: 'OPEN API, third-party systems can easily integrate with StreamPark', + copyStartcURL: 'Job Start', + copyCancelcURL: 'Job Stop', + apiDocCenter: 'API Documentation', + nullAccessToken: 'Access token does not exist, please contact the administrator to add', + invalidAccessToken: 'Access token is invalid, please contact the administrator', + invalidTokenUser: 'Current user has been locked, please contact the administrator', + detailTab: { + detailTabName: { + option: 'Options', + configuration: 'Configuration', + sparkSql: 'Spark SQL', + backup: 'Backup', + operationLog: 'Operation Log', + }, + configDetail: 'View Configuration Details', + sqlDetail: 'View SQL Details', + confDeleteTitle: 'Are you sure you want to delete this record?', + sqlDeleteTitle: 'Are you sure you want to delete this SQL?', + confBackupTitle: 'Are you sure you want to delete this backup?', + operationLogDeleteTitle: 'Are you sure you want to delete this operation record?', + copyPath: 'Copy Path', + pointDeleteTitle: 'Are you sure you want to delete?', + copySuccess: 'Successfully copied to clipboard', + copyFail: 'Copy failed', + check: 'CheckPoint', + save: 'SavePoint', + exception: 'View Exception', + }, + different: { + original: 'Original Version', + target: 'Target Version', + }, + exceptionModal: { + title: 'Exception Information', + }, + }, + view: { + buildTitle: 'Job Start Progress', + stepTitle: 'Step Details', + errorLog: 'Error Log', + errorSummary: 'Error Summary', + errorStack: 'Error Stack', + logTitle: 'Start Log: Job Name [ {0} ]', + refreshTime: 'Last Refresh Time', + refresh: 'Refresh', + start: 'Start Job', + stop: 'Stop Job', + recheck: 'The associated project has been updated and needs to re-release this job', + changed: 'Job has been updated', + ignoreRestored: 'Ignore Failure', + ignoreRestoredTip: + 'Skip errors when the state restore fails, allowing the job to continue running, same parameter: -allowNonRestoredState(-n)', + }, + pod: { + choice: 'Select', + init: 'Initialization Content', + host: 'Host Alias', + }, + sparkSql: { + preview: 'Content Preview', + verify: 'Verify', + format: 'Format', + fullScreen: 'Full Screen', + exit: 'Exit', + successful: 'Verification Successful', + compare: 'Compare', + version: 'Version', + compareSparkSQL: 'Select Compare Version', + compareVersionPlaceholder: 'Please select the SQL version to compare', + effectiveVersion: 'Current Effective Version', + candidateVersion: 'Candidate Compare Version', + }, + operation: { + edit: 'Edit Job', + release: 'Release Job', + releaseDetail: 'Release Details', + start: 'Start Job', + cancel: 'Cancel Job', + detail: 'Job Details', + startLog: 'View Flink Start Log', + abort: 'Terminate Job', + copy: 'Copy Job', + remapping: 'Remap Job', + deleteTip: 'Are you sure you want to delete this job?', + canceling: 'Current job is stopping', + starting: 'Current job is starting', + }, + + runStatusOptions: { + added: 'New', + starting: 'Starting', + running: 'Running', + failed: 'Failed', + canceled: 'Canceled', + finished: 'Completed', + suspended: 'Suspended', + lost: 'Lost', + silent: 'Silent', + terminated: 'Terminated', + }, + + addAppTips: { + developmentModePlaceholder: 'Please select development mode', + developmentModeIsRequiredMessage: 'Development mode is required', + executionModePlaceholder: 'Please select execution mode', + executionModeIsRequiredMessage: 'Execution mode is required', + hadoopEnvInitMessage: 'Hadoop environment check failed, please check configuration', + resourceFromMessage: 'Resource source is required', + mainClassPlaceholder: 'Please enter the main entry class', + mainClassIsRequiredMessage: 'Main entry class is required', + projectPlaceholder: 'Please select a project', + projectIsRequiredMessage: 'Project is required', + projectModulePlaceholder: 'Please select the project module', + appTypePlaceholder: 'Please select job type', + appTypeIsRequiredMessage: 'Job type is required', + programJarIsRequiredMessage: 'Program jar file is required', + useSysHadoopConf: 'Use system Hadoop configuration', + flinkVersionIsRequiredMessage: 'Flink version is required', + appNamePlaceholder: 'Please enter job name', + appNameIsRequiredMessage: 'Job name is required', + appNameNotUniqueMessage: 'Job name must be unique; the entered job name already exists', + appNameExistsInYarnMessage: + 'Job name already exists in the YARN cluster and cannot be duplicated. Please check', + appNameExistsInK8sMessage: + 'The job name already exists in the Kubernetes cluster and cannot be duplicated. Please check', + appNameValid: 'Job name is invalid', + appNameRole: 'Job name must follow the following rules:', + K8sSessionClusterIdRole: 'Kubernetes Cluster ID must follow the following rules:', + appNameK8sClusterIdRole: + 'When the deployment mode is Kubernetes Application mode, the job name will be used as the Kubernetes cluster ID, so the job name must follow the following rules:', + appNameK8sClusterIdRoleLength: 'Should not exceed 45 characters', + appNameK8sClusterIdRoleRegexp: + 'Can only consist of lowercase letters, numbers, characters, and "-" and must satisfy the regex format [a-z]([-a-z0-9]*[a-z0-9])', + appNameRoleContent: + 'Characters must be (Chinese or English or "-" or "_") and cannot have two consecutive spaces', + appNameNotValid: + 'Job name is invalid. Characters must be (Chinese or English or "-" or "_") and cannot have two consecutive spaces. Please check', + flinkClusterIsRequiredMessage: 'Flink cluster is required', + sparkSqlIsRequiredMessage: 'Spark SQL is required', + tagsPlaceholder: 'Please enter tags, if more than one, separate with commas (,)', + parallelismPlaceholder: 'Parallelism for running the program', + slotsOfPerTaskManagerPlaceholder: 'Number of slots per TaskManager', + restartSizePlaceholder: 'Maximum restart count', + alertTemplatePlaceholder: 'Alert Template', + totalMemoryOptionsPlaceholder: 'Please select the resource parameters to set', + tmPlaceholder: 'Please select the resource parameters to set', + yarnQueuePlaceholder: 'Please enter the YARN queue label name', + descriptionPlaceholder: 'Please enter a description for this job', + kubernetesNamespacePlaceholder: 'Please enter Kubernetes namespace, e.g., default', + kubernetesClusterIdPlaceholder: 'Please select Kubernetes Cluster ID', + kubernetesClusterIdRequire: + 'Lowercase letters, numbers, "-", and must start and end with alphanumeric characters, and not exceed 45 characters', + kubernetesClusterIdIsRequiredMessage: 'Kubernetes Cluster ID is required', + flinkImagePlaceholder: + 'Please enter the tag of the Flink base Docker image, e.g., flink:1.13.0-scala_2.11-java8', + flinkImageIsRequiredMessage: 'Flink base Docker image is required', + k8sRestExposedTypePlaceholder: 'Kubernetes Rest-Service Exposed Type', + hadoopXmlConfigFileTips: + 'Automatically copy configuration file from system environment parameters', + dynamicPropertiesPlaceholder: + '$key=$value; if there are multiple parameters, you can input them on separate lines (-D )', + sqlCheck: 'SQL Check Error', + }, + + noteInfo: { + note: 'Note', + minute: 'Minute', + count: 'Count', + officialDoc: 'Official Documentation', + checkPointFailureOptions: 'Checkpoint Failure Strategy', + checkpointFailureRateInterval: 'Checkpoint Failure Interval', + maxFailuresPerInterval: 'Maximum Failure Count', + checkPointFailureNote: + 'Checkpoint failure handling strategy, e.g., within 5 minutes (checkpoint failure interval), if the number of checkpoint failures exceeds 10 (maximum checkpoint failure count), it will trigger an operation (send an alert or restart the job)', + totalMemoryNote: + 'It is not recommended to configure both "total process memory" and "total Flink memory" at the same time. Due to potential memory configuration conflicts, it may lead to deployment failures. Configuring other memory components also requires caution, as it may produce further configuration conflicts; the simplest approach is to set "total process memory"', + dynamicProperties: + 'Dynamic Properties: Works the same as -D$property=$value in Flink run command line mode, allowing multiple parameters to be specified. For more configurable parameters, please refer to the documentation', + yarnQueue: + 'This input not only supports quickly setting "yarn.application.name" but also supports setting "yarn.application.node-label". For example, entering "queue1" sets the value of "yarn.application.name" to "queue1", while entering "queue1{\'@\'}label1,label2" sets "yarn.application.name" to "queue1" and "yarn.application.node-label" to "label1,label2". Queue and label are separated by {\'@\'}.', + }, +}; diff --git a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts index fec0d8ac26..50e933e5fc 100644 --- a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts +++ b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/menu.ts @@ -36,7 +36,10 @@ export default { }, spark: { application: '作业管理', - flinkHome: 'Spark 版本' + flinkHome: 'Spark 版本', + createApplication: '创建应用', + updateApplication: '更新应用', + applicationDetail: '应用详情', }, setting: { menu: '设置中心', diff --git a/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts new file mode 100644 index 0000000000..6d69f81763 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/locales/lang/zh-CN/spark/app.ts @@ -0,0 +1,307 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +export default { + dashboard: { + runningTasks: '运行中的应用数', + totalTask: 'Task 总数', + totalStage: 'Stage 总数', + completedTask: '完成的 Task 总数', + completedStage: '完成的 Stage 总数', + memory: '占用的总内存', + VCore: '占用的总VCores', + }, + runState: { + added: '新增应用', + saving: '保存中', + created: '创建中', + starting: '启动中', + submitted: '已提交', + accept: '已接受', + running: '运行中', + finished: '已完成', + failed: '作业失败', + lost: '作业失联', + mapping: '映射中', + other: '其他', + revoked: '回滚', + stopping: '停止中', + killed: '终止', + }, + releaseState: { + releasing: '发布中', + failed: '发布失败', + success: '发布成功', + waiting: '待发布', + pending: '待回滚', + }, + id: '应用ID', + appName: '应用名称', + searchName: '应用名称', + tags: '应用标签', + owner: '创建者', + sparkVersion: 'Spark版本', + duration: '运行时长', + modifiedTime: '修改时间', + runStatus: '运行状态', + releaseBuild: '发布状态', + jobType: '作业类型', + developmentMode: '开发模式', + executionMode: '执行模式', + historyVersion: '历史版本', + resource: '资源', + resourcePlaceHolder: '从选择资源', + selectAppPlaceHolder: '选择应用', + dependency: '作业依赖', + appConf: '应用配置', + resolveOrder: '类加载顺序', + parallelism: '并行度', + restartSize: '(失败后)重启次数', + faultAlertTemplate: '告警模板', + checkPointFailureOptions: 'Checkpoint告警策略', + totalMemoryOptions: '总内存', + jmMemoryOptions: 'JM内存', + tmMemoryOptions: 'TM内存', + podTemplate: 'Kubernetes Pod 模板', + flinkCluster: 'Flink集群', + yarnQueue: 'Yarn队列', + mavenPom: 'maven pom', + uploadJar: '上传依赖Jar文件', + kubernetesNamespace: 'Kubernetes 命名空间', + kubernetesClusterId: 'Kubernetes ClusterId', + flinkBaseDockerImage: 'Flink 基础docker镜像', + restServiceExposedType: 'Rest-Service Exposed Type', + resourceFrom: '资源来源', + uploadJobJar: '上传jar文件', + selectJobJar: '选择jar文件', + mainClass: '程序入口类', + project: '项目', + module: '模块', + appType: '作业类型', + programArgs: '程序参数', + programJar: '程序Jar文件', + dynamicProperties: '动态参数', + hadoopConfigTitle: '系统 Hadoop', + dragUploadTitle: '单击或拖动 jar或py 到此区域以上传', + dragUploadTip: '支持单次上传。您可以在此处上传本地 jar 以支持当前作业', + dependencyError: '请先检查flink 版本.', + status: '运行状态', + startTime: '启动时间', + endTime: '结束时间', + hadoopUser: 'Hadoop User', + restoreModeTip: 'flink 1.15开始支持restore模式,一般情况下不用设置该参数', + success: '提交成功', + appidCheck: 'appId 不能为空!', + release: { + releaseTitle: '该作业正在启动中.', + releaseDesc: '您确定要强制进行另一次构建吗', + releaseFail: '发布作业失败', + releasing: '当前作业正在发布中', + }, + + clusterState: { + created: '新增', + started: '运行中', + canceled: '停止', + lost: '失联', + }, + detail: { + detailTitle: '应用详情', + flinkWebUi: 'Flink Web UI', + compareConfig: '比较配置', + compareSparkSql: '比较 Spark SQL', + candidate: '侯选', + compare: '比较', + compareSelectTips: '请选择目标板本', + resetApi: 'Open API', + copyCurl: '复制 CURL', + apiTitle: 'Api 详情', + resetApiToolTip: 'OPEN API,第三方系统可轻松对接 StreamPark', + copyStartcURL: '作业启动', + copyCancelcURL: '作业停止', + apiDocCenter: 'Api文档', + nullAccessToken: '访问令牌不存在,请联系管理员添加', + invalidAccessToken: '访问令牌无效,请联系管理员', + invalidTokenUser: '当前用户已被锁定,请联系管理员', + detailTab: { + detailTabName: { + option: '选项', + configuration: '配置', + sparkSql: 'Spark SQL', + backup: '备份', + operationLog: '操作日志', + }, + configDetail: '查看配置详情', + sqlDetail: '查看 SQL 详情', + confDeleteTitle: '您确定要删除此记录吗?', + sqlDeleteTitle: '您确定要删除此 SQL 吗?', + confBackupTitle: '您确定要删除该备份吗?', + operationLogDeleteTitle: '您确定要删除该操作记录吗?', + copyPath: '复制路径', + pointDeleteTitle: '您确定要删除?', + copySuccess: '已成功复制到剪贴板', + copyFail: '复制失败', + check: 'CheckPoint', + save: 'SavePoint', + exception: '查看异常', + }, + different: { + original: '原始版本', + target: '目标版本', + }, + exceptionModal: { + title: '异常信息', + }, + }, + view: { + buildTitle: '作业启动进度', + stepTitle: '步骤详情', + errorLog: '错误日志', + errorSummary: '错误摘要', + errorStack: '错误堆栈', + logTitle: '启动日志 : 作业名称 [ {0} ]', + refreshTime: '上次刷新时间', + refresh: '刷新', + start: '启动作业', + stop: '停止作业', + recheck: '关联的项目已更新,需要重新发布此作业', + changed: '作业已更新', + ignoreRestored: '忽略失败', + ignoreRestoredTip: '当状态恢复失败时跳过错误,作业继续运行, 同参数:-allowNonRestoredState(-n)', + }, + pod: { + choice: '选择', + init: '初始化内容', + host: 'Host别名', + }, + sparkSql: { + preview: '内容预览', + verify: '验证', + format: '格式化', + fullScreen: '全屏', + exit: '退出', + successful: '验证成功', + compare: '比较', + version: '版本', + compareSparkSQL: '选择比对版本', + compareVersionPlaceholder: '请选择要比较的sql版本', + effectiveVersion: '当前生效版本', + candidateVersion: '候选比对版本', + }, + operation: { + edit: '编辑作业', + release: '发布作业', + releaseDetail: '发布详情', + start: '启动作业', + cancel: '取消作业', + detail: '作业详情', + startLog: '查看 Flink 启动日志', + abort: '终止作业', + copy: '复制作业', + remapping: '重新映射作业', + deleteTip: '你确定要删除这个作业?', + canceling: '当前作业正在停止中', + starting: '当前作业正在启动中', + }, + + runStatusOptions: { + added: '新增', + starting: '启动中', + running: '运行中', + failed: '失败', + canceled: '已取消', + finished: '已完成', + suspended: '暂停', + lost: '丢失', + silent: '静默', + terminated: '终止', + }, + + addAppTips: { + developmentModePlaceholder: '请选择开发模式', + developmentModeIsRequiredMessage: '开发模式必填', + executionModePlaceholder: '请选择执行模式', + executionModeIsRequiredMessage: '执行模式必填', + hadoopEnvInitMessage: 'hadoop环境检查失败, 请检查配置', + resourceFromMessage: '资源来源必填', + mainClassPlaceholder: '请输入程序入口类', + mainClassIsRequiredMessage: '入程序入口类必填', + projectPlaceholder: '请选择项目', + projectIsRequiredMessage: '项目必填', + projectModulePlaceholder: '请选择项目的模块', + appTypePlaceholder: '请选择作业类型', + appTypeIsRequiredMessage: '作业类型必填', + programJarIsRequiredMessage: '程序jar文件必填', + useSysHadoopConf: '使用系统hadoop配置', + flinkVersionIsRequiredMessage: 'Flink版本必填', + appNamePlaceholder: '请输入作业名称', + appNameIsRequiredMessage: '作业名称必填', + appNameNotUniqueMessage: '作业名称必须唯一, 输入的作业名称已经存在', + appNameExistsInYarnMessage: '作业名称已经在YARN集群中存在,不能重复。请检查', + appNameExistsInK8sMessage: '该作业名称已经在 Kubernetes 集群中存在,不能重复。请检查', + appNameValid: '作业名称不合法', + appNameRole: '作业必须遵循以下规则:', + K8sSessionClusterIdRole: 'Kubernetes 集群ID必要遵循以下规则:', + appNameK8sClusterIdRole: + '当前部署模式是 Kubernetes Application 模式,会将作业名称作为 Kubernetes 的 clusterId,因此作业名称要遵循以下规则:', + appNameK8sClusterIdRoleLength: '不应超过 45 个字符', + appNameK8sClusterIdRoleRegexp: + '只能由小写字母、数字、字符、和"-" 组成,必须满足正则格式 [a-z]([-a-z0-9]*[a-z0-9])', + appNameRoleContent: '字符必须是(中文 或 英文 或 "-" 或 "_"),不能出现两个连续的空格', + appNameNotValid: + '作业名称无效。字符必须是(中文 或 英文 或 "-" 或 "_"),不能出现两个连续的空格,请检查', + flinkClusterIsRequiredMessage: 'Flink集群必填', + sparkSqlIsRequiredMessage: 'Spark SQL必填', + tagsPlaceholder: '请输入标签,如果超过一个,用逗号(,)分隔', + parallelismPlaceholder: '运行程序的并行度', + slotsOfPerTaskManagerPlaceholder: '每个TaskManager的插槽数', + restartSizePlaceholder: '最大重启次数', + alertTemplatePlaceholder: '告警模板', + totalMemoryOptionsPlaceholder: '请选择要设置的资源参数', + tmPlaceholder: '请选择要设置的资源参数', + yarnQueuePlaceholder: '请输入yarn队列标签名称', + descriptionPlaceholder: '请输入此作业的描述', + kubernetesNamespacePlaceholder: '请输入 Kubernetes 命名空间, 如: default', + kubernetesClusterIdPlaceholder: '请选择 Kubernetes ClusterId', + kubernetesClusterIdRequire: + '小写字母、数字、“-”,并且必须以字母数字字符开头和结尾,并且不超过45个字符', + kubernetesClusterIdIsRequiredMessage: 'Kubernetes ClusterId 必填', + flinkImagePlaceholder: '请输入Flink基础docker镜像的标签,如:flink:1.13.0-scala_2.11-java8', + flinkImageIsRequiredMessage: 'Flink基础docker镜像是必填的', + k8sRestExposedTypePlaceholder: 'Kubernetes Rest-Service Exposed Type', + hadoopXmlConfigFileTips: '从系统环境参数自动复制配置文件', + dynamicPropertiesPlaceholder: '$key=$value,如果有多个参数,可以换行输入(-D )', + sqlCheck: 'SQL 检查错误', + }, + + noteInfo: { + note: '提示', + minute: '分钟', + count: '次数', + officialDoc: '官网文档', + checkPointFailureOptions: 'CheckPoint失败策略', + checkpointFailureRateInterval: 'checkpoint失败间隔', + maxFailuresPerInterval: '最大失败次数', + checkPointFailureNote: + 'checkpoint 失败处理策略, 例如: 在 5 分钟内 (checkpoint的失败间隔), 如果 checkpoint 失败次数超过 10 次 (checkpoint最大失败次数),会触发操作(发送告警或者重启作业)', + totalMemoryNote: + '不建议同时配置 "total process memory" 和 "total Flink memory"。 由于潜在的内存配置冲突,它可能会导致部署失败。 配置其他内存组件也需要谨慎,因为它会产生进一步的配置冲突,最简单的方法是设置"total process memory"', + dynamicProperties: + '动态Properties: 与flink run命令行模式下的 -D$property=$value 的作用相同,允许指定多个参数。 更多可以设置的参数请查阅', + yarnQueue: + '此输入不仅支持快速设置 "yarn.application.name" 还支持设置 "yarn.application.node-label"。例如,输入 "queue1" 表示 "yarn.application.name" 的值为 "queue1",而输入 "queue1{\'@\'}label1,label2" 则表示 "yarn.application.name" 设置为 "queue1" 且 "yarn.application.node-label" 设置为 "label1,label2"。Queue 和 label 之间使用 {\'@\'} 分隔。', + }, +}; diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppDashboard.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppDashboard.vue index bd97f7cb3b..6c9be6b40d 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppDashboard.vue +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppDashboard.vue @@ -32,40 +32,38 @@ const res = await fetchSparkDashboard(); if (res) { Object.assign(dashBigScreenMap, { - runningJob: { - staticstics: { title: t('flink.app.dashboard.runningJobs'), value: res.runningJob }, + runningTask: { + statistics: { + title: t('spark.app.dashboard.runningTasks'), + value: res.runningApplication, + }, footer: [ - { title: t('flink.app.dashboard.totalTask'), value: res?.task?.total || 0 }, - { title: t('flink.app.dashboard.runningTask'), value: res?.task?.running || 0 }, + { title: t('spark.app.dashboard.totalTask'), value: res?.numTasks || 0 }, + { title: t('spark.app.dashboard.totalStage'), value: res?.numStages || 0 }, ], }, - availiableTask: { - staticstics: { - title: t('flink.app.dashboard.availableTaskSlots'), - value: res.availableSlot, + completedTask: { + statistics: { + title: t('spark.app.dashboard.completedTask'), + value: res.numCompletedTasks, }, footer: [ - { title: t('flink.app.dashboard.taskSlots'), value: res.totalSlot }, - { title: t('flink.app.dashboard.taskManagers'), value: res.totalTM }, + { title: t('spark.app.dashboard.completedStage'), value: res.numCompletedStages }, ], }, - jobManager: { - staticstics: { title: t('flink.app.dashboard.jobManagerMemory'), value: res.jmMemory }, - footer: [ - { - title: t('flink.app.dashboard.totalJobManagerMemory'), - value: `${res.jmMemory} MB`, - }, - ], + memory: { + statistics: { + title: t('spark.app.dashboard.memory'), + value: `${res.usedMemory} MB`, + }, + footer: [], }, taskManager: { - staticstics: { title: t('flink.app.dashboard.taskManagerMemory'), value: res.tmMemory }, - footer: [ - { - title: t('flink.app.dashboard.totalTaskManagerMemory'), - value: `${res.tmMemory} MB`, - }, - ], + statistics: { + title: t('spark.app.dashboard.VCore'), + value: res.usedVCores, + }, + footer: [], }, }); } @@ -92,7 +90,7 @@ :key="key" > diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue new file mode 100644 index 0000000000..a8af51c75c --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppForm.vue @@ -0,0 +1,121 @@ + + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppLog.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppLog.vue new file mode 100644 index 0000000000..df698fb5c6 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppLog.vue @@ -0,0 +1,124 @@ + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppStartModal.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppStartModal.vue deleted file mode 100644 index 3fd5976ef5..0000000000 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/components/AppStartModal.vue +++ /dev/null @@ -1,244 +0,0 @@ - - - diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/BuildDrawer.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/BuildDrawer.vue new file mode 100644 index 0000000000..3023a24c6e --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/BuildDrawer.vue @@ -0,0 +1,239 @@ + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/CompareModal.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/CompareModal.vue new file mode 100644 index 0000000000..8fa9415767 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/CompareModal.vue @@ -0,0 +1,153 @@ + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/DetailTab.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/DetailTab.vue new file mode 100644 index 0000000000..1fbee7f634 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/DetailTab.vue @@ -0,0 +1,485 @@ + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSql.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSql.vue index 3a6bc71b4d..f3a5c97e93 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSql.vue +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSql.vue @@ -25,7 +25,7 @@ import { Button } from 'ant-design-vue'; import { isEmpty } from '/@/utils/is'; import { useMessage } from '/@/hooks/web/useMessage'; - import { format } from '../sqlFormatter'; + import { format } from '/@/views/flink/app/FlinkSqlFormatter'; import { useI18n } from '/@/hooks/web/useI18n'; import { useFullContent } from '/@/hooks/event/useFullscreen'; import { fetchSparkSqlVerify } from '/@/api/spark/sql'; @@ -35,7 +35,7 @@ const ButtonGroup = Button.Group; const { t } = useI18n(); - const flinkSql = ref(); + const sparkSql = ref(); const verifyRes = reactive({ errorMsg: '', verified: false, @@ -73,7 +73,7 @@ } if (!props.versionId) { - createMessage.error(t('flink.app.dependencyError')); + createMessage.error(t('spark.app.dependencyError')); return false; } else { try { @@ -150,7 +150,7 @@ // }, 500); // } const { onChange, setContent, getInstance, getMonacoInstance, setMonacoSuggest } = useMonaco( - flinkSql, + sparkSql, { language: 'sql', code: props.value || defaultValue, @@ -200,14 +200,14 @@
- {{ t('flink.app.flinkSql.verify') }} + {{ t('spark.app.sparkSql.verify') }} - {{ t('flink.app.flinkSql.preview') }} + {{ t('spark.app.sparkSql.preview') }} - {{ t('flink.app.flinkSql.format') }} + {{ t('spark.app.sparkSql.format') }} - {{ t('flink.app.flinkSql.fullScreen') }} + {{ t('spark.app.sparkSql.fullScreen') }}
@@ -234,26 +234,26 @@ {{ verifyRes.errorMsg }}
- {{ t('flink.app.flinkSql.successful') }} + {{ t('spark.app.sparkSql.successful') }}
- {{ t('flink.app.flinkSql.verify') }} + {{ t('spark.app.sparkSql.verify') }}
- {{ t('flink.app.flinkSql.preview') }} + {{ t('spark.app.sparkSql.preview') }}
- {{ t('flink.app.flinkSql.format') }} + {{ t('spark.app.sparkSql.format') }}
@@ -268,7 +268,7 @@

{{ verifyRes.errorMsg }} - {{ t('flink.app.flinkSql.successful') }} + {{ t('spark.app.sparkSql.successful') }}

diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlCompareModal.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlCompareModal.vue new file mode 100644 index 0000000000..c03498c3f7 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlCompareModal.vue @@ -0,0 +1,165 @@ + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlDifferent.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlDifferent.vue new file mode 100644 index 0000000000..90f92d1c31 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlDifferent.vue @@ -0,0 +1,136 @@ + + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlReview.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlReview.vue new file mode 100644 index 0000000000..f7b3edb49c --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/SparkSqlReview.vue @@ -0,0 +1,55 @@ + + + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx b/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx new file mode 100644 index 0000000000..5a56faafcf --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/State.tsx @@ -0,0 +1,240 @@ +/* + Licensed to the Apache Software Foundation (ASF) under one or more + contributor license agreements. See the NOTICE file distributed with + this work for additional information regarding copyright ownership. + The ASF licenses this file to You under the Apache License, Version 2.0 + (the "License"); you may not use this file except in compliance with + the License. You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +import { computed, defineComponent, toRefs, unref } from 'vue'; +import { Tag, Tooltip } from 'ant-design-vue'; +import '../../../flink/app/components/State.less'; +import { AppStateEnum, OptionStateEnum } from '/@/enums/sparkEnum'; +import { ReleaseStateEnum } from '/@/enums/flinkEnum'; +import { useI18n } from '/@/hooks/web/useI18n'; +const { t } = useI18n(); + +/* state map*/ +export const stateMap = { + [AppStateEnum.ADDED]: { color: '#2f54eb', title: t('flink.app.runState.added') }, + [AppStateEnum.NEW_SAVING]: { + color: '#738df8', + title: t('flink.app.runState.saving'), + class: 'status-processing-initializing', + }, + [AppStateEnum.NEW]: { color: '#2f54eb', title: t('flink.app.runState.created') }, + [AppStateEnum.STARTING]: { + color: '#1AB58E', + title: t('flink.app.runState.starting'), + class: 'status-processing-starting', + }, + [AppStateEnum.SUBMITTE]: { + color: '#13c2c2', + title: t('flink.app.runState.submitted'), + class: 'status-processing-restarting', + }, + [AppStateEnum.ACCEPTE]: { + color: '#52c41a', + title: t('flink.app.runState.accept'), + class: 'status-processing-running', + }, + [AppStateEnum.RUNNIN]: { + color: '#fa541c', + title: t('flink.app.runState.running'), + class: 'status-processing-failing', + }, + [AppStateEnum.FINISHE]: { color: '#1890ff', title: t('flink.app.runState.finished') }, + [AppStateEnum.FAILE]: { color: '#f5222d', title: t('flink.app.runState.failed') }, + [AppStateEnum.LOS]: { color: '#333333', title: t('flink.app.runState.lost') }, + [AppStateEnum.MAPPING]: { + color: '#13c2c2', + title: t('flink.app.runState.mapping'), + class: 'status-processing-restarting', + }, + [AppStateEnum.OTHER]: { color: '#722ed1', title: t('flink.app.runState.other') }, + [AppStateEnum.REVOKED]: { + color: '#eb2f96', + title: t('flink.app.runState.revoked'), + class: 'status-processing-reconciling', + }, + + [AppStateEnum.STOPPING]: { + color: '#738df8', + title: t('flink.app.runState.stopping'), + class: 'status-processing-initializing', + }, + [AppStateEnum.KILLED]: { color: '#8E50FF', title: t('flink.app.runState.killed') }, +}; +/* option state map*/ +export const optionStateMap = { + [OptionStateEnum.RELEASING]: { + color: '#1ABBDC', + title: t('flink.app.releaseState.releasing'), + class: 'status-processing-deploying', + }, + [OptionStateEnum.STOPPING]: { + color: '#faad14', + title: t('flink.app.runState.cancelling'), + class: 'status-processing-cancelling', + }, + [OptionStateEnum.STARTING]: { + color: '#1AB58E', + title: t('flink.app.runState.starting'), + class: 'status-processing-starting', + }, +}; + +/* release state map*/ +export const releaseStateMap = { + [ReleaseStateEnum.FAILED]: { color: '#f5222d', title: t('flink.app.releaseState.failed') }, + [ReleaseStateEnum.DONE]: { color: '#52c41a', title: t('flink.app.releaseState.success') }, + [ReleaseStateEnum.NEED_RELEASE]: { color: '#fa8c16', title: t('flink.app.releaseState.waiting') }, + [ReleaseStateEnum.RELEASING]: { + color: '#52c41a', + title: t('flink.app.releaseState.releasing'), + class: 'status-processing-deploying', + }, + [ReleaseStateEnum.NEED_RESTART]: { color: '#fa8c16', title: t('flink.app.releaseState.pending') }, + [ReleaseStateEnum.NEED_ROLLBACK]: { + color: '#fa8c16', + title: t('flink.app.releaseState.waiting'), + }, +}; + +/* build state map*/ +export const buildStatusMap = { + 0: { color: '#99A3A4', title: 'UNKNOWN' }, + 1: { color: '#F5B041', title: 'PENDING' }, + 2: { + color: '#3498DB', + title: 'BUILDING', + class: 'status-processing-deploying', + }, + 3: { color: '#2ECC71', title: 'SUCCESS' }, + 4: { color: '#E74C3C', title: 'FAILURE' }, +}; + +const overviewMap = { + running: { color: '#52c41a', title: 'RUNNING' }, + canceled: { color: '#fa8c16', title: 'CANCELED' }, + canceling: { color: '#faad14', title: 'CANCELING' }, + created: { color: '#2f54eb', title: 'CREATED' }, + deploying: { color: '#eb2f96', title: 'RECONCILING' }, + reconciling: { color: '#13c2c2', title: 'RELEASING' }, + scheduled: { color: '#722ed1', title: 'SCHEDULED' }, +}; + +export default defineComponent({ + name: 'State', + props: { + option: { + type: String, + default: 'state', + }, + data: { + type: Object as PropType, + default: () => ({}), + }, + maxTitle: String, + }, + setup(props) { + const { data, option } = toRefs(props); + + const tagWidth = computed(() => { + if (props.maxTitle === undefined) return 0; + // create a dom to calculate the width of the tag + const dom = document.createElement('span'); + dom.style.display = 'inline-block'; + dom.style.fontSize = '10px'; + dom.style.padding = '0 2px'; + dom.style.borderRadius = '2px'; + dom.textContent = props.maxTitle; + document.body.appendChild(dom); + const width = dom.clientWidth + 2; + document.body.removeChild(dom); + return width; + }); + + const renderTag = (map: Recordable, key: number) => { + if (!Reflect.has(map, key)) { + return; + } + return ( + + {map[key].title} + + ); + }; + + const getStyle = computed(() => { + if (tagWidth.value > 0) { + return { width: `${tagWidth.value}px`, textAlign: 'center' }; + } + return {}; + }); + const renderState = () => { + if (unref(data).optionState === OptionStateEnum.NONE) { + return
{renderTag(stateMap, unref(data).state)}
; + } else { + return
{renderTag(optionStateMap, unref(data).optionState)}
; + } + }; + function renderOverview() { + if (!unref(data).overview) { + return; + } + return Object.keys(overviewMap).map((k) => { + if (unref(data)?.overview[k]) { + const item = overviewMap[k]; + return ( + + {unref(data)?.overview[k]} + + ); + } else { + return; + } + }); + } + const renderOtherOption = () => { + if ([AppStateEnum.RUNNIN, AppStateEnum.FAILE].includes(unref(data)?.state)) { + return ( +
+ {unref(data).totalTask && ( + + {unref(data)?.totalTask} + + )} + {renderOverview()} +
+ ); + } + return
-
; + }; + + return () => { + if (unref(option) === 'state') { + return {renderState()}; + } + + if (unref(option) === 'release') { + return {renderTag(releaseStateMap, unref(data).release)}; + } + + if (unref(option) === 'build') { + return {renderTag(buildStatusMap, unref(data).buildStatus)}; + } + + return {renderOtherOption()}; + }; + }, +}); diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/components/StatisticCard.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/components/StatisticCard.vue index 2c2247401f..60fd0a2447 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/components/StatisticCard.vue +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/components/StatisticCard.vue @@ -48,6 +48,9 @@ {{ item.value }} +
diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue index 8c714de262..06f3d2b619 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/create.vue @@ -16,27 +16,20 @@ --> diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts b/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts new file mode 100644 index 0000000000..d6874e5404 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/data/detail.data.ts @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { BasicColumn } from '/@/components/Table/src/types/table'; +import { Tag, TypographyParagraph } from 'ant-design-vue'; +import { DescItem } from '/@/components/Description'; +import { h } from 'vue'; +import State from '../components/State'; +import Icon from '/@/components/Icon'; +import { dateToDuration } from '/@/utils/dateUtil'; +import { JobTypeEnum } from '/@/enums/sparkEnum'; +import { useI18n } from '/@/hooks/web/useI18n'; + +const { t } = useI18n(); +export const getDescSchema = (): DescItem[] => { + return [ + { + field: 'id', + label: t('spark.app.id'), + render: (curVal) => + h(TypographyParagraph, { copyable: true, style: { color: '#477de9' } }, () => curVal), + }, + { field: 'appName', label: t('spark.app.appName') }, + { + field: 'jobType', + label: t('spark.app.developmentMode'), + render: (curVal) => + h( + 'div', + { class: 'bold-tag' }, + h(Tag, { color: curVal === 1 ? '#545454' : '#0C7EF2', class: 'mr-8px' }, () => + curVal === 1 ? 'Custom Code' : 'Spark SQL', + ), + ), + }, + { + field: 'module', + label: t('spark.app.module'), + show: (data) => data.jobType != JobTypeEnum.SQL, + }, + { + field: 'state', + label: t('spark.app.status'), + render: (_curVal, data) => h(State, { option: 'state', data }), + }, + { + field: 'startTime', + label: t('spark.app.startTime'), + render: (curVal) => + h( + 'div', + null, + curVal + ? [h(Icon, { icon: 'ant-design:clock-circle-outlined' }), h('span', null, curVal)] + : '-', + ), + }, + { + field: 'endTime', + label: t('spark.app.endTime'), + render: (curVal) => + h( + 'div', + null, + curVal + ? [h(Icon, { icon: 'ant-design:clock-circle-outlined' }), h('span', null, curVal)] + : '-', + ), + }, + { + field: 'duration', + label: t('spark.app.duration'), + render: (curVal) => dateToDuration(curVal), + show: (data) => data.duration, + }, + { field: 'description', label: t('common.description'), span: 2 }, + ]; +}; +/* setting */ +export const getConfColumns = (): BasicColumn[] => [ + { title: 'Version', dataIndex: 'version' }, + { title: 'Conf Format', dataIndex: 'format' }, + { title: 'Effective', dataIndex: 'effective' }, + { title: 'Modify Time', dataIndex: 'createTime' }, +]; + +export const getSparkSqlColumns = (): BasicColumn[] => [ + { title: 'Version', dataIndex: 'version' }, + { title: 'Effective', dataIndex: 'effective' }, + { title: 'Candidate', dataIndex: 'candidate' }, + { title: 'Modify Time', dataIndex: 'createTime' }, +]; + +export const getSavePointColumns = (): BasicColumn[] => [ + { title: 'Path', dataIndex: 'path' }, + { title: 'Trigger Time', dataIndex: 'triggerTime', width: 250 }, + { title: 'Type', dataIndex: 'type', width: 170 }, + { title: 'Latest', dataIndex: 'latest', width: 200 }, +]; +export const getBackupColumns = (): BasicColumn[] => [ + { title: 'Save Path', dataIndex: 'path', align: 'left' }, + { title: 'Description', dataIndex: 'description' }, + { title: 'Version', dataIndex: 'version', width: 100, align: 'center' }, + { title: 'Backup Time', dataIndex: 'createTime', width: 200 }, +]; + +export const getOptionLogColumns = (): BasicColumn[] => [ + { title: 'Operation Name', dataIndex: 'optionName', width: 150 }, + { title: 'Cluster Id', dataIndex: 'yarnAppId' }, + { title: 'JobManager URL', dataIndex: 'jobManagerUrl' }, + { title: 'Start Status', dataIndex: 'success', width: 120 }, + { title: 'Option Time', dataIndex: 'optionTime', width: 200 }, +]; diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/detail.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/detail.vue new file mode 100644 index 0000000000..004bf65c50 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/detail.vue @@ -0,0 +1,226 @@ + + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue new file mode 100644 index 0000000000..334af61518 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/edit.vue @@ -0,0 +1,157 @@ + + + + diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx index b4ca02eb6c..3141a66d0a 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useAppFormSchema.tsx @@ -14,7 +14,7 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { computed, h, onMounted, ref, unref } from 'vue'; +import { computed, h, onMounted, ref, unref, type Ref } from 'vue'; import type { FormSchema } from '/@/components/Form'; import { useI18n } from '/@/hooks/web/useI18n'; import { AppExistsStateEnum, JobTypeEnum } from '/@/enums/sparkEnum'; @@ -23,16 +23,15 @@ import { SvgIcon } from '/@/components/Icon'; import type { SparkEnv } from '/@/api/spark/home.type'; import type { RuleObject } from 'ant-design-vue/lib/form'; import type { StoreValue } from 'ant-design-vue/lib/form/interface'; -import { renderIsSetConfig, renderStreamParkResource, renderYarnQueue } from './useFlinkRender'; +import { renderIsSetConfig, renderStreamParkResource, renderYarnQueue } from './useSparkRender'; import { executionModes } from '../data'; import { useDrawer } from '/@/components/Drawer'; -import { fetchSparkEnvList } from '/@/api/spark/home'; import { fetchVariableAll } from '/@/api/resource/variable'; import { fetchTeamResource } from '/@/api/resource/upload'; import { fetchCheckSparkName } from '/@/api/spark/app'; -export function useSparkSchema() { + +export function useSparkSchema(sparkEnvs: Ref) { const { t } = useI18n(); - const sparkEnvs = ref([]); const teamResource = ref>([]); const suggestions = ref>([]); @@ -40,9 +39,9 @@ export function useSparkSchema() { /* Detect job name field */ async function getJobNameCheck(_rule: RuleObject, value: StoreValue, _model: Recordable) { if (value === null || value === undefined || value === '') { - return Promise.reject(t('flink.app.addAppTips.appNameIsRequiredMessage')); + return Promise.reject(t('spark.app.addAppTips.appNameIsRequiredMessage')); } - const params = { jobName: value }; + const params = { appName: value }; // if (edit?.appId) { // Object.assign(params, { id: edit.appId }); // } @@ -51,13 +50,13 @@ export function useSparkSchema() { case AppExistsStateEnum.NO: return Promise.resolve(); case AppExistsStateEnum.IN_DB: - return Promise.reject(t('flink.app.addAppTips.appNameNotUniqueMessage')); + return Promise.reject(t('spark.app.addAppTips.appNameNotUniqueMessage')); case AppExistsStateEnum.IN_YARN: - return Promise.reject(t('flink.app.addAppTips.appNameExistsInYarnMessage')); + return Promise.reject(t('spark.app.addAppTips.appNameExistsInYarnMessage')); case AppExistsStateEnum.IN_KUBERNETES: - return Promise.reject(t('flink.app.addAppTips.appNameExistsInK8sMessage')); + return Promise.reject(t('spark.app.addAppTips.appNameExistsInK8sMessage')); default: - return Promise.reject(t('flink.app.addAppTips.appNameValid')); + return Promise.reject(t('spark.app.addAppTips.appNameValid')); } } const getJobTypeOptions = () => { @@ -90,11 +89,11 @@ export function useSparkSchema() { return [ { field: 'jobType', - label: t('flink.app.developmentMode'), + label: t('spark.app.developmentMode'), component: 'Select', componentProps: ({ formModel }) => { return { - placeholder: t('flink.app.addAppTips.developmentModePlaceholder'), + placeholder: t('spark.app.addAppTips.developmentModePlaceholder'), options: getJobTypeOptions(), onChange: (value) => { if (value != JobTypeEnum.SQL) { @@ -105,18 +104,18 @@ export function useSparkSchema() { }, defaultValue: String(JobTypeEnum.SQL), rules: [ - { required: true, message: t('flink.app.addAppTips.developmentModeIsRequiredMessage') }, + { required: true, message: t('spark.app.addAppTips.developmentModeIsRequiredMessage') }, ], }, { field: 'executionMode', - label: t('flink.app.executionMode'), + label: t('spark.app.executionMode'), component: 'Select', itemProps: { autoLink: false, //Resolve multiple trigger validators with null value · }, componentProps: { - placeholder: t('flink.app.addAppTips.executionModePlaceholder'), + placeholder: t('spark.app.addAppTips.executionModePlaceholder'), options: executionModes, }, rules: [ @@ -124,7 +123,7 @@ export function useSparkSchema() { required: true, validator: async (_rule, value) => { if (value === null || value === undefined || value === '') { - return Promise.reject(t('flink.app.addAppTips.executionModeIsRequiredMessage')); + return Promise.reject(t('spark.app.addAppTips.executionModeIsRequiredMessage')); } else { return Promise.resolve(); } @@ -142,7 +141,7 @@ export function useSparkSchema() { fieldNames: { label: 'sparkName', value: 'id', options: 'options' }, }, rules: [ - { required: true, message: t('flink.app.addAppTips.flinkVersionIsRequiredMessage') }, + { required: true, message: t('spark.app.addAppTips.flinkVersionIsRequiredMessage') }, ], }, { @@ -151,28 +150,28 @@ export function useSparkSchema() { component: 'Input', slot: 'sparkSql', ifShow: ({ values }) => values?.jobType == JobTypeEnum.SQL, - rules: [{ required: true, message: t('flink.app.addAppTips.flinkSqlIsRequiredMessage') }], + rules: [{ required: true, message: t('spark.app.addAppTips.flinkSqlIsRequiredMessage') }], }, { field: 'teamResource', - label: t('flink.app.resource'), + label: t('spark.app.resource'), component: 'Select', render: ({ model }) => renderStreamParkResource({ model, resources: unref(teamResource) }), ifShow: ({ values }) => values.jobType == JobTypeEnum.JAR, }, { field: 'mainClass', - label: t('flink.app.mainClass'), + label: t('spark.app.mainClass'), component: 'Input', - componentProps: { placeholder: t('flink.app.addAppTips.mainClassPlaceholder') }, + componentProps: { placeholder: t('spark.app.addAppTips.mainClassPlaceholder') }, ifShow: ({ values }) => values?.jobType == JobTypeEnum.JAR, - rules: [{ required: true, message: t('flink.app.addAppTips.mainClassIsRequiredMessage') }], + rules: [{ required: true, message: t('spark.app.addAppTips.mainClassIsRequiredMessage') }], }, { field: 'jobName', - label: t('flink.app.appName'), + label: t('spark.app.appName'), component: 'Input', - componentProps: { placeholder: t('flink.app.addAppTips.appNamePlaceholder') }, + componentProps: { placeholder: t('spark.app.addAppTips.appNamePlaceholder') }, dynamicRules: ({ model }) => { return [ { @@ -186,21 +185,21 @@ export function useSparkSchema() { }, { field: 'tags', - label: t('flink.app.tags'), + label: t('spark.app.tags'), component: 'Input', componentProps: { - placeholder: t('flink.app.addAppTips.tagsPlaceholder'), + placeholder: t('spark.app.addAppTips.tagsPlaceholder'), }, }, { field: 'yarnQueue', - label: t('flink.app.yarnQueue'), + label: t('spark.app.yarnQueue'), component: 'Input', render: (renderCallbackParams) => renderYarnQueue(renderCallbackParams), }, { field: 'isSetConfig', - label: t('flink.app.appConf'), + label: t('spark.app.appConf'), component: 'Switch', render({ model, field }) { return renderIsSetConfig(model, field, registerConfDrawer, openConfDrawer); @@ -218,7 +217,7 @@ export function useSparkSchema() { }, { field: 'args', - label: t('flink.app.programArgs'), + label: t('spark.app.programArgs'), component: 'InputTextArea', defaultValue: '', slot: 'args', @@ -226,22 +225,18 @@ export function useSparkSchema() { }, { field: 'hadoopUser', - label: t('flink.app.hadoopUser'), + label: t('spark.app.hadoopUser'), component: 'Input', }, { field: 'description', label: t('common.description'), component: 'InputTextArea', - componentProps: { rows: 4, placeholder: t('flink.app.addAppTips.descriptionPlaceholder') }, + componentProps: { rows: 4, placeholder: t('spark.app.addAppTips.descriptionPlaceholder') }, }, ]; }); onMounted(async () => { - //get flinkEnv - fetchSparkEnvList().then((res) => { - sparkEnvs.value = res; - }); /* Get team dependencies */ fetchTeamResource({}).then((res) => { teamResource.value = res; @@ -259,6 +254,5 @@ export function useSparkSchema() { return { formSchema, suggestions, - sparkEnvs, }; } diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useDetailContext.ts b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useDetailContext.ts new file mode 100644 index 0000000000..28f158b659 --- /dev/null +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useDetailContext.ts @@ -0,0 +1,33 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * https://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +import { InjectionKey } from 'vue'; +import { createContext, useContext } from '/@/hooks/core/useContext'; +import type { SparkApplication } from '/@/api/spark/app.type'; + +export interface DetailProviderContextProps { + app: Partial; +} + +const key: InjectionKey = Symbol(); + +export function createDetailProviderContext(context: DetailProviderContextProps) { + return createContext(context, key); +} + +export function useDetailProviderContext() { + return useContext(key); +} diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx index 6d7beb588f..a2c22fc874 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkAction.tsx @@ -14,22 +14,233 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -import { Alert, Form, Input } from 'ant-design-vue'; -import { h, onMounted, reactive, ref, unref } from 'vue'; +import { Alert, Form, Input, Tag } from 'ant-design-vue'; +import { h, onMounted, reactive, ref, unref, VNode } from 'vue'; +import { handleAppBuildStatueText } from '../utils'; +import { + fetchCheckSparkAppStart, + fetchCheckSparkName, + fetchCopySparkApp, + fetchSparkAppForcedStop, + fetchSparkAppStart, + fetchSparkMapping, +} from '/@/api/spark/app'; import { fetchAppOwners } from '/@/api/system/user'; import { SvgIcon } from '/@/components/Icon'; -import { AppExistsStateEnum } from '/@/enums/sparkEnum'; +import { + AppExistsStateEnum, + AppStateEnum, + ExecModeEnum, + OptionStateEnum, +} from '/@/enums/sparkEnum'; import { useI18n } from '/@/hooks/web/useI18n'; import { useMessage } from '/@/hooks/web/useMessage'; -import { fetchCheckSparkName, fetchCopySparkApp, fetchSparkMapping } from '/@/api/spark/app'; +import { fetchBuildSparkApp, fetchBuildProgressDetail } from '/@/api/spark/build'; +import type { SparkApplication } from '/@/api/spark/app.type'; +import { exceptionPropWidth } from '/@/utils'; +import { useRouter } from 'vue-router'; -export const useSparkAction = () => { +export const useSparkAction = (optionApps: Recordable) => { const { t } = useI18n(); - const { Swal, createConfirm, createMessage } = useMessage(); + const router = useRouter(); + const { Swal, createConfirm, createMessage, createWarningModal } = useMessage(); const users = ref([]); + const appBuildDetail = reactive({}); + + /* check */ + function handleCheckReleaseApp(app: Recordable) { + if (app['appControl']['allowBuild'] === true) { + handleReleaseApp(app, false); + } else { + createWarningModal({ + title: 'WARNING', + content: ` +

${t('flink.app.release.releaseTitle')}

+

${t('flink.app.release.releaseDesc')}

+ `, + okType: 'danger', + onOk: () => handleReleaseApp(app, true), + }); + } + } + + /* Release App */ + async function handleReleaseApp(app: Recordable, force: boolean) { + const res = await fetchBuildSparkApp({ + appId: app.id, + forceBuild: force, + }); + if (!res.data) { + let message = res.message || ''; + if (!message) { + message = t('flink.app.release.releaseFail') + message.replaceAll(/\[StreamPark]/g, ''); + } + Swal.fire('Failed', message, 'error'); + } else { + Swal.fire({ + icon: 'success', + title: t('flink.app.release.releasing'), + showConfirmButton: false, + timer: 2000, + }); + } + } + + /* start application */ + function handleAppCheckStart(app: Recordable) { + // when then app is building, show forced starting modal + if (app['appControl']['allowStart'] === false) { + handleFetchBuildDetail(app); + createWarningModal({ + title: 'WARNING', + content: () => { + const content: Array = []; + if (appBuildDetail.pipeline == null) { + content.push( + h('p', { class: 'pt-10px' }, 'No build record exists for the current application.'), + ); + } else { + content.push( + h('p', { class: 'pt-10px' }, [ + 'The current build state of the application is', + h( + Tag, + { color: 'orange' }, + handleAppBuildStatueText(appBuildDetail.pipeline.pipeStatus), + ), + ]), + ); + } + content.push(h('p', null, 'Are you sure to force the application to run?')); + return content; + }, + okType: 'danger', + onOk: () => { + handleStart(app); + return Promise.resolve(true); + }, + }); + } else { + handleStart(app); + } + } + + async function handleStart(app: SparkApplication) { + if (app.sparkVersion == null) { + Swal.fire('Failed', 'please set spark version first.', 'error'); + } else { + if (!optionApps.starting.get(app.id) || app['optionState'] === OptionStateEnum.NONE) { + // when then app is building, show forced starting modal + const resp = await fetchCheckSparkAppStart({ + id: app.id, + }); + if (+resp === AppExistsStateEnum.IN_YARN) { + await fetchSparkAppForcedStop({ + id: app.id, + }); + } + await handleDoSubmit(app); + } + } + } + /* submit */ + async function handleDoSubmit(data: SparkApplication) { + try { + const res = await fetchSparkAppStart({ + id: data.id, + }); + if (res.data) { + Swal.fire({ + icon: 'success', + title: t('spark.app.operation.starting'), + showConfirmButton: false, + timer: 2000, + }); + optionApps.starting.set(data.id, new Date().getTime()); + } else { + Swal.fire({ + title: 'Failed', + icon: 'error', + width: exceptionPropWidth(), + html: + '
 startup failed, ' +
+            res.message.replaceAll(/\[StreamPark]/g, '') +
+            '
', + showCancelButton: true, + confirmButtonColor: '#55BDDDFF', + confirmButtonText: 'Detail', + cancelButtonText: 'Close', + }).then((isConfirm: Recordable) => { + if (isConfirm.value) { + router.push({ + path: '/spark/app/detail', + query: { appId: data.id }, + }); + } + }); + } + } catch (error) { + console.error(error); + } + } + async function handleFetchBuildDetail(app: Recordable) { + const res = await fetchBuildProgressDetail(app.id); + appBuildDetail.pipeline = res.pipeline; + appBuildDetail.docker = res.docker; + } + + function handleCanStop(app: Recordable) { + const optionTime = new Date(app['optionTime']).getTime(); + const nowTime = new Date().getTime(); + if (nowTime - optionTime >= 60 * 1000) { + const state = app['optionState']; + if (state === OptionStateEnum.NONE) { + return [AppStateEnum.STARTING, AppStateEnum.MAPPING].includes(app.state) || false; + } + return true; + } + return false; + } + function handleAbort(app: Recordable) { + let option = 'starting'; + const optionState = app['optionState']; + const stateMap = { + [AppStateEnum.STARTING]: 'starting', + }; + const optionStateMap = { + [OptionStateEnum.RELEASING]: 'releasing', + [OptionStateEnum.STARTING]: 'starting', + }; + if (optionState === OptionStateEnum.NONE) { + option = stateMap[app.state]; + } else { + option = optionStateMap[optionState]; + } + Swal.fire({ + title: 'Are you sure?', + text: `current job is ${option}, are you sure abort the job?`, + icon: 'warning', + showCancelButton: true, + confirmButtonText: 'Yes, abort job!', + denyButtonText: `No, cancel`, + confirmButtonColor: '#d33', + cancelButtonColor: '#3085d6', + }).then(async (result) => { + if (result.isConfirmed) { + Swal.fire('abort job', '', 'success'); + const res = await fetchSparkAppForcedStop({ + id: app.id, + }); + if (res) { + createMessage.success('abort job starting'); + } + return Promise.resolve(); + } + }); + } /* copy application */ - function handleCopy(item: Recordable) { + function handleCopy(item) { const validateStatus = ref<'' | 'error' | 'validating' | 'success' | 'warning'>(''); let help = ''; let copyAppName: string | undefined = ''; @@ -74,14 +285,14 @@ export const useSparkAction = () => { return Promise.reject('copy application error'); } //2) check name - const params = { jobName: copyAppName }; + const params = { appName: copyAppName }; const resp = await fetchCheckSparkName(params); const code = parseInt(resp); - if (code === AppExistsStateEnum.NO) { + if (code === 0) { try { const { data } = await fetchCopySparkApp({ id: item.id, - jobName: copyAppName, + appName: copyAppName, }); const status = data.status || 'error'; if (status === 'success') { @@ -103,11 +314,11 @@ export const useSparkAction = () => { } } else { validateStatus.value = 'error'; - if (code === AppExistsStateEnum.IN_DB) { + if (code === 1) { help = t('flink.app.addAppTips.appNameNotUniqueMessage'); - } else if (code === AppExistsStateEnum.IN_YARN) { + } else if (code === 2) { help = t('flink.app.addAppTips.appNameExistsInYarnMessage'); - } else if (code === AppExistsStateEnum.IN_KUBERNETES) { + } else if (code === 3) { help = t('flink.app.addAppTips.appNameExistsInK8sMessage'); } else { help = t('flink.app.addAppTips.appNameNotValid'); @@ -144,6 +355,15 @@ export const useSparkAction = () => { + {[ExecModeEnum.YARN_CLIENT, ExecModeEnum.YARN_CLUSTER].includes(app.executionMode) && ( + + + + )} { await fetchSparkMapping({ id: app.id, appId: formValue.appId, - jobId: formValue.jobId, }); Swal.fire({ icon: 'success', @@ -185,6 +404,10 @@ export const useSparkAction = () => { }); return { + handleCheckReleaseApp, + handleAppCheckStart, + handleCanStop, + handleAbort, handleCopy, handleMapping, users, diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkColumns.ts b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkColumns.ts index da31b32806..31f9709e76 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkColumns.ts +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkColumns.ts @@ -25,8 +25,8 @@ const { t } = useI18n(); export const useSparkColumns = () => { // app table column width const tableColumnWidth = ref({ - jobName: 250, - flinkVersion: 110, + appName: 250, + sparkVersion: 110, tags: 150, state: 120, release: 190, @@ -46,56 +46,65 @@ export const useSparkColumns = () => { const getAppColumns = computed((): BasicColumn[] => [ { - title: t('flink.app.appName'), - dataIndex: 'jobName', + title: t('spark.app.appName'), + dataIndex: 'appName', align: 'left', fixed: 'left', resizable: true, - width: unref(tableColumnWidth).jobName, + width: unref(tableColumnWidth).appName, }, - { title: t('flink.app.flinkVersion'), dataIndex: 'flinkVersion' }, - { title: t('flink.app.tags'), ellipsis: true, dataIndex: 'tags', width: 150 }, { - title: t('flink.app.runStatus'), + title: t('spark.app.sparkVersion'), + dataIndex: 'sparkVersion', + width: unref(tableColumnWidth).sparkVersion, + }, + { + title: t('spark.app.tags'), + ellipsis: true, + dataIndex: 'tags', + width: unref(tableColumnWidth).tags, + }, + { + title: t('spark.app.runStatus'), dataIndex: 'state', fixed: 'right', width: unref(tableColumnWidth).state, filters: [ - { text: t('flink.app.runStatusOptions.added'), value: String(AppStateEnum.ADDED) }, - { text: t('flink.app.runStatusOptions.starting'), value: String(AppStateEnum.STARTING) }, - { text: t('flink.app.runStatusOptions.running'), value: String(AppStateEnum.RUNNING) }, - { text: t('flink.app.runStatusOptions.failed'), value: String(AppStateEnum.FAILED) }, - { text: t('flink.app.runStatusOptions.canceled'), value: String(AppStateEnum.CANCELED) }, - { text: t('flink.app.runStatusOptions.finished'), value: String(AppStateEnum.FINISHED) }, - { text: t('flink.app.runStatusOptions.suspended'), value: String(AppStateEnum.SUSPENDED) }, - { text: t('flink.app.runStatusOptions.lost'), value: String(AppStateEnum.LOST) }, - { text: t('flink.app.runStatusOptions.silent'), value: String(AppStateEnum.SILENT) }, + { text: t('spark.app.runStatusOptions.added'), value: String(AppStateEnum.ADDED) }, + { text: t('spark.app.runStatusOptions.starting'), value: String(AppStateEnum.STARTING) }, + { text: t('spark.app.runStatusOptions.running'), value: String(AppStateEnum.RUNNING) }, + { text: t('spark.app.runStatusOptions.failed'), value: String(AppStateEnum.FAILED) }, + { text: t('spark.app.runStatusOptions.canceled'), value: String(AppStateEnum.CANCELED) }, + { text: t('spark.app.runStatusOptions.finished'), value: String(AppStateEnum.FINISHED) }, + { text: t('spark.app.runStatusOptions.suspended'), value: String(AppStateEnum.SUSPENDED) }, + { text: t('spark.app.runStatusOptions.lost'), value: String(AppStateEnum.LOST) }, + { text: t('spark.app.runStatusOptions.silent'), value: String(AppStateEnum.SILENT) }, { - text: t('flink.app.runStatusOptions.terminated'), + text: t('spark.app.runStatusOptions.terminated'), value: String(AppStateEnum.TERMINATED), }, ], }, { - title: t('flink.app.releaseBuild'), + title: t('spark.app.releaseBuild'), dataIndex: 'release', width: unref(tableColumnWidth).release, fixed: 'right', }, { - title: t('flink.app.duration'), + title: t('spark.app.duration'), dataIndex: 'duration', sorter: true, width: unref(tableColumnWidth).duration, customRender: ({ value }) => dateToDuration(value), }, { - title: t('flink.app.modifiedTime'), + title: t('spark.app.modifiedTime'), dataIndex: 'modifyTime', sorter: true, width: unref(tableColumnWidth).modifyTime, }, - { title: t('flink.app.owner'), dataIndex: 'nickName', width: unref(tableColumnWidth).nickName }, + { title: t('spark.app.owner'), dataIndex: 'nickName', width: unref(tableColumnWidth).nickName }, ]); return { getAppColumns, onTableColumnResize, tableColumnWidth }; }; diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useFlinkRender.tsx b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkRender.tsx similarity index 98% rename from streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useFlinkRender.tsx rename to streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkRender.tsx index b25dfede2b..b2ab593e07 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useFlinkRender.tsx +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkRender.tsx @@ -27,7 +27,7 @@ import { useI18n } from '/@/hooks/web/useI18n'; import { fetchYarnQueueList } from '/@/api/setting/yarnQueue'; import { ApiSelect } from '/@/components/Form'; import { ResourceTypeEnum } from '/@/views/resource/upload/upload.data'; -import { handleSparkConfTemplate } from '/@/api/spark/conf'; +import { fetchSparkConfTemplate } from '/@/api/spark/conf'; const { t } = useI18n(); /* render input dropdown component */ @@ -161,7 +161,7 @@ export const renderIsSetConfig = ( configOverride: unref(model.configOverride), }); } else { - const res = await handleSparkConfTemplate(); + const res = await fetchSparkConfTemplate(); openConfDrawer(true, { configOverride: decodeByBase64(res), }); diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkTableAction.ts b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkTableAction.ts index f5488ffffe..19a4875317 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkTableAction.ts +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/hooks/useSparkTableAction.ts @@ -16,50 +16,132 @@ */ import { computed, onMounted, ref } from 'vue'; import { useRouter } from 'vue-router'; - +import { handleIsStart } from '../utils'; import { ActionItem, FormProps } from '/@/components/Table'; import { useMessage } from '/@/hooks/web/useMessage'; -import { AppStateEnum } from '/@/enums/flinkEnum'; -import { JobTypeEnum } from '/@/enums/sparkEnum'; +import { AppStateEnum, JobTypeEnum, OptionStateEnum } from '/@/enums/sparkEnum'; import { usePermission } from '/@/hooks/web/usePermission'; import { useI18n } from '/@/hooks/web/useI18n'; -import { isFunction, isObject } from '/@/utils/is'; -import { fetchSparkAppRecord, fetchSparkAppRemove } from '/@/api/spark/app'; +import { isFunction, isNullAndUnDef, isObject } from '/@/utils/is'; +import { + fetchSparkAppForcedStop, + fetchSparkAppRecord, + fetchSparkAppRemove, +} from '/@/api/spark/app'; import type { SparkApplication } from '/@/api/spark/app.type'; import { useSparkAction } from './useSparkAction'; +import { useDrawer } from '/@/components/Drawer'; +import { ReleaseStateEnum } from '/@/enums/flinkEnum'; // Create form configurations and operation functions in the application table -export const useSparkTableAction = (handlePageDataReload: Fn) => { +export const useSparkTableAction = (handlePageDataReload: Fn, optionApps: Recordable) => { const { t } = useI18n(); const tagsOptions = ref([]); const router = useRouter(); - const { createMessage } = useMessage(); + const { createMessage, Swal } = useMessage(); const { hasPermission } = usePermission(); - const { handleCopy, handleMapping, users } = useSparkAction(); + // const [registerStartModal, { openModal: openStartModal }] = useModal(); + // const [registerStopModal, { openModal: openStopModal }] = useModal(); + // const [registerLogModal, { openModal: openLogModal }] = useModal(); + const [registerBuildDrawer, { openDrawer: openBuildDrawer }] = useDrawer(); + const { + handleCheckReleaseApp, + handleAppCheckStart, + handleCanStop, + handleAbort, + handleCopy, + handleMapping, + users, + } = useSparkAction(optionApps); /* Operation button list */ - function getActionList(record: SparkApplication, _currentPageNo: number): ActionItem[] { + function getActionList(record: SparkApplication, currentPageNo: number): ActionItem[] { return [ { - label: t('flink.app.operation.copy'), + tooltip: { title: t('spark.app.operation.edit') }, + auth: 'app:update', + icon: 'clarity:note-edit-line', + onClick: handleEdit.bind(null, record, currentPageNo), + }, + { + tooltip: { title: t('spark.app.operation.release') }, + ifShow: + !isNullAndUnDef(record.release) && + [ + ReleaseStateEnum.FAILED, + ReleaseStateEnum.NEED_RELEASE, + ReleaseStateEnum.NEED_ROLLBACK, + ].includes(record.release) && + record['optionState'] == OptionStateEnum.NONE, + auth: 'app:release', + icon: 'ant-design:cloud-upload-outlined', + onClick: handleCheckReleaseApp.bind(null, record), + }, + { + tooltip: { title: t('spark.app.operation.releaseDetail') }, + ifShow: + (!isNullAndUnDef(record.release) && + [ReleaseStateEnum.FAILED, ReleaseStateEnum.RELEASING].includes(record.release)) || + record['optionState'] == OptionStateEnum.RELEASING, + auth: 'app:release', + icon: 'ant-design:container-outlined', + onClick: () => openBuildDrawer(true, { appId: record.id }), + }, + { + tooltip: { title: t('spark.app.operation.start') }, + ifShow: handleIsStart(record, optionApps), + auth: 'app:start', + icon: 'ant-design:play-circle-outlined', + onClick: handleAppCheckStart.bind(null, record), + }, + { + tooltip: { title: t('spark.app.operation.cancel') }, + ifShow: + record.state == AppStateEnum.RUNNIN && record['optionState'] == OptionStateEnum.NONE, + auth: 'app:cancel', + icon: 'ant-design:pause-circle-outlined', + popConfirm: { + title: t('spark.app.operation.cancel'), + placement: 'left', + confirm: handleCancel.bind(null, record), + }, + }, + { + tooltip: { title: t('spark.app.operation.detail') }, + auth: 'app:detail', + icon: 'carbon:data-view-alt', + onClick: handleDetail.bind(null, record), + }, + // { + // tooltip: { title: t('spark.app.operation.startLog') }, + // auth: 'app:detail', + // icon: 'ant-design:code-outlined', + // onClick: () => openLogModal(true, { app: record }), + // }, + { + tooltip: { title: t('spark.app.operation.abort') }, + ifShow: handleCanStop(record), + auth: 'app:cancel', + icon: 'ant-design:pause-circle-outlined', + onClick: handleAbort.bind(null, record), + }, + { + label: t('spark.app.operation.copy'), auth: 'app:copy', icon: 'ant-design:copy-outlined', onClick: handleCopy.bind(null, record), }, { - label: t('flink.app.operation.remapping'), + label: t('spark.app.operation.remapping'), ifShow: [ AppStateEnum.ADDED, - AppStateEnum.FAILED, - AppStateEnum.CANCELED, + AppStateEnum.FAILE, + AppStateEnum.STOPPING, AppStateEnum.KILLED, AppStateEnum.SUCCEEDED, - AppStateEnum.TERMINATED, - AppStateEnum.POS_TERMINATED, - AppStateEnum.FINISHED, - AppStateEnum.SUSPENDED, - AppStateEnum.LOST, + AppStateEnum.FINISHE, + AppStateEnum.LOS, ].includes(record.state as AppStateEnum), auth: 'app:mapping', icon: 'ant-design:deployment-unit-outlined', @@ -67,21 +149,20 @@ export const useSparkTableAction = (handlePageDataReload: Fn) => { }, { popConfirm: { - title: t('flink.app.operation.deleteTip'), + title: t('spark.app.operation.deleteTip'), confirm: handleDelete.bind(null, record), }, label: t('common.delText'), - ifShow: [ - AppStateEnum.ADDED, - AppStateEnum.FAILED, - AppStateEnum.CANCELED, - AppStateEnum.FINISHED, - AppStateEnum.LOST, - AppStateEnum.TERMINATED, - AppStateEnum.POS_TERMINATED, - AppStateEnum.SUCCEEDED, - AppStateEnum.KILLED, - ].includes(record.state as AppStateEnum), + ifShow: + !isNullAndUnDef(record.state) && + [ + AppStateEnum.ADDED, + AppStateEnum.FAILE, + AppStateEnum.FINISHE, + AppStateEnum.LOS, + AppStateEnum.SUCCEEDED, + AppStateEnum.KILLED, + ].includes(record.state), auth: 'app:delete', icon: 'ant-design:delete-outlined', color: 'error', @@ -122,6 +203,35 @@ export const useSparkTableAction = (handlePageDataReload: Fn) => { }; } + /* Click to edit */ + function handleEdit(app: SparkApplication, currentPageNo: number) { + // Record the current page number + sessionStorage.setItem('sparkAppPageNo', String(currentPageNo || 1)); + router.push({ path: '/spark/app/edit', query: { appId: app.id } }); + } + + /* Click for details */ + function handleDetail(app: SparkApplication) { + router.push({ path: '/spark/app/detail', query: { appId: app.id } }); + } + + // click stop application + async function handleCancel(app: SparkApplication) { + if (!optionApps.stopping.get(app.id) || app['optionState'] == OptionStateEnum.NONE) { + await fetchSparkAppForcedStop({ + id: app.id, + }); + Swal.fire({ + icon: 'success', + title: t('flink.app.operation.canceling'), + showConfirmButton: false, + timer: 2000, + }); + optionApps.stopping.set(app.id, new Date().getTime()); + // openStopModal(true, { application: app }); + } + } + /* Click to delete */ async function handleDelete(app: SparkApplication) { const hide = createMessage.loading('deleting', 0); @@ -147,22 +257,22 @@ export const useSparkTableAction = (handlePageDataReload: Fn) => { }, schemas: [ { - label: t('flink.app.tags'), + label: t('spark.app.tags'), field: 'tags', component: 'Select', componentProps: { - placeholder: t('flink.app.tags'), + placeholder: t('spark.app.tags'), showSearch: true, options: tagsOptions.value.map((t: Recordable) => ({ label: t, value: t })), onChange: handlePageDataReload.bind(null, false), }, }, { - label: t('flink.app.owner'), + label: t('spark.app.owner'), field: 'userId', component: 'Select', componentProps: { - placeholder: t('flink.app.owner'), + placeholder: t('spark.app.owner'), showSearch: true, options: users.value.map((u: Recordable) => { return { label: u.nickName || u.username, value: u.userId }; @@ -171,11 +281,11 @@ export const useSparkTableAction = (handlePageDataReload: Fn) => { }, }, { - label: t('flink.app.jobType'), + label: t('spark.app.jobType'), field: 'jobType', component: 'Select', componentProps: { - placeholder: t('flink.app.jobType'), + placeholder: t('spark.app.jobType'), showSearch: true, options: [ { label: 'JAR', value: JobTypeEnum.JAR }, @@ -186,11 +296,11 @@ export const useSparkTableAction = (handlePageDataReload: Fn) => { }, }, { - label: t('flink.app.searchName'), + label: t('spark.app.searchName'), field: 'jobName', component: 'Input', componentProps: { - placeholder: t('flink.app.searchName'), + placeholder: t('spark.app.searchName'), onChange: handlePageDataReload.bind(null, false), onSearch: handlePageDataReload.bind(null, false), }, @@ -230,5 +340,14 @@ export const useSparkTableAction = (handlePageDataReload: Fn) => { onMounted(() => { handleInitTagsOptions(); }); - return { getTableActions, formConfig }; + return { + // registerStartModal, + // registerStopModal, + // registerLogModal, + registerBuildDrawer, + getTableActions, + formConfig, + tagsOptions, + users, + }; }; diff --git a/streampark-console/streampark-console-webapp/src/views/spark/app/index.vue b/streampark-console/streampark-console-webapp/src/views/spark/app/index.vue index c660c040ab..1987fac0d9 100644 --- a/streampark-console/streampark-console-webapp/src/views/spark/app/index.vue +++ b/streampark-console/streampark-console-webapp/src/views/spark/app/index.vue @@ -15,17 +15,30 @@ limitations under the License. -->