Skip to content

Commit

Permalink
KYLIN-1741 Diagnosis logs to kylin.log
Browse files Browse the repository at this point in the history
  • Loading branch information
lidongsjtu committed Jul 12, 2016
1 parent 47a7cbe commit 62ccd7a
Show file tree
Hide file tree
Showing 4 changed files with 214 additions and 7 deletions.
23 changes: 18 additions & 5 deletions build/bin/diag.sh
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,9 @@ dir=$(dirname ${0})
export KYLIN_HOME=${dir}/../
source ${dir}/check-env.sh

tomcat_root=${dir}/../tomcat
export tomcat_root

if [ $# -eq 1 ] || [ $# -eq 2 ]
then
patient="$1"
Expand Down Expand Up @@ -52,23 +55,33 @@ then
diagJar=`ls ${KYLIN_HOME}/tool/kylin-tool-*.jar`
if [ -f "${diagJar}" ]; then
if [ -f "${KYLIN_HOME}/commit_SHA1" ]; then
export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${diagJar}:${KYLIN_HOME}/lib/*
export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${diagJar}:${KYLIN_HOME}/lib/*:${KYLIN_HOME}/conf
else
export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${KYLIN_HOME}/lib/*:${diagJar}
export HBASE_CLASSPATH=${HBASE_CLASSPATH}:${KYLIN_HOME}/lib/*:${diagJar}:${KYLIN_HOME}/conf
fi
else
echo "missing diagnosis jar file."
exit 1
fi

if [ ${#patient} -eq 36 ]; then
exec hbase ${KYLIN_EXTRA_START_OPTS} -Dlog4j.configuration=kylin-log4j.properties org.apache.kylin.tool.JobDiagnosisInfoCLI -jobId $patient -destDir $destDir
hbase ${KYLIN_EXTRA_START_OPTS} \
-Dlog4j.configuration=kylin-server-log4j.properties \
-Dcatalina.home=${tomcat_root} \
org.apache.kylin.tool.JobDiagnosisInfoCLI \
-jobId $patient \
-destDir $destDir
else
exec hbase ${KYLIN_EXTRA_START_OPTS} -Dlog4j.configuration=kylin-log4j.properties org.apache.kylin.tool.DiagnosisInfoCLI -project -all -destDir $destDir
hbase ${KYLIN_EXTRA_START_OPTS} \
-Dlog4j.configuration=kylin-server-log4j.properties \
-Dcatalina.home=${tomcat_root} \
org.apache.kylin.tool.DiagnosisInfoCLI \
-project -all \
-destDir $destDir
fi

exit 0
else
echo "usage: diag.sh Project|JobId [target_path]"
exit 1
fi
fi
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,9 @@ protected void execute(OptionsHelper optionsHelper) throws Exception {
if (!isSubmodule && new File(exportDest).exists()) {
exportDest = exportDest + packageName + "/";
}

exportDir = new File(exportDest);
FileUtils.forceMkdir(exportDir);

if (!isSubmodule) {
dumpBasicDiagInfo();
Expand Down
14 changes: 12 additions & 2 deletions tool/src/main/java/org/apache/kylin/tool/DiagnosisInfoCLI.java
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ public class DiagnosisInfoCLI extends AbstractInfoExtractor {
private static final Option OPTION_INCLUDE_CLIENT = OptionBuilder.withArgName("includeClient").hasArg().isRequired(false).withDescription("Specify whether to include client info to extract. Default true.").create("includeClient");

@SuppressWarnings("static-access")
private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("Specify whether to include job output to extract. Default true.").create("includeJobs");
private static final Option OPTION_INCLUDE_JOB = OptionBuilder.withArgName("includeJobs").hasArg().isRequired(false).withDescription("Specify whether to include job info to extract. Default true.").create("includeJobs");

public DiagnosisInfoCLI() {
super();
Expand Down Expand Up @@ -82,6 +82,9 @@ private List<String> getProjects(String projectSeed) {
} else {
result.add(projectSeed);
}
if (result.isEmpty()) {
throw new RuntimeException("No project to extract.");
}
return result;
}

Expand All @@ -95,11 +98,18 @@ protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throw
String projectNames = StringUtils.join(getProjects(projectInput), ",");

// export cube metadata
String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-includeJobs", Boolean.toString(includeJob), "-submodule", "true" };
String[] cubeMetaArgs = { "-destDir", new File(exportDir, "metadata").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-includeJobs", "false", "-submodule", "true" };
CubeMetaExtractor cubeMetaExtractor = new CubeMetaExtractor();
logger.info("CubeMetaExtractor args: " + Arrays.toString(cubeMetaArgs));
cubeMetaExtractor.execute(cubeMetaArgs);

// extract all job instances
if (includeJob) {
String[] jobArgs = { "-destDir", new File(exportDir, "jobs").getAbsolutePath(), "-compress", "false", "-submodule", "true" };
JobInstanceExtractor jobInstanceExtractor = new JobInstanceExtractor();
jobInstanceExtractor.execute(jobArgs);
}

// export HBase
if (includeHBase) {
String[] hbaseArgs = { "-destDir", new File(exportDir, "hbase").getAbsolutePath(), "-project", projectNames, "-compress", "false", "-submodule", "true" };
Expand Down
182 changes: 182 additions & 0 deletions tool/src/main/java/org/apache/kylin/tool/JobInstanceExtractor.java
Original file line number Diff line number Diff line change
@@ -0,0 +1,182 @@
package org.apache.kylin.tool;

import java.io.File;
import java.util.List;
import java.util.Map;

import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.common.util.OptionsHelper;
import org.apache.kylin.cube.model.CubeBuildTypeEnum;
import org.apache.kylin.engine.mr.CubingJob;
import org.apache.kylin.engine.mr.common.HadoopShellExecutable;
import org.apache.kylin.engine.mr.common.MapReduceExecutable;
import org.apache.kylin.engine.mr.steps.CubingExecutableUtil;
import org.apache.kylin.job.JobInstance;
import org.apache.kylin.job.common.ShellExecutable;
import org.apache.kylin.job.constant.JobStatusEnum;
import org.apache.kylin.job.constant.JobStepStatusEnum;
import org.apache.kylin.job.execution.AbstractExecutable;
import org.apache.kylin.job.execution.ExecutableState;
import org.apache.kylin.job.execution.Output;
import org.apache.kylin.job.manager.ExecutableManager;
import org.apache.kylin.metadata.project.ProjectInstance;
import org.apache.kylin.metadata.project.ProjectManager;
import org.apache.kylin.metadata.realization.RealizationType;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;

public class JobInstanceExtractor extends AbstractInfoExtractor {
private static final Logger logger = LoggerFactory.getLogger(JobInstanceExtractor.class);

@SuppressWarnings("static-access")
private static final Option OPTION_PROJECT = OptionBuilder.withArgName("project").hasArg().isRequired(false).withDescription("Specify jobs in which project to extract").create("project");
@SuppressWarnings("static-access")
private static final Option OPTION_CUBE = OptionBuilder.withArgName("cube").hasArg().isRequired(false).withDescription("Specify jobs related to which cube to extract").create("cube");

KylinConfig config;
ProjectManager projectManager;
ExecutableManager executableManager;

public JobInstanceExtractor() {
config = KylinConfig.getInstanceFromEnv();
executableManager = ExecutableManager.getInstance(config);
projectManager = ProjectManager.getInstance(config);

packageType = "jobInstances";

options.addOption(OPTION_PROJECT);
options.addOption(OPTION_CUBE);
}

@Override
protected void executeExtract(OptionsHelper optionsHelper, File exportDir) throws Exception {
String cube = optionsHelper.hasOption(OPTION_CUBE) ? optionsHelper.getOptionValue(OPTION_CUBE) : null;
String project = optionsHelper.hasOption(OPTION_PROJECT) ? optionsHelper.getOptionValue(OPTION_PROJECT) : null;

long endTime = System.currentTimeMillis();
long startTime = endTime - 3 * 24 * 3600 * 1000;
List<JobInstance> jobInstances = listJobInstances(cube, project, startTime, endTime);
logger.info("There are {} jobInstances to extract.", jobInstances.size());

ObjectMapper mapper = new ObjectMapper();
for (JobInstance jobInstance : jobInstances) {
mapper.writeValue(new File(exportDir, jobInstance.getUuid() + ".json"), jobInstance);
}
}

private List<JobInstance> listJobInstances(String project, String cube, long startTime, long endTime) {
final List<JobInstance> result = Lists.newArrayList();
final List<AbstractExecutable> executables = executableManager.getAllExecutables(startTime, endTime);
final Map<String, Output> allOutputs = executableManager.getAllOutputs();
for (AbstractExecutable executable : executables) {
if (executable instanceof CubingJob) {
String cubeName = CubingExecutableUtil.getCubeName(executable.getParams());
boolean shouldExtract = false;
if (cube == null || cube.equalsIgnoreCase(cubeName)) {
if (project == null) {
shouldExtract = true;
} else {
ProjectInstance projectInstance = projectManager.getProject(project);
if (projectInstance != null && projectInstance.containsRealization(RealizationType.CUBE, cubeName)) {
shouldExtract = true;
}
}
}

if (shouldExtract) {
result.add(parseToJobInstance((CubingJob) executable, allOutputs));
}
}
}
return result;
}

private JobInstance parseToJobInstance(CubingJob cubeJob, Map<String, Output> outputs) {
Output output = outputs.get(cubeJob.getId());
final JobInstance result = new JobInstance();
result.setName(cubeJob.getName());
result.setRelatedCube(CubingExecutableUtil.getCubeName(cubeJob.getParams()));
result.setRelatedSegment(CubingExecutableUtil.getSegmentId(cubeJob.getParams()));
result.setLastModified(output.getLastModified());
result.setSubmitter(cubeJob.getSubmitter());
result.setUuid(cubeJob.getId());
result.setType(CubeBuildTypeEnum.BUILD);
result.setStatus(parseToJobStatus(output.getState()));
result.setMrWaiting(AbstractExecutable.getExtraInfoAsLong(output, CubingJob.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
result.setDuration(AbstractExecutable.getDuration(AbstractExecutable.getStartTime(output), AbstractExecutable.getEndTime(output)) / 1000);
for (int i = 0; i < cubeJob.getTasks().size(); ++i) {
AbstractExecutable task = cubeJob.getTasks().get(i);
result.addStep(parseToJobStep(task, i, outputs.get(task.getId())));
}
return result;
}

private JobStatusEnum parseToJobStatus(ExecutableState state) {
switch (state) {
case READY:
return JobStatusEnum.PENDING;
case RUNNING:
return JobStatusEnum.RUNNING;
case ERROR:
return JobStatusEnum.ERROR;
case DISCARDED:
return JobStatusEnum.DISCARDED;
case SUCCEED:
return JobStatusEnum.FINISHED;
case STOPPED:
default:
throw new RuntimeException("invalid state:" + state);
}
}

private JobInstance.JobStep parseToJobStep(AbstractExecutable task, int i, Output stepOutput) {
Preconditions.checkNotNull(stepOutput);
JobInstance.JobStep result = new JobInstance.JobStep();
result.setId(task.getId());
result.setName(task.getName());
result.setSequenceID(i);
result.setStatus(parseToJobStepStatus(stepOutput.getState()));
for (Map.Entry<String, String> entry : stepOutput.getExtra().entrySet()) {
if (entry.getKey() != null && entry.getValue() != null) {
result.putInfo(entry.getKey(), entry.getValue());
}
}
result.setExecStartTime(AbstractExecutable.getStartTime(stepOutput));
result.setExecEndTime(AbstractExecutable.getEndTime(stepOutput));
if (task instanceof ShellExecutable) {
result.setExecCmd(((ShellExecutable) task).getCmd());
}
if (task instanceof MapReduceExecutable) {
result.setExecCmd(((MapReduceExecutable) task).getMapReduceParams());
result.setExecWaitTime(AbstractExecutable.getExtraInfoAsLong(stepOutput, MapReduceExecutable.MAP_REDUCE_WAIT_TIME, 0L) / 1000);
}
if (task instanceof HadoopShellExecutable) {
result.setExecCmd(((HadoopShellExecutable) task).getJobParams());
}
return result;
}

private JobStepStatusEnum parseToJobStepStatus(ExecutableState state) {
switch (state) {
case READY:
return JobStepStatusEnum.PENDING;
case RUNNING:
return JobStepStatusEnum.RUNNING;
case ERROR:
return JobStepStatusEnum.ERROR;
case DISCARDED:
return JobStepStatusEnum.DISCARDED;
case SUCCEED:
return JobStepStatusEnum.FINISHED;
case STOPPED:
default:
throw new RuntimeException("invalid state:" + state);
}
}
}

0 comments on commit 62ccd7a

Please sign in to comment.