Skip to content

Commit

Permalink
Include platform and system statistics in profiler viewer payload
Browse files Browse the repository at this point in the history
  • Loading branch information
lucko committed Dec 28, 2021
1 parent 9732064 commit bfbbcb3
Show file tree
Hide file tree
Showing 44 changed files with 729 additions and 295 deletions.
2 changes: 1 addition & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ subprojects {
apply plugin: 'java-library'

ext {
pluginVersion = '1.6.5'
pluginVersion = '1.7.0'
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@

package me.lucko.spark.bukkit;

import me.lucko.spark.common.platform.AbstractPlatformInfo;
import me.lucko.spark.common.platform.PlatformInfo;

import org.bukkit.Server;

import java.lang.reflect.Field;
import java.lang.reflect.Method;

public class BukkitPlatformInfo extends AbstractPlatformInfo {
public class BukkitPlatformInfo implements PlatformInfo {
private final Server server;

public BukkitPlatformInfo(Server server) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@

package me.lucko.spark.bungeecord;

import me.lucko.spark.common.platform.AbstractPlatformInfo;
import me.lucko.spark.common.platform.PlatformInfo;

import net.md_5.bungee.api.ProxyServer;

public class BungeeCordPlatformInfo extends AbstractPlatformInfo {
public class BungeeCordPlatformInfo implements PlatformInfo {
private final ProxyServer proxy;

public BungeeCordPlatformInfo(ProxyServer proxy) {
Expand Down
4 changes: 2 additions & 2 deletions spark-common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ dependencies {

processResources {
from(sourceSets.main.resources.srcDirs) {
include 'linux/libasyncProfiler.so'
include 'macosx/libasyncProfiler.so'
include 'spark/linux/libasyncProfiler.so'
include 'spark/macosx/libasyncProfiler.so'
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.memory.GarbageCollectorStatistics;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.platform.PlatformStatisticsProvider;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
Expand Down Expand Up @@ -96,6 +97,7 @@ public class SparkPlatform {
private final TickHook tickHook;
private final TickReporter tickReporter;
private final TickStatistics tickStatistics;
private final PlatformStatisticsProvider statisticsProvider;
private Map<String, GarbageCollectorStatistics> startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
private final AtomicBoolean enabled = new AtomicBoolean(false);
Expand Down Expand Up @@ -132,6 +134,7 @@ public SparkPlatform(SparkPlugin plugin) {
this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
this.tickStatistics = this.tickHook != null ? new TickStatistics() : null;
this.statisticsProvider = new PlatformStatisticsProvider(this);
}

public void enable() {
Expand Down Expand Up @@ -214,6 +217,10 @@ public TickReporter getTickReporter() {
return this.tickReporter;
}

public PlatformStatisticsProvider getStatisticsProvider() {
return this.statisticsProvider;
}

public ClassSourceLookup createClassSourceLookup() {
return this.plugin.createClassSourceLookup();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,10 +36,8 @@ public GarbageCollectorInfo(String name, GarbageCollectorStatistics stats, long
this.name = name;
this.totalCollections = stats.getCollectionCount();
this.totalTime = stats.getCollectionTime();

double totalTimeDouble = this.totalTime;
this.averageTime = this.totalCollections == 0 ? 0 : totalTimeDouble / this.totalCollections;
this.averageFrequency = this.totalCollections == 0 ? 0 : (long) ((serverUptime - totalTimeDouble) / this.totalCollections);
this.averageTime = stats.getAverageCollectionTime();
this.averageFrequency = stats.getAverageCollectionFrequency(serverUptime);
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.monitor.cpu.CpuMonitor;
import me.lucko.spark.common.monitor.disk.DiskUsage;
import me.lucko.spark.common.monitor.tick.TickStatistics;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.common.util.RollingAverage;
Expand All @@ -38,15 +39,11 @@
import net.kyori.adventure.text.TextComponent;
import net.kyori.adventure.text.format.TextColor;

import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.MemoryMXBean;
import java.lang.management.MemoryPoolMXBean;
import java.lang.management.MemoryType;
import java.lang.management.MemoryUsage;
import java.nio.file.FileStore;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import java.util.function.Consumer;
Expand All @@ -65,8 +62,6 @@

public class HealthModule implements CommandModule {

private static final double MSPT_95_PERCENTILE = 0.95d;

@Override
public void registerCommands(Consumer<Command> consumer) {
consumer.accept(Command.builder()
Expand Down Expand Up @@ -150,11 +145,7 @@ private static void healthReport(SparkPlatform platform, CommandSender sender, C
addDetailedMemoryStats(report, memoryMXBean);
}

try {
addDiskStats(report);
} catch (IOException e) {
e.printStackTrace();
}
addDiskStats(report);

resp.reply(report);
}
Expand Down Expand Up @@ -309,10 +300,14 @@ private static void addDetailedMemoryStats(List<Component> report, MemoryMXBean
}
}

private static void addDiskStats(List<Component> report) throws IOException {
FileStore fileStore = Files.getFileStore(Paths.get("."));
long totalSpace = fileStore.getTotalSpace();
long usedSpace = totalSpace - fileStore.getUsableSpace();
private static void addDiskStats(List<Component> report) {
long total = DiskUsage.getTotal();
long used = DiskUsage.getUsed();

if (total == 0 || used == 0) {
return;
}

report.add(text()
.append(text(">", DARK_GRAY, BOLD))
.append(space())
Expand All @@ -321,18 +316,18 @@ private static void addDiskStats(List<Component> report) throws IOException {
);
report.add(text()
.content(" ")
.append(text(FormatUtil.formatBytes(usedSpace), WHITE))
.append(text(FormatUtil.formatBytes(used), WHITE))
.append(space())
.append(text("/", GRAY))
.append(space())
.append(text(FormatUtil.formatBytes(totalSpace), WHITE))
.append(text(FormatUtil.formatBytes(total), WHITE))
.append(text(" "))
.append(text("(", GRAY))
.append(text(FormatUtil.percent(usedSpace, totalSpace), GREEN))
.append(text(FormatUtil.percent(used, total), GREEN))
.append(text(")", GRAY))
.build()
);
report.add(text().content(" ").append(generateDiskUsageDiagram(usedSpace, totalSpace, 40)).build());
report.add(text().content(" ").append(generateDiskUsageDiagram(used, total, 40)).build());
report.add(empty());
}

Expand All @@ -355,7 +350,7 @@ public static TextComponent formatTickDurations(RollingAverage average) {
.append(text('/', GRAY))
.append(formatTickDuration(average.median()))
.append(text('/', GRAY))
.append(formatTickDuration(average.percentile(MSPT_95_PERCENTILE)))
.append(formatTickDuration(average.percentile95th()))
.append(text('/', GRAY))
.append(formatTickDuration(average.max()))
.build();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,28 +30,22 @@
import me.lucko.spark.common.command.tabcomplete.TabCompleter;
import me.lucko.spark.common.heapdump.HeapDump;
import me.lucko.spark.common.heapdump.HeapDumpSummary;
import me.lucko.spark.common.util.Compression;
import me.lucko.spark.common.util.FormatUtil;
import me.lucko.spark.proto.SparkProtos;
import me.lucko.spark.proto.SparkHeapProtos;

import net.kyori.adventure.text.event.ClickEvent;

import org.tukaani.xz.LZMA2Options;
import org.tukaani.xz.LZMAOutputStream;
import org.tukaani.xz.XZOutputStream;

import okhttp3.MediaType;

import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Iterator;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.Consumer;
import java.util.function.LongConsumer;
import java.util.zip.GZIPOutputStream;

import static net.kyori.adventure.text.Component.text;
import static net.kyori.adventure.text.format.NamedTextColor.GOLD;
Expand Down Expand Up @@ -98,7 +92,7 @@ private static void heapSummary(SparkPlatform platform, CommandSender sender, Co
return;
}

SparkProtos.HeapData output = heapDump.toProto(platform.getPlugin().getPlatformInfo(), sender);
SparkHeapProtos.HeapData output = heapDump.toProto(platform, sender);

boolean saveToFile = false;
if (arguments.boolFlag("save-to-file")) {
Expand Down Expand Up @@ -175,11 +169,11 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma
platform.getActivityLog().addToLog(Activity.fileActivity(sender, System.currentTimeMillis(), "Heap dump", file.toString()));


CompressionMethod compressionMethod = null;
Compression compressionMethod = null;
Iterator<String> compressArgs = arguments.stringFlag("compress").iterator();
if (compressArgs.hasNext()) {
try {
compressionMethod = CompressionMethod.valueOf(compressArgs.next().toUpperCase());
compressionMethod = Compression.valueOf(compressArgs.next().toUpperCase());
} catch (IllegalArgumentException e) {
// ignore
}
Expand All @@ -194,7 +188,7 @@ private static void heapDump(SparkPlatform platform, CommandSender sender, Comma
}
}

private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, CompressionMethod method) throws IOException {
private static void heapDumpCompress(SparkPlatform platform, CommandResponseHandler resp, Path file, Compression method) throws IOException {
resp.broadcastPrefixed(text("Compressing heap dump, please wait..."));

long size = Files.size(file);
Expand Down Expand Up @@ -244,71 +238,4 @@ private static void heapDumpCompress(SparkPlatform platform, CommandResponseHand
);
}

public enum CompressionMethod {
GZIP {
@Override
public Path compress(Path file, LongConsumer progressHandler) throws IOException {
Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".gz");
try (InputStream in = Files.newInputStream(file)) {
try (OutputStream out = Files.newOutputStream(compressedFile)) {
try (GZIPOutputStream compressionOut = new GZIPOutputStream(out, 1024 * 64)) {
copy(in, compressionOut, progressHandler);
}
}
}
return compressedFile;
}
},
XZ {
@Override
public Path compress(Path file, LongConsumer progressHandler) throws IOException {
Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".xz");
try (InputStream in = Files.newInputStream(file)) {
try (OutputStream out = Files.newOutputStream(compressedFile)) {
try (XZOutputStream compressionOut = new XZOutputStream(out, new LZMA2Options())) {
copy(in, compressionOut, progressHandler);
}
}
}
return compressedFile;
}
},
LZMA {
@Override
public Path compress(Path file, LongConsumer progressHandler) throws IOException {
Path compressedFile = file.getParent().resolve(file.getFileName().toString() + ".lzma");
try (InputStream in = Files.newInputStream(file)) {
try (OutputStream out = Files.newOutputStream(compressedFile)) {
try (LZMAOutputStream compressionOut = new LZMAOutputStream(out, new LZMA2Options(), true)) {
copy(in, compressionOut, progressHandler);
}
}
}
return compressedFile;
}
};

public abstract Path compress(Path file, LongConsumer progressHandler) throws IOException;

private static long copy(InputStream from, OutputStream to, LongConsumer progress) throws IOException {
byte[] buf = new byte[1024 * 64];
long total = 0;
long iterations = 0;
while (true) {
int r = from.read(buf);
if (r == -1) {
break;
}
to.write(buf, 0, r);
total += r;

// report progress every 5MB
if (iterations++ % ((1024 / 64) * 5) == 0) {
progress.accept(total);
}
}
return total;
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.util.MethodDisambiguator;
import me.lucko.spark.proto.SparkProtos;
import me.lucko.spark.proto.SparkSamplerProtos;

import net.kyori.adventure.text.event.ClickEvent;

Expand Down Expand Up @@ -305,7 +305,7 @@ private void profilerStop(SparkPlatform platform, CommandSender sender, CommandR
}

private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode, boolean saveToFileFlag) {
SparkProtos.SamplerData output = sampler.toProto(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());
SparkSamplerProtos.SamplerData output = sampler.toProto(platform, resp.sender(), threadOrder, comment, mergeMode, platform.createClassSourceLookup());

boolean saveToFile = false;
if (saveToFileFlag) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,11 @@

package me.lucko.spark.common.heapdump;

import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.proto.SparkProtos;
import me.lucko.spark.proto.SparkProtos.HeapData;
import me.lucko.spark.proto.SparkProtos.HeapEntry;
import me.lucko.spark.proto.SparkHeapProtos.HeapData;
import me.lucko.spark.proto.SparkHeapProtos.HeapEntry;
import me.lucko.spark.proto.SparkHeapProtos.HeapMetadata;

import org.objectweb.asm.Type;

Expand Down Expand Up @@ -125,10 +125,12 @@ private HeapDumpSummary(List<Entry> entries) {
this.entries = entries;
}

public HeapData toProto(PlatformInfo platformInfo, CommandSender creator) {
public HeapData toProto(SparkPlatform platform, CommandSender creator) {
HeapData.Builder proto = HeapData.newBuilder();
proto.setMetadata(SparkProtos.HeapMetadata.newBuilder()
.setPlatformMetadata(platformInfo.toData().toProto())
proto.setMetadata(HeapMetadata.newBuilder()
.setPlatformMetadata(platform.getPlugin().getPlatformInfo().toData().toProto())
.setPlatformStatistics(platform.getStatisticsProvider().getPlatformStatistics(null))
.setSystemStatistics(platform.getStatisticsProvider().getSystemStatistics())
.setCreator(creator.toData().toProto())
.build()
);
Expand Down
Loading

0 comments on commit bfbbcb3

Please sign in to comment.