Skip to content

Commit

Permalink
Extract class sources (plugin/mod names) and include in data payload
Browse files Browse the repository at this point in the history
  • Loading branch information
lucko committed May 30, 2021
1 parent 767995e commit f5bb628
Show file tree
Hide file tree
Showing 32 changed files with 684 additions and 36 deletions.
4 changes: 2 additions & 2 deletions build.gradle
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
allprojects {
group = 'me.lucko'
version = '1.5-SNAPSHOT'
version = '1.6-SNAPSHOT'

configurations {
compileClasspath // Fabric-loom needs this for remap jar for some reason
Expand All @@ -13,7 +13,7 @@ subprojects {
apply plugin: 'idea'

ext {
pluginVersion = '1.5.2'
pluginVersion = '1.6.0'
pluginDescription = 'spark is a performance profiling plugin/mod for Minecraft clients, servers and proxies.'
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.bukkit;

import me.lucko.spark.common.util.ClassSourceLookup;

import org.bukkit.plugin.java.JavaPlugin;

import java.lang.reflect.Method;

public class BukkitClassSourceLookup extends ClassSourceLookup.ByClassLoader {
private static final Class<?> PLUGIN_CLASS_LOADER;
private static final Method GET_PLUGIN_METHOD;

static {
try {
PLUGIN_CLASS_LOADER = Class.forName("org.bukkit.plugin.java.PluginClassLoader");
GET_PLUGIN_METHOD = PLUGIN_CLASS_LOADER.getDeclaredMethod("getPlugin");
GET_PLUGIN_METHOD.setAccessible(true);
} catch (ReflectiveOperationException e) {
throw new ExceptionInInitializerError(e);
}
}

@Override
public String identify(ClassLoader loader) throws ReflectiveOperationException {
if (PLUGIN_CLASS_LOADER.isInstance(loader)) {
JavaPlugin plugin = (JavaPlugin) GET_PLUGIN_METHOD.invoke(loader);
return plugin.getName();
}
return null;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.ClassSourceLookup;

import net.kyori.adventure.platform.bukkit.BukkitAudiences;

Expand Down Expand Up @@ -159,6 +160,11 @@ public TickReporter createTickReporter() {
return null;
}

@Override
public ClassSourceLookup createClassSourceLookup() {
return new BukkitClassSourceLookup();
}

@Override
public PlatformInfo getPlatformInfo() {
return new BukkitPlatformInfo(getServer());
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
/*
* This file is part of spark.
*
* Copyright (c) lucko (Luck) <luck@lucko.me>
* Copyright (c) contributors
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/

package me.lucko.spark.bungeecord;

import me.lucko.spark.common.util.ClassSourceLookup;

import net.md_5.bungee.api.plugin.PluginDescription;

import java.lang.reflect.Field;

public class BungeeCordClassSourceLookup extends ClassSourceLookup.ByClassLoader {
private static final Class<?> PLUGIN_CLASS_LOADER;
private static final Field DESC_FIELD;

static {
try {
PLUGIN_CLASS_LOADER = Class.forName("net.md_5.bungee.api.plugin.PluginClassloader");
DESC_FIELD = PLUGIN_CLASS_LOADER.getDeclaredField("desc");
DESC_FIELD.setAccessible(true);
} catch (ReflectiveOperationException e) {
throw new ExceptionInInitializerError(e);
}
}

@Override
public String identify(ClassLoader loader) throws ReflectiveOperationException {
if (PLUGIN_CLASS_LOADER.isInstance(loader)) {
PluginDescription desc = (PluginDescription) DESC_FIELD.get(loader);
return desc.getName();
}
return null;
}
}

Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@
import me.lucko.spark.common.SparkPlatform;
import me.lucko.spark.common.SparkPlugin;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.util.ClassSourceLookup;

import net.kyori.adventure.platform.bungeecord.BungeeAudiences;
import net.md_5.bungee.api.CommandSender;
Expand Down Expand Up @@ -78,6 +79,11 @@ public void executeAsync(Runnable task) {
getProxy().getScheduler().runAsync(BungeeCordSparkPlugin.this, task);
}

@Override
public ClassSourceLookup createClassSourceLookup() {
return new BungeeCordClassSourceLookup();
}

@Override
public PlatformInfo getPlatformInfo() {
return new BungeeCordPlatformInfo(getProxy());
Expand Down
1 change: 1 addition & 0 deletions spark-common/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ dependencies {
compile 'com.google.protobuf:protobuf-javalite:3.15.6'
compile 'com.squareup.okhttp3:okhttp:3.14.1'
compile 'com.squareup.okio:okio:1.17.3'
compile 'net.bytebuddy:byte-buddy-agent:1.11.0'
compile 'org.tukaani:xz:1.8'
compile('net.kyori:adventure-api:4.7.0') {
exclude(module: 'checker-qual')
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.BytebinClient;
import me.lucko.spark.common.util.ClassSourceLookup;

import net.kyori.adventure.text.event.ClickEvent;

Expand Down Expand Up @@ -87,6 +88,7 @@ public class SparkPlatform {
private final ActivityLog activityLog;
private final TickHook tickHook;
private final TickReporter tickReporter;
private final ClassSourceLookup classSourceLookup;
private final TickStatistics tickStatistics;
private Map<String, GarbageCollectorStatistics> startupGcStatistics = ImmutableMap.of();
private long serverNormalOperationStartTime;
Expand Down Expand Up @@ -115,6 +117,7 @@ public SparkPlatform(SparkPlugin plugin) {

this.tickHook = plugin.createTickHook();
this.tickReporter = plugin.createTickReporter();
this.classSourceLookup = plugin.createClassSourceLookup();
this.tickStatistics = this.tickHook != null ? new TickStatistics() : null;
}

Expand Down Expand Up @@ -175,6 +178,10 @@ public TickReporter getTickReporter() {
return this.tickReporter;
}

public ClassSourceLookup getClassSourceLookup() {
return this.classSourceLookup;
}

public TickStatistics getTickStatistics() {
return this.tickStatistics;
}
Expand Down
10 changes: 10 additions & 0 deletions spark-common/src/main/java/me/lucko/spark/common/SparkPlugin.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.tick.TickHook;
import me.lucko.spark.common.tick.TickReporter;
import me.lucko.spark.common.util.ClassSourceLookup;

import java.nio.file.Path;
import java.util.stream.Stream;
Expand Down Expand Up @@ -101,6 +102,15 @@ default TickReporter createTickReporter() {
return null;
}

/**
* Creates a class source lookup function.
*
* @return the class source lookup function
*/
default ClassSourceLookup createClassSourceLookup() {
return ClassSourceLookup.NO_OP;
}

/**
* Gets information for the platform.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -299,7 +299,7 @@ private void profilerStop(SparkPlatform platform, CommandSender sender, CommandR
}

private void handleUpload(SparkPlatform platform, CommandResponseHandler resp, Sampler sampler, ThreadNodeOrder threadOrder, String comment, MergeMode mergeMode) {
byte[] output = sampler.formCompressedDataPayload(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode);
byte[] output = sampler.formCompressedDataPayload(new Sampler.ExportProps(platform.getPlugin().getPlatformInfo(), resp.sender(), threadOrder, comment, mergeMode, platform.getClassSourceLookup()));
try {
String key = SparkPlatform.BYTEBIN_CLIENT.postContent(output, SPARK_SAMPLER_MEDIA_TYPE).key();
String url = SparkPlatform.VIEWER_URL + key;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos.SamplerData;

import java.io.ByteArrayOutputStream;
Expand Down Expand Up @@ -71,16 +72,10 @@ public interface Sampler {
CompletableFuture<? extends Sampler> getFuture();

// Methods used to export the sampler data to the web viewer.
SamplerData toProto(
PlatformInfo platformInfo,
CommandSender creator,
Comparator<? super Map.Entry<String, ThreadNode>> outputOrder,
String comment,
MergeMode mergeMode
);
SamplerData toProto(ExportProps props);

default byte[] formCompressedDataPayload(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) {
SamplerData proto = toProto(platformInfo, creator, outputOrder, comment, mergeMode);
default byte[] formCompressedDataPayload(ExportProps props) {
SamplerData proto = toProto(props);

ByteArrayOutputStream byteOut = new ByteArrayOutputStream();
try (OutputStream out = new GZIPOutputStream(byteOut)) {
Expand All @@ -91,4 +86,22 @@ default byte[] formCompressedDataPayload(PlatformInfo platformInfo, CommandSende
return byteOut.toByteArray();
}

class ExportProps {
public final PlatformInfo platformInfo;
public final CommandSender creator;
public final Comparator<? super Map.Entry<String, ThreadNode>> outputOrder;
public final String comment;
public final MergeMode mergeMode;
public final ClassSourceLookup classSourceLookup;

public ExportProps(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode, ClassSourceLookup classSourceLookup) {
this.platformInfo = platformInfo;
this.creator = creator;
this.outputOrder = outputOrder;
this.comment = comment;
this.mergeMode = mergeMode;
this.classSourceLookup = classSourceLookup;
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,12 @@

package me.lucko.spark.common.sampler.async;

import me.lucko.spark.common.command.sender.CommandSender;
import me.lucko.spark.common.platform.PlatformInfo;
import me.lucko.spark.common.sampler.Sampler;
import me.lucko.spark.common.sampler.ThreadDumper;
import me.lucko.spark.common.sampler.ThreadGrouper;
import me.lucko.spark.common.sampler.async.jfr.JfrReader;
import me.lucko.spark.common.sampler.node.MergeMode;
import me.lucko.spark.common.sampler.node.ThreadNode;
import me.lucko.spark.common.util.ClassSourceLookup;
import me.lucko.spark.proto.SparkProtos;

import one.profiler.AsyncProfiler;
Expand All @@ -37,7 +35,6 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CompletableFuture;
Expand Down Expand Up @@ -144,17 +141,17 @@ public void stop() {
}

@Override
public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender creator, Comparator<? super Map.Entry<String, ThreadNode>> outputOrder, String comment, MergeMode mergeMode) {
public SparkProtos.SamplerData toProto(ExportProps props) {
final SparkProtos.SamplerMetadata.Builder metadata = SparkProtos.SamplerMetadata.newBuilder()
.setPlatformMetadata(platformInfo.toData().toProto())
.setCreator(creator.toData().toProto())
.setPlatformMetadata(props.platformInfo.toData().toProto())
.setCreator(props.creator.toData().toProto())
.setStartTime(this.startTime)
.setInterval(this.interval)
.setThreadDumper(this.threadDumper.getMetadata())
.setDataAggregator(this.dataAggregator.getMetadata());

if (comment != null) {
metadata.setComment(comment);
if (props.comment != null) {
metadata.setComment(props.comment);
}

SparkProtos.SamplerData.Builder proto = SparkProtos.SamplerData.newBuilder();
Expand All @@ -163,10 +160,17 @@ public SparkProtos.SamplerData toProto(PlatformInfo platformInfo, CommandSender
aggregateOutput();

List<Map.Entry<String, ThreadNode>> data = new ArrayList<>(this.dataAggregator.getData().entrySet());
data.sort(outputOrder);
data.sort(props.outputOrder);

ClassSourceLookup.Visitor classSourceVisitor = ClassSourceLookup.createVisitor(props.classSourceLookup);

for (Map.Entry<String, ThreadNode> entry : data) {
proto.addThreads(entry.getValue().toProto(mergeMode));
proto.addThreads(entry.getValue().toProto(props.mergeMode));
classSourceVisitor.visit(entry.getValue());
}

if (classSourceVisitor.hasMappings()) {
proto.putAllClassSources(classSourceVisitor.getMapping());
}

return proto.build();
Expand Down
Loading

0 comments on commit f5bb628

Please sign in to comment.