From c9016fd4da0dd30375dee4152587fd3ae65880c0 Mon Sep 17 00:00:00 2001 From: granny Date: Sat, 3 Feb 2024 03:43:19 -0800 Subject: [PATCH] properly implement ProviderSource for our spark jar provider --- patches/server/0256-Spark-Profiler.patch | 37 ++++++++++++++++-------- 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/patches/server/0256-Spark-Profiler.patch b/patches/server/0256-Spark-Profiler.patch index a0e179155..a6de0555e 100644 --- a/patches/server/0256-Spark-Profiler.patch +++ b/patches/server/0256-Spark-Profiler.patch @@ -19,10 +19,10 @@ index 708e5bb9bbf0476fcc2c4b92c6830b094703b43e..6141f716b15ad47ac2ac4c9ce92a3897 // This will be the end of me... diff --git a/src/main/java/io/papermc/paper/plugin/provider/source/SparkProviderSource.java b/src/main/java/io/papermc/paper/plugin/provider/source/SparkProviderSource.java new file mode 100644 -index 0000000000000000000000000000000000000000..a7d1ae53eac94bc2dcf8bc78ef1da0d3b8554736 +index 0000000000000000000000000000000000000000..cb78dac8e072b5cb3c6e52e17c9ecdf708aeedc1 --- /dev/null +++ b/src/main/java/io/papermc/paper/plugin/provider/source/SparkProviderSource.java -@@ -0,0 +1,102 @@ +@@ -0,0 +1,115 @@ +package io.papermc.paper.plugin.provider.source; + +import com.mojang.logging.LogUtils; @@ -44,26 +44,23 @@ index 0000000000000000000000000000000000000000..a7d1ae53eac94bc2dcf8bc78ef1da0d3 +import org.bukkit.plugin.java.JavaPlugin; +import org.slf4j.Logger; + -+public class SparkProviderSource extends FileProviderSource { -+ public static final SparkProviderSource INSTANCE = new SparkProviderSource(); ++public class SparkProviderSource implements ProviderSource { + ++ public static final SparkProviderSource INSTANCE = new SparkProviderSource(); ++ private static final FileProviderSource FILE_PROVIDER_SOURCE = new FileProviderSource("File '%s' specified by Purpur"::formatted); + private static final Logger LOGGER = LogUtils.getClassLogger(); + -+ public SparkProviderSource() { -+ super("File '%s' specified by Purpur"::formatted); -+ } -+ + @Override -+ public void registerProviders(EntrypointHandler entrypointHandler, Path context) throws Exception { ++ public Path prepareContext(Path context) { + // first, check if user doesn't want spark at all + if (Boolean.getBoolean("Purpur.IReallyDontWantSpark")) { -+ return; // boo! ++ return null; // boo! + } + + // second, check if user has their own spark + if (hasSpark()) { + LOGGER.info("Purpur: Using user-provided spark plugin instead of our own."); -+ return; // let's hope it's at least the modern version :3 ++ return null; // let's hope it's at least the modern version :3 + } + + // you can't have errors in your code if you wrap the entire codebase in a try/catch block @@ -109,11 +106,27 @@ index 0000000000000000000000000000000000000000..a7d1ae53eac94bc2dcf8bc78ef1da0d3 + } + + // register the spark, newly downloaded or existing -+ super.registerProviders(entrypointHandler, context); ++ return FILE_PROVIDER_SOURCE.prepareContext(context); + + } catch (Throwable e) { + LOGGER.error("Purpur: Failed to download and install spark plugin", e); + } ++ return null; ++ } ++ ++ @Override ++ public void registerProviders(final EntrypointHandler entrypointHandler, final Path context) { ++ if (context == null) { ++ return; ++ } ++ ++ try { ++ FILE_PROVIDER_SOURCE.registerProviders(entrypointHandler, context); ++ } catch (IllegalArgumentException ignored) { ++ // Ignore illegal argument exceptions from jar checking ++ } catch (Exception e) { ++ LOGGER.error("Error loading our spark plugin: " + e.getMessage(), e); ++ } + } + + private static boolean hasSpark() {