Fixes to recipe resolver

This commit is contained in:
DBotThePony 2022-11-09 18:59:34 +07:00
parent 92854f66a7
commit 33aee85cda
Signed by: DBot
GPG Key ID: DCC23B5715498507

View File

@ -85,7 +85,13 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
fun interface Finder {
/**
* Returned stream MUST be sequential
* Returned stream can be either parallel or sequential
*
* Parallel streams will _greatly_ improve performance on mid-end range computers,
* but you need to make sure your stream has no side effects (such as binding late-binding tags)
*
* Internally, OTM separate parallel streams from sequential so sequential streams happen strictly on server thread
* (if you can't avoid side effects, or don't bother with synchronization).
*/
fun find(server: MinecraftServer, json: JsonObject): Stream<ResolvedRecipe>
}
@ -113,7 +119,7 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
val isCritical = data["is_critical"]?.asBoolean ?: true
val ignoreDamageables = data["ignore_damageables"]?.asBoolean ?: false
server.recipeManager.byType(findRecipeType).values.stream()
server.recipeManager.byType(findRecipeType).values.parallelStream()
.filter { !it.isIncomplete && !it.ingredients.stream().anyMatch { it.isActuallyEmpty } } // get rid of invalid recipes, second "isActuallyEmpty" is required because we do care about ingredients being "missing"
.filter {
!ignoreDamageables || it.ingredients.stream().flatMap { Arrays.stream(it.items) }.noneMatch { it.isDamageableItem }
@ -204,9 +210,31 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
val input2Recipes: Reference2ObjectOpenHashMap<Item, ReferenceOpenHashSet<ResolvedRecipe>> = Reference2ObjectOpenHashMap(),
val output2Recipes: Reference2ObjectOpenHashMap<Item, ReferenceOpenHashSet<ResolvedRecipe>> = Reference2ObjectOpenHashMap(),
) {
val heuristicsSize: Long
get() {
return input2Recipes.size.toLong() + output2Recipes.size.toLong()
}
fun combine(other: Accumulator) {
input2Recipes.putAll(other.input2Recipes)
output2Recipes.putAll(other.output2Recipes)
for ((k, v) in other.input2Recipes) {
val existing = input2Recipes[k]
if (existing == null) {
input2Recipes[k] = v
} else {
existing.addAll(v)
}
}
for ((k, v) in other.output2Recipes) {
val existing = output2Recipes[k]
if (existing == null) {
output2Recipes[k] = v
} else {
existing.addAll(v)
}
}
}
fun add(recipe: ResolvedRecipe) {
@ -268,6 +296,8 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
return Result.MISSING
}
var hadSkips = false
recipesLoop@ for (recipe in recipes) {
if (recipe.inputs.isEmpty()) {
// TODO: should we ignore empty recipes?
@ -296,7 +326,8 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
}
} else if (ivalue.isSkipped) {
commentary[item] = TextComponent("Input '${input.item.registryName}' at input slot $i in ${recipe.formattedName} is recursive")
return Result.SKIPPED
hadSkips = true
continue@recipesLoop
}
if (minimal == null || minimal > ivalue.value!!) {
@ -335,6 +366,9 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
}
if (minimalMatter == null || minimalComplexity == null) {
if (hadSkips)
return Result.SKIPPED
if (item !in commentary)
commentary[item] = TextComponent("'${item.registryName}' ended up with no valid recipes (???)")
@ -399,19 +433,37 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
seenItems.clear()
commentary.clear()
val stream = Streams.concat(*foundResolvers.map {
val foundStreams = foundResolvers.map {
try {
it.value.first.find(server, it.value.second).sequential()
it.value.first.find(server, it.value.second)
} catch(err: Throwable) {
LOGGER.fatal("Recipe resolver ${it.key} experienced internal error", err)
throw RuntimeException("Recipe resolver ${it.key} experienced internal error", err)
}
}.toTypedArray())
}
val (input2Recipes, output2Recipes) = stream.collect(::Accumulator, Accumulator::add, Accumulator::combine)
val sequentialStreams = foundStreams.filter { !it.isParallel }
val parallelStreams = foundStreams.filter { it.isParallel }
val streamA = Streams.concat(*sequentialStreams.toTypedArray())
val streamB = Streams.concat(*parallelStreams.toTypedArray())
val resultA = streamA.collect(::Accumulator, Accumulator::add, Accumulator::combine)
val resultB = streamB.collect(::Accumulator, Accumulator::add, Accumulator::combine)
val result: Accumulator
if (resultA.heuristicsSize > resultB.heuristicsSize) {
result = resultA
resultA.combine(resultB)
} else {
result = resultB
resultB.combine(resultA)
}
val (input2Recipes, output2Recipes) = result
LOGGER.info("Finding recipes took ${time.millis}ms, involving ${input2Recipes.keys.size} unique inputs and ${output2Recipes.keys.size} unique outputs")
time = SystemTime()
LOGGER.info("Resolving recipes...")
this.input2Recipes = input2Recipes
@ -434,7 +486,7 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
for (value in toDetermine) {
val name = value.registryName!!.toString()
if (length != 0 && length + name.length < 100) {
if (length != 0 && length + name.length < 400) {
names.add(name)
length += name.length
} else if (length == 0) {
@ -452,6 +504,8 @@ object RecipeResolverManager : SimpleJsonResourceReloadListener(GsonBuilder().se
}
}
time = SystemTime()
while (changes) {
ops += cachedIterationResults.size
cachedIterationResults = Reference2ObjectOpenHashMap()