package ru.dbotthepony.kstarbound import com.google.common.collect.ImmutableMap import com.google.gson.Gson import com.google.gson.GsonBuilder import com.google.gson.TypeAdapter import com.google.gson.TypeAdapterFactory import com.google.gson.reflect.TypeToken import com.google.gson.stream.JsonReader import ru.dbotthepony.kstarbound.api.IStarboundFile import ru.dbotthepony.kstarbound.util.KOptional import java.util.Arrays import java.util.concurrent.Callable import java.util.concurrent.ForkJoinPool import java.util.concurrent.ForkJoinTask import java.util.stream.Stream import kotlin.reflect.KProperty inline fun GsonBuilder.registerTypeAdapter(adapter: TypeAdapter): GsonBuilder { return registerTypeAdapter(T::class.java, adapter) } inline fun GsonBuilder.registerTypeAdapter(noinline factory: (Gson) -> TypeAdapter): GsonBuilder { val token = TypeToken.get(T::class.java) return registerTypeAdapterFactory(object : TypeAdapterFactory { override fun create(gson: Gson, type: TypeToken): TypeAdapter? { if (type == token) { return factory(gson) as TypeAdapter } return null } }) } fun Array.stream(): Stream = Arrays.stream(this) operator fun ThreadLocal.getValue(thisRef: Any, property: KProperty<*>): T { return get() } operator fun ThreadLocal.setValue(thisRef: Any, property: KProperty<*>, value: T) { set(value) } operator fun ImmutableMap.Builder.set(key: K, value: V): ImmutableMap.Builder = put(key, value) fun String.sintern(): String = Starbound.STRINGS.intern(this) inline fun Gson.fromJson(reader: JsonReader): T? = fromJson(reader, T::class.java) /** * guarantees even distribution of tasks while also preserving encountered order of elements */ fun Collection.batch(executor: ForkJoinPool, batchSize: Int = 16, mapper: (IStarboundFile) -> KOptional): Stream { require(batchSize >= 1) { "Invalid batch size: $batchSize" } if (batchSize == 1 || size <= batchSize) { val tasks = ArrayList>>() for (listedFile in this) { tasks.add(executor.submit(Callable { mapper.invoke(listedFile) })) } return tasks.stream().map { it.join() }.filter { it.isPresent }.map { it.value } } val batches = ArrayList>>>() var batch = ArrayList(batchSize) for (listedFile in this) { batch.add(listedFile) if (batch.size >= batchSize) { val mbatch = batch batches.add(executor.submit(Callable { mbatch.map { mapper.invoke(it) } })) batch = ArrayList(batchSize) } } if (batch.isNotEmpty()) batches.add(executor.submit(Callable { batch.map { mapper.invoke(it) } })) return batches.stream().flatMap { it.join().stream() }.filter { it.isPresent }.map { it.value } }