Misc fixes

This commit is contained in:
DBotThePony 2023-10-11 21:56:22 +07:00
parent 7fcad1352e
commit 0c6736ef90
Signed by: DBot
GPG Key ID: DCC23B5715498507
3 changed files with 31 additions and 9 deletions

View File

@ -1,13 +1,15 @@
package ru.dbotthepony.kstarbound.api
import com.google.common.collect.ImmutableMap
import com.google.gson.JsonElement
import com.google.gson.JsonParser
import com.google.gson.stream.JsonReader
import ru.dbotthepony.kstarbound.Starbound
import ru.dbotthepony.kstarbound.io.StarboundPak
import ru.dbotthepony.kstarbound.stream
import java.io.*
import java.io.BufferedInputStream
import java.io.File
import java.io.FileNotFoundException
import java.io.InputStream
import java.io.InputStreamReader
import java.io.Reader
import java.nio.ByteBuffer
import java.util.stream.Stream

View File

@ -74,6 +74,8 @@ import java.nio.ByteOrder
import java.time.Duration
import java.util.*
import java.util.concurrent.ForkJoinPool
import java.util.concurrent.ForkJoinWorkerThread
import java.util.concurrent.atomic.AtomicInteger
import java.util.concurrent.locks.LockSupport
import java.util.concurrent.locks.ReentrantLock
import java.util.function.IntConsumer
@ -86,10 +88,26 @@ class StarboundClient : Closeable {
val camera = Camera(this)
val input = UserInput()
val thread: Thread = Thread.currentThread()
private val threadCounter = AtomicInteger()
// client specific executor which will accept tasks which involve probable
// callback to foreground executor to initialize thread-unsafe data
// In above case too many threads will introduce big congestion for resources, stalling entire workload; wasting cpu resources
val backgroundExecutor = ForkJoinPool(Runtime.getRuntime().availableProcessors().coerceAtMost(4))
val backgroundExecutor = ForkJoinPool(Runtime.getRuntime().availableProcessors().coerceAtMost(4), {
object : ForkJoinWorkerThread(it) {
init {
name = "Background Executor for '${thread.name}'-${threadCounter.incrementAndGet()}"
}
override fun onTermination(exception: Throwable?) {
super.onTermination(exception)
if (exception != null) {
LOGGER.error("$this encountered an exception while executing task", exception)
}
}
}
}, null, false)
val foregroundExecutor = ManualExecutorService(thread)
val capabilities: GLCapabilities

View File

@ -7,6 +7,8 @@ import ru.dbotthepony.kstarbound.stream
import java.lang.ref.ReferenceQueue
import java.lang.ref.WeakReference
import java.util.concurrent.locks.LockSupport
import java.util.concurrent.locks.ReentrantLock
import kotlin.concurrent.withLock
// hand-rolled interner, which has similar performance to ConcurrentHashMap
// (given there is no strong congestion, otherwise it performs somewhere above Caffeine interner),
@ -124,7 +126,7 @@ class HashTableInterner<T : Any>(private val segmentBits: Int) : Interner<T> {
actualSegmentBits = result
}
private val locks: Array<Any> = Array(1.shl(segmentBits)) { Any() }
private val locks: Array<ReentrantLock> = Array(1.shl(segmentBits)) { ReentrantLock() }
private val segments: Array<Segment> = Array(1.shl(segmentBits)) { Segment(32, locks[it]) }
init {
@ -137,7 +139,7 @@ class HashTableInterner<T : Any>(private val segmentBits: Int) : Interner<T> {
// while this increase memory usage (linked list), this greatly
// simplify logic, and make scanning a bit faster because we don't jump to neighbour nodes
// (assuming past our neighbour there is no such key)
private inner class Segment(val size: Int, private val lock: Any) {
private inner class Segment(val size: Int, private val lock: ReentrantLock) {
private val queue = ReferenceQueue<T>()
val mask = size - 1
@ -183,7 +185,7 @@ class HashTableInterner<T : Any>(private val segmentBits: Int) : Interner<T> {
var p: Ref<T>? = queue.poll() as Ref<T>? ?: return 0
var any = 0
synchronized(lock) {
lock.withLock {
while (p != null) {
check(remove(p!!)) { "Unable to remove null entry $p at hash ${hash(p!!)}" }
p = queue.poll() as Ref<T>?
@ -234,7 +236,7 @@ class HashTableInterner<T : Any>(private val segmentBits: Int) : Interner<T> {
val find = segment.search(sample)
if (find != null) return find
synchronized(locks[segmentIndex]) {
locks[segmentIndex].withLock {
segment = segments[segmentIndex]
val find = segment.search(sample)