Bump kommons
This commit is contained in:
parent
df972dd891
commit
a06ed42268
@ -2,7 +2,7 @@ kotlin.code.style=official
|
||||
org.gradle.jvmargs=-Xmx2g -XX:MaxMetaspaceSize=512m
|
||||
|
||||
kotlinVersion=1.9.0
|
||||
kommonsVersion=2.3.3
|
||||
kommonsVersion=2.5.0
|
||||
|
||||
ffiVersion=2.2.13
|
||||
lwjglVersion=3.3.0
|
||||
|
@ -27,15 +27,24 @@ import java.util.zip.InflaterInputStream
|
||||
private val LOGGER = LogManager.getLogger()
|
||||
|
||||
fun main() {
|
||||
/*val db0 = BTreeDB5(File("F:\\SteamLibrary\\steamapps\\common\\Starbound - Unstable\\storage\\universe\\389760395_938904237_-238610574_5.world"))
|
||||
val db2 = BTreeDB6.create(File("testdb.bdb"), sync = false)
|
||||
|
||||
for (key in db0.findAllKeys()) {
|
||||
db2.write(key, db0.read(key).get())
|
||||
}
|
||||
|
||||
db2.close()*/
|
||||
|
||||
LOGGER.info("Running LWJGL ${Version.getVersion()}")
|
||||
|
||||
//Thread.sleep(6_000L)
|
||||
|
||||
val db = BTreeDB6(File("testdb.bdb"), Starbound.BTREEDB_IO_POOL)
|
||||
val db = BTreeDB6(File("testdb.bdb"))
|
||||
//val db = BTreeDB5(File("F:\\SteamLibrary\\steamapps\\common\\Starbound - Unstable\\storage\\universe\\389760395_938904237_-238610574_5.world"))
|
||||
//val db = BTreeDB(File("world.world"))
|
||||
|
||||
val meta = DataInputStream(BufferedInputStream(InflaterInputStream(ByteArrayInputStream(db.read(ByteKey(0, 0, 0, 0, 0)).get().get()), Inflater())))
|
||||
val meta = DataInputStream(BufferedInputStream(InflaterInputStream(ByteArrayInputStream(db.read(ByteKey(0, 0, 0, 0, 0)).get()), Inflater())))
|
||||
|
||||
println(meta.readInt())
|
||||
println(meta.readInt())
|
||||
|
@ -96,8 +96,8 @@ object Starbound : ISBFileLocator {
|
||||
|
||||
private val ioPoolCounter = AtomicInteger()
|
||||
|
||||
val BTREEDB_IO_POOL = ThreadPoolExecutor(0, Int.MAX_VALUE, 30L, TimeUnit.SECONDS, SynchronousQueue(), ThreadFactory {
|
||||
val thread = Thread(it, "BTreeDB IO ${ioPoolCounter.getAndIncrement()}")
|
||||
val STORAGE_IO_POOL = ThreadPoolExecutor(0, Int.MAX_VALUE, 30L, TimeUnit.SECONDS, SynchronousQueue(), ThreadFactory {
|
||||
val thread = Thread(it, "Starbound Storage IO ${ioPoolCounter.getAndIncrement()}")
|
||||
thread.isDaemon = true
|
||||
thread.priority = Thread.MIN_PRIORITY
|
||||
return@ThreadFactory thread
|
||||
|
@ -2,7 +2,6 @@ package ru.dbotthepony.kstarbound.io
|
||||
|
||||
import it.unimi.dsi.fastutil.io.FastByteArrayInputStream
|
||||
import it.unimi.dsi.fastutil.longs.LongArrayList
|
||||
import ru.dbotthepony.kommons.io.BTreeDB
|
||||
import ru.dbotthepony.kommons.io.ByteDataBTreeDB
|
||||
import ru.dbotthepony.kommons.io.ByteKey
|
||||
import ru.dbotthepony.kommons.io.readByteKeyRaw
|
||||
@ -54,7 +53,6 @@ private enum class TreeBlockType(val identity: String) {
|
||||
|
||||
class BTreeDB5(override val file: File) : ByteDataBTreeDB<ByteKey>() {
|
||||
private val reader = RandomAccessFile(file, "r")
|
||||
private val carrier = CarriedExecutor(Starbound.BTREEDB_IO_POOL)
|
||||
|
||||
init {
|
||||
readHeader(reader, 'B')
|
||||
@ -145,24 +143,20 @@ class BTreeDB5(override val file: File) : ByteDataBTreeDB<ByteKey>() {
|
||||
}
|
||||
|
||||
override fun close() {
|
||||
carrier.execute { reader.close() }
|
||||
carrier.shutdown()
|
||||
carrier.awaitTermination(Long.MAX_VALUE, TimeUnit.NANOSECONDS)
|
||||
reader.close()
|
||||
}
|
||||
|
||||
override fun write(key: ByteKey, value: ByteArray, offset: Int, length: Int): CompletableFuture<*> {
|
||||
override fun write(key: ByteKey, value: ByteArray, offset: Int, length: Int) {
|
||||
throw UnsupportedOperationException()
|
||||
}
|
||||
|
||||
override fun findAllKeys(): CompletableFuture<List<ByteKey>> {
|
||||
return CompletableFuture.supplyAsync(Supplier {
|
||||
val list = ArrayList<ByteKey>()
|
||||
doFindAllKeys(rootNodeIndex, list)
|
||||
list
|
||||
}, carrier)
|
||||
override fun findAllKeys(): List<ByteKey> {
|
||||
val list = ArrayList<ByteKey>()
|
||||
doFindAllKeys(rootNodeIndex, list)
|
||||
return list
|
||||
}
|
||||
|
||||
private fun doHasKey(key: ByteKey): Boolean {
|
||||
override fun contains(key: ByteKey): Boolean {
|
||||
seekBlock(rootNodeIndex)
|
||||
var blockStream = BlockInputStream()
|
||||
|
||||
@ -224,13 +218,9 @@ class BTreeDB5(override val file: File) : ByteDataBTreeDB<ByteKey>() {
|
||||
return false
|
||||
}
|
||||
|
||||
override fun hasKey(key: ByteKey): CompletableFuture<Boolean> {
|
||||
return CompletableFuture.supplyAsync(Supplier {
|
||||
doHasKey(key)
|
||||
}, carrier)
|
||||
}
|
||||
override fun read(key: ByteKey): KOptional<ByteArray> {
|
||||
require(key.size == keySize) { "Key provided is ${key.size} in size, while $keySize is required" }
|
||||
|
||||
private fun doRead(key: ByteKey): KOptional<ByteArray> {
|
||||
seekBlock(rootNodeIndex)
|
||||
var blockStream = BlockInputStream()
|
||||
|
||||
@ -300,14 +290,6 @@ class BTreeDB5(override val file: File) : ByteDataBTreeDB<ByteKey>() {
|
||||
return KOptional.empty()
|
||||
}
|
||||
|
||||
override fun read(key: ByteKey): CompletableFuture<KOptional<ByteArray>> {
|
||||
require(key.size == keySize) { "Key provided is ${key.size} in size, while $keySize is required" }
|
||||
|
||||
return CompletableFuture.supplyAsync(Supplier {
|
||||
doRead(key)
|
||||
}, carrier)
|
||||
}
|
||||
|
||||
private fun seekBlock(id: Long) {
|
||||
reader.seek(blockOffset(id))
|
||||
}
|
||||
|
@ -6,6 +6,7 @@ import ru.dbotthepony.kommons.io.BTreeDB
|
||||
import ru.dbotthepony.kommons.io.ByteKey
|
||||
import ru.dbotthepony.kommons.util.KOptional
|
||||
import ru.dbotthepony.kommons.io.readVarInt
|
||||
import ru.dbotthepony.kommons.util.CarriedExecutor
|
||||
import ru.dbotthepony.kommons.vector.Vector2i
|
||||
import ru.dbotthepony.kstarbound.Starbound
|
||||
import ru.dbotthepony.kstarbound.json.VersionedJson
|
||||
@ -20,16 +21,19 @@ import java.io.BufferedInputStream
|
||||
import java.io.ByteArrayInputStream
|
||||
import java.io.DataInputStream
|
||||
import java.util.concurrent.CompletableFuture
|
||||
import java.util.function.Supplier
|
||||
import java.util.zip.Inflater
|
||||
import java.util.zip.InflaterInputStream
|
||||
|
||||
class LegacyChunkSource(val db: BTreeDB<ByteKey, ByteArray>) : IChunkSource {
|
||||
private val carrier = CarriedExecutor(Starbound.STORAGE_IO_POOL)
|
||||
|
||||
override fun getTiles(pos: ChunkPos): CompletableFuture<KOptional<Object2DArray<out AbstractCell>>> {
|
||||
val chunkX = pos.x
|
||||
val chunkY = pos.y
|
||||
val key = ByteKey(1, (chunkX shr 8).toByte(), chunkX.toByte(), (chunkY shr 8).toByte(), chunkY.toByte())
|
||||
|
||||
return db.read(key).thenApplyAsync {
|
||||
return CompletableFuture.supplyAsync(Supplier { db.read(key) }, carrier).thenApplyAsync {
|
||||
it.map {
|
||||
val reader = DataInputStream(BufferedInputStream(InflaterInputStream(ByteArrayInputStream(it), Inflater())))
|
||||
reader.skipBytes(3)
|
||||
@ -52,7 +56,7 @@ class LegacyChunkSource(val db: BTreeDB<ByteKey, ByteArray>) : IChunkSource {
|
||||
val chunkY = pos.y
|
||||
val key = ByteKey(2, (chunkX shr 8).toByte(), chunkX.toByte(), (chunkY shr 8).toByte(), chunkY.toByte())
|
||||
|
||||
return db.read(key).thenApplyAsync {
|
||||
return CompletableFuture.supplyAsync(Supplier { db.read(key) }, carrier).thenApplyAsync {
|
||||
it.map {
|
||||
val reader = DataInputStream(BufferedInputStream(InflaterInputStream(ByteArrayInputStream(it), Inflater())))
|
||||
val i = reader.readVarInt()
|
||||
|
Loading…
Reference in New Issue
Block a user