Native world storage, featuring fast unique entity index, atomic updates and paletted tile storage (does not require integer IDs to be assigned to materials/modifiers/liquids)

This commit is contained in:
DBotThePony 2024-05-04 02:06:52 +07:00
parent 72d6db0a17
commit b4940ce8ca
Signed by: DBot
GPG Key ID: DCC23B5715498507
15 changed files with 658 additions and 96 deletions

View File

@ -20,7 +20,7 @@ object VersionRegistry {
return VersionedJson(name, currentVersion(name), contents)
}
private fun migrate(read: VersionedJson, identifier: String = read.id): JsonElement {
fun migrate(read: VersionedJson, identifier: String = read.id): JsonElement {
if (read.version != currentVersion(identifier)) {
throw IllegalStateException("NYI: Migrating $identifier from ${read.version} to ${currentVersion(identifier)}")
}

View File

@ -0,0 +1,24 @@
package ru.dbotthepony.kstarbound.io
import java.sql.Connection
import java.sql.PreparedStatement
data class SavepointStatements(val begin: PreparedStatement, val commit: PreparedStatement, val rollback: PreparedStatement)
fun Connection.createSavepoint(name: String): SavepointStatements {
check('"' !in name) { "Invalid identifier: $name" }
val begin = prepareStatement("""
SAVEPOINT "$name"
""".trimIndent())
val commit = prepareStatement("""
RELEASE SAVEPOINT "$name"
""".trimIndent())
val rollback = prepareStatement("""
ROLLBACK TO SAVEPOINT "$name"
""".trimIndent())
return SavepointStatements(begin, commit, rollback)
}

View File

@ -28,6 +28,7 @@ import ru.dbotthepony.kstarbound.json.writeJsonElement
import ru.dbotthepony.kstarbound.json.writeJsonElementDeflated
import ru.dbotthepony.kstarbound.json.writeJsonObject
import ru.dbotthepony.kstarbound.server.world.LegacyWorldStorage
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import ru.dbotthepony.kstarbound.server.world.ServerUniverse
import ru.dbotthepony.kstarbound.server.world.ServerWorld
import ru.dbotthepony.kstarbound.server.world.ServerSystemWorld
@ -203,7 +204,7 @@ sealed class StarboundServer(val root: File) : BlockableEventLoop("Server thread
val fileName = location.pos.toString().replace(':', '_') + ".db"
val file = File(universeFolder, fileName)
val firstTime = !file.exists()
val storage = LegacyWorldStorage.SQL(file)
val storage = NativeLocalWorldStorage(file)
val world = try {
if (firstTime) {
@ -225,7 +226,7 @@ sealed class StarboundServer(val root: File) : BlockableEventLoop("Server thread
var i = 0
while (!file.renameTo(File(universeFolder, "$fileName-fail$i")) && ++i < 1000) {}
ServerWorld.create(this, WorldTemplate.create(location.pos, universe), LegacyWorldStorage.SQL(file), location)
ServerWorld.create(this, WorldTemplate.create(location.pos, universe), NativeLocalWorldStorage(file), location)
}
}

View File

@ -22,6 +22,7 @@ import ru.dbotthepony.kommons.io.writeVarInt
import ru.dbotthepony.kommons.util.xxhash32
import ru.dbotthepony.kstarbound.Starbound
import ru.dbotthepony.kstarbound.VersionRegistry
import ru.dbotthepony.kstarbound.defs.EntityType
import ru.dbotthepony.kstarbound.io.BTreeDB5
import ru.dbotthepony.kstarbound.io.readInternedString
import ru.dbotthepony.kstarbound.io.readVector2f
@ -183,7 +184,29 @@ sealed class LegacyWorldStorage() : WorldStorage() {
val key = ByteKey(2, (chunkX shr 8).toByte(), chunkX.toByte(), (chunkY shr 8).toByte(), chunkY.toByte())
return load(key).thenApplyAsync(Function {
readEntities(pos, it ?: return@Function listOf())
it ?: return@Function listOf()
val reader = DataInputStream(BufferedInputStream(InflaterInputStream(ByteArrayInputStream(it))))
val i = reader.readVarInt()
val objects = ArrayList<AbstractEntity>()
for (i2 in 0 until i) {
val obj = VersionedJson(reader)
val type = EntityType.entries.firstOrNull { it.storeName == obj.id }
if (type != null) {
try {
objects.add(type.fromStorage(obj.content as JsonObject))
} catch (err: Throwable) {
LOGGER.error("Unable to deserialize entity in chunk $pos", err)
}
} else {
LOGGER.error("Unknown entity type in chunk $pos: ${obj.id}")
}
}
reader.close()
return@Function objects
}, Starbound.EXECUTOR)
}
@ -218,7 +241,7 @@ sealed class LegacyWorldStorage() : WorldStorage() {
streamEntities.writeVarInt(entities.size)
for (entity in entities) {
Starbound.storeJson {
Starbound.legacyStoreJson {
val data = JsonObject()
entity.serialize(data)
VersionRegistry.make(entity.type.storeName, data).write(streamEntities)
@ -335,17 +358,24 @@ sealed class LegacyWorldStorage() : WorldStorage() {
}
class Memory(private val get: (ByteKey) -> ByteArray?, private val set: (ByteKey, ByteArray) -> Unit) : LegacyWorldStorage() {
private val pending = HashMap<ByteKey, ByteArray>()
override val executor: Executor = Executor { it.run() }
override fun load(at: ByteKey): CompletableFuture<ByteArray?> {
return CompletableFuture.completedFuture(get(at))
return CompletableFuture.completedFuture(pending[at] ?: get(at))
}
override fun write(at: ByteKey, value: ByteArray) {
set(at, value)
// set(at, value)
pending[at] = value
}
override fun close() {}
override fun commit() {
pending.entries.forEach { (k, v) -> set(k, v) }
pending.clear()
}
}
class DB5(private val database: BTreeDB5) : LegacyWorldStorage() {
@ -363,6 +393,10 @@ sealed class LegacyWorldStorage() : WorldStorage() {
executor.execute { database.close() }
executor.wait(300L, TimeUnit.SECONDS)
}
override fun commit() {
// do nothing
}
}
class SQL(path: File) : LegacyWorldStorage() {
@ -374,7 +408,6 @@ sealed class LegacyWorldStorage() : WorldStorage() {
val connection = connection
cleaner = Starbound.CLEANER.register(this) {
/*connection.commit();*/
connection.close()
}
@ -387,6 +420,8 @@ sealed class LegacyWorldStorage() : WorldStorage() {
|"value" BLOB NOT NULL
|)""".trimMargin())
}
connection.autoCommit = false
}
// concurrent safety - load(), write() and close() are always called on one thread
@ -417,10 +452,13 @@ sealed class LegacyWorldStorage() : WorldStorage() {
}
override fun close() {
// carrier.execute { connection.commit() }
executor.execute { cleaner.clean() }
executor.wait(300L, TimeUnit.SECONDS)
}
override fun commit() {
executor.execute { connection.commit() }
}
}
companion object {

View File

@ -0,0 +1,458 @@
package ru.dbotthepony.kstarbound.server.world
import com.google.gson.JsonObject
import it.unimi.dsi.fastutil.ints.Int2ObjectAVLTreeMap
import it.unimi.dsi.fastutil.io.FastByteArrayInputStream
import it.unimi.dsi.fastutil.io.FastByteArrayOutputStream
import it.unimi.dsi.fastutil.objects.Object2IntFunction
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap
import org.apache.logging.log4j.LogManager
import ru.dbotthepony.kommons.arrays.Object2DArray
import ru.dbotthepony.kommons.io.readBinaryString
import ru.dbotthepony.kommons.io.readCollection
import ru.dbotthepony.kommons.io.writeBinaryString
import ru.dbotthepony.kommons.io.writeCollection
import ru.dbotthepony.kstarbound.Starbound
import ru.dbotthepony.kstarbound.VersionRegistry
import ru.dbotthepony.kstarbound.defs.EntityType
import ru.dbotthepony.kstarbound.io.createSavepoint
import ru.dbotthepony.kstarbound.json.VersionedJson
import ru.dbotthepony.kstarbound.json.readJsonElement
import ru.dbotthepony.kstarbound.json.readJsonObjectInflated
import ru.dbotthepony.kstarbound.json.writeJsonElement
import ru.dbotthepony.kstarbound.json.writeJsonObjectDeflated
import ru.dbotthepony.kstarbound.math.vector.Vector2d
import ru.dbotthepony.kstarbound.math.vector.Vector2i
import ru.dbotthepony.kstarbound.util.CarriedExecutor
import ru.dbotthepony.kstarbound.util.supplyAsync
import ru.dbotthepony.kstarbound.world.ChunkPos
import ru.dbotthepony.kstarbound.world.ChunkState
import ru.dbotthepony.kstarbound.world.WorldGeometry
import ru.dbotthepony.kstarbound.world.api.AbstractCell
import ru.dbotthepony.kstarbound.world.api.MutableCell
import ru.dbotthepony.kstarbound.world.entities.AbstractEntity
import java.io.BufferedInputStream
import java.io.BufferedOutputStream
import java.io.DataInputStream
import java.io.DataOutputStream
import java.io.File
import java.lang.ref.Cleaner
import java.sql.Connection
import java.sql.DriverManager
import java.sql.PreparedStatement
import java.util.concurrent.CompletableFuture
import java.util.concurrent.TimeUnit
import java.util.zip.Deflater
import java.util.zip.DeflaterOutputStream
import java.util.zip.Inflater
import java.util.zip.InflaterInputStream
class NativeLocalWorldStorage(file: File?) : WorldStorage() {
private val connection: Connection
private val executor = CarriedExecutor(Starbound.IO_EXECUTOR)
private val cleaner: Cleaner.Cleanable
init {
if (file == null) {
connection = DriverManager.getConnection("jdbc:sqlite:")
} else {
connection = DriverManager.getConnection("jdbc:sqlite:${file.absolutePath.replace('\\', '/')}")
}
val connection = connection
cleaner = Starbound.CLEANER.register(this) {
connection.close()
}
}
override fun commit() {
executor.execute { connection.commit() }
}
override fun close() {
executor.execute { connection.commit(); cleaner.clean() }
executor.wait(300L, TimeUnit.SECONDS)
}
init {
connection.createStatement().use {
it.execute("PRAGMA journal_mode=WAL")
it.execute("PRAGMA synchronous=NORMAL")
it.execute("""
CREATE TABLE IF NOT EXISTS "metadata" (
"key" VARCHAR NOT NULL PRIMARY KEY,
"version" INTEGER NOT NULL,
"data" BLOB NOT NULL
)
""".trimIndent())
it.execute("""
CREATE TABLE IF NOT EXISTS "cells" (
"x" INTEGER NOT NULL,
"y" INTEGER NOT NULL,
"generation_stage" INTEGER NOT NULL,
"version" INTEGER NOT NULL,
"data" BLOB NOT NULL,
PRIMARY KEY ("x", "y")
)
""".trimIndent())
it.execute("""
CREATE TABLE IF NOT EXISTS "entities" (
-- store chunks because rules for entities belonging to specific chunk might get different over time
"chunkX" INTEGER NOT NULL,
"chunkY" INTEGER NOT NULL,
"x" REAL NOT NULL,
"y" REAL NOT NULL,
"unique_id" VARCHAR,
"type" VARCHAR NOT NULL,
"version" INTEGER NOT NULL,
"data" BLOB NOT NULL
)
""".trimIndent())
// Enforce unique-ness of unique entity IDs
// If another entity pops-up with same unique id, then it will overwrite entity in other chunk
// (unless colliding entities are both loaded into world's memory, which will cause runtime exception to be thrown;
// and no overwrite will happen)
it.execute("""
CREATE UNIQUE INDEX IF NOT EXISTS "entities_unique_id" ON "entities" ("unique_id")
""".trimIndent())
it.execute("""
CREATE INDEX IF NOT EXISTS "entities_chunk_pos" ON "entities" ("chunkX", "chunkY")
""".trimIndent())
}
connection.autoCommit = false
}
private val readCells = connection.prepareStatement("""
SELECT "generation_stage", "version", "data" FROM "cells" WHERE "x" = ? AND "y" = ?
""".trimIndent())
override fun loadCells(pos: ChunkPos): CompletableFuture<ChunkCells?> {
return executor.supplyAsync {
readCells.setInt(1, pos.x)
readCells.setInt(2, pos.y)
readCells.executeQuery().use {
if (!it.next())
return@supplyAsync null
val stage = ChunkState.entries[it.getInt(1)]
val version = it.getInt(2)
val data = it.getBytes(3)
val inflater = Inflater()
val stream = DataInputStream(BufferedInputStream(InflaterInputStream(FastByteArrayInputStream(data), inflater, 0x10000), 0x40000))
try {
val palette = PaletteSet()
palette.read(stream)
val width = stream.readInt()
val height = stream.readInt()
val array = Object2DArray.nulls<AbstractCell>(width, height)
for (x in 0 until width) {
for (y in 0 until height) {
array[x, y] = MutableCell().readNative(stream, palette, version).immutable()
}
}
return@supplyAsync ChunkCells(array as Object2DArray<out AbstractCell>, stage)
} finally {
inflater.end()
}
}
}
}
private val readEntities = connection.prepareStatement("""
SELECT "type", "version", "data" FROM "entities" WHERE "chunkX" = ? AND "chunkY" = ?
""".trimIndent())
override fun loadEntities(pos: ChunkPos): CompletableFuture<Collection<AbstractEntity>> {
return executor.supplyAsync {
readEntities.setInt(1, pos.x)
readEntities.setInt(2, pos.y)
val entities = ArrayList<AbstractEntity>()
readEntities.executeQuery().use {
while (it.next()) {
val rtype = it.getString(1)
val version = it.getInt(2)
val type = EntityType.entries.firstOrNull { it.storeName == rtype }
if (type != null) {
try {
val data = it.getBytes(3).readJsonObjectInflated()
entities.add(type.fromStorage(VersionRegistry.migrate(VersionedJson(rtype, version, data)) as JsonObject))
} catch (err: Throwable) {
LOGGER.error("Unable to deserialize entity in chunk $pos", err)
}
} else {
LOGGER.error("Unknown entity type $rtype in chunk $pos")
}
}
}
return@supplyAsync entities
}
}
private val readMetadata = connection.prepareStatement("""
SELECT "version", "data" FROM "metadata" WHERE "key" = ?
""".trimIndent())
private val writeMetadata = connection.prepareStatement("""
REPLACE INTO "metadata" ("key", "version", "data") VALUES (?, ?, ?)
""".trimIndent())
private fun readMetadata(key: String): Pair<Int, ByteArray>? {
readMetadata.setString(1, key)
return readMetadata.executeQuery().use {
if (it.next()) it.getInt(1) to it.getBytes(2) else null
}
}
private fun writeMetadata(key: String, version: Int, value: ByteArray) {
writeMetadata.setString(1, key)
writeMetadata.setInt(2, version)
writeMetadata.setBytes(3, value)
writeMetadata.execute()
}
override fun loadMetadata(): CompletableFuture<Metadata?> {
return executor.supplyAsync {
val (version, metadata) = readMetadata("metadata") ?: return@supplyAsync null
val stream = DataInputStream(BufferedInputStream(InflaterInputStream(FastByteArrayInputStream(metadata))))
val width = stream.readInt()
val height = stream.readInt()
val loopX = stream.readBoolean()
val loopY = stream.readBoolean()
val json = VersionedJson("WorldMetadata", version, stream.readJsonElement())
stream.close()
Metadata(WorldGeometry(Vector2i(width, height), loopX, loopY), json)
}
}
private val clearEntities = connection.prepareStatement("""
DELETE FROM "entities" WHERE "chunkX" = ? AND "chunkY" = ?
""".trimIndent())
private val beginSaveEntities: PreparedStatement
private val finishSaveEntities: PreparedStatement
private val rollbackSaveEntities: PreparedStatement
init {
val (begin, commit, rollback) = connection.createSavepoint("save_entities")
beginSaveEntities = begin
finishSaveEntities = commit
rollbackSaveEntities = rollback
}
private val writeEntity = connection.prepareStatement("""
REPLACE INTO "entities" ("chunkX", "chunkY", "x", "y", "unique_id", "type", "version", "data")
VALUES (?, ?, ?, ?, ?, ?, ?, ?)
""".trimIndent())
override fun saveEntities(pos: ChunkPos, entities: Collection<AbstractEntity>) {
executor.execute {
beginSaveEntities.execute()
try {
clearEntities.setInt(1, pos.x)
clearEntities.setInt(2, pos.y)
clearEntities.execute()
for (entity in entities) {
Starbound.storeJson {
val data = JsonObject()
entity.serialize(data)
writeEntity.setInt(1, pos.x)
writeEntity.setInt(2, pos.y)
writeEntity.setDouble(3, entity.position.x)
writeEntity.setDouble(4, entity.position.y)
writeEntity.setString(5, entity.uniqueID.get())
writeEntity.setString(6, entity.type.storeName)
writeEntity.setInt(7, VersionRegistry.currentVersion(entity.type.storeName))
writeEntity.setBytes(8, data.writeJsonObjectDeflated())
writeEntity.execute()
}
}
finishSaveEntities.execute()
} catch (err: Throwable) {
rollbackSaveEntities.execute()
throw err
}
}
}
class Palette {
private val key2index = Object2IntOpenHashMap<String>()
private val index2key = Int2ObjectAVLTreeMap<String>()
init {
key2index.defaultReturnValue(-1)
}
fun add(value: String) {
index2key[key2index.computeIfAbsent(value, Object2IntFunction { key2index.size })] = value
}
fun get(value: String): Int {
val get = key2index.getInt(value)
if (get == -1) throw NoSuchElementException("No such palette element with key '$value'")
return get
}
fun get(value: Int): String {
return index2key[value] ?: throw NoSuchElementException("No such palette element with index $value")
}
fun writeKey(stream: DataOutputStream, key: String) {
if (key2index.size <= 256) {
stream.writeByte(get(key))
} else {
stream.writeShort(get(key))
}
}
fun readKey(stream: DataInputStream): String {
if (key2index.size <= 256) {
return get(stream.readUnsignedByte())
} else {
return get(stream.readUnsignedShort())
}
}
fun write(stream: DataOutputStream) {
stream.writeCollection(index2key.values) { writeBinaryString(it) }
}
fun read(stream: DataInputStream) {
key2index.clear()
index2key.clear()
for (key in stream.readCollection { readBinaryString() }) {
if (key in key2index)
throw IllegalStateException("Duplicate palette key $key")
index2key[key2index.computeIfAbsent(key, Object2IntFunction { key2index.size })] = key
}
}
}
class PaletteSet {
val tiles = Palette()
val mods = Palette()
val liquids = Palette()
fun write(stream: DataOutputStream) {
tiles.write(stream)
mods.write(stream)
liquids.write(stream)
}
fun read(stream: DataInputStream) {
tiles.read(stream)
mods.read(stream)
liquids.read(stream)
}
}
private val writeCells = connection.prepareStatement("""
REPLACE INTO "cells" ("x", "y", "generation_stage", "version", "data")
VALUES (?, ?, ?, ?, ?)
""".trimIndent())
override fun saveCells(pos: ChunkPos, data: ChunkCells) {
executor.execute {
val palette = PaletteSet()
for (x in 0 until data.cells.columns) {
for (y in 0 until data.cells.rows) {
data.cells[x, y].prepare(palette)
}
}
val deflater = Deflater()
val buff = FastByteArrayOutputStream()
val stream = DataOutputStream(BufferedOutputStream(DeflaterOutputStream(buff, deflater, 0x10000), 0x40000))
try {
palette.write(stream)
stream.writeInt(data.cells.columns)
stream.writeInt(data.cells.rows)
for (x in 0 until data.cells.columns) {
for (y in 0 until data.cells.rows) {
data.cells[x, y].writeNative(stream, palette)
}
}
stream.close()
writeCells.setInt(1, pos.x)
writeCells.setInt(2, pos.y)
writeCells.setInt(3, data.state.ordinal)
writeCells.setInt(4, 0)
writeCells.setBytes(5, buff.array.copyOf(buff.length))
writeCells.execute()
} finally {
deflater.end()
}
}
}
override fun saveMetadata(data: Metadata) {
executor.execute {
val buff = FastByteArrayOutputStream()
val stream = DataOutputStream(BufferedOutputStream(DeflaterOutputStream(buff, Deflater(), 0x10000), 0x40000))
stream.writeInt(data.geometry.size.x)
stream.writeInt(data.geometry.size.y)
stream.writeBoolean(data.geometry.loopX)
stream.writeBoolean(data.geometry.loopY)
stream.writeJsonElement(data.data.content)
stream.close()
writeMetadata("metadata", data.data.version ?: 0, buff.array.copyOf(buff.length))
}
}
private val findUniqueEntity = connection.prepareStatement("""
SELECT "chunkX", "chunkY", "x", "y" FROM "entities" WHERE "unique_id" = ?
""".trimIndent())
override fun findUniqueEntity(identifier: String): CompletableFuture<UniqueEntitySearchResult?> {
return executor.supplyAsync {
findUniqueEntity.setString(1, identifier)
findUniqueEntity.executeQuery().use {
if (it.next()) {
UniqueEntitySearchResult(ChunkPos(it.getInt(1), it.getInt(2)), Vector2d(it.getDouble(3), it.getDouble(4)))
} else {
null
}
}
}
}
companion object {
private val LOGGER = LogManager.getLogger()
}
}

View File

@ -1,45 +0,0 @@
package ru.dbotthepony.kstarbound.server.world
import ru.dbotthepony.kommons.arrays.Object2DArray
import ru.dbotthepony.kommons.io.ByteKey
import ru.dbotthepony.kommons.util.KOptional
import ru.dbotthepony.kstarbound.world.ChunkPos
import ru.dbotthepony.kstarbound.world.ChunkState
import ru.dbotthepony.kstarbound.world.api.AbstractCell
import ru.dbotthepony.kstarbound.world.entities.AbstractEntity
import java.io.Closeable
import java.util.concurrent.CompletableFuture
class NativeWorldStorage() : WorldStorage() {
override fun loadCells(pos: ChunkPos): CompletableFuture<ChunkCells?> {
TODO("Not yet implemented")
}
override fun loadEntities(pos: ChunkPos): CompletableFuture<Collection<AbstractEntity>> {
TODO("Not yet implemented")
}
override fun loadMetadata(): CompletableFuture<Metadata?> {
TODO("Not yet implemented")
}
override fun saveEntities(pos: ChunkPos, entities: Collection<AbstractEntity>) {
TODO("Not yet implemented")
}
override fun saveCells(pos: ChunkPos, data: ChunkCells) {
TODO("Not yet implemented")
}
override fun saveMetadata(data: Metadata) {
TODO("Not yet implemented")
}
override fun findUniqueEntity(identifier: String): CompletableFuture<UniqueEntitySearchResult?> {
TODO("Not yet implemented")
}
override fun close() {
}
}

View File

@ -538,7 +538,7 @@ class ServerChunk(world: ServerWorld, pos: ChunkPos) : Chunk<ServerWorld, Server
private var savedCellState = -1
private fun writeToStorage(): Collection<AbstractEntity> {
private fun writeToStorage(isUnloadingWorld: Boolean = false): Collection<AbstractEntity> {
if (!cells.isInitialized() || state <= ChunkState.EMPTY)
return emptyList()
@ -548,10 +548,15 @@ class ServerChunk(world: ServerWorld, pos: ChunkPos) : Chunk<ServerWorld, Server
filter = Predicate { !it.isRemote && aabbd.isInside(it.position) })
if (!isLoadingFromDisk) {
if (savedCellState != cellChangeset)
if (savedCellState != cellChangeset) {
world.storage.saveCells(pos, WorldStorage.ChunkCells(copyCells(), state))
savedCellState = cellChangeset
}
world.storage.saveEntities(pos, unloadable.filter { it.isPersistent })
if (!isUnloadingWorld)
world.storage.commit()
}
return unloadable
@ -565,7 +570,7 @@ class ServerChunk(world: ServerWorld, pos: ChunkPos) : Chunk<ServerWorld, Server
}
}, 5L, 30L, TimeUnit.SECONDS)
fun unload() {
fun unload(isUnloadingWorld: Boolean = false) {
if (isUnloaded)
return
@ -576,7 +581,7 @@ class ServerChunk(world: ServerWorld, pos: ChunkPos) : Chunk<ServerWorld, Server
world.chunkMap.remove(pos)
flushTask.cancel(false)
writeToStorage().forEach {
writeToStorage(isUnloadingWorld).forEach {
it.remove(AbstractEntity.RemovalReason.UNLOADED)
}
}

View File

@ -145,9 +145,11 @@ class ServerWorld private constructor(
storage.saveMetadata(WorldStorage.Metadata(geometry, VersionRegistry.make("WorldMetadata", Starbound.gson.toJsonTree(metadata))))
}
private var uncleanShutdown = false
override val eventLoop = object : BlockableEventLoop("Server World $worldID") {
init {
isDaemon = true
isDaemon = false
}
override fun performShutdown() {
@ -159,11 +161,15 @@ class ServerWorld private constructor(
// do nothing
}
chunkMap.chunks().forEach {
try {
it.unload()
} catch (err: Throwable) {
LOGGER.error("Exception while saving chunk ${it.pos}", err)
if (!uncleanShutdown) {
for (chunk in chunkMap.chunks()) {
try {
chunk.unload(isUnloadingWorld = true)
} catch (err: Throwable) {
LOGGER.error("Exception while saving chunk ${chunk.pos}", err)
uncleanShutdown = true
break
}
}
}
@ -173,7 +179,11 @@ class ServerWorld private constructor(
it.client.enqueueWarp(WarpAlias.Return)
}
saveMetadata()
if (!uncleanShutdown) {
saveMetadata()
storage.commit()
}
storage.close()
}
}
@ -423,6 +433,7 @@ class ServerWorld private constructor(
}
} catch (err: Throwable) {
LOGGER.fatal("Exception in world tick loop", err)
uncleanShutdown = true
eventLoop.shutdown()
if (worldID is WorldID.ShipWorld) {

View File

@ -37,36 +37,10 @@ abstract class WorldStorage : Closeable {
data class UniqueEntitySearchResult(val chunk: ChunkPos, val pos: Vector2d)
abstract fun findUniqueEntity(identifier: String): CompletableFuture<UniqueEntitySearchResult?>
protected fun readEntities(pos: ChunkPos, data: ByteArray): List<AbstractEntity> {
val reader = DataInputStream(BufferedInputStream(InflaterInputStream(ByteArrayInputStream(data))))
val i = reader.readVarInt()
val objects = ArrayList<AbstractEntity>()
/**
* closes storage, discarding any un[commit]ted data
*/
override fun close() {}
for (i2 in 0 until i) {
val obj = VersionedJson(reader)
val type = EntityType.entries.firstOrNull { it.storeName == obj.id }
if (type != null) {
try {
objects.add(type.fromStorage(obj.content as JsonObject))
} catch (err: Throwable) {
LOGGER.error("Unable to deserialize entity in chunk $pos", err)
}
} else {
LOGGER.error("Unknown entity type in chunk $pos: ${obj.id}")
}
}
reader.close()
return objects
}
override fun close() {
}
companion object {
private val LOGGER = LogManager.getLogger()
}
abstract fun commit()
}

View File

@ -6,6 +6,7 @@ import ru.dbotthepony.kstarbound.Starbound
import ru.dbotthepony.kstarbound.defs.tile.NO_DUNGEON_ID
import ru.dbotthepony.kstarbound.math.vector.Vector2i
import ru.dbotthepony.kstarbound.network.LegacyNetworkCellState
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import ru.dbotthepony.kstarbound.world.physics.CollisionType
import java.io.DataInputStream
import java.io.DataOutputStream
@ -74,6 +75,30 @@ sealed class AbstractCell {
}
}
fun prepare(palette: NativeLocalWorldStorage.PaletteSet) {
foreground.prepare(palette)
background.prepare(palette)
liquid.prepare(palette)
}
fun writeNative(stream: DataOutputStream, palette: NativeLocalWorldStorage.PaletteSet) {
foreground.writeNative(stream, palette)
background.writeNative(stream, palette)
liquid.writeNative(stream, palette)
stream.writeShort(dungeonId)
stream.writeShort(blockBiome)
stream.writeShort(envBiome)
stream.writeBoolean(biomeTransition)
if (rootSource == null) {
stream.writeBoolean(false)
} else {
stream.writeBoolean(true)
stream.writeStruct2i(rootSource!!)
}
}
companion object {
fun skip(stream: DataInputStream) {
AbstractTileState.skip(stream)

View File

@ -5,6 +5,7 @@ import ru.dbotthepony.kstarbound.Registry
import ru.dbotthepony.kstarbound.Starbound
import ru.dbotthepony.kstarbound.defs.tile.LiquidDefinition
import ru.dbotthepony.kstarbound.network.LegacyNetworkLiquidState
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import java.io.DataInputStream
import java.io.DataOutputStream
@ -32,6 +33,17 @@ sealed class AbstractLiquidState {
stream.writeBoolean(isInfinite)
}
fun writeNative(stream: DataOutputStream, palette: NativeLocalWorldStorage.PaletteSet) {
palette.liquids.writeKey(stream, state.key)
stream.writeFloat(level)
stream.writeFloat(pressure)
stream.writeBoolean(isInfinite)
}
fun prepare(palette: NativeLocalWorldStorage.PaletteSet) {
palette.liquids.add(state.key)
}
companion object {
fun skip(stream: DataInputStream) {
stream.skipNBytes(1 + 4 + 4 + 1)

View File

@ -7,6 +7,7 @@ import ru.dbotthepony.kstarbound.defs.tile.BuiltinMetaMaterials
import ru.dbotthepony.kstarbound.defs.tile.TileModifierDefinition
import ru.dbotthepony.kstarbound.defs.tile.TileDefinition
import ru.dbotthepony.kstarbound.network.LegacyNetworkTileState
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import java.io.DataInputStream
import java.io.DataOutputStream
@ -46,6 +47,20 @@ sealed class AbstractTileState {
stream.writeByte(byteModifierHueShift())
}
fun writeNative(stream: DataOutputStream, palette: NativeLocalWorldStorage.PaletteSet) {
palette.tiles.writeKey(stream, material.key)
stream.writeFloat(hueShift)
stream.writeByte(color.ordinal)
palette.mods.writeKey(stream, modifier.key)
stream.writeFloat(modifierHueShift)
}
fun prepare(palette: NativeLocalWorldStorage.PaletteSet) {
palette.tiles.add(material.key)
palette.mods.add(modifier.key)
}
companion object {
fun skip(stream: DataInputStream) {
stream.skipNBytes(2 + 1 + 1 + 2 + 1)

View File

@ -3,6 +3,7 @@ package ru.dbotthepony.kstarbound.world.api
import ru.dbotthepony.kstarbound.io.readVector2i
import ru.dbotthepony.kstarbound.defs.tile.NO_DUNGEON_ID
import ru.dbotthepony.kstarbound.math.vector.Vector2i
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import java.io.DataInputStream
data class MutableCell(
@ -47,6 +48,26 @@ data class MutableCell(
return this
}
fun readNative(stream: DataInputStream, palette: NativeLocalWorldStorage.PaletteSet, version: Int): MutableCell {
foreground.readNative(stream, palette, version)
background.readNative(stream, palette, version)
liquid.readNative(stream, palette, version)
dungeonId = stream.readUnsignedShort()
blockBiome = stream.readUnsignedShort()
envBiome = stream.readUnsignedShort()
biomeTransition = stream.readBoolean()
if (stream.readBoolean()) {
rootSource = stream.readVector2i()
} else {
rootSource = null
}
return this
}
override fun tile(background: Boolean): MutableTileState {
if (background)
return this.background

View File

@ -4,6 +4,7 @@ import ru.dbotthepony.kstarbound.Registries
import ru.dbotthepony.kstarbound.Registry
import ru.dbotthepony.kstarbound.defs.tile.BuiltinMetaMaterials
import ru.dbotthepony.kstarbound.defs.tile.LiquidDefinition
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import java.io.DataInputStream
data class MutableLiquidState(
@ -27,6 +28,14 @@ data class MutableLiquidState(
return this
}
fun readNative(stream: DataInputStream, palette: NativeLocalWorldStorage.PaletteSet, version: Int): MutableLiquidState {
state = Registries.liquid[palette.liquids.readKey(stream)] ?: BuiltinMetaMaterials.NO_LIQUID
level = stream.readFloat()
pressure = stream.readFloat()
isInfinite = stream.readBoolean()
return this
}
fun reset() {
state = BuiltinMetaMaterials.NO_LIQUID
level = 0f

View File

@ -5,6 +5,7 @@ import ru.dbotthepony.kstarbound.Registry
import ru.dbotthepony.kstarbound.defs.tile.BuiltinMetaMaterials
import ru.dbotthepony.kstarbound.defs.tile.TileModifierDefinition
import ru.dbotthepony.kstarbound.defs.tile.TileDefinition
import ru.dbotthepony.kstarbound.server.world.NativeLocalWorldStorage
import java.io.DataInputStream
data class MutableTileState(
@ -66,8 +67,21 @@ data class MutableTileState(
material = Registries.tiles[stream.readUnsignedShort()] ?: BuiltinMetaMaterials.EMPTY
setHueShift(stream.readUnsignedByte())
color = TileColor.of(stream.readUnsignedByte())
modifier = Registries.tileModifiers[stream.readUnsignedShort()] ?: BuiltinMetaMaterials.EMPTY_MOD
setModHueShift(stream.readUnsignedByte())
return this
}
fun readNative(stream: DataInputStream, palette: NativeLocalWorldStorage.PaletteSet, version: Int): MutableTileState {
material = Registries.tiles[palette.tiles.readKey(stream)] ?: BuiltinMetaMaterials.EMPTY
hueShift = stream.readFloat()
color = TileColor.of(stream.readUnsignedByte())
modifier = Registries.tileModifiers[palette.mods.readKey(stream)] ?: BuiltinMetaMaterials.EMPTY_MOD
modifierHueShift = stream.readFloat()
return this
}
}