Get rid of "intermediate" btreedb class

This commit is contained in:
DBotThePony 2024-02-18 21:19:59 +07:00
parent 396dcf4a7e
commit 7011ab8ace
Signed by: DBot
GPG Key ID: DCC23B5715498507
3 changed files with 11 additions and 40 deletions

View File

@ -4,7 +4,7 @@ kotlin.code.style=official
specifyKotlinAsDependency=false
projectGroup=ru.dbotthepony.kommons
projectVersion=2.5.0
projectVersion=2.6.0
guavaDepVersion=33.0.0
gsonDepVersion=2.8.9

View File

@ -1,31 +0,0 @@
package ru.dbotthepony.kommons.io
import ru.dbotthepony.kommons.util.KOptional
import java.io.Closeable
import java.io.File
/**
* For actual implementation, see [BTreeDB6].
*/
abstract class BTreeDB<K, V> : Closeable {
abstract val file: File
abstract val blockSize: Int
abstract operator fun contains(key: K): Boolean
/**
* Reads data at specified [key]
*/
abstract fun read(key: K): KOptional<V>
abstract fun findAllKeys(): List<K>
abstract fun write(key: K, value: V)
}
abstract class ByteDataBTreeDB<K> : BTreeDB<K, ByteArray>() {
abstract fun write(key: K, value: ByteArray, offset: Int, length: Int)
final override fun write(key: K, value: ByteArray) {
return write(key, value, 0, value.size)
}
}

View File

@ -8,6 +8,7 @@ import it.unimi.dsi.fastutil.objects.ObjectArrayList
import it.unimi.dsi.fastutil.objects.ObjectOpenHashSet
import ru.dbotthepony.kommons.util.CarriedExecutor
import ru.dbotthepony.kommons.util.KOptional
import java.io.Closeable
import java.io.DataInputStream
import java.io.DataOutputStream
import java.io.File
@ -73,9 +74,10 @@ private fun readHeader(reader: RandomAccessFile, required: Char) {
// TODO: Defragmenting
// TODO: Add keys to DATA blocks, so entire tree can be reconstructed from scratch in event of both trees become unreadable
// TODO: Changeset counter (to determine write-in-progress blocks/trees, to ignore them when reconstructing tree)
// TODO: Tree rotations (rebalancing)
// TODO: Tree rotations (rebalancing), tree height tracking, weighted subtree splitting
// TODO: Removal of keys
class BTreeDB6 private constructor(override val file: File, private var reader: RandomAccessFile, private val sync: Boolean) : ByteDataBTreeDB<ByteKey>() {
// TODO: Faster free bitmap scan
class BTreeDB6 private constructor(val file: File, private var reader: RandomAccessFile, private val sync: Boolean) : Closeable {
constructor(file: File, sync: Boolean = true) : this(file, RandomAccessFile(file, "rw"), sync)
init {
@ -97,8 +99,8 @@ class BTreeDB6 private constructor(override val file: File, private var reader:
reader.close()
}
override val blockSize = reader.readInt()
private val effectiveBlockSize = blockSize - 9
val blockSize = reader.readInt()
val effectiveBlockSize = blockSize - 9
init {
require(blockSize >= 64) { "Degenerate block size: $blockSize" }
@ -235,11 +237,11 @@ class BTreeDB6 private constructor(override val file: File, private var reader:
}
}
override fun contains(key: ByteKey): Boolean {
operator fun contains(key: ByteKey): Boolean {
return searchForBlock(key) != null
}
override fun read(key: ByteKey): KOptional<ByteArray> {
fun read(key: ByteKey): KOptional<ByteArray> {
val data = searchForBlock(key)
if (data != null) {
@ -274,7 +276,7 @@ class BTreeDB6 private constructor(override val file: File, private var reader:
}
}
override fun findAllKeys(): List<ByteKey> {
fun findAllKeys(): List<ByteKey> {
if (rootBlockIndex == INVALID_BLOCK_INDEX) {
return emptyList()
}
@ -284,7 +286,7 @@ class BTreeDB6 private constructor(override val file: File, private var reader:
return result
}
override fun write(key: ByteKey, value: ByteArray, offset: Int, length: Int) {
fun write(key: ByteKey, value: ByteArray, offset: Int, length: Int) {
if (rootBlockIndex == INVALID_BLOCK_INDEX) {
// create LEAF node for root
val block = allocBlock(BlockType.LEAF)