apparently the problem was on the other places

This commit is contained in:
minjaesong
2024-11-15 23:07:07 +09:00
parent 0eccc4dbea
commit cc3c1c1b14
2 changed files with 48 additions and 10 deletions

View File

@@ -1,6 +1,7 @@
package net.torvald.terrarum.gameworld package net.torvald.terrarum.gameworld
import net.torvald.terrarum.App import net.torvald.terrarum.App
import net.torvald.terrarum.App.printdbg
import net.torvald.terrarum.INGAME import net.torvald.terrarum.INGAME
import net.torvald.terrarum.Point2i import net.torvald.terrarum.Point2i
import net.torvald.terrarum.modulecomputers.virtualcomputer.tvd.archivers.ClusteredFormatDOM import net.torvald.terrarum.modulecomputers.virtualcomputer.tvd.archivers.ClusteredFormatDOM
@@ -8,6 +9,7 @@ import net.torvald.terrarum.modulecomputers.virtualcomputer.tvd.archivers.Clustf
import net.torvald.terrarum.realestate.LandUtil import net.torvald.terrarum.realestate.LandUtil
import net.torvald.terrarum.realestate.LandUtil.CHUNK_H import net.torvald.terrarum.realestate.LandUtil.CHUNK_H
import net.torvald.terrarum.realestate.LandUtil.CHUNK_W import net.torvald.terrarum.realestate.LandUtil.CHUNK_W
import net.torvald.terrarum.savegame.ByteArray64
import net.torvald.terrarum.savegame.DiskEntry import net.torvald.terrarum.savegame.DiskEntry
import net.torvald.terrarum.savegame.DiskSkimmer import net.torvald.terrarum.savegame.DiskSkimmer
import net.torvald.terrarum.savegame.EntryFile import net.torvald.terrarum.savegame.EntryFile
@@ -35,7 +37,10 @@ enum class ChunkAllocClass {
} }
/** /**
* FIXME: loading a chunk from disk will attempt to create a chunk because the chunk-to-be-loaded is not on the pointers map, and this operation will want to create a new chunk file but the file already exists * WHAT IF instead of reading chunks directly from the savegame, I treat ChunkPool as a mere disk-cache?
*
* FIXME: loading a chunk from disk will attempt to create a chunk because the chunk-to-be-loaded
* is not on the pointers map, and this operation will want to create a new chunk file but the file already exists
* *
* Single layer gets single Chunk Pool. * Single layer gets single Chunk Pool.
* *
@@ -43,6 +48,10 @@ enum class ChunkAllocClass {
*/ */
open class ChunkPool { open class ChunkPool {
private enum class ChunkLoadingStatus {
LOADING_REQUESTED, RAW, LOADED_FROM_DISK, NEWLY_GENERATED
}
// `DiskSkimmer` or `ClusteredFormatDOM` // `DiskSkimmer` or `ClusteredFormatDOM`
private val disk: Any private val disk: Any
private val layerIndex: Int private val layerIndex: Int
@@ -51,6 +60,7 @@ open class ChunkPool {
private val initialValue: Int // bytes to fill the new chunk private val initialValue: Int // bytes to fill the new chunk
private val renumberFun: (Int) -> Int private val renumberFun: (Int) -> Int
private val chunkStatus = HashMap<Long, ChunkLoadingStatus>()
private val pointers = TreeMap<Long, Long>() private val pointers = TreeMap<Long, Long>()
private var allocCap = 32 private var allocCap = 32
private var allocMap = Array<ChunkAllocation?>(allocCap) { null } private var allocMap = Array<ChunkAllocation?>(allocCap) { null }
@@ -216,10 +226,10 @@ open class ChunkPool {
* @return `unit` if IO operation was successful, `null` if failed (e.g. file not exists) * @return `unit` if IO operation was successful, `null` if failed (e.g. file not exists)
*/ */
private fun fetchFromDisk(chunkNumber: Long): Unit? { private fun fetchFromDisk(chunkNumber: Long): Unit? {
val fileName = chunkNumToFileNum(layerIndex, chunkNumber)
// read data from the disk // read data from the disk
return if (disk is ClusteredFormatDOM) { return if (disk is ClusteredFormatDOM) {
val fileName = chunkNumToFileNumType17(layerIndex, chunkNumber)
Clustfile(disk, fileName).let { Clustfile(disk, fileName).let {
if (!it.exists()) return@let null if (!it.exists()) return@let null
@@ -230,13 +240,16 @@ open class ChunkPool {
} }
} }
else if (disk is DiskSkimmer) { else if (disk is DiskSkimmer) {
val fileID = fileName.toLong() val fileID = chunkNumToFileEntryID(layerIndex, chunkNumber)
disk.getFile(fileID).let { disk.getFile(fileID).let {
printdbg(this, "Reading chunk data: Layer $layerIndex Chunk $chunkNumber (fileID: $fileID), file: $it")
if (it == null) return@let null if (it == null) return@let null
val bytes = Common.unzip(it.bytes) val bytes = Common.unzip(it.bytes)
val ptr = allocate(chunkNumber) val ptr = allocate(chunkNumber)
UnsafeHelper.memcpyFromArrToPtr(bytes, 0, ptr.ptr, bytes.size) memcpyFromByteArray64ToPtr(bytes, 0L, ptr, 0L, bytes.size)
renumber(ptr) renumber(ptr)
} }
} }
@@ -245,6 +258,12 @@ open class ChunkPool {
} }
} }
private fun memcpyFromByteArray64ToPtr(ba: ByteArray64, srcIndex: Long, destPtr: UnsafePtr, destOffset: Long, copyLen: Long) {
// TODO temporary
val obj = ba.toByteArray()
UnsafeHelper.memcpyFromArrToPtr(obj, srcIndex.toInt(), destPtr.ptr + destOffset, copyLen)
}
/** /**
* @return `unit` if IO operation was successful, `null` if failed (e.g. file not exists) * @return `unit` if IO operation was successful, `null` if failed (e.g. file not exists)
*/ */
@@ -253,10 +272,10 @@ open class ChunkPool {
} }
private fun storeToDisk(chunkNumber: Long) { private fun storeToDisk(chunkNumber: Long) {
val fileName = chunkNumToFileNum(layerIndex, chunkNumber)
// write to the disk (the disk must be an autosaving copy of the original) // write to the disk (the disk must be an autosaving copy of the original)
if (disk is ClusteredFormatDOM) { if (disk is ClusteredFormatDOM) {
val fileName = chunkNumToFileNumType17(layerIndex, chunkNumber)
Clustfile(disk, fileName).let { Clustfile(disk, fileName).let {
val bytes = Common.zip(serialise(chunkNumber).iterator()) val bytes = Common.zip(serialise(chunkNumber).iterator())
it.overwrite(bytes.toByteArray()) it.overwrite(bytes.toByteArray())
@@ -264,7 +283,7 @@ open class ChunkPool {
} }
// append the new entry // append the new entry
else if (disk is DiskSkimmer) { else if (disk is DiskSkimmer) {
val fileID = fileName.toLong() val fileID = chunkNumToFileEntryID(layerIndex, chunkNumber)
val bytes = Common.zip(serialise(chunkNumber).iterator()) val bytes = Common.zip(serialise(chunkNumber).iterator())
val oldEntry = disk.getEntry(fileID) val oldEntry = disk.getEntry(fileID)
@@ -376,10 +395,13 @@ open class ChunkPool {
} }
companion object { companion object {
fun chunkNumToFileNum(layerNum: Int, chunkNum: Long): String { fun chunkNumToFileNumType17(layerNum: Int, chunkNum: Long): String {
val entryID = Common.layerAndChunkNumToEntryID(layerNum, chunkNum) val entryID = Common.layerAndChunkNumToEntryID(layerNum, chunkNum)
return Common.type254EntryIDtoType17Filename(entryID) return Common.type254EntryIDtoType17Filename(entryID)
} }
fun chunkNumToFileEntryID(layerNum: Int, chunkNum: Long): Long {
return Common.layerAndChunkNumToEntryID(layerNum, chunkNum)
}
private fun Int.get1SS() = this and 65535 private fun Int.get1SS() = this and 65535
private fun Int.get2SS() = (this ushr 16) and 65535 private fun Int.get2SS() = (this ushr 16) and 65535
@@ -426,7 +448,8 @@ open class ChunkPool {
} }
} }
private val chunkOffsetsNearPlayer = listOf( // this list does NOT contain `Point2i(0,0)`
val chunkOffsetsNearPlayer = listOf(
Point2i(-1,-2), Point2i(0,-2),Point2i(1,-2), Point2i(-1,-2), Point2i(0,-2),Point2i(1,-2),
Point2i(-2,-1),Point2i(-1,-1),Point2i(0,-1),Point2i(1,-1),Point2i(2,-1), Point2i(-2,-1),Point2i(-1,-1),Point2i(0,-1),Point2i(1,-1),Point2i(2,-1),
Point2i(-2,0),Point2i(-1,0),Point2i(1,0),Point2i(2,0), Point2i(-2,0),Point2i(-1,0),Point2i(1,0),Point2i(2,0),

View File

@@ -1,8 +1,10 @@
package net.torvald.terrarum.modulebasegame.serialise package net.torvald.terrarum.modulebasegame.serialise
import net.torvald.terrarum.* import net.torvald.terrarum.*
import net.torvald.terrarum.TerrarumAppConfiguration.TILE_SIZED
import net.torvald.terrarum.console.Echo import net.torvald.terrarum.console.Echo
import net.torvald.terrarum.gameworld.* import net.torvald.terrarum.gameworld.*
import net.torvald.terrarum.gameworld.ChunkPool.Companion.chunkOffsetsNearPlayer
import net.torvald.terrarum.gameworld.GameWorld.Companion.FLUID import net.torvald.terrarum.gameworld.GameWorld.Companion.FLUID
import net.torvald.terrarum.gameworld.GameWorld.Companion.ORES import net.torvald.terrarum.gameworld.GameWorld.Companion.ORES
import net.torvald.terrarum.gameworld.GameWorld.Companion.TERRAIN import net.torvald.terrarum.gameworld.GameWorld.Companion.TERRAIN
@@ -14,6 +16,7 @@ import net.torvald.terrarum.modulebasegame.gameactors.IngamePlayer
import net.torvald.terrarum.realestate.LandUtil import net.torvald.terrarum.realestate.LandUtil
import net.torvald.terrarum.savegame.* import net.torvald.terrarum.savegame.*
import net.torvald.terrarum.serialise.Common import net.torvald.terrarum.serialise.Common
import org.dyn4j.geometry.Vector2
import java.io.Reader import java.io.Reader
import java.util.logging.Level import java.util.logging.Level
import kotlin.experimental.or import kotlin.experimental.or
@@ -125,6 +128,18 @@ object LoadSavegame {
loadscreen.progress.getAndAdd(1) loadscreen.progress.getAndAdd(1)
} }
val playerChunk = player.hitbox.canonVec.let {
(it.x / (cw * TILE_SIZED)).toInt() to (it.y / (ch * TILE_SIZED)).toInt()
}.let { Point2i(it.first, it.second) }
val chunksToLoad = chunkOffsetsNearPlayer.map {
playerChunk + it
} + playerChunk
/*for (layer in worldLayer) {
(layer as? BlockLayerWithChunkPool)?.chunkPool?.
}*/
loadscreen.addMessage(Lang["MENU_IO_LOAD_UPDATING_BLOCK_MAPPINGS"]) loadscreen.addMessage(Lang["MENU_IO_LOAD_UPDATING_BLOCK_MAPPINGS"])
world.renumberTilesAfterLoad() world.renumberTilesAfterLoad()