basically chunk loading

This commit is contained in:
2025-11-11 19:24:56 -06:00
parent 9953d7d388
commit 5c8b314689
11 changed files with 244 additions and 81 deletions

View File

@@ -3,4 +3,4 @@
# This software is released under the MIT License. # This software is released under the MIT License.
# https://opensource.org/licenses/MIT # https://opensource.org/licenses/MIT
add_asset(MAP map.json) add_asset(MAP map)

View File

@@ -10,6 +10,7 @@
#include "type/assetalphaimage.h" #include "type/assetalphaimage.h"
#include "type/assetlanguage.h" #include "type/assetlanguage.h"
#include "type/assetmap.h" #include "type/assetmap.h"
#include "type/assetchunk.h"
#include <zip.h> #include <zip.h>
typedef enum { typedef enum {
@@ -19,6 +20,7 @@ typedef enum {
ASSET_TYPE_ALPHA_IMAGE, ASSET_TYPE_ALPHA_IMAGE,
ASSET_TYPE_LANGUAGE, ASSET_TYPE_LANGUAGE,
ASSET_TYPE_MAP, ASSET_TYPE_MAP,
ASSET_TYPE_CHUNK,
ASSET_TYPE_COUNT, ASSET_TYPE_COUNT,
} assettype_t; } assettype_t;
@@ -73,5 +75,11 @@ static const assettypedef_t ASSET_TYPE_DEFINITIONS[ASSET_TYPE_COUNT] = {
.loadStrategy = ASSET_LOAD_STRAT_ENTIRE, .loadStrategy = ASSET_LOAD_STRAT_ENTIRE,
.dataSize = sizeof(assetmap_t), .dataSize = sizeof(assetmap_t),
.entire = assetMapLoad .entire = assetMapLoad
},
[ASSET_TYPE_CHUNK] = {
.header = "DCF",
.loadStrategy = ASSET_LOAD_STRAT_CUSTOM,
.custom = assetChunkLoad
} }
}; };

View File

@@ -10,4 +10,5 @@ target_sources(${DUSK_TARGET_NAME}
assetpaletteimage.c assetpaletteimage.c
assetlanguage.c assetlanguage.c
assetmap.c assetmap.c
assetchunk.c
) )

122
src/asset/type/assetchunk.c Normal file
View File

@@ -0,0 +1,122 @@
/**
* Copyright (c) 2025 Dominic Masters
*
* This software is released under the MIT License.
* https://opensource.org/licenses/MIT
*/
#include "asset/asset.h"
#include "assert/assert.h"
#pragma pack(push, 1)
typedef struct {
uint32_t tileCount;
uint8_t modelCount;
} assetchunkheader_t;
#pragma pack(pop)
#pragma pack(push, 1)
typedef struct {
tile_t tile;
} assetchunktiledata_t;
#pragma pack(pop)
#pragma pack(push, 1)
typedef struct {
uint32_t vertexCount;
} assetchunkmodelheader_t;
#pragma pack(pop)
errorret_t assetChunkLoad(assetcustom_t custom) {
assertNotNull(custom.output, "Output pointer cannot be NULL");
assertNotNull(custom.zipFile, "Zip file pointer cannot be NULL");
chunk_t *chunk = (chunk_t *)custom.output;
// Read header
assetchunkheader_t header;
size_t bytesRead = zip_fread(
custom.zipFile, &header, sizeof(assetchunkheader_t)
);
if(bytesRead != sizeof(assetchunkheader_t)) {
zip_fclose(custom.zipFile);
errorThrow("Failed to read chunk asset header.");
}
if(header.tileCount != CHUNK_TILE_COUNT) {
zip_fclose(custom.zipFile);
errorThrow(
"Chunk asset has invalid tile count: %d (expected %d).",
header.tileCount,
CHUNK_TILE_COUNT
);
}
if(header.modelCount > CHUNK_MESH_COUNT_MAX) {
zip_fclose(custom.zipFile);
errorThrow(
"Chunk asset has too many models: %d (max %d).",
header.modelCount,
CHUNK_MESH_COUNT_MAX
);
}
chunk->meshCount = header.modelCount;
// Read tile data
bytesRead = zip_fread(
custom.zipFile,
chunk->tiles,
sizeof(assetchunktiledata_t) * header.tileCount
);
if(bytesRead != sizeof(assetchunktiledata_t) * header.tileCount) {
zip_fclose(custom.zipFile);
errorThrow("Failed to read chunk tile data.");
}
// For each model...
uint32_t vertexIndex = 0;
for(uint8_t i = 0; i < header.modelCount; i++) {
assetchunkmodelheader_t modelHeader;
bytesRead = zip_fread(
custom.zipFile, &modelHeader, sizeof(assetchunkmodelheader_t)
);
if(bytesRead != sizeof(assetchunkmodelheader_t)) {
zip_fclose(custom.zipFile);
errorThrow("Failed to read chunk model header.");
}
if(
vertexIndex + modelHeader.vertexCount >
CHUNK_VERTEX_COUNT_MAX
) {
zip_fclose(custom.zipFile);
errorThrow("Chunk model vertex count exceeds maximum.");
}
// Read vertex data.
bytesRead = zip_fread(
custom.zipFile,
&chunk->vertices[vertexIndex],
sizeof(meshvertex_t) * modelHeader.vertexCount
);
if(bytesRead != sizeof(meshvertex_t) * modelHeader.vertexCount) {
zip_fclose(custom.zipFile);
errorThrow("Failed to read chunk model vertex data.");
}
// Init the mesh
mesh_t *mesh = &chunk->meshes[i];
meshInit(
mesh,
MESH_PRIMITIVE_TRIANGLES,
modelHeader.vertexCount,
&chunk->vertices[vertexIndex]
);
vertexIndex += modelHeader.vertexCount;
}
errorOk();
}

View File

@@ -0,0 +1,20 @@
/**
* Copyright (c) 2025 Dominic Masters
*
* This software is released under the MIT License.
* https://opensource.org/licenses/MIT
*/
#pragma once
#include "error/error.h"
#include "rpg/world/chunk.h"
typedef struct assetcustom_s assetcustom_t;
/**
* Handles loading of chunk data from a chunk asset file.
*
* @param custom The custom asset loading parameters.
* @return An error code.
*/
errorret_t assetChunkLoad(assetcustom_t custom);

View File

@@ -8,10 +8,18 @@
#pragma once #pragma once
#include "rpg/world/tile.h" #include "rpg/world/tile.h"
#include "worldpos.h" #include "worldpos.h"
#include "display/mesh/mesh.h"
#define CHUNK_VERTEX_COUNT_MAX (6 * CHUNK_TILE_COUNT * 3)
#define CHUNK_MESH_COUNT_MAX 16
typedef struct chunk_s { typedef struct chunk_s {
chunkpos_t position; chunkpos_t position;
tile_t tiles[CHUNK_TILE_COUNT]; tile_t tiles[CHUNK_TILE_COUNT];
uint8_t meshCount;
meshvertex_t vertices[CHUNK_VERTEX_COUNT_MAX];
mesh_t meshes[CHUNK_MESH_COUNT_MAX];
} chunk_t; } chunk_t;
/** /**

View File

@@ -8,7 +8,7 @@
#include "map.h" #include "map.h"
#include "util/memory.h" #include "util/memory.h"
#include "assert/assert.h" #include "assert/assert.h"
#include "scene/scene/scenemap.h" #include "asset/asset.h"
map_t MAP; map_t MAP;
@@ -122,20 +122,7 @@ void mapChunkUnload(chunk_t* chunk) {
} }
void mapChunkLoad(chunk_t* chunk) { void mapChunkLoad(chunk_t* chunk) {
// printf("Loading chunk at (%d, %d, %d)\n", errorCatch(errorPrint(assetLoad("map/map/0_0.dcf", chunk)));
// chunk->position.x,
// chunk->position.y,
// chunk->position.z
// );
memoryZero(chunk->tiles, sizeof(tile_t) * CHUNK_TILE_COUNT);
if(chunk->position.x == 0 && chunk->position.y == 0 && chunk->position.z == 0) {
if(TEST_MAP_READY) {
}
printf("LOAD CHUNK\n");
}
} }
chunkindex_t mapGetChunkIndexAt(const chunkpos_t position) { chunkindex_t mapGetChunkIndexAt(const chunkpos_t position) {

View File

@@ -19,8 +19,6 @@
#define TILE_WIDTH 16.0f #define TILE_WIDTH 16.0f
#define TILE_HEIGHT 16.0f #define TILE_HEIGHT 16.0f
#define TILE_DEPTH 11.36f #define TILE_DEPTH 11.36f
assetmap_t TEST_MAP;
bool_t TEST_MAP_READY = false;
errorret_t sceneMapInit(scenedata_t *data) { errorret_t sceneMapInit(scenedata_t *data) {
// Init the camera. // Init the camera.
@@ -39,9 +37,6 @@ errorret_t sceneMapInit(scenedata_t *data) {
); );
data->sceneMap.camera.lookatPixelPerfect.pixelsPerUnit = 1.0f; data->sceneMap.camera.lookatPixelPerfect.pixelsPerUnit = 1.0f;
errorChain(assetLoad("map/map.dmf", &TEST_MAP));
TEST_MAP_READY = true;
errorOk(); errorOk();
} }
@@ -118,10 +113,7 @@ void sceneMapRender(scenedata_t *data) {
cameraPushMatrix(&data->sceneMap.camera); cameraPushMatrix(&data->sceneMap.camera);
// Render map probably. // Render map probably.
// sceneMapRenderMap(); sceneMapRenderMap();
textureBind(NULL);
meshDraw(&TEST_MAP.models[0].mesh, -1, -1);
// Render ents // Render ents
entity_t *ent = ENTITIES; entity_t *ent = ENTITIES;
@@ -175,33 +167,38 @@ void sceneMapRenderMap() {
for(uint32_t i = 0; i < MAP_CHUNK_COUNT; i++) { for(uint32_t i = 0; i < MAP_CHUNK_COUNT; i++) {
chunk_t *chunk = MAP.chunkOrder[i]; chunk_t *chunk = MAP.chunkOrder[i];
vec3 min, max; for(uint8_t j = 0; j < chunk->meshCount; j++) {
min[0] = chunk->position.x * CHUNK_WIDTH * TILE_WIDTH; mesh_t *mesh = &chunk->meshes[j];
min[1] = chunk->position.y * CHUNK_HEIGHT * TILE_HEIGHT; textureBind(NULL);
min[2] = chunk->position.z * CHUNK_DEPTH * TILE_DEPTH; meshDraw(mesh, -1, -1);
max[0] = min[0] + (CHUNK_WIDTH * TILE_WIDTH);
max[1] = min[1] + (CHUNK_HEIGHT * TILE_HEIGHT);
max[2] = min[2];
color_t color = COLOR_WHITE;
if(chunk->position.x % 2 == 0) {
color = (chunk->position.y % 2 == 0) ? COLOR_BLACK : COLOR_WHITE;
} else {
color = (chunk->position.y % 2 == 0) ? COLOR_WHITE : COLOR_BLACK;
} }
spriteBatchPush3D( // vec3 min, max;
NULL, // min[0] = chunk->position.x * CHUNK_WIDTH * TILE_WIDTH;
min, // min[1] = chunk->position.y * CHUNK_HEIGHT * TILE_HEIGHT;
max, // min[2] = chunk->position.z * CHUNK_DEPTH * TILE_DEPTH;
color,
(vec2){ 0.0f, 0.0f }, // max[0] = min[0] + (CHUNK_WIDTH * TILE_WIDTH);
(vec2){ 1.0f, 1.0f } // max[1] = min[1] + (CHUNK_HEIGHT * TILE_HEIGHT);
); // max[2] = min[2];
// color_t color = COLOR_WHITE;
// if(chunk->position.x % 2 == 0) {
// color = (chunk->position.y % 2 == 0) ? COLOR_BLACK : COLOR_WHITE;
// } else {
// color = (chunk->position.y % 2 == 0) ? COLOR_WHITE : COLOR_BLACK;
// }
// spriteBatchPush3D(
// NULL,
// min,
// max,
// color,
// (vec2){ 0.0f, 0.0f },
// (vec2){ 1.0f, 1.0f }
// );
} }
} }
void sceneMapDispose(scenedata_t *data) { void sceneMapDispose(scenedata_t *data) {
meshDispose(&TEST_MAP.models[0].mesh);
} }

View File

@@ -15,9 +15,6 @@ typedef struct {
camera_t camera; camera_t camera;
} scenemap_t; } scenemap_t;
extern assetmap_t TEST_MAP;
extern bool_t TEST_MAP_READY;
errorret_t sceneMapInit(scenedata_t *data); errorret_t sceneMapInit(scenedata_t *data);
void sceneMapUpdate(scenedata_t *data); void sceneMapUpdate(scenedata_t *data);
void sceneMapRender(scenedata_t *data); void sceneMapRender(scenedata_t *data);

View File

@@ -14,7 +14,7 @@ TILE_WIDTH = 16.0
TILE_HEIGHT = 16.0 TILE_HEIGHT = 16.0
TILE_DEPTH = 11.36 TILE_DEPTH = 11.36
def createQuadForTile(model, tileIndex, x=0, y=0, z=0): def createQuadForTile(tileIndex, x=0, y=0, z=0):
vertices = [] vertices = []
indices = [] indices = []
@@ -48,35 +48,33 @@ def createQuadForTile(model, tileIndex, x=0, y=0, z=0):
'indices': indices 'indices': indices
} }
def processMap(asset): def processChunk(path):
cache = assetGetCache(asset['path']) cache = assetGetCache(path)
if cache is not None: if cache:
return cache return cache
# Read input file as JSON # Read input file as JSON
with open(asset['path'], 'r') as f: with open(path, 'r') as f:
inData = json.load(f) inData = json.load(f)
# Create output object 'map' with default tile indexes and models array chunk = {
map = {
'tiles': [0] * CHUNK_TILE_COUNT, 'tiles': [0] * CHUNK_TILE_COUNT,
'models': [] 'models': []
} }
# Create a simple 3D model object baseModel = {
model = {
'vertices': [], 'vertices': [],
'indices': [], 'indices': [],
'vertexCount': 0, 'vertexCount': 0,
'indexCount': 0 'indexCount': 0
} }
# Append the model to map.models # Append the model to chunk.models
map['models'].append(model) chunk['models'].append(baseModel)
for i, tile in enumerate(inData['tiles']): for i, tile in enumerate(inData['tiles']):
# Set to map # Set to chunk
map['tiles'][i] = tile chunk['tiles'][i] = tile
# Calculate x, y, z from i # Calculate x, y, z from i
x = i % CHUNK_WIDTH x = i % CHUNK_WIDTH
@@ -84,27 +82,27 @@ def processMap(asset):
z = i // (CHUNK_WIDTH * CHUNK_HEIGHT) z = i // (CHUNK_WIDTH * CHUNK_HEIGHT)
# Add tile 3D model # Add tile 3D model
result = createQuadForTile(model, tile, x, y, z) result = createQuadForTile(tile, x, y, z)
if len(result['vertices']) > 0: if len(result['vertices']) > 0:
base = len(model['vertices']) base = len(baseModel['vertices'])
quad_indices = [base + idx for idx in result['indices']] quad_indices = [base + idx for idx in result['indices']]
model['vertices'].extend(result['vertices']) baseModel['vertices'].extend(result['vertices'])
model['indices'].extend(quad_indices) baseModel['indices'].extend(quad_indices)
model['vertexCount'] = len(model['vertices']) baseModel['vertexCount'] = len(baseModel['vertices'])
model['indexCount'] = len(model['indices']) baseModel['indexCount'] = len(baseModel['indices'])
# Generate binary buffer for efficient output # Generate binary buffer for efficient output
buffer = bytearray() buffer = bytearray()
buffer.extend(b'DMF')# Header buffer.extend(b'DCF')# Header
buffer.extend(len(map['tiles']).to_bytes(4, 'little')) # Number of tiles buffer.extend(len(chunk['tiles']).to_bytes(4, 'little')) # Number of tiles
buffer.extend(len(map['models']).to_bytes(1, 'little')) # Number of models buffer.extend(len(chunk['models']).to_bytes(1, 'little')) # Number of models
# Buffer tile data as array of uint8_t # Buffer tile data as array of uint8_t
for tileIndex in map['tiles']: for tileIndex in chunk['tiles']:
buffer.append(tileIndex.to_bytes(1, 'little')[0]) buffer.append(tileIndex.to_bytes(1, 'little')[0])
# For each model # For each model
for model in map['models']: for model in chunk['models']:
# Write vertex count and index count # Write vertex count and index count
buffer.extend(model['vertexCount'].to_bytes(4, 'little')) buffer.extend(model['vertexCount'].to_bytes(4, 'little'))
# buffer.extend(model['indexCount'].to_bytes(4, 'little')) # buffer.extend(model['indexCount'].to_bytes(4, 'little'))
@@ -129,17 +127,42 @@ def processMap(asset):
buffer.extend(bytearray(struct.pack('<f', z))) buffer.extend(bytearray(struct.pack('<f', z)))
# Write out map file # Write out map file
relative = getAssetRelativePath(asset['path']) relative = getAssetRelativePath(path)
fileNameWithoutExt = os.path.splitext(os.path.basename(asset['path']))[0] fileNameWithoutExt = os.path.splitext(os.path.basename(path))[0]
outputFileRelative = os.path.join(os.path.dirname(relative), f"{fileNameWithoutExt}.dmf") outputFileRelative = os.path.join(os.path.dirname(relative), f"{fileNameWithoutExt}.dcf")
outputFilePath = os.path.join(args.output_assets, outputFileRelative) outputFilePath = os.path.join(args.output_assets, outputFileRelative)
os.makedirs(os.path.dirname(outputFilePath), exist_ok=True) os.makedirs(os.path.dirname(outputFilePath), exist_ok=True)
with open(outputFilePath, "wb") as f: with open(outputFilePath, "wb") as f:
f.write(buffer) f.write(buffer)
outMap = { outChunk = {
'files': [ outputFilePath ], 'files': [ outputFilePath ],
'map': map 'chunk': chunk
}
return assetCache(path, outChunk)
def processMap(asset):
cache = assetGetCache(asset['path'])
if cache is not None:
return cache
# Path provided should be a directory.
if not os.path.isdir(asset['path']):
print(f"Error: Asset path {asset['path']} is not a directory.")
sys.exit(1)
# List files
chunkFiles = []
for fileName in os.listdir(asset['path']):
if not fileName.endswith('.json'):
continue
result = processChunk(os.path.join(asset['path'], fileName))
chunkFiles.extend(result['files'])
outMap = {
'files': chunkFiles
} }
return assetCache(asset['path'], outMap) return assetCache(asset['path'], outMap)