Basic NPC loading (half done)

This commit is contained in:
2025-06-13 17:07:28 -05:00
parent 49989e0272
commit 9288c01887
25 changed files with 644 additions and 26 deletions

View File

@ -6,19 +6,13 @@
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_command(
OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
add_custom_target(DUSK_CHUNKS
# OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/chunks.py --output ${DUSK_GENERATED_HEADERS_DIR}
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/chunks.py
COMMENT "Generating chunk header files"
VERBATIM
)
# Custom target to generate headers before build
add_custom_target(DUSK_CHUNKS
DEPENDS
${DUSK_GENERATED_HEADERS_DIR}/world/world.h
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_CHUNKS)

View File

@ -7,6 +7,11 @@ import json
# Constants that are defined in the C code
CHUNK_WIDTH = 8
CHUNK_HEIGHT = 8
CHUNK_ENTITY_COUNT_MAX = 8
ENTITY_TYPE = {
"npc": "ENTITY_TYPE_NPC",
}
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
@ -38,8 +43,7 @@ now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
worldWidth = 0
worldHeight = 0
chunksDone = []
entityIdNext = 1
# For each chunk file
for chunkFile in os.listdir(chunks_dir):
@ -109,6 +113,8 @@ for chunkFile in os.listdir(chunks_dir):
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
exit(1)
# Read in entities
entities = data['chunk'].get('entities', [])
# Now we generate a chunk header file
chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
@ -116,22 +122,51 @@ for chunkFile in os.listdir(chunks_dir):
f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
f.write(f"// Generated at {now}\n")
f.write("#pragma once\n")
f.write("#include \"dusk.h\"\n\n")
f.write("#include \"world/chunkdata.h\"\n\n")
f.write(f"static const uint8_t CHUNK_{x}_{y}_LAYER_BASE[] = {{\n")
f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
f.write(f" .layerBase = {{\n")
for row in baseLayer:
f.write(" ")
f.write(" ")
for column in row:
f.write(f"0x{column:02x}, ")
f.write("\n")
f.write("};\n\n")
f.write(" },\n\n")
f.write(f"static const uint8_t CHUNK_{x}_{y}_LAYER_OVERLAY[] = {{\n")
f.write(f" .layerOverlay = {{\n")
for row in overlayLayer:
f.write(" ")
f.write(" ")
for column in row:
f.write(f"0x{column:02x}, ")
f.write("\n")
f.write(" },\n\n")
f.write(f" .entities = {{\n")
for entity in entities:
if 'id' in entity:
entityId = entity['id']
else:
entityId = entityIdNext
entityIdNext += 1
if 'type' not in entity:
print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.")
exit(1)
if 'x' not in entity or 'y' not in entity:
print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.")
exit(1)
f.write(" {\n")
f.write(f" .id = {entityId},\n")
f.write(f" .type = {ENTITY_TYPE.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"),
f.write(f" .x = {entity['x']},\n")
f.write(f" .y = {entity['y']},\n")
f.write(f" }},\n")
pass
f.write(" },\n\n")
f.write("};\n\n")
pass
@ -152,12 +187,12 @@ with open(header_path, 'w') as f:
f.write("\n")
f.write(f"#define WORLD_WIDTH {worldWidth}\n")
f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
f.write(f"static const uint8_t* WORLD_CHUNKS_BASE[] = {{\n")
f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
for i in range(worldHeight):
f.write(" ")
for j in range(worldWidth):
if (j, i) in chunksDone:
f.write(f"CHUNK_{j}_{i}_LAYER_BASE, ")
f.write(f"&CHUNK_{j}_{i}, ")
else:
f.write("NULL, ")
f.write("\n")

View File

@ -1,6 +1,6 @@
{
"chunk": {
"position": [ 1, 1 ],
"position": [ 0, 0 ],
"baseLayer": [
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
@ -22,6 +22,11 @@
[ 1, 1, 1, 1, 1, 1, 1, 1 ]
],
"entities": [
{
"type": "npc",
"x": "3",
"y": "3"
}
],
"triggers": [
]