First pass chunk loading.
This commit is contained in:
24
data/CMakeLists.txt
Normal file
24
data/CMakeLists.txt
Normal file
@ -0,0 +1,24 @@
|
||||
# Copyright (c) 2025 Dominic Masters
|
||||
#
|
||||
# This software is released under the MIT License.
|
||||
# https://opensource.org/licenses/MIT
|
||||
|
||||
find_package(Python3 COMPONENTS Interpreter REQUIRED)
|
||||
|
||||
# Custom command to generate all header files
|
||||
add_custom_command(
|
||||
OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
|
||||
COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/chunks.py --output ${DUSK_GENERATED_HEADERS_DIR}
|
||||
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/chunks.py
|
||||
COMMENT "Generating chunk header files"
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
# Custom target to generate headers before build
|
||||
add_custom_target(DUSK_CHUNKS
|
||||
DEPENDS
|
||||
${DUSK_GENERATED_HEADERS_DIR}/world/world.h
|
||||
)
|
||||
|
||||
# Ensure headers are generated before compiling main
|
||||
add_dependencies(${DUSK_TARGET_NAME} DUSK_CHUNKS)
|
166
data/chunks.py
Normal file
166
data/chunks.py
Normal file
@ -0,0 +1,166 @@
|
||||
#!/usr/bin/python3
|
||||
import os
|
||||
import argparse
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
# Constants that are defined in the C code
|
||||
CHUNK_WIDTH = 8
|
||||
CHUNK_HEIGHT = 8
|
||||
|
||||
# Check if the script is run with the correct arguments
|
||||
parser = argparse.ArgumentParser(description="Generate chunk header files")
|
||||
parser.add_argument('--output', required=True, help='Dir to output headers')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Ensure outdir exists
|
||||
outputDir = args.output
|
||||
os.makedirs(outputDir, exist_ok=True)
|
||||
|
||||
# Create world directory if it does not exist
|
||||
worldDir = os.path.join(outputDir, "world")
|
||||
os.makedirs(worldDir, exist_ok=True)
|
||||
|
||||
# Create chunks directory if it does not exist
|
||||
chunksDir = os.path.join(worldDir, "chunk")
|
||||
os.makedirs(chunksDir, exist_ok=True)
|
||||
|
||||
# Scan ./chunks folder
|
||||
chunks_dir = os.path.join(os.path.dirname(__file__), "chunks")
|
||||
if not os.path.exists(chunks_dir):
|
||||
print(f"Error: Chunks directory '{chunks_dir}' does not exist.")
|
||||
exit(1)
|
||||
|
||||
# Some vars used during printing
|
||||
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Data sent to the world header file
|
||||
worldWidth = 0
|
||||
worldHeight = 0
|
||||
chunksDone = []
|
||||
|
||||
|
||||
|
||||
# For each chunk file
|
||||
for chunkFile in os.listdir(chunks_dir):
|
||||
data = json.load(open(os.path.join(chunks_dir, chunkFile)))
|
||||
print(f"Processing chunk: {chunkFile}")
|
||||
|
||||
if not 'chunk' in data:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
|
||||
exit(1)
|
||||
|
||||
if not 'position' in data['chunk']:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
|
||||
exit(1)
|
||||
|
||||
# Position must be array of two integers
|
||||
position = data['chunk']['position']
|
||||
if not isinstance(position, list) or len(position) != 2:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
|
||||
exit(1)
|
||||
if not all(isinstance(x, int) for x in position):
|
||||
print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
|
||||
exit(1)
|
||||
|
||||
x, y = position
|
||||
|
||||
# Make sure that the filename "chunk_{x}_{y}.json" matches the position
|
||||
expectedFilename = f"chunk_{x}_{y}.json"
|
||||
if chunkFile != expectedFilename:
|
||||
print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
|
||||
exit(1)
|
||||
|
||||
# Chunk should not be already processed
|
||||
if (x, y) in chunksDone:
|
||||
print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
|
||||
exit(1)
|
||||
chunksDone.append((x, y))
|
||||
|
||||
worldWidth = max(worldWidth, x + 1)
|
||||
worldHeight = max(worldHeight, y + 1)
|
||||
|
||||
# Read in base layer data
|
||||
if 'baseLayer' not in data['chunk']:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
|
||||
exit(1)
|
||||
baseLayer = data['chunk']['baseLayer']
|
||||
|
||||
# Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
||||
if len(baseLayer) != CHUNK_HEIGHT:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
|
||||
exit(1)
|
||||
for row in baseLayer:
|
||||
if len(row) != CHUNK_WIDTH:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
|
||||
exit(1)
|
||||
|
||||
# Read in overlay layer data
|
||||
if 'overlayLayer' not in data['chunk']:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
|
||||
exit(1)
|
||||
overlayLayer = data['chunk']['overlayLayer']
|
||||
# Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
||||
if len(overlayLayer) != CHUNK_HEIGHT:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
|
||||
exit(1)
|
||||
for row in overlayLayer:
|
||||
if len(row) != CHUNK_WIDTH:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
|
||||
exit(1)
|
||||
|
||||
|
||||
# Now we generate a chunk header file
|
||||
chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
|
||||
with open(chunk_header_path, 'w') as f:
|
||||
f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
|
||||
f.write(f"// Generated at {now}\n")
|
||||
f.write("#pragma once\n")
|
||||
f.write("#include \"dusk.h\"\n\n")
|
||||
|
||||
f.write(f"static const uint8_t CHUNK_{x}_{y}_LAYER_BASE[] = {{\n")
|
||||
for row in baseLayer:
|
||||
f.write(" ")
|
||||
for column in row:
|
||||
f.write(f"0x{column:02x}, ")
|
||||
f.write("\n")
|
||||
f.write("};\n\n")
|
||||
|
||||
f.write(f"static const uint8_t CHUNK_{x}_{y}_LAYER_OVERLAY[] = {{\n")
|
||||
for row in overlayLayer:
|
||||
f.write(" ")
|
||||
for column in row:
|
||||
f.write(f"0x{column:02x}, ")
|
||||
f.write("\n")
|
||||
f.write("};\n\n")
|
||||
pass
|
||||
|
||||
|
||||
|
||||
# Output header file.
|
||||
header_path = os.path.join(worldDir, "world.h")
|
||||
with open(header_path, 'w') as f:
|
||||
f.write(f"// Generated chunks file. Generated at {now}\n\n")
|
||||
f.write("#pragma once\n")
|
||||
f.write("#include \"dusk.h\"\n")
|
||||
|
||||
# Now, for each chunk, include its header file
|
||||
for (x, y) in chunksDone:
|
||||
chunk_header = f"world/chunk/chunk_{x}_{y}.h"
|
||||
f.write(f"#include \"{chunk_header}\"\n")
|
||||
|
||||
f.write("\n")
|
||||
f.write(f"#define WORLD_WIDTH {worldWidth}\n")
|
||||
f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
|
||||
f.write(f"static const uint8_t* WORLD_CHUNKS_BASE[] = {{\n")
|
||||
for i in range(worldHeight):
|
||||
f.write(" ")
|
||||
for j in range(worldWidth):
|
||||
if (j, i) in chunksDone:
|
||||
f.write(f"CHUNK_{j}_{i}_LAYER_BASE, ")
|
||||
else:
|
||||
f.write("NULL, ")
|
||||
f.write("\n")
|
||||
f.write("};\n\n")
|
||||
|
||||
print(f"chunks.h generated at: {header_path}")
|
@ -1,45 +0,0 @@
|
||||
{
|
||||
"chunk": {
|
||||
"position": [ 0, 0 ],
|
||||
"baseLayer": [
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 ]
|
||||
],
|
||||
"overlayLayer": [
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ],
|
||||
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
|
||||
],
|
||||
"entities": [
|
||||
],
|
||||
"triggers": [
|
||||
]
|
||||
}
|
||||
}
|
29
data/chunks/chunk_1_1.json
Normal file
29
data/chunks/chunk_1_1.json
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
"chunk": {
|
||||
"position": [ 1, 1 ],
|
||||
"baseLayer": [
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ]
|
||||
],
|
||||
"overlayLayer": [
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ]
|
||||
],
|
||||
"entities": [
|
||||
],
|
||||
"triggers": [
|
||||
]
|
||||
}
|
||||
}
|
29
data/chunks/chunk_2_1.json
Normal file
29
data/chunks/chunk_2_1.json
Normal file
@ -0,0 +1,29 @@
|
||||
{
|
||||
"chunk": {
|
||||
"position": [ 2, 1 ],
|
||||
"baseLayer": [
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ]
|
||||
],
|
||||
"overlayLayer": [
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ],
|
||||
[ 1, 1, 1, 1, 1, 1, 1, 1 ]
|
||||
],
|
||||
"entities": [
|
||||
],
|
||||
"triggers": [
|
||||
]
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user