Dusk progress

This commit is contained in:
2025-06-13 19:53:55 -05:00
parent 8289cac039
commit f6f6194b4d
10 changed files with 43 additions and 130 deletions

View File

@ -4,5 +4,4 @@
# https://opensource.org/licenses/MIT
# Tools
add_subdirectory(assetstool)
add_subdirectory(copytool)
add_subdirectory(mapcompile)

View File

@ -1,19 +0,0 @@
# Copyright (c) 2023 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 REQUIRED COMPONENTS Interpreter)
add_custom_target(duskassets
COMMAND
${Python3_EXECUTABLE}
${DUSK_TOOLS_DIR}/assetstool/assetstool.py
--input=${DUSK_ASSETS_BUILD_DIR}
--output=${DUSK_BUILD_DIR}/dusk.tar
COMMENT "Bundling assets..."
USES_TERMINAL
DEPENDS ${DUSK_ASSETS}
)
add_dependencies(${DUSK_TARGET_NAME} duskassets)

View File

@ -1,67 +0,0 @@
# Copyright (c) 2023 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
import os
import tarfile
import argparse
# Args
parser = argparse.ArgumentParser(
description='Bundles all assets into the internal archive format.'
)
parser.add_argument('-i', '--input');
parser.add_argument('-o', '--output');
args = parser.parse_args()
# Ensure the directory for the output path exists
if not os.path.exists(os.path.dirname(args.output)):
os.makedirs(os.path.dirname(args.output))
# Create a ZIP archive and add the specified directory
# archive = tarfile.open(args.output, 'w:bz2') # BZ2 Compression
# Does the archive already exist?
filesInArchive = []
if os.path.exists(args.output) and False:
# if os.path.exists(args.output):
# Yes, open it
archive = tarfile.open(args.output, 'a:')
# Get all the files in the archive
for member in archive.getmembers():
filesInArchive.append(member.name)
archive.close()
# Open archive for appending.
archive = tarfile.open(args.output, 'a:')
else:
# No, create it
archive = tarfile.open(args.output, 'w:')
# Add all files in the input directory
for foldername, subfolders, filenames in os.walk(args.input):
for filename in filenames:
# Is the file already in the archive?
absolute_path = os.path.join(foldername, filename)
relative_path = os.path.relpath(absolute_path, args.input)
if relative_path in filesInArchive:
if relative_path.endswith('.texture'):
print(f"Skipping {filename}...")
continue
else:
print(f"Overwriting {filename}...")
# Does not work in python, throw error
exit (1)
else:
print(f"Archiving asset {filename}...")
archive.add(absolute_path, arcname=relative_path)
# Close the archive
archive.close()

View File

@ -1,16 +0,0 @@
# Copyright (c) 2023 Dominic Msters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
function(copytool file)
set(TARGET_NAME "copytool_${file}")
# Replace slashes with underscores
string(REPLACE "/" "_" TARGET_NAME ${TARGET_NAME})
add_custom_target(${TARGET_NAME}
COMMAND ${CMAKE_COMMAND} -E copy ${DUSK_ASSETS_DIR}/${file} ${DUSK_ASSETS_BUILD_DIR}/${file}
)
add_dependencies(duskassets ${TARGET_NAME})
endfunction()

View File

@ -0,0 +1,18 @@
# Copyright (c) 2025 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_CHUNKS
# OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py --output ${DUSK_GENERATED_HEADERS_DIR}
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
COMMENT "Generating chunk header files"
VERBATIM
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_CHUNKS)

View File

@ -0,0 +1,196 @@
import sys, os
import argparse
from datetime import datetime
import json
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
sys.path.append(shared_path)
from worlddefs import CHUNK_WIDTH, CHUNK_HEIGHT, ENTITY_TYPE_MAP, CHUNK_DATA_DIR
# Dynamically add ../shared to sys.path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
sys.path.append(shared_path)
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='Dir to output headers')
args = parser.parse_args()
# Ensure outdir exists
outputDir = args.output
os.makedirs(outputDir, exist_ok=True)
# Create world directory if it does not exist
worldDir = os.path.join(outputDir, "world")
os.makedirs(worldDir, exist_ok=True)
# Create chunks directory if it does not exist
chunksDir = os.path.join(worldDir, "chunk")
os.makedirs(chunksDir, exist_ok=True)
# Some vars used during printing
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Data sent to the world header file
worldWidth = 0
worldHeight = 0
chunksDone = []
entityIdNext = 1
# For each chunk file
for chunkFile in os.listdir(CHUNK_DATA_DIR):
data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile)))
print(f"Processing chunk: {chunkFile}")
if not 'chunk' in data:
print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
exit(1)
if not 'position' in data['chunk']:
print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
exit(1)
# Position must be array of two integers
position = data['chunk']['position']
if not isinstance(position, list) or len(position) != 2:
print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
exit(1)
if not all(isinstance(x, int) for x in position):
print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
exit(1)
x, y = position
# Make sure that the filename "chunk_{x}_{y}.json" matches the position
expectedFilename = f"chunk_{x}_{y}.json"
if chunkFile != expectedFilename:
print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
exit(1)
# Chunk should not be already processed
if (x, y) in chunksDone:
print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
exit(1)
chunksDone.append((x, y))
worldWidth = max(worldWidth, x + 1)
worldHeight = max(worldHeight, y + 1)
# Read in base layer data
if 'baseLayer' not in data['chunk']:
print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
exit(1)
baseLayer = data['chunk']['baseLayer']
# Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
if len(baseLayer) != CHUNK_HEIGHT:
print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
exit(1)
for row in baseLayer:
if len(row) != CHUNK_WIDTH:
print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
exit(1)
# Read in overlay layer data
if 'overlayLayer' not in data['chunk']:
print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
exit(1)
overlayLayer = data['chunk']['overlayLayer']
# Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
if len(overlayLayer) != CHUNK_HEIGHT:
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
exit(1)
for row in overlayLayer:
if len(row) != CHUNK_WIDTH:
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
exit(1)
# Read in entities
entities = data['chunk'].get('entities', [])
# Now we generate a chunk header file
chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
with open(chunk_header_path, 'w') as f:
f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
f.write(f"// Generated at {now}\n")
f.write("#pragma once\n")
f.write("#include \"world/chunkdata.h\"\n\n")
f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
f.write(f" .layerBase = {{\n")
for row in baseLayer:
f.write(" ")
for column in row:
f.write(f"0x{column:02x}, ")
f.write("\n")
f.write(" },\n\n")
f.write(f" .layerOverlay = {{\n")
for row in overlayLayer:
f.write(" ")
for column in row:
f.write(f"0x{column:02x}, ")
f.write("\n")
f.write(" },\n\n")
f.write(f" .entities = {{\n")
for entity in entities:
if 'id' in entity:
entityId = entity['id']
else:
entityId = entityIdNext
entityIdNext += 1
if 'type' not in entity:
print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.")
exit(1)
if 'x' not in entity or 'y' not in entity:
print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.")
exit(1)
f.write(" {\n")
f.write(f" .id = {entityId},\n")
f.write(f" .type = {ENTITY_TYPE_MAP.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"),
f.write(f" .x = {entity['x']},\n")
f.write(f" .y = {entity['y']},\n")
f.write(f" }},\n")
pass
f.write(" },\n\n")
f.write("};\n\n")
pass
# Output header file.
header_path = os.path.join(worldDir, "world.h")
with open(header_path, 'w') as f:
f.write(f"// Generated chunks file. Generated at {now}\n\n")
f.write("#pragma once\n")
f.write("#include \"dusk.h\"\n")
# Now, for each chunk, include its header file
for (x, y) in chunksDone:
chunk_header = f"world/chunk/chunk_{x}_{y}.h"
f.write(f"#include \"{chunk_header}\"\n")
f.write("\n")
f.write(f"#define WORLD_WIDTH {worldWidth}\n")
f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
for i in range(worldHeight):
f.write(" ")
for j in range(worldWidth):
if (j, i) in chunksDone:
f.write(f"&CHUNK_{j}_{i}, ")
else:
f.write("NULL, ")
f.write("\n")
f.write("};\n\n")
print(f"chunks.h generated at: {header_path}")

10
tools/mapeditor/mapeditor.py Executable file
View File

@ -0,0 +1,10 @@
#!/usr/bin/python3
import sys, os
# Dynamically add ../shared to sys.path
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
sys.path.append(shared_path)
# Import shared modules
from worlddefs import CHUNK_WIDTH, CHUNK_HEIGHT, ENTITY_TYPE_MAP, CHUNK_DATA_DIR

11
tools/shared/worlddefs.py Normal file
View File

@ -0,0 +1,11 @@
import os
CHUNK_WIDTH = 8
CHUNK_HEIGHT = 8
CHUNK_ENTITY_COUNT_MAX = 8
ENTITY_TYPE_MAP = {
"npc": "ENTITY_TYPE_NPC",
}
CHUNK_DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'chunks'))