Going to redo assets.

This commit is contained in:
2025-08-24 13:57:12 -05:00
parent 329925ea54
commit 479aad2f06
36 changed files with 285 additions and 128 deletions

View File

@@ -1,11 +1,11 @@
# Copyright (c) 2023 Dominic Msters
# Copyright (c) 2025 Dominic Msters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
# Tools
add_subdirectory(eventcompile)
add_subdirectory(fontcompile)
add_subdirectory(languagecompile)
add_subdirectory(mapcompile)
add_subdirectory(tilecompile)
# Function that adds an asset to be compiled
function(add_asset ASSET_PATH)
set(FULL_ASSET_PATH "${CMAKE_CURRENT_LIST_DIR}/${ASSET_PATH}")
list(APPEND DUSK_ASSETS ${FULL_ASSET_PATH})
set(DUSK_ASSETS ${DUSK_ASSETS} CACHE INTERNAL ${DUSK_CACHE_TARGET})
endfunction()

View File

@@ -0,0 +1,17 @@
import os
import sys
from tilesetparser import parseTileset
from imageparser import parseImage
def parseAsset(assetPath):
if not os.path.isfile(assetPath):
print(f"Error: Input asset '{assetPath}' does not exist.")
sys.exit(1)
if assetPath.endswith(".tsx"):
return parseTileset(assetPath)
elif assetPath.endswith(".png"):
return parseImage(assetPath)
else:
print(f"Warning: Unsupported asset type for '{assetPath}'. Skipping.")
return []

View File

@@ -0,0 +1,31 @@
import sys, os
import argparse
from assetparser import parseAsset
from header import setOutputDir
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='Dir to output headers')
parser.add_argument('--input', required=True, help='Input assets to process', nargs='+')
args = parser.parse_args()
# Setup headers directory.
setOutputDir(args.output)
outputHeaders = []
# Create output directory if it doesn't exist
if not os.path.exists(args.output):
os.makedirs(args.output)
# Split input assets by comma
inputAssets = []
for inputArg in args.input:
inputAssets.extend(inputArg.split(','))
# Begin processing assets
if not inputAssets:
print("Error: No input assets provided.")
sys.exit(1)
for asset in inputAssets:
outputHeaders.extend(parseAsset(asset))

View File

@@ -0,0 +1,15 @@
import os
def setOutputDir(outputDir):
global OUTPUT_DIR
OUTPUT_DIR = outputDir
def getOutputDir():
return OUTPUT_DIR
def getHeaderInclude(headerPath):
outputDir = getOutputDir()
relPath = os.path.relpath(headerPath, outputDir)
path = relPath.replace('\\', '/') # Use forward slashes for includes
print(f" Including header: {path}")
return f'#include "{path}"'

View File

@@ -0,0 +1,34 @@
import os
from os import abort
from header import getOutputDir
from PIL import Image
def parseImage(imagePath):
print(f"Parsing image: {imagePath}")
if not os.path.isfile(imagePath):
abort(f"Error: Image file {imagePath} does not exist")
outputFile = os.path.join(getOutputDir(), f"image_{os.path.basename(imagePath)}.h")
dataOut = ""
dataOut += f"// Auto-generated image header for {os.path.basename(imagePath)}\n"
dataOut += f"#pragma once\n"
dataOut += f"#include \"asset/assetimage.h\"\n\n"
name = os.path.splitext(os.path.basename(imagePath))[0]
name = name.upper().replace(' ', '_')
dataOut += f"static const assetimage_t IMAGE_{name} = {{\n"
try:
with Image.open(imagePath) as img:
width, height = img.size
dataOut += f" .width = {width},\n"
dataOut += f" .height = {height},\n"
except Exception as e:
abort(f"Error: Unable to open image {imagePath}: {e}")
dataOut += f"}};\n"
with open(outputFile, 'w') as f:
f.write(dataOut)
return [ outputFile ]

View File

@@ -0,0 +1,74 @@
from os import abort
import os
import xml.etree.ElementTree as ET
from imageparser import parseImage
from header import getOutputDir, getHeaderInclude
def parseTileset(assetPath):
tree = ET.parse(assetPath)
root = tree.getroot()
# Should have tilewidth, tileheight, tilecount and columns attributes
if not all(attr in root.attrib for attr in ['tilewidth', 'tileheight', 'tilecount', 'columns']):
print(f"Error: Missing required attributes in tileset {assetPath}")
return []
tileWidth = int(root.attrib['tilewidth'])
tileHeight = int(root.attrib['tileheight'])
tileCount = int(root.attrib['tilecount'])
columns = int(root.attrib['columns'])
# Find image elements
images = root.findall('image')
if not images:
abort(f"Error: No image elements found in tileset {assetPath}")
imageSources = []
for image in images:
imageSource = image.attrib.get('source')
if not imageSource:
abort(f"Error: Image element missing 'source' attribute in tileset {assetPath}")
# Get relative dir from this assetPath
assetDir = os.path.dirname(assetPath)
imageSource = os.path.normpath(os.path.join(assetDir, imageSource))
imageSources.extend(parseImage(imageSource))
# Now do our own header.
headers = []
print(f"Generating tileset header for {assetPath}")
name = os.path.splitext(os.path.basename(assetPath))[0]
name = name.upper().replace(' ', '_')
imageNameWithoutExtension = os.path.splitext(os.path.splitext(os.path.basename(imageSources[0]))[0])[0]
imageNameWithoutExtension = imageNameWithoutExtension.upper().replace(' ', '_')
dataOut = ""
dataOut += f"// Auto-generated tileset header for {os.path.basename(assetPath)}\n"
dataOut += f"#pragma once\n"
dataOut += f"#include \"asset/assettileset.h\"\n"
for imgHeader in imageSources:
dataOut += getHeaderInclude(imgHeader) + "\n"
dataOut += f"\n"
dataOut += f"static const assettileset_t TILESET_{name} = {{\n"
dataOut += f" .tileCount = {tileCount},\n"
dataOut += f" .columns = {columns},\n"
dataOut += f" .tileHeight = {tileHeight},\n"
dataOut += f" .tileWidth = {tileWidth},\n"
dataOut += f" .image = &{imageNameWithoutExtension},\n"
dataOut += f"}};\n"
# Write out to output dir
outputDir = getOutputDir()
if not os.path.isdir(outputDir):
os.makedirs(outputDir)
outputFile = os.path.join(outputDir, f"tileset_{os.path.basename(assetPath)}.h")
with open(outputFile, 'w') as f:
f.write(dataOut)
headers.append(outputFile)
headers.extend(imageSources)
return headers

View File

@@ -1,20 +0,0 @@
# Copyright (c) 2025 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_EVENTS
COMMAND
${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/eventcompile.py
--output ${DUSK_GENERATED_HEADERS_DIR}/event/
--input ${DUSK_DATA_DIR}/events/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/eventcompile.py
COMMENT "Generating event header files"
VERBATIM
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_EVENTS)

View File

@@ -1,103 +0,0 @@
import sys, os
import argparse
from datetime import datetime
import json
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate event header files")
parser.add_argument('--output', required=True, help='Dir to write headers')
parser.add_argument('--input', required=True, help='Input directory containing event files')
args = parser.parse_args()
# Ensure outdir exists
outputFile = args.output
outputDir = args.output
os.makedirs(outputDir, exist_ok=True)
inputDir = args.input
# Scan for .json files in the input directory
if not os.path.exists(inputDir):
print(f"Error: Input directory '{inputDir}' does not exist.")
sys.exit(1)
jsonFiles = [f for f in os.listdir(inputDir) if f.endswith('.json')]
if not jsonFiles or len(jsonFiles) == 0:
print(f"Error: No JSON files found in '{inputDir}'.")
sys.exit(1)
# For each language file...
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
eventFiles = []
for jsonFile in jsonFiles:
inputFile = os.path.join(inputDir, jsonFile)
data = json.load(open(inputFile, 'r', encoding='utf-8'))
if 'key' not in data:
print(f"Error: JSON file '{inputFile}' does not contain 'key' field.")
sys.exit(1)
if 'items' not in data or not isinstance(data['items'], list) or len(data['items']) == 0:
print(f"Error: JSON file '{inputFile}' does not contain 'items' field.")
sys.exit(1)
key = data['key']
keyUpper = key.upper()
if jsonFile != f'{key}.json':
print(f"Error: JSON file '{jsonFile}' does not match expected filename '{key}.json'.")
sys.exit(1)
outputFile = os.path.join(outputDir, f"{key}.h")
with open(outputFile, 'w', encoding='utf-8') as f:
f.write(f"// Generated event header for {jsonFile}\n")
f.write(f"// Generated at {now}\n")
f.write("#pragma once\n\n")
f.write("#include \"event/eventdata.h\"\n\n")
f.write(f"static const eventdata_t EVENT_{key.upper()} = {{\n")
f.write(f" .itemCount = {len(data['items'])},\n")
f.write(f" .items = {{\n")
for i, item in enumerate(data['items']):
if 'type' not in item:
print(f"Error: Item {i} in '{jsonFile}' does not contain 'type' field.")
sys.exit(1)
itemType = item['type']
f.write(f" {{\n")
# Text(s) Type
if itemType == 'text':
if 'text' not in item:
print(f"Error: Item {i} in '{jsonFile}' of type 'text' does not contain 'text' field.")
sys.exit(1)
f.write(f" .type = EVENT_TYPE_TEXT,\n")
f.write(f" .text = \"{item['text']}\",\n")
else:
print(f"Error: Unknown item type '{itemType}' in item {i} of '{jsonFile}'.")
sys.exit(1)
f.write(f" }},\n")
f.write(f" }},\n")
f.write(f"}};\n\n")
eventFiles.append(key)
# Write the event list header
eventListFile = os.path.join(outputDir, "eventlist.h")
with open(eventListFile, 'w', encoding='utf-8') as f:
f.write(f"// Generated event list header\n")
f.write(f"// Generated at {now}\n")
f.write("#pragma once\n\n")
f.write("#include \"event/event.h\"\n")
for event in eventFiles:
f.write(f"#include \"event/{event}.h\"\n")
f.write("\n")
f.write(f"#define EVENT_LIST_COUNT {len(eventFiles)}\n\n")
f.write("static const eventdata_t* EVENT_LIST[EVENT_LIST_COUNT] = {\n")
for event in eventFiles:
f.write(f" &EVENT_{event.upper()},\n")
f.write("};\n\n")

View File

@@ -1,20 +0,0 @@
# Copyright (c) 2025 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_FONT
COMMAND
${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/fontcompile.py
--output ${DUSK_GENERATED_HEADERS_DIR}/ui/fontdata.h
--input ${DUSK_DATA_DIR}/minogram.tsx
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/fontcompile.py
COMMENT "Generating font header file"
VERBATIM
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_FONT)

View File

@@ -1,153 +0,0 @@
import sys, os
import argparse
from datetime import datetime
import xml.etree.ElementTree as ET
from PIL import Image
# Input font information.
CHARACTER_MAP = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M',
'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm',
'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z',
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', '+', '-', '=',
'(', ')', '[', ']', '{', '}', '<', '>', '/', '*', ':', '#', '%',
'!', '?', '.', ',', "'", '"', '@', '&', '$', ' '
]
CHAR_START = 0x20 # ASCII space character
CHAR_END = 0x7E # ASCII tilde character (exclusive)
CHARS_TOTAL = CHAR_END - CHAR_START
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='File to write header')
parser.add_argument('--input', required=True, help='Input XML from tiled')
args = parser.parse_args()
# Ensure outdir exists
outputFile = args.output
outputDir = os.path.dirname(outputFile)
os.makedirs(outputDir, exist_ok=True)
# Read the XML file
inputFile = args.input
if not os.path.exists(inputFile):
print(f"Error: Input file '{inputFile}' does not exist.")
sys.exit(1)
# Find root element
tree = ET.parse(inputFile)
root = tree.getroot()
# Check if the root element is 'tileset'
if root.tag != 'tileset':
print(f"Error: Expected root element 'tileset', found '{root.tag}'")
sys.exit(1)
# Shoul have tilewidth and tileheight attributes
if 'tilewidth' not in root.attrib or 'tileheight' not in root.attrib:
print("Error: 'tileset' element must have 'tilewidth' and 'tileheight' attributes")
sys.exit(1)
if 'tilecount' not in root.attrib:
print("Error: 'tileset' element must have 'tilecount' attribute")
sys.exit(1)
# Find image element
image = root.find('image')
if image is None:
print("Error: 'tileset' element must contain an 'image' element")
sys.exit(1)
# Ensure image has 'source' attribute
if 'source' not in image.attrib:
print("Error: 'image' element must have a 'source' attribute")
sys.exit(1)
# Ensure image source exists
inputDir = os.path.dirname(inputFile)
imageSource = os.path.join(inputDir, image.attrib['source'])
if not os.path.exists(imageSource):
print(f"Error: Image source '{imageSource}' does not exist.")
sys.exit(1)
# Ensure image has 'width' and 'height' attributes
if 'width' not in image.attrib or 'height' not in image.attrib:
print("Error: 'image' element must have 'width' and 'height' attributes")
sys.exit(1)
# Ensure image is readable
try:
img = Image.open(imageSource)
except Exception as e:
print(f"Error: Unable to open image '{imageSource}': {e}")
sys.exit(1)
# Ensure image dimensions match the attributes
if img.width != int(image.attrib['width']) or img.height != int(image.attrib['height']):
print(f"Error: Image dimensions ({img.width}x{img.height}) do not match attributes ({image.attrib['width']}x{image.attrib['height']})")
sys.exit(1)
# Prepare header content
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
tileCount = int(root.attrib['tilecount'])
outputTileIndexes = []
for i in range(CHARS_TOTAL):
# For some reason the input tilemap is not in ASCII, so we are going to map
# the tiles to the ASCII characters we expect, we start at 0x20 (space)
# Find the index of the character in the CHARACTER_MAP
inputIndex = -1
for j, char in enumerate(CHARACTER_MAP):
if ord(char) == (CHAR_START + i):
inputIndex = j
break
if inputIndex == -1:
print(f"Warning: Character '{chr(CHAR_START + i)}' not found in CHARACTER_MAP")
outputTileIndexes.append(0) # Use 0 for missing characters (space)
continue
outputTileIndexes.append(inputIndex)
with open(outputFile, 'w') as f:
f.write(f"// Generated at {now}\n")
f.write("#pragma once\n")
f.write("#include \"dusk.h\"\n\n")
f.write(f"#define FONT_TILE_WIDTH {root.attrib['tilewidth']}\n")
f.write(f"#define FONT_TILE_HEIGHT {root.attrib['tileheight']}\n")
f.write(f"#define FONT_TILE_COUNT {len(outputTileIndexes)}\n")
f.write(f"#define FONT_CHAR_START {CHAR_START}\n")
f.write(f"#define FONT_CHAR_END {CHAR_END}\n")
f.write(f"#define FONT_CHAR_COUNT {CHARS_TOTAL}\n")
f.write(f"#define FONT_COLUMN_COUNT {img.width // int(root.attrib['tilewidth'])}\n")
f.write(f"#define FONT_ROW_COUNT {img.height // int(root.attrib['tileheight'])}\n\n")
f.write("static const uint8_t TILE_INDEXES[FONT_CHAR_COUNT] = {\n")
f.write(" ")
for i in range(len(outputTileIndexes)):
tileIndex = outputTileIndexes[i]
f.write(f"{tileIndex}, ")
f.write("\n};\n\n")
f.write("static const uint8_t TILE_PIXEL_DATA[FONT_TILE_COUNT][FONT_TILE_WIDTH * FONT_TILE_HEIGHT] = {\n")
for i in range(len(outputTileIndexes)):
tileIndex = outputTileIndexes[i]
f.write(f" // Character {i} ('{chr(CHAR_START + i)}'). Read from {tileIndex} tile.\n")
f.write(f" {{")
# Read the tile from the image
tileX = (tileIndex % (img.width // int(root.attrib['tilewidth']))) * int(root.attrib['tilewidth'])
tileY = (tileIndex // (img.width // int(root.attrib['tilewidth']))) * int(root.attrib['tileheight'])
tile = img.crop((tileX, tileY, tileX + int(root.attrib['tilewidth']), tileY + int(root.attrib['tileheight'])))
# Pixel is either 0 (transparent) or 1 (opaque)
for y in range(int(root.attrib['tileheight'])):
for x in range(int(root.attrib['tilewidth'])):
pixel = tile.getpixel((x, y))
f.write(f"0x{1 if pixel[3] > 0 else 0:02X}, ")
f.write("},\n\n")
f.write("};\n\n")

View File

@@ -1,20 +0,0 @@
# Copyright (c) 2025 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_LANGUAGES
COMMAND
${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/languagecompile.py
--output ${DUSK_GENERATED_HEADERS_DIR}/locale/language/
--input ${DUSK_DATA_DIR}/languages/
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/languagecompile.py
COMMENT "Generating language header files"
VERBATIM
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_LANGUAGES)

View File

@@ -1,147 +0,0 @@
import sys, os
import argparse
from datetime import datetime
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='Dir to write headers')
parser.add_argument('--input', required=True, help='Input directory containing language files')
args = parser.parse_args()
# Ensure outdir exists
outputFile = args.output
outputDir = args.output
os.makedirs(outputDir, exist_ok=True)
inputDir = args.input
# Scan for .json files in the input directory
if not os.path.exists(inputDir):
print(f"Error: Input directory '{inputDir}' does not exist.")
sys.exit(1)
jsonFiles = [f for f in os.listdir(inputDir) if f.endswith('.json')]
if not jsonFiles or len(jsonFiles) == 0:
print(f"Error: No JSON files found in '{inputDir}'.")
sys.exit(1)
# take JSON from form { "a": { "b": { "c": "d" } } } to "a.b.c": "d"
def flattenJson(y):
keyValues = {}
for key, value in y.items():
if isinstance(value, dict):
# If the value is a dictionary, recurse into it
subKeyValues = flattenJson(value)
for subKey, subValue in subKeyValues.items():
keyValues[f"{key}.{subKey}"] = subValue
else:
# If the value is not a dictionary, add it to the keyValues
keyValues[key] = value
return keyValues
def escapeString(s):
# Escape double quotes and backslashes in the string
return s.replace('\\', '\\\\').replace('"', '\\"')
# For each language file...
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
isFirstLanguage = True
# Because I code in english, I am going to reorder the langs so it is first.
jsonFiles.sort(key=lambda x: x.lower() if x.lower() == 'en.json' else "zz_" + x.lower())
keysExpected = []
languages = []
for jsonFile in jsonFiles:
inputFile = os.path.join(inputDir, jsonFile)
languageName = os.path.splitext(jsonFile)[0]
langUpper = languageName.upper()
outputFile = os.path.join(outputDir, f"{languageName}.h")
# Read the JSON file
with open(inputFile, 'r', encoding='utf-8') as f:
content = f.read()
# Write the header file
with open(outputFile, 'w', encoding='utf-8') as f:
f.write(f"// Generated from {jsonFile} on {now}\n")
f.write("#pragma once\n")
f.write("#include \"dusk.h\"\n\n")
f.write(f"// Language: {languageName} from {jsonFile}\n")
keyValues = flattenJson(eval(content))
if 'meta.language.name' not in keyValues:
print(f"Error: 'meta.language.name' not found in {jsonFile}.")
sys.exit(1)
f.write(f"#define LANGUAGE_{langUpper}_CODE \"{languageName}\"\n")
f.write(f"#define LANGUAGE_{langUpper}_NAME \"{keyValues['meta.language.name']}\"\n")
f.write(f"#define LANGUAGE_{langUpper}_COUNT {len(keyValues)}\n\n")
# Write keys
f.write(f"static const char_t *LANGUAGE_{langUpper}_KEYS[] = {{\n")
for key in keyValues.keys():
f.write(f' "{escapeString(key)}",\n')
f.write("};\n\n")
# Write values
f.write(f"static const char_t *LANGUAGE_{langUpper}_VALUES[] = {{\n")
for value in keyValues.values():
f.write(f' "{escapeString(value)}",\n')
f.write("};\n\n")
languages.append(langUpper)
if isFirstLanguage:
# For the first language, we also write the keysExpected
keysExpected = list(keyValues.keys())
else:
for key in keysExpected:
if key in keyValues:
continue
print(f"Error, expected language translation key: '{key}' was not found in {jsonFile}.")
sys.exit(1)
# Now write the main header file
mainOutputFile = os.path.join(outputDir, "languages.h")
with open(mainOutputFile, 'w', encoding='utf-8') as f:
f.write("// Generated from languagecompile.py\n")
f.write("#pragma once\n")
f.write("#include \"dusk.h\"\n")
for lang in languages:
f.write(f'#include "locale/language/{lang.lower()}.h"\n')
f.write("\n")
f.write(f"#define LANGUAGE_COUNT {len(languages)}\n\n")
index = 0
for lang in languages:
f.write(f"#define LANGUAGE_{lang} {index}\n")
index += 1
f.write("\n")
f.write("static const char_t* LANGUAGE_CODES[] = {\n")
for lang in languages:
f.write(f' LANGUAGE_{lang}_CODE,\n')
f.write("};\n\n")
f.write("static const char_t* LANGUAGE_NAMES[] = {\n")
for lang in languages:
f.write(f' LANGUAGE_{lang}_NAME,\n')
f.write("};\n\n")
f.write("static const char_t** LANGUAGE_KEYS[] = {\n")
for lang in languages:
f.write(f' LANGUAGE_{lang}_KEYS,\n')
f.write("};\n\n")
f.write("static const int LANGUAGE_COUNTS[] = {\n")
for lang in languages:
f.write(f' LANGUAGE_{lang}_COUNT,\n')
f.write("};\n\n")
f.write("static const char_t** LANGUAGE_VALUES[] = {\n")
for lang in languages:
f.write(f' LANGUAGE_{lang}_VALUES,\n')
f.write("};\n\n")

View File

@@ -1,21 +0,0 @@
# Copyright (c) 2025 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_CHUNKS
# OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
COMMAND
${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
--output ${DUSK_GENERATED_HEADERS_DIR}
--input ${DUSK_DATA_DIR}/map.tmj
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
COMMENT "Generating chunk header files"
VERBATIM
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_CHUNKS)

View File

@@ -1,127 +0,0 @@
import sys
from constants import CHUNK_WIDTH, CHUNK_HEIGHT, CHUNK_TILE_COUNT, TILE_WIDTH_HEIGHT
from entityParser import parseEntity
import math
def parseChunkLayerData(layer, mapData, chunkData):
layerData = []
for y in range(CHUNK_HEIGHT):
for x in range(CHUNK_WIDTH):
inputTileIndex = chunkGetTileIndex(x, y, mapData, chunkData)
outputTileIndex = chunkGetOutputTileIndex(x, y)
layerData.append(layer['data'][inputTileIndex])
if len(layerData) != CHUNK_TILE_COUNT:
print(f"Error: Layer data length {len(layerData)} does not match expected chunk tile count {CHUNK_TILE_COUNT}.")
sys.exit(1)
return layerData
def parseChunk(chunkX, chunkY, mapData):
chunkData = { }
chunkData['topLeftTileX'] = chunkX * CHUNK_WIDTH
chunkData['topLeftTileY'] = chunkY * CHUNK_HEIGHT
chunkData['inputTopLeftTileX'] = math.floor(
float(chunkData['topLeftTileX']) / float(mapData['inputLayerWidthInTiles'])
) * mapData['inputLayerWidthInTiles']
chunkData['inputTopLeftTileY'] = math.floor(
float(chunkData['topLeftTileY']) / float(mapData['inputLayerHeightInTiles'])
) * mapData['inputLayerHeightInTiles']
# Get the data for this chunk out of the map data.
chunkData['layers'] = []
for layer in mapData['tileLayers']:
foundChunk = None
if 'chunks' not in layer or not isinstance(layer['chunks'], list):
print(f"Error: Layer '{layer['name']}' does not contain 'chunks' key or it is not a list.")
sys.exit(1)
for chunk in layer['chunks']:
if 'x' not in chunk or 'y' not in chunk:
print(f"Error: Chunk in layer '{layer['name']}' does not contain 'x' or 'y' key.")
sys.exit(1)
# Check if this chunk is within the bounds of the top left tile.
if chunk['x'] != chunkData['inputTopLeftTileX'] or chunk['y'] != chunkData['inputTopLeftTileY']:
continue
foundChunk = chunk
break
if foundChunk is None:
chunkData['layers'].append(None)
continue
# Is layer empty?
layerEmpty = True
for tile in foundChunk['data']:
if tile == 0:
continue
layerEmpty = False
break
if layerEmpty:
chunkData['layers'].append(None)
else:
chunkData['layers'].append(foundChunk)
# Any layers for this chunk?
if all(chunk is None for chunk in chunkData['layers']):
return None
if len(chunkData['layers']) == 0:
return None
# Parse Layer Data
chunkData['layerBase'] = chunkData['layers'][0]
chunkData['layerBaseOverlay'] = None
if len(chunkData['layers']) > 1:
chunkData['layerBaseOverlay'] = chunkData['layers'][1]
chunkData['layerBaseData'] = parseChunkLayerData(chunkData['layerBase'], mapData, chunkData)
if chunkData['layerBaseOverlay'] is not None:
chunkData['layerBaseOverlayData'] = parseChunkLayerData(chunkData['layerBaseOverlay'], mapData, chunkData)
else:
chunkData['layerBaseOverlayData'] = []
# Parse chunk entities.
chunkData['entities'] = []
for ob in mapData['objectLayer']['objects']:
if 'x' not in ob or 'y' not in ob:
print(f"Error: Object in object layer does not contain 'x' or 'y' key.")
sys.exit(1)
# Is this object within the chunk?
if ob['x'] < chunkData['topLeftTileX'] * TILE_WIDTH_HEIGHT:
continue
if ob['x'] >= (chunkData['topLeftTileX'] + CHUNK_WIDTH) * TILE_WIDTH_HEIGHT:
continue
if ob['y'] < chunkData['topLeftTileY'] * TILE_WIDTH_HEIGHT:
continue
if ob['y'] >= (chunkData['topLeftTileY'] + CHUNK_HEIGHT) * TILE_WIDTH_HEIGHT:
continue
ent = parseEntity(ob, chunkData)
if ent is None:
continue
chunkData['entities'].append(ent)
return chunkData
def chunkGetLocalTileX(absoluteTileX, mapData):
return absoluteTileX % mapData['inputLayerWidthInTiles']
def chunkGetLocalTileY(absoluteTileY, mapData):
return absoluteTileY % mapData['inputLayerHeightInTiles']
def chunkGetTileIndex(localX, localY, mapData, chunkData):
absoluteTileX = chunkData['topLeftTileX'] + localX
absoluteTileY = chunkData['topLeftTileY'] + localY
inputLocalTileX = chunkGetLocalTileX(absoluteTileX, mapData)
inputLocalTileY = chunkGetLocalTileY(absoluteTileY, mapData)
return inputLocalTileY * mapData['inputLayerWidthInTiles'] + inputLocalTileX
def chunkGetOutputTileIndex(localX, localY):
return localY * CHUNK_WIDTH + localX

View File

@@ -1,6 +0,0 @@
# Values defined within C
CHUNK_WIDTH = 8
CHUNK_HEIGHT = 8
CHUNK_TILE_COUNT = CHUNK_WIDTH * CHUNK_HEIGHT
CHUNK_ENTITY_COUNT_MAX = 8
TILE_WIDTH_HEIGHT = 16

View File

@@ -1,64 +0,0 @@
import sys
from constants import TILE_WIDTH_HEIGHT
ENTITY_TYPE_MAP = {
'templates/NPC.tx': 'ENTITY_TYPE_NPC',
}
def parseEntity(obj, chunkData):
if 'type' in obj:
if obj['type'] not in ENTITY_TYPE_MAP:
print(f"Unknown entity type: {obj['type']}")
return None
entType = ENTITY_TYPE_MAP[obj['type']]
elif 'template' in obj:
if obj['template'] not in ENTITY_TYPE_MAP:
print(f"Unknown entity template: {obj['template']}")
return None
entType = ENTITY_TYPE_MAP[obj['template']]
else:
return None
if 'properties' not in obj:
obj['properties'] = {}
obj['localX'] = obj['x'] - (chunkData['topLeftTileX'] * TILE_WIDTH_HEIGHT)
obj['localY'] = obj['y'] - (chunkData['topLeftTileY'] * TILE_WIDTH_HEIGHT)
obj['dir'] = 'DIRECTION_SOUTH'
obj['type'] = entType
def getProperty(propName):
for prop in obj['properties']:
if prop['name'] == propName:
return prop['value']
return None
# Handle per-type properties
if entType == 'ENTITY_TYPE_NPC':
interactType = getProperty('interactType')
if interactType is None:
print(f"NPC entity missing 'interactType' property: {obj['id']}")
sys.exit(1)
obj['data'] = {}
obj['data']['npc'] = {}
obj['data']['npc']['interactType'] = interactType
if interactType == 'NPC_INTERACT_TYPE_TEXT':
text = getProperty('interactText')
if text is None:
print(f"NPC entity missing 'interactText' property: {obj['id']}")
sys.exit(1)
obj['data']['npc']['text'] = text
elif interactType == 'NPC_INTERACT_TYPE_EVENT':
event = getProperty('interactEvent')
if event is None:
print(f"NPC entity missing 'interactEvent' property: {obj['id']}")
sys.exit(1)
obj['data']['npc']['eventData'] = f'&EVENT_{event.upper()}'
return obj

View File

@@ -1,32 +0,0 @@
import os
import sys
import json
def parseInputFile(inputFile):
if not os.path.isfile(inputFile):
print(f"Error: Input file '{inputFile}' does not exist.")
sys.exit(1)
data = None
with open(inputFile, 'r') as f:
data = json.load(f)
# Data should have height key
if 'height' not in data or 'width' not in data:
print(f"Error: Input file '{inputFile}' does not contain 'height' or 'width' key.")
sys.exit(1)
if 'tilewidth' not in data or 'tileheight' not in data:
print(f"Error: Input file '{inputFile}' does not contain 'tilewidth' or 'tileheight' key.")
sys.exit(1)
if 'infinite' not in data or not isinstance(data['infinite'], bool):
print(f"Error: Input file '{inputFile}' does not contain 'infinite' key.")
sys.exit(1)
# Need layers
if 'layers' not in data or not isinstance(data['layers'], list) or len(data['layers']) == 0:
print(f"Error: Input file '{inputFile}' does not contain 'layers' key.")
sys.exit(1)
return data

View File

@@ -1,159 +0,0 @@
import sys
from constants import TILE_WIDTH_HEIGHT, CHUNK_WIDTH, CHUNK_HEIGHT
import math
def parseMap(data):
mapData = {
'layers': data['layers'],
'playerSpawnX': 0,
'playerSpawnY': 0,
}
# Object layer
for layer in mapData['layers']:
if layer.get('type') == 'objectgroup':
mapData['objectLayer'] = layer
break
if mapData['objectLayer'] is None:
print(f"Error: Data does not contain an object layer.")
sys.exit(1)
if 'objects' not in mapData['objectLayer'] or not isinstance(mapData['objectLayer']['objects'], list):
print(f"Error: Object layer does not contain 'objects' key or it is not a list.")
sys.exit(1)
# Tile Layers
mapData['tileLayers'] = []
for layer in mapData['layers']:
if layer.get('type') == 'tilelayer':
mapData['tileLayers'].append(layer)
if len(mapData['tileLayers']) == 0:
print(f"Error: Data does not contain any tile layers.")
sys.exit(1)
# First layer
mapData['firstLayer'] = mapData['tileLayers'][0]
if 'width' not in mapData['firstLayer'] or 'height' not in mapData['firstLayer']:
print(f"Error: First layer does not contain 'width' or 'height' key.")
sys.exit(1)
if 'chunks' not in mapData['firstLayer'] or not isinstance(mapData['firstLayer']['chunks'], list):
print(f"Error: First layer does not contain 'chunks' key.")
sys.exit(1)
if len(mapData['firstLayer']['chunks']) == 0:
print(f"Error: First layer does not contain any chunks.")
sys.exit(1)
mapData['firstLayerFirstChunk'] = mapData['firstLayer']['chunks'][0]
# Now determine the input map bounds.
isMinXFound = False
isMaxXFound = False
isMinYFound = False
isMaxYFound = False
mapData['inputMapLowestX'] = 0
mapData['inputMapHighestX'] = 0
mapData['inputMapLowestY'] = 0
mapData['inputMapHighestY'] = 0
mapData['inputLayerWidthInTiles'] = mapData['firstLayerFirstChunk']['width']
mapData['inputLayerHeightInTiles'] = mapData['firstLayerFirstChunk']['height']
for chunk in mapData['firstLayer']['chunks']:
if 'x' not in chunk or 'y' not in chunk:
print(f"Error: Chunk in first layer does not contain 'x' or 'y' key.")
sys.exit(1)
# Check chunk is not empty
if 'data' not in chunk or not isinstance(chunk['data'], list):
print(f"Error: Chunk in first layer does not contain 'data' key or it is not a list.")
sys.exit(1)
if len(chunk['data']) != mapData['inputLayerWidthInTiles'] * mapData['inputLayerHeightInTiles']:
print(f"Error: Chunk in first layer does not contain the expected number of tiles.")
sys.exit(1)
chunkEmpty = True
for tile in chunk['data']:
if tile == 0:
continue
chunkEmpty = False
break
if chunkEmpty:
print(f"Warning: Chunk at ({chunk['x']}, {chunk['y']}) is empty, skipping.")
continue
chunkX = chunk['x']
chunkY = chunk['y']
if mapData['inputMapLowestX'] > chunkX or not isMinXFound:
mapData['inputMapLowestX'] = chunkX
isMinXFound = True
if mapData['inputMapHighestX'] < chunkX or not isMaxXFound:
mapData['inputMapHighestX'] = chunkX
isMaxXFound = True
if mapData['inputMapLowestY'] > chunkY or not isMinYFound:
mapData['inputMapLowestY'] = chunkY
isMinYFound = True
if mapData['inputMapHighestY'] < chunkY or not isMaxYFound:
mapData['inputMapHighestY'] = chunkY
isMaxYFound = True
mapData['inputMapHighestX'] += mapData['inputLayerWidthInTiles']
mapData['inputMapHighestY'] += mapData['inputLayerHeightInTiles']
# We now offset all chunks by the lowest X/Y values to make them start at (0, 0).
for layerIndex, layer in enumerate(mapData['tileLayers']):
for chunkIndex, chunk in enumerate(layer['chunks']):
chunk['x'] -= mapData['inputMapLowestX']
chunk['y'] -= mapData['inputMapLowestY']
layer['chunks'][chunkIndex] = chunk
mapData['layers'][layerIndex] = layer
# Same for object layers
for obIndex, ob in enumerate(mapData['objectLayer']['objects']):
if 'x' not in ob or 'y' not in ob:
print(f"Error: Object in object layer does not contain 'x' or 'y' key.")
sys.exit(1)
if 'id' not in ob:
print(f"Error: Object in object layer does not contain 'id' key.")
sys.exit(1)
ob['x'] -= mapData['inputMapLowestX'] * TILE_WIDTH_HEIGHT
ob['y'] -= mapData['inputMapLowestY'] * TILE_WIDTH_HEIGHT
# Objects are bottom aligned in tiled, so we need to adjust the Y coordinate.
ob['y'] -= TILE_WIDTH_HEIGHT
# Round off the coordinates
ob['x'] = round(ob['x'])
ob['y'] = round(ob['y'])
mapData['objectLayer']['objects'][obIndex] = ob
mapData['mapWidthInTiles'] = mapData['inputMapHighestX'] - mapData['inputMapLowestX']
mapData['mapHeightInTiles'] = mapData['inputMapHighestY'] - mapData['inputMapLowestY']
mapData['mapWidthInRealChunks'] = math.ceil(float(mapData['mapWidthInTiles']) / float(CHUNK_WIDTH))
mapData['mapHeightInRealChunks'] = math.ceil(float(mapData['mapHeightInTiles']) / float(CHUNK_HEIGHT))
if mapData['inputLayerWidthInTiles'] < CHUNK_WIDTH or mapData['inputLayerHeightInTiles'] < CHUNK_HEIGHT:
print(f"Error: Input layer size is smaller than chunk size.")
sys.exit(1)
# Extract player spawn point
for ob in mapData['objectLayer']['objects']:
if 'type' not in ob or ob['type'] != 'player_spawn':
continue
if 'x' not in ob or 'y' not in ob:
print(f"Error: Player spawn object does not contain 'x' or 'y' key.")
sys.exit(1)
mapData['playerSpawnX'] = ob['x']
mapData['playerSpawnY'] = ob['y']
return mapData

View File

@@ -1,146 +0,0 @@
import sys, os
import argparse
from datetime import datetime
import math
from inputParser import parseInputFile
from mapParser import parseMap
from chunkParser import parseChunk
from constants import CHUNK_WIDTH, CHUNK_HEIGHT, TILE_WIDTH_HEIGHT
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='Dir to output headers')
parser.add_argument('--input', required=True, help='Input JSON file from tiled')
args = parser.parse_args()
# Ensure outdir exists
outputDir = args.output
os.makedirs(outputDir, exist_ok=True)
# Create world directory if it does not exist
worldDir = os.path.join(outputDir, "world")
os.makedirs(worldDir, exist_ok=True)
# Create chunks directory if it does not exist
chunksDir = os.path.join(worldDir, "chunk")
os.makedirs(chunksDir, exist_ok=True)
# Some vars used during printing
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Read the input JSON file
data = parseInputFile(args.input)
mapData = parseMap(data)
# For each output chunk.
worldWidth = 0
worldHeight = 0
chunksDone = set()
for chunkY in range(mapData['mapHeightInRealChunks']):
for chunkX in range(mapData['mapWidthInRealChunks']):
chunkData = parseChunk(chunkX, chunkY, mapData)
if chunkData is None:
continue
# This is a valid chunk.
worldWidth = max(worldWidth, chunkX + 1)
worldHeight = max(worldHeight, chunkY + 1)
chunksDone.add((chunkX, chunkY))
chunkHeaderPath = os.path.join(chunksDir, f"chunk_{chunkX}_{chunkY}.h")
with open(chunkHeaderPath, 'w') as f:
f.write(f"// Generated chunk header for chunk at position ({chunkX}, {chunkY})\n")
f.write(f"// Generated at {now}\n")
f.write("#pragma once\n")
f.write("#include \"world/chunkdata.h\"\n\n")
f.write(f"static const chunkdata_t CHUNK_{chunkX}_{chunkY} = {{\n")
f.write(f" .layerBase = {{\n")
for y in range(CHUNK_HEIGHT):
f.write(f" ")
for x in range(CHUNK_WIDTH):
i = y * CHUNK_WIDTH + x
byte = chunkData['layerBaseData'][i]
f.write(f"0x{byte:02x}, ")
f.write(f"\n")
f.write(" },\n\n")
f.write(" .layerBaseOverlay = {\n")
if chunkData['layerBaseOverlay'] is not None:
for y in range(CHUNK_HEIGHT):
f.write(f" ")
for x in range(CHUNK_WIDTH):
i = y * CHUNK_WIDTH + x
byte = chunkData['layerBaseOverlayData'][i]
f.write(f"0x{byte:02x}, ")
f.write(f"\n")
f.write(" },\n\n")
f.write(f" .entities = {{\n")
for entity in chunkData['entities']:
f.write(" {\n")
f.write(f" .id = {entity['id']},\n")
f.write(f" .type = {entity['type']},\n")
f.write(f" .x = {round(entity['x'] / TILE_WIDTH_HEIGHT)},\n")
f.write(f" .y = {round(entity['y'] / TILE_WIDTH_HEIGHT)},\n")
if 'dir' in entity:
f.write(f" .dir = {entity['dir']},\n")
def printRecurse(obj, tabs = " "):
for key, value in obj:
if isinstance(value, dict):
f.write(f"{tabs}.{key} = {{\n")
printRecurse(value.items(), tabs + " ")
f.write(f"{tabs}}},\n")
elif isinstance(value, list):
f.write(f"{tabs}.{key} = {{\n")
for item in value:
f.write(f"{tabs} {item},\n")
f.write(f"{tabs}}},\n")
else:
f.write(f"{tabs}.{key} = {value},\n")
if 'data' in entity:
printRecurse(entity['data'].items())
f.write(" },\n")
f.write(f" }},\n")
f.write("};\n\n")
# Output header file.
headerPath = os.path.join(worldDir, "world.h")
with open(headerPath, 'w') as f:
f.write(f"// Generated chunks file. Generated at {now}\n\n")
f.write("#pragma once\n")
f.write("#include \"dusk.h\"\n")
# Now, for each chunk, include its header file
for (x, y) in chunksDone:
f.write(f"#include \"world/chunk/chunk_{x}_{y}.h\"\n")
f.write("\n")
f.write(f"#define WORLD_WIDTH {worldWidth}\n")
f.write(f"#define WORLD_HEIGHT {worldHeight}\n")
# Write out other global variables.
f.write(f"#define WORLD_PLAYER_SPAWN_X ({round(mapData['playerSpawnX'] / TILE_WIDTH_HEIGHT)})\n")
f.write(f"#define WORLD_PLAYER_SPAWN_Y ({round(mapData['playerSpawnY'] / TILE_WIDTH_HEIGHT)})\n")
f.write("\n")
f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
for i in range(worldHeight):
f.write(" ")
for j in range(worldWidth):
if(j, i) in chunksDone:
f.write(f"&CHUNK_{j}_{i}, ")
else:
f.write("NULL, ")
f.write("\n")
f.write("};\n\n")
print(f"chunks.h generated at: {headerPath}")

View File

@@ -1,20 +0,0 @@
# Copyright (c) 2025 Dominic Masters
#
# This software is released under the MIT License.
# https://opensource.org/licenses/MIT
find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_TILES
COMMAND
${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/tilecompile.py
--output ${DUSK_GENERATED_HEADERS_DIR}
--input ${DUSK_DATA_DIR}/overworld.tsx
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/tilecompile.py
COMMENT "Generating tile header file"
VERBATIM
)
# Ensure headers are generated before compiling main
add_dependencies(${DUSK_TARGET_NAME} DUSK_TILES)

View File

@@ -1,109 +0,0 @@
import sys, os
import argparse
from datetime import datetime
import xml.etree.ElementTree as ET
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='Dir to output headers')
parser.add_argument('--input', required=True, help='Input XML file from tiled')
args = parser.parse_args()
# Ensure outdir exists
outputDir = args.output
os.makedirs(outputDir, exist_ok=True)
# Create world directory if it does not exist
worldDir = os.path.join(outputDir, "world")
os.makedirs(worldDir, exist_ok=True)
# Some vars used during printing
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
# Read the input JSON file
inputFile = args.input
if not os.path.isfile(inputFile):
print(f"Error: Input file '{inputFile}' does not exist.")
sys.exit(1)
with open(inputFile, 'r') as f:
data = f.read()
# Parse the XML data
try:
root = ET.fromstring(data)
except ET.ParseError as e:
print(f"Error parsing XML: {e}")
sys.exit(1)
if root.tag != 'tileset':
print("Error: Input file is not a valid Tiled XML file.")
sys.exit(1)
if 'tilewidth' not in root.attrib or 'tileheight' not in root.attrib:
print("Error: Missing tilewidth or tileheight attributes in the tileset.")
sys.exit(1)
if 'tilecount' not in root.attrib or 'columns' not in root.attrib:
print("Error: Missing tilecount or columns attributes in the tileset.")
sys.exit(1)
tileWidth = int(root.get('tilewidth', 0))
tileHeight = int(root.get('tileheight', 0))
tileCount = int(root.get('tilecount', 0)) + 1 # +1 because maps are 1 indexed
columns = int(root.get('columns', 0))
tilesById = {}
for tile in root.findall('tile'):
if 'id' not in tile.attrib:
print("Error: Tile element missing 'id' attribute.")
continue
tileId = int(tile.get('id', -1)) + 1 # +1 because maps are 1 indexed
if tileId < 0 or tileId >= tileCount:
print(f"Error: Invalid tile ID {tileId} in tile element.")
continue
tilesById[tileId] = tile
# Create the header file
headerFile = os.path.join(worldDir, "tiledata.h")
with open(headerFile, 'w') as f:
f.write(f"// Generated on {now}\n")
f.write(f"#include \"world/tile.h\"\n\n")
f.write(f"#define TILE_WIDTH {tileWidth}\n")
f.write(f"#define TILE_HEIGHT {tileHeight}\n")
f.write(f"#define TILE_WIDTH_HEIGHT {tileWidth}\n")
f.write(f"#define TILE_COUNT {tileCount}\n")
f.write("static const tilemetadata_t TILE_META_DATA[TILE_COUNT] = {\n")
for tileId in range(tileCount):
tile = tilesById.get(tileId, None)
if tile is None:
f.write(f" {{ 0 }},\n")
continue
properties = tile.find('properties')
if properties is None:
f.write(f" {{ 0 }},\n")
continue
def findProp(name, expectedType=''):
for prop in properties.findall('property'):
if prop.get('name') == name:
if len(expectedType) > 0:
if 'type' in prop.attrib and prop.get('type') != expectedType:
continue
if 'propertytype' in prop.attrib and prop.get('propertytype') != expectedType:
continue
return prop.get('value', '')
return None
f.write(f" {{\n")
propSolid = findProp('solidType', 'tileSolidType')
if propSolid is not None:
f.write(f" .solidType = {propSolid},\n")
f.write(f" }},\n")
f.write("};\n\n")