map crap
This commit is contained in:
@ -8,7 +8,10 @@ find_package(Python3 COMPONENTS Interpreter REQUIRED)
|
||||
# Custom command to generate all header files
|
||||
add_custom_target(DUSK_CHUNKS
|
||||
# OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
|
||||
COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py --output ${DUSK_GENERATED_HEADERS_DIR}
|
||||
COMMAND
|
||||
${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
|
||||
--output ${DUSK_GENERATED_HEADERS_DIR}
|
||||
--input ${DUSK_DATA_DIR}/map.tmj
|
||||
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
|
||||
COMMENT "Generating chunk header files"
|
||||
VERBATIM
|
||||
|
@ -3,18 +3,18 @@ import argparse
|
||||
from datetime import datetime
|
||||
import json
|
||||
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
|
||||
sys.path.append(shared_path)
|
||||
CHUNK_WIDTH = 8
|
||||
CHUNK_HEIGHT = 8
|
||||
CHUNK_ENTITY_COUNT_MAX = 8
|
||||
|
||||
from worlddefs import CHUNK_WIDTH, CHUNK_HEIGHT, ENTITY_TYPE_MAP, CHUNK_DATA_DIR
|
||||
|
||||
# Dynamically add ../shared to sys.path
|
||||
shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
|
||||
sys.path.append(shared_path)
|
||||
ENTITY_TYPE_MAP = {
|
||||
"npc": "ENTITY_TYPE_NPC",
|
||||
}
|
||||
|
||||
# Check if the script is run with the correct arguments
|
||||
parser = argparse.ArgumentParser(description="Generate chunk header files")
|
||||
parser.add_argument('--output', required=True, help='Dir to output headers')
|
||||
parser.add_argument('--input', required=True, help='Input JSON file from tiled')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Ensure outdir exists
|
||||
@ -32,165 +32,219 @@ os.makedirs(chunksDir, exist_ok=True)
|
||||
# Some vars used during printing
|
||||
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
|
||||
# Read the input JSON file
|
||||
inputFile = args.input
|
||||
if not os.path.isfile(inputFile):
|
||||
print(f"Error: Input file '{inputFile}' does not exist.")
|
||||
sys.exit(1)
|
||||
|
||||
with open(inputFile, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
# Data should have height key
|
||||
if 'height' not in data or 'width' not in data:
|
||||
print(f"Error: Input file '{inputFile}' does not contain 'height' or 'width' key.")
|
||||
sys.exit(1)
|
||||
|
||||
if 'tilewidth' not in data or 'tileheight' not in data:
|
||||
print(f"Error: Input file '{inputFile}' does not contain 'tilewidth' or 'tileheight' key.")
|
||||
sys.exit(1)
|
||||
|
||||
if 'infinite' not in data or not isinstance(data['infinite'], bool):
|
||||
print(f"Error: Input file '{inputFile}' does not contain 'infinite' key.")
|
||||
sys.exit(1)
|
||||
|
||||
if 'layers' not in data or not isinstance(data['layers'], list):
|
||||
print(f"Error: Input file '{inputFile}' does not contain 'layers' key.")
|
||||
sys.exit(1)
|
||||
|
||||
layers = data['layers']
|
||||
if len(layers) == 0:
|
||||
print(f"Error: Input file '{inputFile}' does not contain any layers.")
|
||||
sys.exit(1)
|
||||
|
||||
# For each layer
|
||||
for layer in layers:
|
||||
if not 'chunks' in layer or not isinstance(layer['chunks'], list):
|
||||
print(f"Error: Layer in input file '{inputFile}' does not contain 'chunks' key.")
|
||||
sys.exit(1)
|
||||
|
||||
if not 'height' in layer or not isinstance(layer['height'], int):
|
||||
print(f"Error: Layer in input file '{inputFile}' does not contain 'height' key.")
|
||||
sys.exit(1)
|
||||
|
||||
if not 'width' in layer or not isinstance(layer['width'], int):
|
||||
print(f"Error: Layer in input file '{inputFile}' does not contain 'width' key.")
|
||||
sys.exit(1)
|
||||
|
||||
if 'visible' in layer and layer['visible'] is not True:
|
||||
continue
|
||||
|
||||
chunks = layer['chunks']
|
||||
if len(chunks) == 0:
|
||||
print(f"Error: Layer in input file '{inputFile}' does not contain any chunks.")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
# Data sent to the world header file
|
||||
worldWidth = 0
|
||||
worldHeight = 0
|
||||
chunksDone = []
|
||||
entityIdNext = 1
|
||||
# worldWidth = 0
|
||||
# worldHeight = 0
|
||||
# chunksDone = []
|
||||
# entityIdNext = 1
|
||||
|
||||
# For each chunk file
|
||||
for chunkFile in os.listdir(CHUNK_DATA_DIR):
|
||||
data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile)))
|
||||
print(f"Processing chunk: {chunkFile}")
|
||||
# # For each chunk file
|
||||
# for chunkFile in os.listdir(CHUNK_DATA_DIR):
|
||||
# data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile)))
|
||||
# print(f"Processing chunk: {chunkFile}")
|
||||
|
||||
if not 'chunk' in data:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
|
||||
exit(1)
|
||||
# if not 'chunk' in data:
|
||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
|
||||
# exit(1)
|
||||
|
||||
if not 'position' in data['chunk']:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
|
||||
exit(1)
|
||||
# if not 'position' in data['chunk']:
|
||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
|
||||
# exit(1)
|
||||
|
||||
# Position must be array of two integers
|
||||
position = data['chunk']['position']
|
||||
if not isinstance(position, list) or len(position) != 2:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
|
||||
exit(1)
|
||||
if not all(isinstance(x, int) for x in position):
|
||||
print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
|
||||
exit(1)
|
||||
# # Position must be array of two integers
|
||||
# position = data['chunk']['position']
|
||||
# if not isinstance(position, list) or len(position) != 2:
|
||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
|
||||
# exit(1)
|
||||
# if not all(isinstance(x, int) for x in position):
|
||||
# print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
|
||||
# exit(1)
|
||||
|
||||
x, y = position
|
||||
# x, y = position
|
||||
|
||||
# Make sure that the filename "chunk_{x}_{y}.json" matches the position
|
||||
expectedFilename = f"chunk_{x}_{y}.json"
|
||||
if chunkFile != expectedFilename:
|
||||
print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
|
||||
exit(1)
|
||||
# # Make sure that the filename "chunk_{x}_{y}.json" matches the position
|
||||
# expectedFilename = f"chunk_{x}_{y}.json"
|
||||
# if chunkFile != expectedFilename:
|
||||
# print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
|
||||
# exit(1)
|
||||
|
||||
# Chunk should not be already processed
|
||||
if (x, y) in chunksDone:
|
||||
print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
|
||||
exit(1)
|
||||
chunksDone.append((x, y))
|
||||
# # Chunk should not be already processed
|
||||
# if (x, y) in chunksDone:
|
||||
# print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
|
||||
# exit(1)
|
||||
# chunksDone.append((x, y))
|
||||
|
||||
worldWidth = max(worldWidth, x + 1)
|
||||
worldHeight = max(worldHeight, y + 1)
|
||||
# worldWidth = max(worldWidth, x + 1)
|
||||
# worldHeight = max(worldHeight, y + 1)
|
||||
|
||||
# Read in base layer data
|
||||
if 'baseLayer' not in data['chunk']:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
|
||||
exit(1)
|
||||
baseLayer = data['chunk']['baseLayer']
|
||||
# # Read in base layer data
|
||||
# if 'baseLayer' not in data['chunk']:
|
||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
|
||||
# exit(1)
|
||||
# baseLayer = data['chunk']['baseLayer']
|
||||
|
||||
# Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
||||
if len(baseLayer) != CHUNK_HEIGHT:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
|
||||
exit(1)
|
||||
for row in baseLayer:
|
||||
if len(row) != CHUNK_WIDTH:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
|
||||
exit(1)
|
||||
# # Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
||||
# if len(baseLayer) != CHUNK_HEIGHT:
|
||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
|
||||
# exit(1)
|
||||
# for row in baseLayer:
|
||||
# if len(row) != CHUNK_WIDTH:
|
||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
|
||||
# exit(1)
|
||||
|
||||
# Read in overlay layer data
|
||||
if 'overlayLayer' not in data['chunk']:
|
||||
print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
|
||||
exit(1)
|
||||
# # Read in overlay layer data
|
||||
# if 'overlayLayer' not in data['chunk']:
|
||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
|
||||
# exit(1)
|
||||
|
||||
overlayLayer = data['chunk']['overlayLayer']
|
||||
# overlayLayer = data['chunk']['overlayLayer']
|
||||
|
||||
# Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
||||
if len(overlayLayer) != CHUNK_HEIGHT:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
|
||||
exit(1)
|
||||
for row in overlayLayer:
|
||||
if len(row) != CHUNK_WIDTH:
|
||||
print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
|
||||
exit(1)
|
||||
# # Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
||||
# if len(overlayLayer) != CHUNK_HEIGHT:
|
||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
|
||||
# exit(1)
|
||||
# for row in overlayLayer:
|
||||
# if len(row) != CHUNK_WIDTH:
|
||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
|
||||
# exit(1)
|
||||
|
||||
# Read in entities
|
||||
entities = data['chunk'].get('entities', [])
|
||||
# # Read in entities
|
||||
# entities = data['chunk'].get('entities', [])
|
||||
|
||||
# Now we generate a chunk header file
|
||||
chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
|
||||
with open(chunk_header_path, 'w') as f:
|
||||
f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
|
||||
f.write(f"// Generated at {now}\n")
|
||||
f.write("#pragma once\n")
|
||||
f.write("#include \"world/chunkdata.h\"\n\n")
|
||||
# # Now we generate a chunk header file
|
||||
# chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
|
||||
# with open(chunk_header_path, 'w') as f:
|
||||
# f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
|
||||
# f.write(f"// Generated at {now}\n")
|
||||
# f.write("#pragma once\n")
|
||||
# f.write("#include \"world/chunkdata.h\"\n\n")
|
||||
|
||||
f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
|
||||
# f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
|
||||
|
||||
f.write(f" .layerBase = {{\n")
|
||||
for row in baseLayer:
|
||||
f.write(" ")
|
||||
for column in row:
|
||||
f.write(f"0x{column:02x}, ")
|
||||
f.write("\n")
|
||||
f.write(" },\n\n")
|
||||
# f.write(f" .layerBase = {{\n")
|
||||
# for row in baseLayer:
|
||||
# f.write(" ")
|
||||
# for column in row:
|
||||
# f.write(f"0x{column:02x}, ")
|
||||
# f.write("\n")
|
||||
# f.write(" },\n\n")
|
||||
|
||||
f.write(f" .layerOverlay = {{\n")
|
||||
for row in overlayLayer:
|
||||
f.write(" ")
|
||||
for column in row:
|
||||
f.write(f"0x{column:02x}, ")
|
||||
f.write("\n")
|
||||
f.write(" },\n\n")
|
||||
# f.write(f" .layerOverlay = {{\n")
|
||||
# for row in overlayLayer:
|
||||
# f.write(" ")
|
||||
# for column in row:
|
||||
# f.write(f"0x{column:02x}, ")
|
||||
# f.write("\n")
|
||||
# f.write(" },\n\n")
|
||||
|
||||
f.write(f" .entities = {{\n")
|
||||
for entity in entities:
|
||||
if 'id' in entity:
|
||||
entityId = entity['id']
|
||||
else:
|
||||
entityId = entityIdNext
|
||||
entityIdNext += 1
|
||||
# f.write(f" .entities = {{\n")
|
||||
# for entity in entities:
|
||||
# if 'id' in entity:
|
||||
# entityId = entity['id']
|
||||
# else:
|
||||
# entityId = entityIdNext
|
||||
# entityIdNext += 1
|
||||
|
||||
if 'type' not in entity:
|
||||
print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.")
|
||||
exit(1)
|
||||
# if 'type' not in entity:
|
||||
# print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.")
|
||||
# exit(1)
|
||||
|
||||
if 'x' not in entity or 'y' not in entity:
|
||||
print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.")
|
||||
exit(1)
|
||||
# if 'x' not in entity or 'y' not in entity:
|
||||
# print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.")
|
||||
# exit(1)
|
||||
|
||||
f.write(" {\n")
|
||||
f.write(f" .id = {entityId},\n")
|
||||
f.write(f" .type = {ENTITY_TYPE_MAP.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"),
|
||||
f.write(f" .x = {entity['x']},\n")
|
||||
f.write(f" .y = {entity['y']},\n")
|
||||
f.write(f" }},\n")
|
||||
pass
|
||||
f.write(" },\n\n")
|
||||
# f.write(" {\n")
|
||||
# f.write(f" .id = {entityId},\n")
|
||||
# f.write(f" .type = {ENTITY_TYPE_MAP.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"),
|
||||
# f.write(f" .x = {entity['x']},\n")
|
||||
# f.write(f" .y = {entity['y']},\n")
|
||||
# f.write(f" }},\n")
|
||||
# pass
|
||||
# f.write(" },\n\n")
|
||||
|
||||
f.write("};\n\n")
|
||||
pass
|
||||
# f.write("};\n\n")
|
||||
# pass
|
||||
|
||||
|
||||
|
||||
# Output header file.
|
||||
header_path = os.path.join(worldDir, "world.h")
|
||||
with open(header_path, 'w') as f:
|
||||
f.write(f"// Generated chunks file. Generated at {now}\n\n")
|
||||
f.write("#pragma once\n")
|
||||
f.write("#include \"dusk.h\"\n")
|
||||
# # Output header file.
|
||||
# header_path = os.path.join(worldDir, "world.h")
|
||||
# with open(header_path, 'w') as f:
|
||||
# f.write(f"// Generated chunks file. Generated at {now}\n\n")
|
||||
# f.write("#pragma once\n")
|
||||
# f.write("#include \"dusk.h\"\n")
|
||||
|
||||
# Now, for each chunk, include its header file
|
||||
for (x, y) in chunksDone:
|
||||
chunk_header = f"world/chunk/chunk_{x}_{y}.h"
|
||||
f.write(f"#include \"{chunk_header}\"\n")
|
||||
# # Now, for each chunk, include its header file
|
||||
# for (x, y) in chunksDone:
|
||||
# chunk_header = f"world/chunk/chunk_{x}_{y}.h"
|
||||
# f.write(f"#include \"{chunk_header}\"\n")
|
||||
|
||||
f.write("\n")
|
||||
f.write(f"#define WORLD_WIDTH {worldWidth}\n")
|
||||
f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
|
||||
f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
|
||||
for i in range(worldHeight):
|
||||
f.write(" ")
|
||||
for j in range(worldWidth):
|
||||
if (j, i) in chunksDone:
|
||||
f.write(f"&CHUNK_{j}_{i}, ")
|
||||
else:
|
||||
f.write("NULL, ")
|
||||
f.write("\n")
|
||||
f.write("};\n\n")
|
||||
# f.write("\n")
|
||||
# f.write(f"#define WORLD_WIDTH {worldWidth}\n")
|
||||
# f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
|
||||
# f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
|
||||
# for i in range(worldHeight):
|
||||
# f.write(" ")
|
||||
# for j in range(worldWidth):
|
||||
# if (j, i) in chunksDone:
|
||||
# f.write(f"&CHUNK_{j}_{i}, ")
|
||||
# else:
|
||||
# f.write("NULL, ")
|
||||
# f.write("\n")
|
||||
# f.write("};\n\n")
|
||||
|
||||
print(f"chunks.h generated at: {header_path}")
|
||||
# print(f"chunks.h generated at: {header_path}")
|
Reference in New Issue
Block a user