import sys, os import argparse from datetime import datetime import json shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared')) sys.path.append(shared_path) from worlddefs import CHUNK_WIDTH, CHUNK_HEIGHT, ENTITY_TYPE_MAP, CHUNK_DATA_DIR # Dynamically add ../shared to sys.path shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared')) sys.path.append(shared_path) # Check if the script is run with the correct arguments parser = argparse.ArgumentParser(description="Generate chunk header files") parser.add_argument('--output', required=True, help='Dir to output headers') args = parser.parse_args() # Ensure outdir exists outputDir = args.output os.makedirs(outputDir, exist_ok=True) # Create world directory if it does not exist worldDir = os.path.join(outputDir, "world") os.makedirs(worldDir, exist_ok=True) # Create chunks directory if it does not exist chunksDir = os.path.join(worldDir, "chunk") os.makedirs(chunksDir, exist_ok=True) # Some vars used during printing now = datetime.now().strftime("%Y-%m-%d %H:%M:%S") # Data sent to the world header file worldWidth = 0 worldHeight = 0 chunksDone = [] entityIdNext = 1 # For each chunk file for chunkFile in os.listdir(CHUNK_DATA_DIR): data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile))) print(f"Processing chunk: {chunkFile}") if not 'chunk' in data: print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.") exit(1) if not 'position' in data['chunk']: print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.") exit(1) # Position must be array of two integers position = data['chunk']['position'] if not isinstance(position, list) or len(position) != 2: print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.") exit(1) if not all(isinstance(x, int) for x in position): print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.") exit(1) x, y = position # Make sure that the filename "chunk_{x}_{y}.json" matches the position expectedFilename = f"chunk_{x}_{y}.json" if chunkFile != expectedFilename: print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.") exit(1) # Chunk should not be already processed if (x, y) in chunksDone: print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.") exit(1) chunksDone.append((x, y)) worldWidth = max(worldWidth, x + 1) worldHeight = max(worldHeight, y + 1) # Read in base layer data if 'baseLayer' not in data['chunk']: print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.") exit(1) baseLayer = data['chunk']['baseLayer'] # Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements if len(baseLayer) != CHUNK_HEIGHT: print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.") exit(1) for row in baseLayer: if len(row) != CHUNK_WIDTH: print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.") exit(1) # Read in overlay layer data if 'overlayLayer' not in data['chunk']: print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.") exit(1) overlayLayer = data['chunk']['overlayLayer'] # Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements if len(overlayLayer) != CHUNK_HEIGHT: print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.") exit(1) for row in overlayLayer: if len(row) != CHUNK_WIDTH: print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.") exit(1) # Read in entities entities = data['chunk'].get('entities', []) # Now we generate a chunk header file chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h") with open(chunk_header_path, 'w') as f: f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n") f.write(f"// Generated at {now}\n") f.write("#pragma once\n") f.write("#include \"world/chunkdata.h\"\n\n") f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n") f.write(f" .layerBase = {{\n") for row in baseLayer: f.write(" ") for column in row: f.write(f"0x{column:02x}, ") f.write("\n") f.write(" },\n\n") f.write(f" .layerOverlay = {{\n") for row in overlayLayer: f.write(" ") for column in row: f.write(f"0x{column:02x}, ") f.write("\n") f.write(" },\n\n") f.write(f" .entities = {{\n") for entity in entities: if 'id' in entity: entityId = entity['id'] else: entityId = entityIdNext entityIdNext += 1 if 'type' not in entity: print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.") exit(1) if 'x' not in entity or 'y' not in entity: print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.") exit(1) f.write(" {\n") f.write(f" .id = {entityId},\n") f.write(f" .type = {ENTITY_TYPE_MAP.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"), f.write(f" .x = {entity['x']},\n") f.write(f" .y = {entity['y']},\n") f.write(f" }},\n") pass f.write(" },\n\n") f.write("};\n\n") pass # Output header file. header_path = os.path.join(worldDir, "world.h") with open(header_path, 'w') as f: f.write(f"// Generated chunks file. Generated at {now}\n\n") f.write("#pragma once\n") f.write("#include \"dusk.h\"\n") # Now, for each chunk, include its header file for (x, y) in chunksDone: chunk_header = f"world/chunk/chunk_{x}_{y}.h" f.write(f"#include \"{chunk_header}\"\n") f.write("\n") f.write(f"#define WORLD_WIDTH {worldWidth}\n") f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n") f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n") for i in range(worldHeight): f.write(" ") for j in range(worldWidth): if (j, i) in chunksDone: f.write(f"&CHUNK_{j}_{i}, ") else: f.write("NULL, ") f.write("\n") f.write("};\n\n") print(f"chunks.h generated at: {header_path}")