diff --git a/data/map.tmj b/data/map.tmj
new file mode 100644
index 0000000..5a404c5
--- /dev/null
+++ b/data/map.tmj
@@ -0,0 +1,122 @@
+{ "compressionlevel":-1,
+ "height":20,
+ "infinite":true,
+ "layers":[
+ {
+ "chunks":[
+ {
+ "data":[1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 9, 10, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 17, 18, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 25, 26, 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ "height":16,
+ "width":16,
+ "x":0,
+ "y":0
+ },
+ {
+ "data":[1, 2, 3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 9, 10, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 17, 18, 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 25, 26, 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ "height":16,
+ "width":16,
+ "x":16,
+ "y":0
+ },
+ {
+ "data":[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, 18,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 26,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ "height":16,
+ "width":16,
+ "x":0,
+ "y":16
+ },
+ {
+ "data":[3, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 19, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 27, 28, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
+ "height":16,
+ "width":16,
+ "x":16,
+ "y":16
+ }],
+ "height":32,
+ "id":1,
+ "name":"Tile Layer 1",
+ "opacity":1,
+ "startx":0,
+ "starty":0,
+ "type":"tilelayer",
+ "visible":true,
+ "width":32,
+ "x":0,
+ "y":0
+ }],
+ "nextlayerid":2,
+ "nextobjectid":1,
+ "orientation":"orthogonal",
+ "renderorder":"right-down",
+ "tiledversion":"1.11.1",
+ "tileheight":64,
+ "tilesets":[
+ {
+ "firstgid":1,
+ "source":"overworld.tsx"
+ }],
+ "tilewidth":64,
+ "type":"map",
+ "version":"1.10",
+ "width":30
+}
\ No newline at end of file
diff --git a/data/map.tmx b/data/map.tmx
new file mode 100644
index 0000000..1fcb20b
--- /dev/null
+++ b/data/map.tmx
@@ -0,0 +1,26 @@
+
+
diff --git a/data/overworld.tsx b/data/overworld.tsx
new file mode 100644
index 0000000..e7fc3a3
--- /dev/null
+++ b/data/overworld.tsx
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/data/tilemap_test-3990799883.jpg b/data/tilemap_test-3990799883.jpg
new file mode 100644
index 0000000..3acec69
Binary files /dev/null and b/data/tilemap_test-3990799883.jpg differ
diff --git a/tools/mapcompile/CMakeLists.txt b/tools/mapcompile/CMakeLists.txt
index 8c3453b..7effe6c 100644
--- a/tools/mapcompile/CMakeLists.txt
+++ b/tools/mapcompile/CMakeLists.txt
@@ -8,7 +8,10 @@ find_package(Python3 COMPONENTS Interpreter REQUIRED)
# Custom command to generate all header files
add_custom_target(DUSK_CHUNKS
# OUTPUT ${DUSK_GENERATED_HEADERS_DIR}/world/world.h
- COMMAND ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py --output ${DUSK_GENERATED_HEADERS_DIR}
+ COMMAND
+ ${Python3_EXECUTABLE} ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
+ --output ${DUSK_GENERATED_HEADERS_DIR}
+ --input ${DUSK_DATA_DIR}/map.tmj
DEPENDS ${CMAKE_CURRENT_SOURCE_DIR}/mapcompile.py
COMMENT "Generating chunk header files"
VERBATIM
diff --git a/tools/mapcompile/mapcompile.py b/tools/mapcompile/mapcompile.py
index e260381..b45dcc3 100644
--- a/tools/mapcompile/mapcompile.py
+++ b/tools/mapcompile/mapcompile.py
@@ -3,18 +3,18 @@ import argparse
from datetime import datetime
import json
-shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
-sys.path.append(shared_path)
+CHUNK_WIDTH = 8
+CHUNK_HEIGHT = 8
+CHUNK_ENTITY_COUNT_MAX = 8
-from worlddefs import CHUNK_WIDTH, CHUNK_HEIGHT, ENTITY_TYPE_MAP, CHUNK_DATA_DIR
-
-# Dynamically add ../shared to sys.path
-shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
-sys.path.append(shared_path)
+ENTITY_TYPE_MAP = {
+ "npc": "ENTITY_TYPE_NPC",
+}
# Check if the script is run with the correct arguments
parser = argparse.ArgumentParser(description="Generate chunk header files")
parser.add_argument('--output', required=True, help='Dir to output headers')
+parser.add_argument('--input', required=True, help='Input JSON file from tiled')
args = parser.parse_args()
# Ensure outdir exists
@@ -32,165 +32,219 @@ os.makedirs(chunksDir, exist_ok=True)
# Some vars used during printing
now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
+# Read the input JSON file
+inputFile = args.input
+if not os.path.isfile(inputFile):
+ print(f"Error: Input file '{inputFile}' does not exist.")
+ sys.exit(1)
+
+with open(inputFile, 'r') as f:
+ data = json.load(f)
+
+# Data should have height key
+if 'height' not in data or 'width' not in data:
+ print(f"Error: Input file '{inputFile}' does not contain 'height' or 'width' key.")
+ sys.exit(1)
+
+if 'tilewidth' not in data or 'tileheight' not in data:
+ print(f"Error: Input file '{inputFile}' does not contain 'tilewidth' or 'tileheight' key.")
+ sys.exit(1)
+
+if 'infinite' not in data or not isinstance(data['infinite'], bool):
+ print(f"Error: Input file '{inputFile}' does not contain 'infinite' key.")
+ sys.exit(1)
+
+if 'layers' not in data or not isinstance(data['layers'], list):
+ print(f"Error: Input file '{inputFile}' does not contain 'layers' key.")
+ sys.exit(1)
+
+layers = data['layers']
+if len(layers) == 0:
+ print(f"Error: Input file '{inputFile}' does not contain any layers.")
+ sys.exit(1)
+
+# For each layer
+for layer in layers:
+ if not 'chunks' in layer or not isinstance(layer['chunks'], list):
+ print(f"Error: Layer in input file '{inputFile}' does not contain 'chunks' key.")
+ sys.exit(1)
+
+ if not 'height' in layer or not isinstance(layer['height'], int):
+ print(f"Error: Layer in input file '{inputFile}' does not contain 'height' key.")
+ sys.exit(1)
+
+ if not 'width' in layer or not isinstance(layer['width'], int):
+ print(f"Error: Layer in input file '{inputFile}' does not contain 'width' key.")
+ sys.exit(1)
+
+ if 'visible' in layer and layer['visible'] is not True:
+ continue
+
+ chunks = layer['chunks']
+ if len(chunks) == 0:
+ print(f"Error: Layer in input file '{inputFile}' does not contain any chunks.")
+ sys.exit(1)
+
+
# Data sent to the world header file
-worldWidth = 0
-worldHeight = 0
-chunksDone = []
-entityIdNext = 1
+# worldWidth = 0
+# worldHeight = 0
+# chunksDone = []
+# entityIdNext = 1
-# For each chunk file
-for chunkFile in os.listdir(CHUNK_DATA_DIR):
- data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile)))
- print(f"Processing chunk: {chunkFile}")
+# # For each chunk file
+# for chunkFile in os.listdir(CHUNK_DATA_DIR):
+# data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile)))
+# print(f"Processing chunk: {chunkFile}")
- if not 'chunk' in data:
- print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
- exit(1)
+# if not 'chunk' in data:
+# print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
+# exit(1)
- if not 'position' in data['chunk']:
- print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
- exit(1)
+# if not 'position' in data['chunk']:
+# print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
+# exit(1)
- # Position must be array of two integers
- position = data['chunk']['position']
- if not isinstance(position, list) or len(position) != 2:
- print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
- exit(1)
- if not all(isinstance(x, int) for x in position):
- print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
- exit(1)
+# # Position must be array of two integers
+# position = data['chunk']['position']
+# if not isinstance(position, list) or len(position) != 2:
+# print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
+# exit(1)
+# if not all(isinstance(x, int) for x in position):
+# print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
+# exit(1)
- x, y = position
+# x, y = position
- # Make sure that the filename "chunk_{x}_{y}.json" matches the position
- expectedFilename = f"chunk_{x}_{y}.json"
- if chunkFile != expectedFilename:
- print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
- exit(1)
+# # Make sure that the filename "chunk_{x}_{y}.json" matches the position
+# expectedFilename = f"chunk_{x}_{y}.json"
+# if chunkFile != expectedFilename:
+# print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
+# exit(1)
- # Chunk should not be already processed
- if (x, y) in chunksDone:
- print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
- exit(1)
- chunksDone.append((x, y))
+# # Chunk should not be already processed
+# if (x, y) in chunksDone:
+# print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
+# exit(1)
+# chunksDone.append((x, y))
- worldWidth = max(worldWidth, x + 1)
- worldHeight = max(worldHeight, y + 1)
+# worldWidth = max(worldWidth, x + 1)
+# worldHeight = max(worldHeight, y + 1)
- # Read in base layer data
- if 'baseLayer' not in data['chunk']:
- print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
- exit(1)
- baseLayer = data['chunk']['baseLayer']
+# # Read in base layer data
+# if 'baseLayer' not in data['chunk']:
+# print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
+# exit(1)
+# baseLayer = data['chunk']['baseLayer']
- # Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
- if len(baseLayer) != CHUNK_HEIGHT:
- print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
- exit(1)
- for row in baseLayer:
- if len(row) != CHUNK_WIDTH:
- print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
- exit(1)
+# # Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
+# if len(baseLayer) != CHUNK_HEIGHT:
+# print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
+# exit(1)
+# for row in baseLayer:
+# if len(row) != CHUNK_WIDTH:
+# print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
+# exit(1)
- # Read in overlay layer data
- if 'overlayLayer' not in data['chunk']:
- print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
- exit(1)
+# # Read in overlay layer data
+# if 'overlayLayer' not in data['chunk']:
+# print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
+# exit(1)
- overlayLayer = data['chunk']['overlayLayer']
+# overlayLayer = data['chunk']['overlayLayer']
- # Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
- if len(overlayLayer) != CHUNK_HEIGHT:
- print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
- exit(1)
- for row in overlayLayer:
- if len(row) != CHUNK_WIDTH:
- print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
- exit(1)
+# # Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
+# if len(overlayLayer) != CHUNK_HEIGHT:
+# print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
+# exit(1)
+# for row in overlayLayer:
+# if len(row) != CHUNK_WIDTH:
+# print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
+# exit(1)
- # Read in entities
- entities = data['chunk'].get('entities', [])
+# # Read in entities
+# entities = data['chunk'].get('entities', [])
- # Now we generate a chunk header file
- chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
- with open(chunk_header_path, 'w') as f:
- f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
- f.write(f"// Generated at {now}\n")
- f.write("#pragma once\n")
- f.write("#include \"world/chunkdata.h\"\n\n")
+# # Now we generate a chunk header file
+# chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
+# with open(chunk_header_path, 'w') as f:
+# f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
+# f.write(f"// Generated at {now}\n")
+# f.write("#pragma once\n")
+# f.write("#include \"world/chunkdata.h\"\n\n")
- f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
+# f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
- f.write(f" .layerBase = {{\n")
- for row in baseLayer:
- f.write(" ")
- for column in row:
- f.write(f"0x{column:02x}, ")
- f.write("\n")
- f.write(" },\n\n")
+# f.write(f" .layerBase = {{\n")
+# for row in baseLayer:
+# f.write(" ")
+# for column in row:
+# f.write(f"0x{column:02x}, ")
+# f.write("\n")
+# f.write(" },\n\n")
- f.write(f" .layerOverlay = {{\n")
- for row in overlayLayer:
- f.write(" ")
- for column in row:
- f.write(f"0x{column:02x}, ")
- f.write("\n")
- f.write(" },\n\n")
+# f.write(f" .layerOverlay = {{\n")
+# for row in overlayLayer:
+# f.write(" ")
+# for column in row:
+# f.write(f"0x{column:02x}, ")
+# f.write("\n")
+# f.write(" },\n\n")
- f.write(f" .entities = {{\n")
- for entity in entities:
- if 'id' in entity:
- entityId = entity['id']
- else:
- entityId = entityIdNext
- entityIdNext += 1
+# f.write(f" .entities = {{\n")
+# for entity in entities:
+# if 'id' in entity:
+# entityId = entity['id']
+# else:
+# entityId = entityIdNext
+# entityIdNext += 1
- if 'type' not in entity:
- print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.")
- exit(1)
+# if 'type' not in entity:
+# print(f"Error: Entity in chunk ({x}, {y}) does not have 'type' key.")
+# exit(1)
- if 'x' not in entity or 'y' not in entity:
- print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.")
- exit(1)
+# if 'x' not in entity or 'y' not in entity:
+# print(f"Error: Entity in chunk ({x}, {y}) does not have 'x' or 'y' key.")
+# exit(1)
- f.write(" {\n")
- f.write(f" .id = {entityId},\n")
- f.write(f" .type = {ENTITY_TYPE_MAP.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"),
- f.write(f" .x = {entity['x']},\n")
- f.write(f" .y = {entity['y']},\n")
- f.write(f" }},\n")
- pass
- f.write(" },\n\n")
+# f.write(" {\n")
+# f.write(f" .id = {entityId},\n")
+# f.write(f" .type = {ENTITY_TYPE_MAP.get(entity['type'], 'ENTITY_TYPE_UNKNOWN')},\n"),
+# f.write(f" .x = {entity['x']},\n")
+# f.write(f" .y = {entity['y']},\n")
+# f.write(f" }},\n")
+# pass
+# f.write(" },\n\n")
- f.write("};\n\n")
- pass
+# f.write("};\n\n")
+# pass
-# Output header file.
-header_path = os.path.join(worldDir, "world.h")
-with open(header_path, 'w') as f:
- f.write(f"// Generated chunks file. Generated at {now}\n\n")
- f.write("#pragma once\n")
- f.write("#include \"dusk.h\"\n")
+# # Output header file.
+# header_path = os.path.join(worldDir, "world.h")
+# with open(header_path, 'w') as f:
+# f.write(f"// Generated chunks file. Generated at {now}\n\n")
+# f.write("#pragma once\n")
+# f.write("#include \"dusk.h\"\n")
- # Now, for each chunk, include its header file
- for (x, y) in chunksDone:
- chunk_header = f"world/chunk/chunk_{x}_{y}.h"
- f.write(f"#include \"{chunk_header}\"\n")
+# # Now, for each chunk, include its header file
+# for (x, y) in chunksDone:
+# chunk_header = f"world/chunk/chunk_{x}_{y}.h"
+# f.write(f"#include \"{chunk_header}\"\n")
- f.write("\n")
- f.write(f"#define WORLD_WIDTH {worldWidth}\n")
- f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
- f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
- for i in range(worldHeight):
- f.write(" ")
- for j in range(worldWidth):
- if (j, i) in chunksDone:
- f.write(f"&CHUNK_{j}_{i}, ")
- else:
- f.write("NULL, ")
- f.write("\n")
- f.write("};\n\n")
+# f.write("\n")
+# f.write(f"#define WORLD_WIDTH {worldWidth}\n")
+# f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
+# f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
+# for i in range(worldHeight):
+# f.write(" ")
+# for j in range(worldWidth):
+# if (j, i) in chunksDone:
+# f.write(f"&CHUNK_{j}_{i}, ")
+# else:
+# f.write("NULL, ")
+# f.write("\n")
+# f.write("};\n\n")
-print(f"chunks.h generated at: {header_path}")
\ No newline at end of file
+# print(f"chunks.h generated at: {header_path}")
\ No newline at end of file
diff --git a/tools/mapeditor/mapeditor.py b/tools/mapeditor/mapeditor.py
deleted file mode 100755
index 3ae5e5a..0000000
--- a/tools/mapeditor/mapeditor.py
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/usr/bin/python3
-import sys, os
-
-# Dynamically add ../shared to sys.path
-shared_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'shared'))
-sys.path.append(shared_path)
-
-# Import shared modules
-from worlddefs import CHUNK_WIDTH, CHUNK_HEIGHT, ENTITY_TYPE_MAP, CHUNK_DATA_DIR
-
diff --git a/tools/shared/worlddefs.py b/tools/shared/worlddefs.py
deleted file mode 100644
index 744c354..0000000
--- a/tools/shared/worlddefs.py
+++ /dev/null
@@ -1,11 +0,0 @@
-import os
-
-CHUNK_WIDTH = 8
-CHUNK_HEIGHT = 8
-CHUNK_ENTITY_COUNT_MAX = 8
-
-ENTITY_TYPE_MAP = {
- "npc": "ENTITY_TYPE_NPC",
-}
-
-CHUNK_DATA_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'data', 'chunks'))
\ No newline at end of file