unfinished chunk generator
This commit is contained in:
30
data/map.tmj
30
data/map.tmj
@ -48,6 +48,28 @@
|
|||||||
"x":16,
|
"x":16,
|
||||||
"y":0
|
"y":0
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"data":[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 33, 34, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 41, 42, 43, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 49, 50, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
|
||||||
|
"height":16,
|
||||||
|
"width":16,
|
||||||
|
"x":48,
|
||||||
|
"y":0
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"data":[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2,
|
"data":[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10,
|
||||||
@ -80,9 +102,9 @@
|
|||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 34, 35, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 42, 43, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 49, 50, 51, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||||
@ -100,7 +122,7 @@
|
|||||||
"starty":0,
|
"starty":0,
|
||||||
"type":"tilelayer",
|
"type":"tilelayer",
|
||||||
"visible":true,
|
"visible":true,
|
||||||
"width":32,
|
"width":64,
|
||||||
"x":0,
|
"x":0,
|
||||||
"y":0
|
"y":0
|
||||||
}],
|
}],
|
||||||
|
26
data/map.tmx
26
data/map.tmx
@ -1,26 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<map version="1.10" tiledversion="1.11.1" orientation="orthogonal" renderorder="right-down" width="30" height="20" tilewidth="64" tileheight="64" infinite="1" nextlayerid="2" nextobjectid="1">
|
|
||||||
<tileset firstgid="1" source="Downloads/overworld.tsx"/>
|
|
||||||
<layer id="1" name="Tile Layer 1" width="30" height="20">
|
|
||||||
<data encoding="csv">
|
|
||||||
<chunk x="0" y="0" width="16" height="16">
|
|
||||||
1,2,3,4,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,10,11,12,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,18,19,12,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
17,10,11,20,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,18,19,12,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
17,10,11,20,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,10,11,12,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,18,19,20,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,10,11,12,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
17,10,11,20,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
9,10,11,12,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
17,18,19,20,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
25,26,27,28,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
|
|
||||||
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
|
|
||||||
</chunk>
|
|
||||||
</data>
|
|
||||||
</layer>
|
|
||||||
</map>
|
|
@ -2,9 +2,12 @@ import sys, os
|
|||||||
import argparse
|
import argparse
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import json
|
import json
|
||||||
|
import math
|
||||||
|
|
||||||
|
# Values defined within C
|
||||||
CHUNK_WIDTH = 8
|
CHUNK_WIDTH = 8
|
||||||
CHUNK_HEIGHT = 8
|
CHUNK_HEIGHT = 8
|
||||||
|
CHUNK_TILE_COUNT = CHUNK_WIDTH * CHUNK_HEIGHT
|
||||||
CHUNK_ENTITY_COUNT_MAX = 8
|
CHUNK_ENTITY_COUNT_MAX = 8
|
||||||
|
|
||||||
ENTITY_TYPE_MAP = {
|
ENTITY_TYPE_MAP = {
|
||||||
@ -54,6 +57,7 @@ if 'infinite' not in data or not isinstance(data['infinite'], bool):
|
|||||||
print(f"Error: Input file '{inputFile}' does not contain 'infinite' key.")
|
print(f"Error: Input file '{inputFile}' does not contain 'infinite' key.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Need layers
|
||||||
if 'layers' not in data or not isinstance(data['layers'], list):
|
if 'layers' not in data or not isinstance(data['layers'], list):
|
||||||
print(f"Error: Input file '{inputFile}' does not contain 'layers' key.")
|
print(f"Error: Input file '{inputFile}' does not contain 'layers' key.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
@ -63,133 +67,130 @@ if len(layers) == 0:
|
|||||||
print(f"Error: Input file '{inputFile}' does not contain any layers.")
|
print(f"Error: Input file '{inputFile}' does not contain any layers.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# For each layer
|
# First layer
|
||||||
for layer in layers:
|
firstLayer = layers[0]
|
||||||
if not 'chunks' in layer or not isinstance(layer['chunks'], list):
|
if 'width' not in firstLayer or 'height' not in firstLayer:
|
||||||
print(f"Error: Layer in input file '{inputFile}' does not contain 'chunks' key.")
|
print(f"Error: First layer in '{inputFile}' does not contain 'width' or 'height' key.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not 'height' in layer or not isinstance(layer['height'], int):
|
|
||||||
print(f"Error: Layer in input file '{inputFile}' does not contain 'height' key.")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
if not 'width' in layer or not isinstance(layer['width'], int):
|
if 'chunks' not in firstLayer or not isinstance(firstLayer['chunks'], list):
|
||||||
print(f"Error: Layer in input file '{inputFile}' does not contain 'width' key.")
|
print(f"Error: First layer in '{inputFile}' does not contain 'chunks' key.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if 'visible' in layer and layer['visible'] is not True:
|
|
||||||
continue
|
|
||||||
|
|
||||||
chunks = layer['chunks']
|
if len(firstLayer['chunks']) == 0:
|
||||||
if len(chunks) == 0:
|
print(f"Error: First layer in '{inputFile}' does not contain any chunks.")
|
||||||
print(f"Error: Layer in input file '{inputFile}' does not contain any chunks.")
|
sys.exit(1)
|
||||||
|
|
||||||
|
firstLayerFirstChunk = firstLayer['chunks'][0]
|
||||||
|
|
||||||
|
inputLayerWidthInTiles = firstLayerFirstChunk['width']
|
||||||
|
inputLayerHeightInTiles = firstLayerFirstChunk['height']
|
||||||
|
mapWidthInTiles = firstLayer['width']
|
||||||
|
mapHeightInTiles = firstLayer['height']
|
||||||
|
mapWidthInRealTiles = math.ceil(float(mapWidthInTiles) / float(CHUNK_WIDTH))
|
||||||
|
mapHeightInRealTiles = math.ceil(float(mapHeightInTiles) / float(CHUNK_HEIGHT))
|
||||||
|
|
||||||
|
print(f"Input Layer Size: {inputLayerWidthInTiles}x{inputLayerHeightInTiles} tiles")
|
||||||
|
print(f"Map Size: {mapWidthInTiles}x{mapHeightInTiles} tiles")
|
||||||
|
print(f"Map Real Size: {mapWidthInRealTiles}x{mapHeightInRealTiles} chunks")
|
||||||
|
|
||||||
|
if inputLayerWidthInTiles < CHUNK_WIDTH or inputLayerHeightInTiles < CHUNK_HEIGHT:
|
||||||
|
print(f"Error: Input layer size {inputLayerWidthInTiles}x{inputLayerHeightInTiles} is smaller than chunk size {CHUNK_WIDTH}x{CHUNK_HEIGHT}.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# For each output chunk.
|
||||||
|
worldWidth = 0
|
||||||
|
worldHeight = 0
|
||||||
|
chunksDone = set()
|
||||||
|
|
||||||
|
for chunkY in range(mapHeightInRealTiles):
|
||||||
|
for chunkX in range(mapWidthInRealTiles):
|
||||||
|
# Top left X/Y based on real chunk size
|
||||||
|
topLeftTileX = chunkX * CHUNK_WIDTH
|
||||||
|
topLeftTileY = chunkY * CHUNK_HEIGHT
|
||||||
|
|
||||||
|
# Top left coordinates based on input layer size
|
||||||
|
inputTopLeftTileX = math.floor(float(topLeftTileX) / float(inputLayerWidthInTiles)) * inputLayerWidthInTiles
|
||||||
|
inputTopLeftTileY = math.floor(float(topLeftTileY) / float(inputLayerHeightInTiles)) * inputLayerHeightInTiles
|
||||||
|
|
||||||
|
chunkLayers = []
|
||||||
|
|
||||||
|
# For each layer...
|
||||||
|
for layerIndex, layer in enumerate(layers):
|
||||||
|
foundChunk = None
|
||||||
|
if 'chunks' not in layer or not isinstance(layer['chunks'], list):
|
||||||
|
print(f"Error: Layer {layerIndex} in '{inputFile}' does not contain 'chunks' key.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
chunks = layer['chunks']
|
||||||
|
for chunk in chunks:
|
||||||
|
if 'x' not in chunk or 'y' not in chunk:
|
||||||
|
print(f"Error: Chunk in layer {layerIndex} does not contain 'x' or 'y' key.")
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if chunk['x'] == topLeftTileX and chunk['y'] == topLeftTileY:
|
||||||
|
foundChunk = chunk
|
||||||
|
break
|
||||||
|
|
||||||
|
if foundChunk is None:
|
||||||
|
chunkLayers.append(None)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Is this chunk layer just empty?
|
||||||
|
layerEmpty = True
|
||||||
|
for tile in foundChunk.get('data', []):
|
||||||
|
if tile == 0:
|
||||||
|
continue
|
||||||
|
layerEmpty = False
|
||||||
|
break
|
||||||
|
|
||||||
|
if layerEmpty:
|
||||||
|
chunkLayers.append(None)
|
||||||
|
continue
|
||||||
|
|
||||||
|
chunkLayers.append(foundChunk)
|
||||||
|
|
||||||
|
# Now we have a chunkLayers list with the found chunks for each layer.
|
||||||
|
if all(chunk is None for chunk in chunkLayers) or len(chunkLayers) == 0:
|
||||||
|
print(f"Warning: No valid chunks found for chunk at ({chunkX}, {chunkY}). Skipping.")
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(chunkLayers) > 2:
|
||||||
|
print(f"Error: Expected 2 layers for chunk at ({chunkX}, {chunkY}), found {len(chunkLayers)}.")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
# Data sent to the world header file
|
layerBase = chunkLayers[0]
|
||||||
# worldWidth = 0
|
layerBaseData = []
|
||||||
# worldHeight = 0
|
# for y in range(CHUNK_HEIGHT):
|
||||||
# chunksDone = []
|
# for x in range(CHUNK_WIDTH):
|
||||||
# entityIdNext = 1
|
# # Calculate the tile index in the chunk
|
||||||
|
# tileIndex = y * CHUNK_WIDTH + x
|
||||||
|
# if layerBase is not None and tileIndex < len(layerBase.get('data', [])):
|
||||||
|
# tileId = layerBase['data'][tileIndex]
|
||||||
|
# layerBaseData.append(tileId)
|
||||||
|
# else:
|
||||||
|
# layerBaseData.append(0)
|
||||||
|
|
||||||
# # For each chunk file
|
# This is a valid chunk.
|
||||||
# for chunkFile in os.listdir(CHUNK_DATA_DIR):
|
worldWidth = max(worldWidth, chunkX + 1)
|
||||||
# data = json.load(open(os.path.join(CHUNK_DATA_DIR, chunkFile)))
|
worldHeight = max(worldHeight, chunkY + 1)
|
||||||
# print(f"Processing chunk: {chunkFile}")
|
chunksDone.add((chunkX, chunkY))
|
||||||
|
|
||||||
# if not 'chunk' in data:
|
chunkHeaderPath = os.path.join(chunksDir, f"chunk_{chunkX}_{chunkY}.h")
|
||||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'chunk' key.")
|
with open(chunkHeaderPath, 'w') as f:
|
||||||
# exit(1)
|
f.write(f"// Generated chunk header for chunk at position ({chunkX}, {chunkY})\n")
|
||||||
|
f.write(f"// Generated at {now}\n")
|
||||||
|
f.write("#pragma once\n")
|
||||||
|
f.write("#include \"world/chunkdata.h\"\n\n")
|
||||||
|
f.write(f"static const chunkdata_t CHUNK_{chunkX}_{chunkY} = {{\n")
|
||||||
|
f.write(f" .layerBase = {{\n")
|
||||||
|
for byte in layerBase.get('data', []):
|
||||||
|
f.write(f" 0x{byte:02x}, \n")
|
||||||
|
f.write(" },\n\n")
|
||||||
|
f.write(f" .layerOverlay = {{}},\n")
|
||||||
|
f.write(f" .entities = {{}},\n")
|
||||||
|
f.write("};\n\n")
|
||||||
|
|
||||||
# if not 'position' in data['chunk']:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'position' key.")
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
# # Position must be array of two integers
|
|
||||||
# position = data['chunk']['position']
|
|
||||||
# if not isinstance(position, list) or len(position) != 2:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'position' format.")
|
|
||||||
# exit(1)
|
|
||||||
# if not all(isinstance(x, int) for x in position):
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' invalid 'position' values.")
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
# x, y = position
|
|
||||||
|
|
||||||
# # Make sure that the filename "chunk_{x}_{y}.json" matches the position
|
|
||||||
# expectedFilename = f"chunk_{x}_{y}.json"
|
|
||||||
# if chunkFile != expectedFilename:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' should be '{expectedFilename}'.")
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
# # Chunk should not be already processed
|
|
||||||
# if (x, y) in chunksDone:
|
|
||||||
# print(f"Error: Chunk at position ({x}, {y}) is already processed. Skipping.")
|
|
||||||
# exit(1)
|
|
||||||
# chunksDone.append((x, y))
|
|
||||||
|
|
||||||
# worldWidth = max(worldWidth, x + 1)
|
|
||||||
# worldHeight = max(worldHeight, y + 1)
|
|
||||||
|
|
||||||
# # Read in base layer data
|
|
||||||
# if 'baseLayer' not in data['chunk']:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'baseLayer' key.")
|
|
||||||
# exit(1)
|
|
||||||
# baseLayer = data['chunk']['baseLayer']
|
|
||||||
|
|
||||||
# # Base layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
|
||||||
# if len(baseLayer) != CHUNK_HEIGHT:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' length.")
|
|
||||||
# exit(1)
|
|
||||||
# for row in baseLayer:
|
|
||||||
# if len(row) != CHUNK_WIDTH:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'baseLayer' row length.")
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
# # Read in overlay layer data
|
|
||||||
# if 'overlayLayer' not in data['chunk']:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' does not contain 'overlayLayer' key.")
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
# overlayLayer = data['chunk']['overlayLayer']
|
|
||||||
|
|
||||||
# # Overlay layer should exactly CHUNK_WIDTH * CHUNK_HEIGHT elements
|
|
||||||
# if len(overlayLayer) != CHUNK_HEIGHT:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' length.")
|
|
||||||
# exit(1)
|
|
||||||
# for row in overlayLayer:
|
|
||||||
# if len(row) != CHUNK_WIDTH:
|
|
||||||
# print(f"Error: Chunk file '{chunkFile}' has invalid 'overlayLayer' row length.")
|
|
||||||
# exit(1)
|
|
||||||
|
|
||||||
# # Read in entities
|
|
||||||
# entities = data['chunk'].get('entities', [])
|
|
||||||
|
|
||||||
# # Now we generate a chunk header file
|
|
||||||
# chunk_header_path = os.path.join(chunksDir, f"chunk_{x}_{y}.h")
|
|
||||||
# with open(chunk_header_path, 'w') as f:
|
|
||||||
# f.write(f"// Generated chunk header for chunk at position ({x}, {y})\n")
|
|
||||||
# f.write(f"// Generated at {now}\n")
|
|
||||||
# f.write("#pragma once\n")
|
|
||||||
# f.write("#include \"world/chunkdata.h\"\n\n")
|
|
||||||
|
|
||||||
# f.write(f"static const chunkdata_t CHUNK_{x}_{y} = {{\n")
|
|
||||||
|
|
||||||
# f.write(f" .layerBase = {{\n")
|
|
||||||
# for row in baseLayer:
|
|
||||||
# f.write(" ")
|
|
||||||
# for column in row:
|
|
||||||
# f.write(f"0x{column:02x}, ")
|
|
||||||
# f.write("\n")
|
|
||||||
# f.write(" },\n\n")
|
|
||||||
|
|
||||||
# f.write(f" .layerOverlay = {{\n")
|
|
||||||
# for row in overlayLayer:
|
|
||||||
# f.write(" ")
|
|
||||||
# for column in row:
|
|
||||||
# f.write(f"0x{column:02x}, ")
|
|
||||||
# f.write("\n")
|
|
||||||
# f.write(" },\n\n")
|
|
||||||
|
|
||||||
# f.write(f" .entities = {{\n")
|
# f.write(f" .entities = {{\n")
|
||||||
# for entity in entities:
|
# for entity in entities:
|
||||||
@ -221,30 +222,30 @@ for layer in layers:
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
# # Output header file.
|
# Output header file.
|
||||||
# header_path = os.path.join(worldDir, "world.h")
|
header_path = os.path.join(worldDir, "world.h")
|
||||||
# with open(header_path, 'w') as f:
|
with open(header_path, 'w') as f:
|
||||||
# f.write(f"// Generated chunks file. Generated at {now}\n\n")
|
f.write(f"// Generated chunks file. Generated at {now}\n\n")
|
||||||
# f.write("#pragma once\n")
|
f.write("#pragma once\n")
|
||||||
# f.write("#include \"dusk.h\"\n")
|
f.write("#include \"dusk.h\"\n")
|
||||||
|
|
||||||
# # Now, for each chunk, include its header file
|
# Now, for each chunk, include its header file
|
||||||
# for (x, y) in chunksDone:
|
for (x, y) in chunksDone:
|
||||||
# chunk_header = f"world/chunk/chunk_{x}_{y}.h"
|
chunk_header = f"world/chunk/chunk_{x}_{y}.h"
|
||||||
# f.write(f"#include \"{chunk_header}\"\n")
|
f.write(f"#include \"{chunk_header}\"\n")
|
||||||
|
|
||||||
# f.write("\n")
|
f.write("\n")
|
||||||
# f.write(f"#define WORLD_WIDTH {worldWidth}\n")
|
f.write(f"#define WORLD_WIDTH {worldWidth}\n")
|
||||||
# f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
|
f.write(f"#define WORLD_HEIGHT {worldHeight}\n\n")
|
||||||
# f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
|
f.write(f"static const chunkdata_t* WORLD_CHUNKS[] = {{\n")
|
||||||
# for i in range(worldHeight):
|
for i in range(worldHeight):
|
||||||
# f.write(" ")
|
f.write(" ")
|
||||||
# for j in range(worldWidth):
|
for j in range(worldWidth):
|
||||||
# if (j, i) in chunksDone:
|
if (j, i) in chunksDone:
|
||||||
# f.write(f"&CHUNK_{j}_{i}, ")
|
f.write(f"&CHUNK_{j}_{i}, ")
|
||||||
# else:
|
else:
|
||||||
# f.write("NULL, ")
|
f.write("NULL, ")
|
||||||
# f.write("\n")
|
f.write("\n")
|
||||||
# f.write("};\n\n")
|
f.write("};\n\n")
|
||||||
|
|
||||||
# print(f"chunks.h generated at: {header_path}")
|
print(f"chunks.h generated at: {header_path}")
|
Reference in New Issue
Block a user