Prepping map stuff
This commit is contained in:
@@ -1,143 +1,124 @@
|
||||
import struct
|
||||
import sys
|
||||
import os
|
||||
import json
|
||||
from args import args
|
||||
from xml.etree import ElementTree as ET
|
||||
from processtileset import processTileset
|
||||
from assetcache import assetCache, assetGetCache
|
||||
from assethelpers import getAssetRelativePath
|
||||
|
||||
CHUNK_WIDTH = 16
|
||||
CHUNK_HEIGHT = 16
|
||||
CHUNK_DEPTH = 32
|
||||
CHUNK_TILE_COUNT = CHUNK_WIDTH * CHUNK_HEIGHT * CHUNK_DEPTH
|
||||
TILE_SIZE = 16.0
|
||||
|
||||
def createQuadForTile(model, tileIndex, x=0, y=0, z=0):
|
||||
# Only append vertices if z == 0
|
||||
if z != 0:
|
||||
return
|
||||
# Determine color for checkerboard pattern
|
||||
color = (255,255,255) if (x + y) % 2 == 0 else (0,0,0)
|
||||
# Use TILE_SIZE for positions
|
||||
px = x * TILE_SIZE
|
||||
py = y * TILE_SIZE
|
||||
pz = z * TILE_SIZE
|
||||
quad_vertices = [
|
||||
{'position': (px, py, pz), 'color': color, 'uv': (0,0)}, # 0,0
|
||||
{'position': (px + TILE_SIZE, py, pz), 'color': color, 'uv': (1,0)}, # 1,0
|
||||
{'position': (px + TILE_SIZE, py + TILE_SIZE, pz), 'color': color, 'uv': (1,1)}, # 1,1
|
||||
{'position': (px, py, pz), 'color': color, 'uv': (0,0)}, # 0,0 (repeat)
|
||||
{'position': (px + TILE_SIZE, py + TILE_SIZE, pz), 'color': color, 'uv': (1,1)}, # 1,1 (repeat)
|
||||
{'position': (px, py + TILE_SIZE, pz), 'color': color, 'uv': (0,1)} # 0,1
|
||||
]
|
||||
base = len(model['vertices'])
|
||||
quad_indices = [base, base+1, base+2, base+3, base+4, base+5]
|
||||
model['vertices'].extend(quad_vertices)
|
||||
model['indices'].extend(quad_indices)
|
||||
model['vertexCount'] = len(model['vertices'])
|
||||
model['indexCount'] = len(model['indices'])
|
||||
|
||||
def processMap(asset):
|
||||
cache = assetGetCache(asset['path'])
|
||||
if cache is not None:
|
||||
return cache
|
||||
|
||||
# Load the TMX file
|
||||
tree = ET.parse(asset['path'])
|
||||
root = tree.getroot()
|
||||
|
||||
# Root needs to be "map" element.
|
||||
if root.tag != 'map':
|
||||
print(f"Error: TMX file {asset['path']} does not have a <map> root element")
|
||||
sys.exit(1)
|
||||
|
||||
# Root needs to be orientation="orthogonal"
|
||||
if 'orientation' not in root.attrib or root.attrib['orientation'] != 'orthogonal':
|
||||
print(f"Error: TMX file {asset['path']} does not have orientation='orthogonal'")
|
||||
sys.exit(1)
|
||||
|
||||
# Extract width, height, tilewidth, tileheight attributes
|
||||
if 'width' not in root.attrib or 'height' not in root.attrib or 'tilewidth' not in root.attrib or 'tileheight' not in root.attrib:
|
||||
print(f"Error: TMX file {asset['path']} is missing required attributes (width, height, tilewidth, tileheight)")
|
||||
sys.exit(1)
|
||||
|
||||
mapWidth = int(root.attrib['width'])
|
||||
mapHeight = int(root.attrib['height'])
|
||||
tileWidth = int(root.attrib['tilewidth'])
|
||||
tileHeight = int(root.attrib['tileheight'])
|
||||
|
||||
# Find all tileset elements
|
||||
tilesets = []
|
||||
for tilesetElement in root.findall('tileset'):
|
||||
# Tileset must have a source attribute
|
||||
if 'source' not in tilesetElement.attrib:
|
||||
print(f"Error: <tileset> element in {asset['path']} is missing a source attribute")
|
||||
sys.exit(1)
|
||||
# Must have a firstgid attribute
|
||||
if 'firstgid' not in tilesetElement.attrib:
|
||||
print(f"Error: <tileset> element in {asset['path']} is missing a firstgid attribute")
|
||||
sys.exit(1)
|
||||
|
||||
firstGid = int(tilesetElement.attrib['firstgid'])
|
||||
source = tilesetElement.attrib['source']
|
||||
|
||||
# Get source path relative to the tmx file's working directory.
|
||||
# Needs normalizing also since ".." is often used.
|
||||
source = os.path.normpath(os.path.join(os.path.dirname(asset['path']), source))
|
||||
tileset = processTileset({ 'path': source, 'type': 'tileset', 'options': {} })
|
||||
|
||||
tilesets.append({
|
||||
'firstGid': firstGid,
|
||||
'source': source,
|
||||
'tileset': tileset
|
||||
})
|
||||
|
||||
# Sort tilesets by firstGid, highest first
|
||||
tilesets.sort(key=lambda x: x['firstGid'], reverse=True)
|
||||
|
||||
# Layer types
|
||||
# objectLayers = [] # Not implemented
|
||||
tileLayers = []
|
||||
for layerElement in root.findall('layer'):
|
||||
# Assume tile layer for now
|
||||
# Must have id, name, width, height attributes
|
||||
if 'id' not in layerElement.attrib or 'name' not in layerElement.attrib or 'width' not in layerElement.attrib or 'height' not in layerElement.attrib:
|
||||
print(f"Error: <layer> element in {asset['path']} is missing required attributes (id, name, width, height)")
|
||||
sys.exit(1)
|
||||
|
||||
id = int(layerElement.attrib['id'])
|
||||
name = layerElement.attrib['name']
|
||||
width = int(layerElement.attrib['width'])
|
||||
height = int(layerElement.attrib['height'])
|
||||
|
||||
# Need exactly one data element
|
||||
dataElements = layerElement.findall('data')
|
||||
if len(dataElements) != 1:
|
||||
print(f"Error: <layer> element in {asset['path']} must have exactly one <data> child element")
|
||||
sys.exit(1)
|
||||
|
||||
# Get text, remove whitespace, split by comman and convert to int
|
||||
dataElement = dataElements[0]
|
||||
if dataElement.attrib.get('encoding', '') != 'csv':
|
||||
print(f"Error: <data> element in {asset['path']} must have encoding='csv'")
|
||||
sys.exit(1)
|
||||
|
||||
dataText = dataElement.text.strip()
|
||||
data = [int(gid) for gid in dataText.split(',') if gid.strip().isdigit()]
|
||||
|
||||
# Should be exactly width * height entries
|
||||
if len(data) != width * height:
|
||||
print(f"Error: <data> element in {asset['path']} has {len(data)} entries but expected {width * height} (width * height)")
|
||||
sys.exit(1)
|
||||
|
||||
tileLayers.append({
|
||||
'id': id,
|
||||
'name': name,
|
||||
'width': width,
|
||||
'height': height,
|
||||
'data': data,
|
||||
})
|
||||
|
||||
# Now we have our layers all parsed out.
|
||||
data = bytearray()
|
||||
data += b'DRM' # Dusk RPG Map
|
||||
data += mapWidth.to_bytes(4, 'little') # Map width in tiles
|
||||
data += mapHeight.to_bytes(4, 'little') # Map height in tiles
|
||||
data += len(tilesets).to_bytes(4, 'little') # Number of tilesets
|
||||
data += len(tileLayers).to_bytes(4, 'little') # Number of layers
|
||||
# Read input file as JSON
|
||||
with open(asset['path'], 'r') as f:
|
||||
inData = json.load(f)
|
||||
|
||||
# For each layer...
|
||||
for layer in tileLayers:
|
||||
for gid in layer['data']:
|
||||
data += gid.to_bytes(4, 'little') # Tileset index
|
||||
tileIndexes = inData['tiles']
|
||||
|
||||
# For each tileset
|
||||
for tileset in tilesets:
|
||||
data += tileset['firstGid'].to_bytes(4, 'little') # First GID
|
||||
data += tileset['tileset']['tilesetIndex'].to_bytes(4, 'little') # Tileset index
|
||||
# Create output object 'map' with default tile indexes and models array
|
||||
map = {
|
||||
'tiles': [0] * CHUNK_TILE_COUNT,
|
||||
'models': []
|
||||
}
|
||||
|
||||
# Create a simple 3D model object
|
||||
model = {
|
||||
'vertices': [],
|
||||
'indices': [],
|
||||
'vertexCount': 0,
|
||||
'indexCount': 0
|
||||
}
|
||||
|
||||
# Append the model to map.models
|
||||
map['models'].append(model)
|
||||
|
||||
for i, tile in enumerate(tileIndexes):
|
||||
# Calculate x, y, z from i
|
||||
x = i % CHUNK_WIDTH
|
||||
y = (i // CHUNK_WIDTH) % CHUNK_HEIGHT
|
||||
z = i // (CHUNK_WIDTH * CHUNK_HEIGHT)
|
||||
createQuadForTile(model, tile, x, y, z)
|
||||
|
||||
# Generate binary buffer for efficient output
|
||||
buffer = bytearray()
|
||||
buffer.extend(b'DMF')# Header
|
||||
buffer.extend(len(map['tiles']).to_bytes(4, 'little')) # Number of tiles
|
||||
buffer.extend(len(map['models']).to_bytes(1, 'little')) # Number of models
|
||||
|
||||
# Buffer tile data as array of uint8_t
|
||||
for tileIndex in map['tiles']:
|
||||
buffer.append(tileIndex.to_bytes(1, 'little')[0])
|
||||
|
||||
# For each model
|
||||
for model in map['models']:
|
||||
# Write vertex count and index count
|
||||
buffer.extend(model['vertexCount'].to_bytes(4, 'little'))
|
||||
# buffer.extend(model['indexCount'].to_bytes(4, 'little'))
|
||||
# For each vertex
|
||||
for vertex in model['vertices']:
|
||||
# This is not tightly packed in memory.
|
||||
# R G B A U V X Y Z
|
||||
# Color is 4 bytes (RGBA)
|
||||
# Rest is floats
|
||||
r, g, b = vertex['color']
|
||||
a = 255
|
||||
buffer.extend(r.to_bytes(1, 'little'))
|
||||
buffer.extend(g.to_bytes(1, 'little'))
|
||||
buffer.extend(b.to_bytes(1, 'little'))
|
||||
buffer.extend(a.to_bytes(1, 'little'))
|
||||
u, v = vertex['uv']
|
||||
buffer.extend(bytearray(struct.pack('<f', u)))
|
||||
buffer.extend(bytearray(struct.pack('<f', v)))
|
||||
x, y, z = vertex['position']
|
||||
buffer.extend(bytearray(struct.pack('<f', x)))
|
||||
buffer.extend(bytearray(struct.pack('<f', y)))
|
||||
buffer.extend(bytearray(struct.pack('<f', z)))
|
||||
|
||||
# Write out map file
|
||||
relative = getAssetRelativePath(asset['path'])
|
||||
fileNameWithoutExt = os.path.splitext(os.path.basename(asset['path']))[0]
|
||||
outputFileRelative = os.path.join(os.path.dirname(relative), f"{fileNameWithoutExt}.drm")
|
||||
outputFileRelative = os.path.join(os.path.dirname(relative), f"{fileNameWithoutExt}.dmf")
|
||||
outputFilePath = os.path.join(args.output_assets, outputFileRelative)
|
||||
os.makedirs(os.path.dirname(outputFilePath), exist_ok=True)
|
||||
with open(outputFilePath, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
f.write(buffer)
|
||||
|
||||
outMap = {
|
||||
'mapPath': outputFileRelative,
|
||||
'files': [ outputFilePath ],
|
||||
'width': mapWidth,
|
||||
'height': mapHeight,
|
||||
'map': map
|
||||
}
|
||||
|
||||
return assetCache(asset['path'], outMap)
|
||||
Reference in New Issue
Block a user