233 lines
9.6 KiB
Python
233 lines
9.6 KiB
Python
import os
|
|
import json
|
|
import shutil
|
|
import uuid
|
|
import subprocess
|
|
|
|
# Configuration
|
|
PROJECT_ROOT = '/Users/davidkotnik/repos/novafarma'
|
|
GODOT_ROOT = os.path.join(PROJECT_ROOT, 'godot')
|
|
LDTK_FILE = os.path.join(PROJECT_ROOT, 'AutoLayers_2_stamps.ldtk')
|
|
|
|
# Ensure destination for missing assets exists
|
|
dest_ground = os.path.join(GODOT_ROOT, 'world', 'GROUND')
|
|
os.makedirs(dest_ground, exist_ok=True)
|
|
|
|
# 1. Fix missing grass_placeholder if needed
|
|
src_grass = os.path.join(PROJECT_ROOT, 'assets', 'terrain', 'grass_placeholder.png')
|
|
dst_grass = os.path.join(dest_ground, 'grass_placeholder.png')
|
|
if os.path.exists(src_grass) and not os.path.exists(dst_grass):
|
|
shutil.copy2(src_grass, dst_grass)
|
|
|
|
# User requested assets mapping
|
|
requested_assets = {
|
|
"Ground": [
|
|
"grass_placeholder.png", "stone.png", "dirt_path.png",
|
|
"dirt_path_corner_bottomleft.png", "dirt_path_corner_bottomright.png"
|
|
],
|
|
"Crops": [
|
|
"cannabis_s32_stage1_seeds.png", "cannabis_s32_stage2_sprout.png",
|
|
"cannabis_s32_stage3_young.png", "cannabis_s32_stage4_growing.png",
|
|
"cannabis_s32_stage5_ready.png", "cannabis_s32_stage6_harvested.png"
|
|
],
|
|
"Tools": [
|
|
"shovel.png", "watering_can.png", "stone_hoe.png", "hoe.png"
|
|
],
|
|
"Props": [
|
|
"blue_patch.png", "mixed_patch.png", "red_patch.png", "white_patch.png", "yellow_patch.png",
|
|
"bush_flowering.png", "bush_green.png", "bushes_set2.png",
|
|
"fallen_log.png", "mushroom_small.png", "mushrooms_large.png",
|
|
"rock_large.png", "rock_medium.png", "rock_small.png", "rocks_set2.png",
|
|
"tall_grass.png", "tall_grass_set1.png", "tall_grass_set2.png",
|
|
"tree_stump.png", "oak_summer.png", "pine.png", "willow.png"
|
|
],
|
|
"Entities": [
|
|
"kai_idle_down_v2", "kai_idle_right_v2", "kai_idle_up_v2",
|
|
"kai_walk_down_01_v2", "kai_walk_down_02_v2", "kai_walk_down_03_v2", "kai_walk_down_04_v2",
|
|
"kai_walk_right_01_v2", "kai_walk_right_02_v2", "kai_walk_right_03_v2", "kai_walk_right_04_v2",
|
|
"kai_walk_up_01_v2", "kai_walk_up_02_v2", "kai_walk_up_03_v2", "kai_walk_up_04_v2",
|
|
"kai_harvest_frame1", "kai_harvest_frame2", "kai_harvest_frame3", "kai_harvest_frame4",
|
|
"kai_plant_frame1", "kai_plant_frame2", "kai_plant_frame3", "kai_plant_frame4",
|
|
"kai_water_frame1", "kai_water_frame2", "kai_water_frame3", "kai_water_frame4",
|
|
"kai_shelter_wooden_hut", "kai_shelter_sleeping_bag"
|
|
]
|
|
}
|
|
|
|
def find_file(filename_part, strict=True):
|
|
for root, dirs, files in os.walk(GODOT_ROOT):
|
|
for file in files:
|
|
if strict:
|
|
if file == filename_part:
|
|
return os.path.join(root, file)
|
|
else:
|
|
if filename_part in file and file.endswith('.png'):
|
|
return os.path.join(root, file)
|
|
return None
|
|
|
|
def get_image_size(path):
|
|
try:
|
|
# Use sips on macOS
|
|
out = subprocess.check_output(['sips', '-g', 'pixelWidth', '-g', 'pixelHeight', path])
|
|
# Output format:
|
|
# /path/to/file:
|
|
# pixelWidth: 1024
|
|
# pixelHeight: 1024
|
|
lines = out.decode('utf-8').splitlines()
|
|
w = 32
|
|
h = 32
|
|
for line in lines:
|
|
line = line.strip()
|
|
if line.startswith('pixelWidth:'):
|
|
w = int(line.split(':')[1].strip())
|
|
elif line.startswith('pixelHeight:'):
|
|
h = int(line.split(':')[1].strip())
|
|
return w, h
|
|
except Exception as e:
|
|
print(f"Error getting size for {path}: {e}")
|
|
return 32, 32
|
|
|
|
# Load LDtk
|
|
with open(LDTK_FILE, 'r') as f:
|
|
ldtk_data = json.load(f)
|
|
|
|
defs_layers = ldtk_data['defs']['layers']
|
|
defs_entities = ldtk_data['defs']['entities']
|
|
defs_tilesets = ldtk_data['defs']['tilesets']
|
|
|
|
# Setup Layers (Ensure existence and order)
|
|
next_layer_uid = 600
|
|
# Ensure Entities (201)
|
|
ex_ent = next((l for l in defs_layers if l['identifier'] == 'Entities'), None)
|
|
if not ex_ent:
|
|
ex_ent = {
|
|
"__type": "Entities", "identifier": "Entities", "type": "Entities", "uid": 201,
|
|
"gridSize": 32, "displayOpacity": 1, "pxOffsetX": 0, "pxOffsetY": 0,
|
|
"requiredTags": [], "excludedTags": [], "intGridValues": [], "autoRuleGroups": [],
|
|
"autoSourceLayerDefUid": None, "tilesetDefUid": None, "tilePivotX": 0, "tilePivotY": 0
|
|
}
|
|
defs_layers.append(ex_ent)
|
|
|
|
for lname in ["Props", "Crops", "Ground"]:
|
|
if not any(l['identifier'] == lname for l in defs_layers):
|
|
new_l = ex_ent.copy()
|
|
new_l['identifier'] = lname
|
|
new_l['uid'] = next_layer_uid
|
|
next_layer_uid += 1
|
|
new_l['requiredTags'] = [lname] # Tag requirement
|
|
defs_layers.append(new_l)
|
|
|
|
final_layers = []
|
|
for name in ["Entities", "Props", "Crops", "Ground", "Terrain_Visuals", "Terrain_Control"]:
|
|
l = next((x for x in defs_layers if x['identifier'] == name), None)
|
|
if l: final_layers.append(l)
|
|
|
|
ldtk_data['defs']['layers'] = final_layers
|
|
|
|
# UIDs
|
|
ent_start_uid = 1000
|
|
ts_start_uid = 2000
|
|
existing_ent_uids = {e['uid'] for e in defs_entities}
|
|
existing_ts_uids = {t['uid'] for t in defs_tilesets}
|
|
|
|
def get_new_uid(existing_uids, start):
|
|
while start in existing_uids:
|
|
start += 1
|
|
return start
|
|
|
|
# Process Assets
|
|
for category, items in requested_assets.items():
|
|
strict_search = (category != "Entities")
|
|
tag = category
|
|
if category == "Tools": tag = "Props"
|
|
|
|
for item_name in items:
|
|
full_path = find_file(item_name, strict=strict_search)
|
|
if not full_path:
|
|
if category == "Entities":
|
|
full_path = find_file(item_name.rsplit('_', 1)[0], strict=False)
|
|
if not full_path:
|
|
continue
|
|
|
|
rel_path = os.path.relpath(full_path, PROJECT_ROOT)
|
|
w, h = get_image_size(full_path)
|
|
|
|
# Check tileset
|
|
existing_ts = next((t for t in defs_tilesets if t['relPath'] == rel_path), None)
|
|
if existing_ts:
|
|
ts_uid = existing_ts['uid']
|
|
# Update dimensions even if exists, in case they were wrong
|
|
existing_ts['pxWid'] = w
|
|
existing_ts['pxHei'] = h
|
|
else:
|
|
ts_uid = get_new_uid(existing_ts_uids, ts_start_uid)
|
|
existing_ts_uids.add(ts_uid)
|
|
new_ts = {
|
|
"__cWid": 1, "__cHei": 1,
|
|
"identifier": "TS_" + os.path.basename(full_path).replace('.png','').replace(' ','_'),
|
|
"uid": ts_uid, "relPath": rel_path, "embedAtlas": None,
|
|
"pxWid": w, "pxHei": h, "tileGridSize": max(w,h),
|
|
"spacing": 0, "padding": 0, "tags": [], "tagsSourceEnumUid": None,
|
|
"enumTags": [], "customData": [], "savedSelections": [], "cachedPixelData": None
|
|
}
|
|
defs_tilesets.append(new_ts)
|
|
|
|
# Entity
|
|
ent_identifier = os.path.basename(full_path).replace('.png','').replace(' ','_').capitalize()
|
|
if len(ent_identifier) > 30: ent_identifier = ent_identifier[:30]
|
|
|
|
# Find existing to update or create new
|
|
existing_ent = next((e for e in defs_entities if e['identifier'] == ent_identifier), None)
|
|
if existing_ent:
|
|
ent_uid = existing_ent['uid']
|
|
# Update bad fields
|
|
existing_ent['width'] = w
|
|
existing_ent['height'] = h
|
|
existing_ent['tags'] = [tag]
|
|
existing_ent['tilesetId'] = ts_uid
|
|
existing_ent['tileRect'] = { "tilesetUid": ts_uid, "x": 0, "y": 0, "w": w, "h": h }
|
|
else:
|
|
ent_uid = get_new_uid(existing_ent_uids, ent_start_uid)
|
|
existing_ent_uids.add(ent_uid)
|
|
new_ent = {
|
|
"identifier": ent_identifier, "uid": ent_uid, "tags": [tag],
|
|
"exportToToc": False, "allowOutOfBounds": False, "doc": None,
|
|
"width": w, "height": h,
|
|
"resizableX": False, "resizableY": False,
|
|
"minWidth": None, "maxWidth": None, "minHeight": None, "maxHeight": None,
|
|
"keepAspectRatio": True, "tileOpacity": 1, "fillOpacity": 0.08, "lineOpacity": 0,
|
|
"hollow": False, "color": "#94D0FF", "renderMode": "Tile", "showName": True,
|
|
"tilesetId": ts_uid, "tileRenderMode": "FitInside",
|
|
"tileRect": { "tilesetUid": ts_uid, "x": 0, "y": 0, "w": w, "h": h },
|
|
"uiTileRect": None, "nineSliceBorders": [], "maxCount": 0, "limitScope": "PerLevel",
|
|
"limitBehavior": "MoveLastOne", "pivotX": 0.5, "pivotY": 1, "fieldDefs": []
|
|
}
|
|
defs_entities.append(new_ent)
|
|
|
|
ldtk_data['defs']['entities'] = defs_entities
|
|
ldtk_data['defs']['tilesets'] = defs_tilesets
|
|
|
|
# Update Levels Layer Instances
|
|
for level in ldtk_data['levels']:
|
|
old_insts = level['layerInstances']
|
|
new_insts = []
|
|
for ldef in final_layers:
|
|
existing = next((i for i in old_insts if i['layerDefUid'] == ldef['uid']), None)
|
|
if existing: new_insts.append(existing)
|
|
else:
|
|
new_insts.append({
|
|
"__identifier": ldef['identifier'], "__type": "Entities",
|
|
"__cWid": level['pxWid'] // 32, "__cHei": level['pxHei'] // 32,
|
|
"__gridSize": 32, "__opacity": 1, "__pxTotalOffsetX": 0, "__pxTotalOffsetY": 0,
|
|
"__tilesetDefUid": None, "__tilesetRelPath": None,
|
|
"iid": str(uuid.uuid4()), "levelId": level['uid'], "layerDefUid": ldef['uid'],
|
|
"pxOffsetX": 0, "pxOffsetY": 0, "visible": True, "optionalRules": [],
|
|
"intGridCsv": [], "autoLayerTiles": [], "seed": 0, "overrideTilesetUid": None,
|
|
"gridTiles": [], "entityInstances": []
|
|
})
|
|
level['layerInstances'] = new_insts
|
|
|
|
with open(LDTK_FILE, 'w') as f:
|
|
json.dump(ldtk_data, f, indent=2)
|
|
|
|
print("LDtk updated with sips-verified asset sizes.")
|