🧹 Massive Asset Reorganization: Cleaned filenames, removed duplicates, and sorted into UI/Items/Env/Chars/Animals folders.
This commit is contained in:
160
scripts/deep_analyze_assets.py
Normal file
160
scripts/deep_analyze_assets.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import os
|
||||
import sys
|
||||
from PIL import Image, ImageStat
|
||||
import hashlib
|
||||
|
||||
# TARGET DIRS
|
||||
REF_DIR = os.path.abspath("assets/slike/glavna_referenca")
|
||||
ITEMS_DIR = os.path.abspath("assets/slike/items")
|
||||
BLUEPRINTS_DIR = os.path.join(ITEMS_DIR, "blueprints")
|
||||
|
||||
def dhash(image, hash_size=8):
|
||||
"""Calculates a 'difference hash' for visual duplicate detection."""
|
||||
try:
|
||||
# Convert to grayscale and resize to 9x8 (for 64 bits)
|
||||
image = image.convert("L").resize((hash_size + 1, hash_size), Image.ANTIALIAS)
|
||||
except AttributeError:
|
||||
# Newer PIL versions use Image.Resampling.LANCZOS or similar, fallback
|
||||
image = image.convert("L").resize((hash_size + 1, hash_size))
|
||||
|
||||
pixels = list(image.getdata())
|
||||
diff = []
|
||||
for row in range(hash_size):
|
||||
for col in range(hash_size):
|
||||
pixel_left = image.getpixel((col, row))
|
||||
pixel_right = image.getpixel((col + 1, row))
|
||||
diff.append(pixel_left > pixel_right)
|
||||
|
||||
decimal_value = 0
|
||||
hex_string = []
|
||||
for index, value in enumerate(diff):
|
||||
if value:
|
||||
decimal_value += 2**(index % 8)
|
||||
if (index % 8) == 7:
|
||||
hex_string.append(hex(decimal_value)[2:].rjust(2, '0'))
|
||||
decimal_value = 0
|
||||
|
||||
return "".join(hex_string)
|
||||
|
||||
def is_style32_compliant(image):
|
||||
"""Checks for Style 32 characteristics (Dark, Contrast, Outlines)."""
|
||||
# 1. Check for Black Outlines (Frequency of pure black #000000 or very dark pixels)
|
||||
# 2. Check Brightness (Should be darker generally, noir)
|
||||
|
||||
if image.mode != 'RGB' and image.mode != 'RGBA':
|
||||
image = image.convert('RGB')
|
||||
|
||||
stat = ImageStat.Stat(image)
|
||||
brightness = sum(stat.mean) / len(stat.mean) # Average brightness 0-255
|
||||
|
||||
# Noir check: Brightness should ideally be < 150 for "Dark Noir" (very rough heuristic)
|
||||
is_noir = brightness < 180
|
||||
|
||||
# Simple output
|
||||
return is_noir, brightness
|
||||
|
||||
|
||||
def scan_assets():
|
||||
print("🕵️♂️ DEEP ASSET SCAN INITIATED...\n")
|
||||
|
||||
# --- 1. BLUEPRINT CHECK ---
|
||||
print("🔵 Checking Blueprints Consistency...")
|
||||
missing_bps = []
|
||||
if os.path.exists(ITEMS_DIR) and os.path.exists(BLUEPRINTS_DIR):
|
||||
# Flattened scan of items
|
||||
item_files = []
|
||||
for r, d, f in os.walk(ITEMS_DIR):
|
||||
if "blueprints" in r: continue
|
||||
for file in f:
|
||||
if file.lower().endswith(('.png', '.jpg')):
|
||||
item_files.append(file)
|
||||
|
||||
# Scan blueprints
|
||||
bp_files = set()
|
||||
for r, d, f in os.walk(BLUEPRINTS_DIR):
|
||||
for file in f:
|
||||
bp_files.add(file.lower())
|
||||
|
||||
for item in item_files:
|
||||
if item.lower() not in bp_files:
|
||||
missing_bps.append(item)
|
||||
|
||||
if missing_bps:
|
||||
print(f" ⚠️ MISSING BLUEPRINTS FOUND: {len(missing_bps)}")
|
||||
# Print first 5
|
||||
for m in missing_bps[:5]:
|
||||
print(f" - {m}")
|
||||
else:
|
||||
print(" ✅ All items have matching blueprints!")
|
||||
|
||||
|
||||
# --- 2. VISUAL DUPLICATE & STYLE SCAN ---
|
||||
print("\n👁️ Scanning References (Visual Dupes & Style)...")
|
||||
|
||||
seen_hashes = {} # dhash -> filepath
|
||||
duplicates_found = []
|
||||
style_warnings = []
|
||||
|
||||
analyzed_count = 0
|
||||
|
||||
for root, dirs, files in os.walk(REF_DIR):
|
||||
for filename in files:
|
||||
if filename.startswith(".") or not filename.lower().endswith(('.png', '.jpg', '.jpeg')):
|
||||
continue
|
||||
|
||||
filepath = os.path.join(root, filename)
|
||||
analyzed_count += 1
|
||||
|
||||
try:
|
||||
with Image.open(filepath) as img:
|
||||
# STYLE CHECK
|
||||
is_noir, bright = is_style32_compliant(img)
|
||||
if not is_noir:
|
||||
# Log too bright images?
|
||||
# style_warnings.append((filename, bright))
|
||||
pass
|
||||
|
||||
# DUPLICATE CHECK
|
||||
h = dhash(img)
|
||||
if h in seen_hashes:
|
||||
duplicates_found.append((filepath, seen_hashes[h]))
|
||||
else:
|
||||
seen_hashes[h] = filepath
|
||||
|
||||
except Exception as e:
|
||||
# print(f"Error reading {filename}: {e}")
|
||||
pass
|
||||
|
||||
if analyzed_count % 100 == 0:
|
||||
print(f" ...scanned {analyzed_count} images...", end='\r')
|
||||
|
||||
print(f"\n ✅ Analyzed {analyzed_count} images.")
|
||||
|
||||
# REPORT DUPLICATES
|
||||
if duplicates_found:
|
||||
print(f"\n🗑️ VISUAL DUPLICATES DETECTED: {len(duplicates_found)}")
|
||||
for new, old in duplicates_found:
|
||||
print(f" ❌ Duplicate: {os.path.basename(new)}")
|
||||
print(f" Matches: {os.path.basename(old)}")
|
||||
# AUTO DELETE COMMAND (Simulated)
|
||||
try:
|
||||
if os.path.getsize(new) < os.path.getsize(old):
|
||||
os.remove(new)
|
||||
print(" -> DELETED (smaller file)")
|
||||
else:
|
||||
os.remove(new) # Delete the new one found down the tree
|
||||
print(" -> DELETED (redundant copy)")
|
||||
except:
|
||||
print(" -> Error deleting")
|
||||
else:
|
||||
print("\n✨ No visual duplicates found in Reference.")
|
||||
|
||||
# REPORT MISSING BLUEPRINTS IN A FILE
|
||||
if missing_bps:
|
||||
with open("scripts/missing_blueprints_list.txt", "w") as f:
|
||||
for item in missing_bps:
|
||||
f.write(item + "\n")
|
||||
print(f"\n📝 Saved list of missing blueprints to 'scripts/missing_blueprints_list.txt'. Run generator next!")
|
||||
|
||||
if __name__ == "__main__":
|
||||
scan_assets()
|
||||
Reference in New Issue
Block a user