﻿#!/usr/bin/env python3
# fika_unified_generator_and_notifier.py
#
# Single script that:
#   - Scans client files (BepInEx/plugins + BepInEx/patchers)
#   - Scans server files (SPT/user/mods)
#   - Applies hard-coded + text-file-based includes/excludes
#   - Writes two manifests with SHA1 + URL + protected_prefixes
#   - Posts a summary of changes to Discord
#   - Keeps a hash cache + change-state + rotating logs

import os
import sys
import time
import json
import hashlib
import subprocess
from pathlib import Path
from urllib.parse import quote



# -------------------------------
# CONSTANTS / PATHS
# -------------------------------

BASE_DIR = Path(__file__).resolve().parent

# Where the database is hosted on the web (for URL generation)
BASE_URL = "https://files.vmvproductions.net/downloads/FikaSPT4_DB/"

# Manifest output locations (renamed /manifests/*.json)
CLIENT_MANIFEST_PATH = BASE_DIR / "manifests" / "client_mods_manifest.json"
SERVER_MANIFEST_PATH = BASE_DIR / "manifests" / "server_mods_manifest.json"

# Text files for extra roots
INCLUDE_LIST = BASE_DIR / "include_list.txt"                # extra CLIENT roots
INCLUDE_FIKA_CLIENT = BASE_DIR / "include_fika_client.txt"  # extra SERVER roots

# Text files for protected/excluded prefixes
EXCLUDE_LIST = BASE_DIR / "exclude_list.txt"                # CLIENT protected prefixes
EXCLUDE_FIKA_CLIENT = BASE_DIR / "exclude_fika_client.txt"  # SERVER protected prefixes

# Hard-coded scan roots
CLIENT_ROOTS = [
    "BepInEx/patchers",
    "BepInEx/plugins",
]

SERVER_ROOTS = [
    "SPT/user/mods",
]

# Hard-coded excludes → also become protected_prefixes
CLIENT_EXCLUDES = [
    "BepInEx/plugins/spt",
    "BepInEx/plugins/Fika",
    "BepInEx/patchers/spt-prepatch.dll",
]

SERVER_EXCLUDES = [
    "SPT/user/mods/fika-server",
]

# State / cache files
STATE_FILE = BASE_DIR / "fika_changelog_cache.json"
HASH_CACHE_FILE = BASE_DIR / "fika_hash_cache.json"

# Logging
LOG_DIR = BASE_DIR / "logs"
BASE_LOG_NAME = "fika_unified.log"
LOG_FILE = LOG_DIR / BASE_LOG_NAME
MAX_LOG_SIZE = 512 * 1024  # 512 KB
MAX_LOG_FILES = 5

# Discord webhook
WEBHOOK_URL = "https://discord.com/api/webhooks/1429795721560854638/jE04Pl-PICQQU3-A0Pywlw7eE5Le4JkV9EvqHsDP9hM1YbfskaTcV3KE4e08GVFAveO1"

# Global hash cache (filled in main())
HASH_CACHE = {}

# -------------------------------
# LOGGING HELPERS
# -------------------------------

def rotate_logs():
    LOG_DIR.mkdir(parents=True, exist_ok=True)
    if not LOG_FILE.exists():
        return
    if LOG_FILE.stat().st_size < MAX_LOG_SIZE:
        return

    oldest = LOG_DIR / f"{BASE_LOG_NAME}.{MAX_LOG_FILES}"
    if oldest.exists():
        oldest.unlink()

    for i in range(MAX_LOG_FILES - 1, 0, -1):
        src = LOG_DIR / f"{BASE_LOG_NAME}.{i}"
        dst = LOG_DIR / f"{BASE_LOG_NAME}.{i + 1}"
        if src.exists():
            src.rename(dst)

    rotated = LOG_DIR / f"{BASE_LOG_NAME}.1"
    LOG_FILE.rename(rotated)


def log(msg: str):
    LOG_DIR.mkdir(parents=True, exist_ok=True)
    ts = time.strftime("[%Y-%m-%d %H:%M:%S]")
    line = f"{ts} {msg}"
    print(line, flush=True)
    with LOG_FILE.open("a", encoding="utf-8") as f:
        f.write(line + "\n")

# -------------------------------
# JSON HELPERS
# -------------------------------

def load_json(path: Path, default):
    try:
        with path.open("r", encoding="utf-8") as f:
            return json.load(f)
    except FileNotFoundError:
        return default
    except Exception as e:
        log(f"⚠️ Failed to read JSON from {path.name}: {e} — using default.")
        return default


def save_json(path: Path, data):
    path.parent.mkdir(parents=True, exist_ok=True)
    tmp = path.with_suffix(path.suffix + ".tmp")
    with tmp.open("w", encoding="utf-8") as f:
        json.dump(data, f, indent=2, ensure_ascii=False)
    tmp.replace(path)

# -------------------------------
# PATTERN / WALK HELPERS
# -------------------------------

def filter_protected_removals(removed_list, protected_prefixes):
    """Remove items that were excluded due to protected_prefixes."""
    out = []
    for rel in removed_list:
        rl = rel.lower()
        if any(rl == p or rl.startswith(p + "/") for p in protected_prefixes):
            # Skip — this removal is artificial (protected)
            continue
        out.append(rel)
    return out

def load_patterns(path: Path):
    """
    Read non-empty, non-comment lines from a text file and normalize
    to POSIX-style separators.
    """
    if not path.exists():
        return []
    lines = []
    with path.open("r", encoding="utf-8") as f:
        for raw in f:
            line = raw.strip()
            if not line or line.startswith("#"):
                continue
            lines.append(line.replace("\\", "/"))
    return lines


def is_prefix_match(rel_path: str, prefix: str) -> bool:
    """True if rel_path is exactly prefix or directly under it."""
    prefix = prefix.rstrip("/")
    if rel_path == prefix:
        return True
    return rel_path.startswith(prefix + "/")


def should_exclude(rel_path: str, excludes):
    return any(is_prefix_match(rel_path, p) for p in excludes)


def quick_hash(path: Path, rel_key: str) -> str:
    """
    SHA1 hash with caching by relative key.
    Cache entry: { "hash": ..., "size": ..., "mtime": ... }
    """
    global HASH_CACHE
    st = path.stat()
    size = st.st_size
    mtime = int(st.st_mtime)

    cached = HASH_CACHE.get(rel_key)
    if cached and cached.get("size") == size and cached.get("mtime") == mtime:
        return cached["hash"]

    h = hashlib.sha1()
    with path.open("rb") as f:
        while True:
            chunk = f.read(1024 * 1024)
            if not chunk:
                break
            h.update(chunk)
    digest = h.hexdigest()
    HASH_CACHE[rel_key] = {"hash": digest, "size": size, "mtime": mtime}
    return digest


def walk_roots(root_rel_paths, exclude_prefixes):
    """
    Walk the given root-relative paths (dirs or files) and yield
    (rel_path, hash) pairs, preserving ORIGINAL CASE for output.
    All exclusion/prefix logic is done using lowercase copies.
    """
    seen = set()

    for root_rel in root_rel_paths:
        root_rel = root_rel.strip()
        if not root_rel:
            continue

        root_path = BASE_DIR / root_rel
        if not root_path.exists():
            log(f"⚠️ Included path not found: {root_rel}")
            continue

        if root_path.is_file():
            rel = root_rel.replace("\\", "/")
            rel_norm = rel
            rel_lower = rel_norm.lower()

            if rel_lower in seen or should_exclude(rel_lower, exclude_prefixes):
                continue

            seen.add(rel_lower)
            digest = quick_hash(root_path, rel_lower)
            yield rel_norm, digest

        else:
            for dirpath, dirnames, filenames in os.walk(root_path):
                dir_rel = Path(dirpath).relative_to(BASE_DIR).as_posix()
                dir_rel_norm = dir_rel.replace("\\", "/")
                dir_rel_lower = dir_rel_norm.lower()

                # prune excluded dirs
                pruned = []
                for d in dirnames:
                    sub_rel = f"{dir_rel_norm}/{d}" if dir_rel_norm != "." else d
                    sub_rel_lower = sub_rel.lower()
                    if any(is_prefix_match(sub_rel_lower, p) for p in exclude_prefixes):
                        continue
                    pruned.append(d)
                dirnames[:] = pruned

                for fname in filenames:
                    rel = (Path(dirpath) / fname).relative_to(BASE_DIR).as_posix()
                    rel_norm = rel.replace("\\", "/")
                    rel_lower = rel_norm.lower()

                    if rel_lower in seen:
                        continue
                    if should_exclude(rel_lower, exclude_prefixes):
                        continue

                    seen.add(rel_lower)
                    digest = quick_hash(Path(dirpath) / fname, rel_lower)
                    yield rel_norm, digest



# -------------------------------
# MANIFEST BUILDERS
# -------------------------------

def build_client_manifest():
    """
    Client manifest:
      - roots: BepInEx/patchers, BepInEx/plugins + include_list.txt
      - excludes: CLIENT_EXCLUDES + exclude_list.txt
      - output: manifests/client_mods_manifest.json
      - also writes protected_prefixes for GUI to ignore/prune-around
    """
    extra_roots = load_patterns(INCLUDE_LIST)
    all_roots = list(dict.fromkeys(CLIENT_ROOTS + extra_roots))

    # Load additional protected prefixes from exclude_list.txt
    extra_excludes = load_patterns(EXCLUDE_LIST)
    # Normalize all excludes to lowercase, POSIX style
    all_excludes = [p.lower().rstrip("/\\") for p in (CLIENT_EXCLUDES + extra_excludes)]

    log(f"🧭 Client scan roots: {all_roots}")
    if all_excludes:
        log(f"🧹 Client protected/excluded prefixes: {all_excludes}")

    files = []
    mapping = {}

    for rel, digest in walk_roots(all_roots, all_excludes):
        safe_url = BASE_URL + quote(rel, safe="/")

        files.append({
            "name": rel,
            "hash": digest,
            "url": safe_url
        })

        mapping[rel] = digest

    files.sort(key=lambda x: x["name"])
    protected_prefixes = sorted(set(all_excludes))

    manifest = {
        "generated_at": time.strftime("%a %b %d %H:%M:%S %Z %Y"),
        "files": files,
        "protected_prefixes": protected_prefixes,
    }
    save_json(CLIENT_MANIFEST_PATH, manifest)
    log(f"📝 Client manifest written: {CLIENT_MANIFEST_PATH} ({len(files)} files)")
    return manifest, mapping


def build_server_manifest():
    """
    Server manifest:
      - roots: SPT/user/mods + include_fika_client.txt
      - excludes: SERVER_EXCLUDES + exclude_fika_client.txt
      - output: manifests/server_mods_manifest.json
      - also writes protected_prefixes for GUI to ignore/prune-around
    """
    extra_roots = load_patterns(INCLUDE_FIKA_CLIENT)
    all_roots = list(dict.fromkeys(SERVER_ROOTS + extra_roots))

    extra_excludes = load_patterns(EXCLUDE_FIKA_CLIENT)
    all_excludes = [p.lower().rstrip("/\\") for p in (SERVER_EXCLUDES + extra_excludes)]

    log(f"🧭 Server scan roots: {all_roots}")
    if all_excludes:
        log(f"🧹 Server protected/excluded prefixes: {all_excludes}")

    files = []
    mapping = {}

    for rel, digest in walk_roots(all_roots, all_excludes):
        safe_url = BASE_URL + quote(rel, safe="/")

        files.append({
            "name": rel,
            "hash": digest,
            "url": safe_url
        })

        mapping[rel] = digest

    files.sort(key=lambda x: x["name"])
    protected_prefixes = sorted(set(all_excludes))

    manifest = {
        "generated_at": time.strftime("%a %b %d %H:%M:%S %Z %Y"),
        "files": files,
        "protected_prefixes": protected_prefixes,
    }
    save_json(SERVER_MANIFEST_PATH, manifest)
    log(f"📝 Server manifest written: {SERVER_MANIFEST_PATH} ({len(files)} files)")
    return manifest, mapping

# -------------------------------
# CHANGE STATE / DIFF
# -------------------------------

def load_last_state():
    data = load_json(STATE_FILE, {})
    client_prev = data.get("client", {})
    server_prev = data.get("server", {})
    return client_prev, server_prev


def save_state(client_map, server_map):
    data = {
        "client": client_map,
        "server": server_map,
    }
    save_json(STATE_FILE, data)


def diff_maps(current, previous):
    cur_keys = set(current.keys())
    prev_keys = set(previous.keys())
    added = sorted(cur_keys - prev_keys)
    removed = sorted(prev_keys - cur_keys)
    updated = sorted(
        k for k in cur_keys & prev_keys
        if current[k] != previous[k]
    )
    return added, updated, removed

# -------------------------------
# DISCORD NOTIFY
# -------------------------------

def send_discord_message(content: str):
    """
    Send Discord webhook without 'requests' — uses curl so Unraid always works.
    """
    if not WEBHOOK_URL:
        log("⚠️ WEBHOOK_URL not set — skipping Discord post.")
        return

    try:
        payload = json.dumps({"content": content})

        result = subprocess.run(
            [
                "curl",
                "-s",
                "-H", "Content-Type: application/json",
                "-X", "POST",
                "-d", payload,
                WEBHOOK_URL,
            ],
            capture_output=True,
            text=True
        )

        if result.returncode != 0:
            log(f"⚠️ curl failed with code {result.returncode}: {result.stderr.strip()}")
        else:
            log("📨 Posted changelog to Discord via curl.")

    except Exception as e:
        log(f"⚠️ Failed to send webhook via curl: {e}")


def format_section(title, added, updated, removed, prefix=""):
    """
    Build Discord formatted section for one side (Client/Server).
    """
    lines = [f"\n— {title} —"]
    max_list = 40  # max individual entries per section

    def add_block(label, marker, items):
        if not items:
            return
        lines.append(f"{label} ({len(items)}):")
        for f in items[:max_list]:
            lines.append(f"  {marker} {prefix}{f}")
        if len(items) > max_list:
            lines.append(f"  … and {len(items) - max_list} more")

    add_block("➕ Added", "+", added)
    add_block("🟠 Updated", "~", updated)
    add_block("➖ Removed", "-", removed)

    return lines


def post_changelog(client_diff, server_diff):
    c_added, c_updated, c_removed = client_diff
    s_added, s_updated, s_removed = server_diff

    if not (c_added or c_updated or c_removed or s_added or s_updated or s_removed):
        log("✔️ No new file changes detected — skipping Discord post.")
        return

    lines = ["Update summary for Fika Mods:"]

    if c_added or c_updated or c_removed:
        lines.extend(format_section("Client changes", c_added, c_updated, c_removed))

    if s_added or s_updated or s_removed:
        lines.extend(format_section("Server changes", s_added, s_updated, s_removed))

    msg = "```\n" + "\n".join(lines) + "\n```"
    send_discord_message(msg)

# -------------------------------
# MAIN
# -------------------------------

def main():
    global HASH_CACHE

    rotate_logs()
    log("🚀 Running Fika unified generator...")

    # Load hash cache
    HASH_CACHE = load_json(HASH_CACHE_FILE, {})
    log(f"📚 Loaded hash cache with {len(HASH_CACHE)} entries.")

    # Build manifests
    client_manifest, client_map = build_client_manifest()
    server_manifest, server_map = build_server_manifest()

    # Save hash cache
    save_json(HASH_CACHE_FILE, HASH_CACHE)
    log(f"💾 Hash cache updated with {len(HASH_CACHE)} entries.")

    # Compare against last state
    prev_client, prev_server = load_last_state()
    client_diff_raw = diff_maps(client_map, prev_client)
    server_diff_raw = diff_maps(server_map, prev_server)

    # Extract protected prefixes from manifests
    client_protected = client_manifest.get("protected_prefixes", [])
    server_protected = server_manifest.get("protected_prefixes", [])

    #Filter "removed" entries if they fall inside protected prefixes
    client_removed_filtered = filter_protected_removals(
    client_diff_raw[2], client_protected
    )
    server_removed_filtered = filter_protected_removals(
    server_diff_raw[2], server_protected
    )

    # Reassemble diffs with filtered removals
    client_diff = (
    client_diff_raw[0],  # added
    client_diff_raw[1],  # updated
    client_removed_filtered
    )
    server_diff = (
    server_diff_raw[0],
    server_diff_raw[1],
    server_removed_filtered
)


    log(
        f"🧾 Client changes — Added: {len(client_diff[0])}, "
        f"Updated: {len(client_diff[1])}, Removed: {len(client_diff[2])}"
    )
    log(
        f"🧾 Server changes — Added: {len(server_diff[0])}, "
        f"Updated: {len(server_diff[1])}, Removed: {len(server_diff[2])}"
    )

    # Discord
    post_changelog(client_diff, server_diff)

    # Persist new state
    save_state(client_map, server_map)
    log("💾 Change-state saved.")
    log("✅ Fika unified generator completed.")
    log("------------------------------------------------------------")


if __name__ == "__main__":
    main()
