﻿#!/usr/bin/env python3
import os
import hashlib
import json
import time

# ==============================
# CONFIG
# ==============================
SOURCE_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_URL   = "https://files.vmvproductions.net/downloads/ATSModDB/"
OUTPUT_FILE = "filelist.json"
CACHE_FILE  = ".hashcache.json"

IGNORE = {
    OUTPUT_FILE,
    CACHE_FILE,
    "generate_filelist.py",
}

# ==============================

def sha256_file(path):
    h = hashlib.sha256()
    with open(path, "rb") as f:
        for chunk in iter(lambda: f.read(1024 * 1024), b""):
            h.update(chunk)
    return h.hexdigest()

def load_cache():
    if os.path.exists(CACHE_FILE):
        try:
            with open(CACHE_FILE, "r", encoding="utf-8") as f:
                return json.load(f)
        except:
            pass
    return {}

def save_cache(cache):
    with open(CACHE_FILE, "w", encoding="utf-8") as f:
        json.dump(cache, f, indent=2)

def main():
    cache = load_cache()
    new_cache = {}
    file_entries = []

    for root, dirs, files in os.walk(SOURCE_DIR, followlinks=False):
        for file in files:
           # print("FOUND FILE:", file)


            # Ignore non-mod files
            LOW = file.lower()

            # Only allow valid mod types
            if not (LOW.endswith(".scs") or LOW.endswith(".zip")):
                continue

                continue

            if file in IGNORE:
                continue

            local_path = os.path.join(root, file)
            rel = os.path.relpath(local_path, SOURCE_DIR).replace("\\", "/")

            try:
                stat = os.stat(local_path)
            except FileNotFoundError:
                print(f"Skipping missing file: {rel}")
                continue

            size = stat.st_size
            mtime = int(stat.st_mtime)

            cached = cache.get(rel)
            if cached and cached.get("size") == size and cached.get("mtime") == mtime:
                file_hash = cached.get("sha256")
            else:
                print(f"Hashing: {rel} ...")
                file_hash = sha256_file(local_path)

            new_cache[rel] = {
                "size": size,
                "mtime": mtime,
                "sha256": file_hash
            }

            url = f"{BASE_URL}{rel}"

            file_entries.append({
                "name": rel,
                "hash": file_hash,
                "url": url
            })

    save_cache(new_cache)

    manifest = {
        "generated_at": time.asctime(),
        "files": file_entries
    }

    output_path = os.path.join(SOURCE_DIR, OUTPUT_FILE)
    with open(output_path, "w", encoding="utf-8") as f:

        json.dump(manifest, f, indent=2)

   # print(f"\n✅ Generated {OUTPUT_FILE} with {len(file_entries)} entries.")

if __name__ == "__main__":
    main()
