#!/usr/bin/env python3

import os
import json
import shutil
import argparse
import mysql.connector
from datetime import datetime

# ---------------- CONFIG ----------------

DEFAULT_INCOMING = "/var/www/html/lumenita/trader/msg/queue"
DEFAULT_PROCESSED = "/var/www/html/lumenita/trader/msg/processed"
DEFAULT_ERROR = "/var/www/html/lumenita/trader/msg/err"

DB_CONFIG = {
    "host": "localhost",
    "user": "trader_user",
    "password": "M@keD$Money",
    "database": "trader"
}

SECRET_KEY = "Money_Printer"

# ----------------------------------------


def get_db_connection():
    return mysql.connector.connect(**DB_CONFIG)


def parse_event(json_data, file_path):
    if json_data.get("secret") != SECRET_KEY:
        raise ValueError("Invalid secret")

    symbol = json_data.get("symbol")
    if not symbol:
        raise ValueError("Missing symbol")

    price = float(json_data.get("price"))

    indicator = json_data.get("trigger") or json_data.get("type")
    if not indicator:
        raise ValueError("Missing indicator (trigger/type)")

    if "time" in json_data:
        event_time = datetime.strptime(
            json_data["time"],
            "%Y-%m-%d %H:%M:%S"
        )
    else:
        event_time = datetime.fromtimestamp(
            os.path.getmtime(file_path)
        )

    return symbol, indicator, price, event_time


def upsert_event(cursor, data, source_file):
    sql = """
        INSERT INTO stock_indicator_state
        (symbol, indicator, price, event_time, source_file)
        VALUES (%s, %s, %s, %s, %s)
        ON DUPLICATE KEY UPDATE
            price = IF(
                VALUES(event_time) >= event_time,
                VALUES(price),
                price
            ),
            event_time = GREATEST(event_time, VALUES(event_time)),
            source_file = IF(
                VALUES(event_time) >= event_time,
                VALUES(source_file),
                source_file
            )
    """
    cursor.execute(sql, (*data, source_file))


def process_file(file_path):
    with open(file_path, "r") as f:
        json_data = json.load(f)

    data = parse_event(json_data, file_path)

    conn = get_db_connection()
    cursor = conn.cursor()

    upsert_event(cursor, data, os.path.basename(file_path))

    conn.commit()
    cursor.close()
    conn.close()


def process_directory(incoming, processed, error):
    os.makedirs(processed, exist_ok=True)
    os.makedirs(error, exist_ok=True)

    files = sorted(
        f for f in os.listdir(incoming)
        if f.lower().endswith(".json")
    )

    print(f"Processing {len(files)} files from {incoming}")

    for filename in files:
        file_path = os.path.join(incoming, filename)

        if not os.path.isfile(file_path):
            continue

        try:
            process_file(file_path)

            shutil.move(
                file_path,
                os.path.join(processed, filename)
            )

            print(f"OK   {filename}")

        except Exception as e:
            print(f"FAIL {filename}: {e}")

            shutil.move(
                file_path,
                os.path.join(error, filename)
            )


def main():
    parser = argparse.ArgumentParser(
        description="Manual bulk loader for indicator JSON files"
    )

    parser.add_argument(
        "--incoming",
        default=DEFAULT_INCOMING,
        help="Directory containing incoming JSON files"
    )

    parser.add_argument(
        "--processed",
        default=DEFAULT_PROCESSED,
        help="Directory to move processed files to"
    )

    parser.add_argument(
        "--error",
        default=DEFAULT_ERROR,
        help="Directory to move failed files to"
    )

    args = parser.parse_args()

    process_directory(
        args.incoming,
        args.processed,
        args.error
    )


if __name__ == "__main__":
    main()
