#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Glass curtain wall adhesive detection - backend
Features:
- Start/stop capture (sudo ./main) running in /home/pi/acc_module/AI0
- Capture process stdout/stderr -> /home/pi/capture.log, real-time push to front-end via /log_ws
- Tail newest CSV in /home/pi/acc_data (pattern ai0_accel_*.csv) and push samples to /ws
- Camera MJPEG stream at /video_feed (OpenCV)
- Static frontend served from /static, index at /
"""
import os
import glob
import time
import json
import threading
import asyncio
import subprocess
import signal
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Request
from fastapi.responses import StreamingResponse, FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles

import cv2

# ---------------- CONFIG ----------------
CSV_DIR = "/home/pi/acc_data"
CSV_GLOB = "ai0_accel_*.csv"
POLL_INTERVAL = 0.05

# capture executable settings
CAPTURE_CWD = "/home/pi/acc_module/AI0"   # where your ./main resides
CAPTURE_EXE = "./main"                    # executed in CAPTURE_CWD
CAPTURE_CMD = ["sudo", "./main"]          # uses sudo; configure sudoers for nopass if needed
CAPTURE_LOG = "/home/pi/capture.log"

# camera
CAMERA_INDEX = 0

# queue sizes
SAMPLE_QUEUE_MAX = 8000
LOG_QUEUE_MAX = 2000
# ----------------------------------------

app = FastAPI(title="玻璃幕墙结构胶检测系统")
app.mount("/static", StaticFiles(directory="static"), name="static")

@app.get("/")
def index():
    return FileResponse(os.path.join("static", "index.html"))

# ---- queues ----
sample_queue: asyncio.Queue = asyncio.Queue(maxsize=SAMPLE_QUEUE_MAX)
log_queue: asyncio.Queue = asyncio.Queue(maxsize=LOG_QUEUE_MAX)

# ---- CSV tail logic ----
def find_latest_csv():
    files = glob.glob(os.path.join(CSV_DIR, CSV_GLOB))
    if not files:
        return None
    # if new files appear, choose the newest by mtime
    files.sort(key=os.path.getmtime, reverse=True)
    return files[0]

def tail_csv_file(path, loop, queue: asyncio.Queue, stop_event: threading.Event):
    try:
        f = open(path, "r", encoding="utf-8", errors="ignore")
    except Exception as e:
        print("[tail_csv] open error:", e)
        return
    f.seek(0, os.SEEK_END)
    print("[tail_csv] tailing:", path)
    while not stop_event.is_set():
        line = f.readline()
        if not line:
            time.sleep(POLL_INTERVAL)
            continue
        line = line.strip()
        if not line:
            continue
        parts = [p.strip() for p in line.split(",") if p.strip()!='']
        if len(parts) < 4:
            # skip if not enough columns
            continue
        try:
            t = float(parts[0])
            volt = float(parts[1])
            accel_g = float(parts[2])
            accel_mps2 = float(parts[3])
            sample = {"t": t, "volt": volt, "accel_g": accel_g, "accel_mps2": accel_mps2}
            loop.call_soon_threadsafe(asyncio.create_task, queue.put(sample))
        except Exception as e:
            # parsing error - skip
            print("[tail_csv] parse error:", e, "line:", line)
            continue
    f.close()
    print("[tail_csv] stopped:", path)

async def csv_watcher_task():
    stop_event = None
    tail_thread = None
    current = None
    loop = asyncio.get_event_loop()
    while True:
        latest = find_latest_csv()
        if latest != current:
            if tail_thread and stop_event:
                stop_event.set()
                tail_thread.join(timeout=1.0)
            current = latest
            if current:
                stop_event = threading.Event()
                tail_thread = threading.Thread(target=tail_csv_file, args=(current, loop, sample_queue, stop_event), daemon=True)
                tail_thread.start()
                print("[csv_watcher] started tail for:", current)
            else:
                print("[csv_watcher] no csv found")
        await asyncio.sleep(1.0)

# ---- capture subprocess control ----
def _is_capture_running():
    p = getattr(app.state, "capture_proc", None)
    return (p is not None) and (p.poll() is None)

@app.post("/start_capture")
async def start_capture(req: Request):
    """
    Start the capture process using CAPTURE_CMD in CAPTURE_CWD.
    subprocess started with stdin=PIPE so we can send a newline to stop (your 'press any key' behavior).
    """
    if _is_capture_running():
        return JSONResponse({"status":"already_running"}, status_code=200)

    # ensure CAPTURE_CWD exists
    if not os.path.isdir(CAPTURE_CWD):
        return JSONResponse({"status":"failed", "reason": f"CAPTURE_CWD not found: {CAPTURE_CWD}"}, status_code=500)

    # clear previous log
    try:
        open(CAPTURE_LOG, "w").close()
    except Exception as e:
        # cannot clear log; continue
        print("[start_capture] clear log failed:", e)

    try:
        logf = open(CAPTURE_LOG, "ab")
    except Exception as e:
        return JSONResponse({"status":"failed", "reason": f"open log failed: {e}"}, status_code=500)

    try:
        # Start process with stdin PIPE so we can send newline later
        proc = subprocess.Popen(CAPTURE_CMD, cwd=CAPTURE_CWD, stdout=logf, stderr=subprocess.STDOUT, stdin=subprocess.PIPE)
        app.state.capture_proc = proc
        print("[start_capture] started pid:", proc.pid)
        return JSONResponse({"status":"started", "pid": proc.pid}, status_code=200)
    except Exception as e:
        return JSONResponse({"status":"failed", "reason": str(e)}, status_code=500)

@app.post("/stop_capture")
async def stop_capture():
    """
    Stop capture:
    - First try to send newline to stdin (simulate 'press any key')
    - Wait briefly; if still running, send terminate; then kill if necessary.
    """
    proc = getattr(app.state, "capture_proc", None)
    if not proc:
        return JSONResponse({"status":"not_running"}, status_code=200)

    if proc.poll() is not None:
        app.state.capture_proc = None
        return JSONResponse({"status":"already_exited"}, status_code=200)

    # Try to send newline to stdin
    try:
        if proc.stdin:
            try:
                proc.stdin.write(b"\n")
                proc.stdin.flush()
                # give it short time to exit
                proc.wait(timeout=2.0)
            except Exception as e:
                print("[stop_capture] write to stdin failed:", e)
    except Exception as e:
        print("[stop_capture] stdin handling error:", e)

    # If still running, try terminate
    if proc.poll() is None:
        try:
            proc.terminate()
            try:
                proc.wait(timeout=3.0)
            except subprocess.TimeoutExpired:
                proc.kill()
        except Exception as e:
            print("[stop_capture] terminate/kill error:", e)

    app.state.capture_proc = None
    return JSONResponse({"status":"stopped"}, status_code=200)

# ---- log tailer and /log_ws ----
def log_tailer(loop, queue: asyncio.Queue, stop_event: threading.Event):
    """
    Tail CAPTURE_LOG and push lines to log_queue (asyncio) via loop.call_soon_threadsafe.
    """
    try:
        f = open(CAPTURE_LOG, "r", encoding="utf-8", errors="ignore")
    except Exception as e:
        print("[log_tailer] open log failed:", e)
        return
    f.seek(0, os.SEEK_END)
    print("[log_tailer] tailing capture log:", CAPTURE_LOG)
    while not stop_event.is_set():
        line = f.readline()
        if not line:
            time.sleep(0.2)
            continue
        loop.call_soon_threadsafe(asyncio.create_task, queue.put(line.rstrip("\n")))
    f.close()
    print("[log_tailer] stopped")

@app.websocket("/log_ws")
async def log_ws(websocket: WebSocket):
    await websocket.accept()
    print("[log_ws] client connected:", websocket.client)
    stop_event = threading.Event()
    loop = asyncio.get_event_loop()
    t = threading.Thread(target=log_tailer, args=(loop, log_queue, stop_event), daemon=True)
    t.start()
    try:
        while True:
            line = await log_queue.get()
            # send raw line
            await websocket.send_text(line)
    except WebSocketDisconnect:
        stop_event.set()
        t.join(timeout=1.0)
        print("[log_ws] disconnected:", websocket.client)

# ---- sample websocket /ws ----
@app.websocket("/ws")
async def websocket_samples(websocket: WebSocket):
    await websocket.accept()
    print("[ws] client connected:", websocket.client)
    try:
        while True:
            sample = await sample_queue.get()
            await websocket.send_text(json.dumps(sample, ensure_ascii=False))
    except WebSocketDisconnect:
        print("[ws] disconnected:", websocket.client)

# ---- camera mjpeg route ----
def gen_frames():
    cap = cv2.VideoCapture(CAMERA_INDEX)
    if not cap.isOpened():
        print("[camera] cannot open index", CAMERA_INDEX)
        while True:
            # produce empty bytes so client has response (won't render)
            time.sleep(0.5)
            yield b''
    while True:
        success, frame = cap.read()
        if not success:
            time.sleep(0.05)
            continue
        ret, buf = cv2.imencode('.jpg', frame)
        if not ret:
            continue
        frame_bytes = buf.tobytes()
        yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame_bytes + b'\r\n')
    cap.release()

@app.get("/video_feed")
def video_feed():
    return StreamingResponse(gen_frames(), media_type='multipart/x-mixed-replace; boundary=frame')

# ---- startup tasks ----
@app.on_event("startup")
async def on_startup():
    if not hasattr(app.state, "capture_proc"):
        app.state.capture_proc = None
    asyncio.create_task(csv_watcher_task())
    print("backend started, watching csv dir:", CSV_DIR)
