#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Glass curtain wall adhesive detection - backend (tail CSV)
- Start/stop capture via POST /start_capture and /stop_capture
- Spawn CAPTURE_CMD with stdout=PIPE and stderr=PIPE, write raw stdout/stderr into capture.log
- Detect "Output file: <path>" printed by capture program and tail that CSV for appended sample lines
- Parse sample lines and push JSON samples into sample_queue (for /ws)
- /log_ws streams raw capture log lines, /ws streams parsed samples
- Camera MJPEG stream at /video_feed retained
"""
import os
import time
import json
import threading
import asyncio
import subprocess
import traceback
from datetime import datetime
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Request
from fastapi.responses import StreamingResponse, FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles

# Optional: import cv2 when /video_feed is used
try:
    import cv2
except Exception:
    cv2 = None

# ---------------- CONFIG ----------------
CSV_DIR = "/home/pi/acc_data"
CAPTURE_CWD = "/home/pi/acc_module/AI0"
# Prefer absolute path for the capture executable; avoid sudo unless configured in sudoers
CAPTURE_CMD = ["/home/pi/acc_module/AI0/main"]
CAPTURE_LOG = "/home/pi/capture.log"
CAMERA_INDEX = 0

# queue sizes
SAMPLE_QUEUE_MAX = 8000
LOG_QUEUE_MAX = 2000

# ----------------------------------------

app = FastAPI(title="玻璃幕墙结构胶检测系统")
# static directory: make sure your index.html is at ./static/index.html
app.mount("/static", StaticFiles(directory="static"), name="static")

@app.get("/")
def index():
    return FileResponse(os.path.join("static", "index.html"))

# async queues shared with websockets
sample_queue: asyncio.Queue = asyncio.Queue(maxsize=SAMPLE_QUEUE_MAX)
log_queue: asyncio.Queue = asyncio.Queue(maxsize=LOG_QUEUE_MAX)

# ---------- parser ----------
def try_parse_sample_from_line(line: str):
    """
    Robust parser for sample lines.
    Accepts CSV-like lines produced by device CSV or stdout:
    - t,volt,accel_g,accel_mps2
    - t,ax,ay,az  -> compute accel_g from vector magnitude
    - t,volt,ax,ay,az (volt present)
    Returns dict {"t":..., "volt":..., "accel_g":..., "accel_mps2":...} or None.
    """
    import math
    s = line.strip()
    if not s:
        return None
    # ignore informational lines
    info_keywords = ("output file", "opening device", "start", "press any key", "sampling rate", "done", "stopping", "total points")
    if any(k in s.lower() for k in info_keywords):
        return None

    # split by comma first, then whitespace fallback
    parts = [p.strip() for p in s.replace(';', ',').split(',') if p.strip() != '']
    if len(parts) < 3:
        parts = [p for p in s.split() if p != '']

    nums = []
    for p in parts:
        try:
            nums.append(float(p))
        except Exception:
            cleaned = ''.join(ch for ch in p if (ch.isdigit() or ch in ".-+eE"))
            try:
                if cleaned:
                    nums.append(float(cleaned))
                else:
                    continue
            except Exception:
                continue
        if len(nums) >= 6:
            break

    if len(nums) == 0:
        return None

    # Common cases:
    # 4+ floats: try t,volt,accel_g,accel_mps2 OR t,ax,ay,az (volt maybe)
    if len(nums) >= 4:
        t = nums[0]
        # heuristic: if second value is small (voltage) and third/fourth look like acceleration:
        v = nums[1]
        f2 = nums[2]
        f3 = nums[3]
        # If f3 >> f2 (roughly) it's likely accel_mps2
        if abs(f3) > abs(f2) * 4 or abs(f3) > 1.0:
            # treat as t,volt,accel_g,accel_mps2
            accel_g = float(f2)
            accel_mps2 = float(f3)
            return {"t": float(t), "volt": float(v), "accel_g": accel_g, "accel_mps2": accel_mps2}
        else:
            # treat as axis data: if we have 3 axes use nums[2..4], else use available
            ax = nums[2]
            ay = nums[3] if len(nums) > 3 else 0.0
            az = nums[4] if len(nums) > 4 else 0.0
            mag = math.sqrt(ax*ax + ay*ay + az*az)
            accel_g = mag / 9.80665 if mag > 0 else 0.0
            return {"t": float(t), "volt": float(v), "accel_g": accel_g, "accel_mps2": accel_g * 9.80665}

    # exactly 3 floats: assume t,volt,accel_g
    if len(nums) == 3:
        t, v, a = nums
        return {"t": float(t), "volt": float(v), "accel_g": float(a), "accel_mps2": float(a) * 9.80665}

    return None

# ---------- capture reader with CSV tailing ----------
def capture_reader(proc: subprocess.Popen, loop: asyncio.AbstractEventLoop, stop_event: threading.Event, csv_file_path: str):
    """
    - Log proc stdout/stderr to CAPTURE_LOG and log_queue.
    - Detect "Output file: <path>" and then tail that CSV for appended sample lines,
      parsing and pushing them into sample_queue.
    """
    try:
        logf = open(CAPTURE_LOG, "a", encoding="utf-8", errors="ignore")
    except Exception as e:
        print("[capture_reader] open capture.log failed:", e)
        logf = None

    def write_log(line: str):
        prefix = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
        try:
            if logf:
                logf.write(f"{prefix} {line}\n")
                logf.flush()
        except Exception:
            pass

    # tail-follow function for CSV
    def tail_csv_and_push(csv_path_local: str):
        try:
            f = open(csv_path_local, "r", encoding="utf-8", errors="ignore")
        except Exception as e:
            write_log(f"[tail_csv] open failed {csv_path_local}: {e}")
            return
        try:
            f.seek(0, os.SEEK_END)
        except Exception:
            pass
        write_log(f"[tail_csv] started tailing {csv_path_local}")
        while not stop_event.is_set():
            line = f.readline()
            if not line:
                time.sleep(0.05)
                continue
            line = line.rstrip("\r\n")
            if not line:
                continue
            # skip header lines if they contain non-numeric or 't,volt'
            if any(tok.lower() in line.lower() for tok in ("t,volt", "time,volt")):
                # skip header
                continue
            sample = None
            try:
                sample = try_parse_sample_from_line(line)
            except Exception as e:
                write_log(f"[tail_csv][parser_error] {e}")
            if sample:
                try:
                    asyncio.run_coroutine_threadsafe(sample_queue.put(sample), loop)
                except Exception:
                    pass
            write_log("[CSV] " + line)
        try:
            f.close()
        except:
            pass
        write_log(f"[tail_csv] stopped tailing {csv_path_local}")

    csv_being_tailed = None
    tail_thread = None

    try:
        stdout = proc.stdout
        stderr = proc.stderr
        # Start separate readers for stderr to ensure we capture errors
        def read_stream(stream, tag):
            for raw in iter(stream.readline, b''):
                if stop_event.is_set():
                    break
                try:
                    line = raw.decode("utf-8", errors="ignore").rstrip("\r\n")
                except Exception:
                    line = raw.decode("latin1", errors="ignore").rstrip("\r\n")
                write_log(f"[{tag}] {line}")
                try:
                    asyncio.run_coroutine_threadsafe(log_queue.put(f"[{tag}] {line}"), loop)
                except Exception:
                    pass

        stderr_thread = threading.Thread(target=read_stream, args=(stderr, "STDERR"), daemon=True)
        stderr_thread.start()

        # read stdout lines (text)
        for raw in iter(stdout.readline, b''):
            if stop_event.is_set():
                break
            try:
                line = raw.decode("utf-8", errors="ignore").rstrip("\r\n")
            except Exception:
                line = raw.decode("latin1", errors="ignore").rstrip("\r\n")
            write_log("[STDOUT] " + line)
            try:
                asyncio.run_coroutine_threadsafe(log_queue.put("[STDOUT] " + line), loop)
            except Exception:
                pass

            # detect Output file line
            if "output file" in line.lower():
                # extract path
                try:
                    parts = line.split(":", 1)
                    csv_path_reported = parts[1].strip() if len(parts) > 1 else None
                    if csv_path_reported:
                        # sometimes program prints quotes or extra text; try clean
                        csv_path_reported = csv_path_reported.strip().strip('"').strip("'")
                        if os.path.exists(csv_path_reported):
                            csv_being_tailed = csv_path_reported
                            if tail_thread is None or not tail_thread.is_alive():
                                tail_thread = threading.Thread(target=tail_csv_and_push, args=(csv_being_tailed,), daemon=True)
                                tail_thread.start()
                                write_log(f"[capture_reader] started tail thread for {csv_being_tailed}")
                except Exception as e:
                    write_log(f"[capture_reader] parse Output file failed: {e}")

            # in case the device prints sample lines to stdout directly, also attempt parse and push
            try:
                sample = try_parse_sample_from_line(line)
                if sample:
                    try:
                        asyncio.run_coroutine_threadsafe(sample_queue.put(sample), loop)
                    except Exception:
                        pass
            except Exception:
                pass

        # EOF reached
    except Exception as e:
        write_log(f"[capture_reader] exception: {e}\n{traceback.format_exc()}")
    finally:
        try:
            stop_event.set()
            if tail_thread and tail_thread.is_alive():
                tail_thread.join(timeout=1.0)
        except Exception:
            pass
        try:
            if logf:
                logf.close()
        except:
            pass
        print("[capture_reader] stopped, csv:", csv_file_path)

# ---- start/stop capture endpoints ----
def _is_capture_running():
    p = getattr(app.state, "capture_proc", None)
    return (p is not None) and (p.poll() is None)

@app.post("/start_capture")
async def start_capture(req: Request):
    if _is_capture_running():
        return JSONResponse({"status":"already_running", "pid": app.state.capture_proc.pid}, status_code=200)

    if not os.path.isdir(CAPTURE_CWD):
        return JSONResponse({"status":"failed", "reason": f"CAPTURE_CWD not found: {CAPTURE_CWD}"}, status_code=500)

    try:
        os.makedirs(CSV_DIR, exist_ok=True)
    except Exception as e:
        return JSONResponse({"status":"failed", "reason": f"cannot create CSV_DIR: {e}"}, status_code=500)

    ts = datetime.now().strftime("%Y%m%d_%H%M%S")
    csv_name = f"ai0_accel_{ts}.csv"
    csv_path_placeholder = os.path.join(CSV_DIR, csv_name)

    try:
        proc = subprocess.Popen(
            CAPTURE_CMD,
            cwd=CAPTURE_CWD,
            stdout=subprocess.PIPE,
            stderr=subprocess.PIPE,
            stdin=subprocess.PIPE,
            bufsize=0
        )
    except Exception as e:
        return JSONResponse({"status":"failed", "reason": f"start process failed: {e}. Note: if using sudo, configure sudoers or run as root."}, status_code=500)

    app.state.capture_proc = proc
    app.state.capture_stop_event = threading.Event()
    app.state.capture_csv_path = csv_path_placeholder
    loop = asyncio.get_event_loop()

    thr = threading.Thread(target=capture_reader, args=(proc, loop, app.state.capture_stop_event, csv_path_placeholder), daemon=True)
    app.state.capture_thread = thr
    thr.start()

    write_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    # write a short log entry
    try:
        with open(CAPTURE_LOG, "a", encoding="utf-8", errors="ignore") as lf:
            lf.write(f"{write_time} [start_capture] started pid={proc.pid} placeholder_csv={csv_path_placeholder}\n")
    except Exception:
        pass

    return JSONResponse({"status":"started", "pid": proc.pid, "csv": csv_path_placeholder}, status_code=200)

@app.post("/stop_capture")
async def stop_capture():
    proc = getattr(app.state, "capture_proc", None)
    if not proc:
        return JSONResponse({"status":"not_running"}, status_code=200)

    if proc.poll() is not None:
        app.state.capture_proc = None
        return JSONResponse({"status":"already_exited"}, status_code=200)

    try:
        if proc.stdin:
            try:
                proc.stdin.write(b"\n")
                proc.stdin.flush()
            except Exception:
                pass
    except Exception:
        pass

    if proc.poll() is None:
        try:
            proc.terminate()
            try:
                proc.wait(timeout=3.0)
            except subprocess.TimeoutExpired:
                proc.kill()
        except Exception:
            pass

    stop_event = getattr(app.state, "capture_stop_event", None)
    if stop_event:
        stop_event.set()
    thr = getattr(app.state, "capture_thread", None)
    if thr and thr.is_alive():
        thr.join(timeout=1.0)

    csv_path = getattr(app.state, "capture_csv_path", None)
    app.state.capture_proc = None
    app.state.capture_thread = None
    app.state.capture_stop_event = None

    try:
        with open(CAPTURE_LOG, "a", encoding="utf-8", errors="ignore") as lf:
            lf.write(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} [stop_capture] stopped csv={csv_path}\n")
    except Exception:
        pass

    return JSONResponse({"status":"stopped", "csv": csv_path}, status_code=200)

# ---- /ws samples (push parsed sample JSON to client) ----
@app.websocket("/ws")
async def websocket_samples(websocket: WebSocket):
    await websocket.accept()
    print("[ws] client connected:", websocket.client)
    try:
        while True:
            sample = await sample_queue.get()
            await websocket.send_text(json.dumps(sample, ensure_ascii=False))
    except WebSocketDisconnect:
        print("[ws] disconnected:", websocket.client)

# ---- /log_ws: send raw capture log lines ----
@app.websocket("/log_ws")
async def log_ws(websocket: WebSocket):
    await websocket.accept()
    print("[log_ws] client connected:", websocket.client)
    try:
        while True:
            line = await log_queue.get()
            await websocket.send_text(line)
    except WebSocketDisconnect:
        print("[log_ws] disconnected:", websocket.client)

# ---- camera mjpeg route ----
def gen_frames():
    if cv2 is None:
        while True:
            time.sleep(0.5)
            yield b''
    cap = cv2.VideoCapture(CAMERA_INDEX)
    if not cap.isOpened():
        print("[camera] cannot open index", CAMERA_INDEX)
        while True:
            time.sleep(0.5)
            yield b''
    while True:
        success, frame = cap.read()
        if not success:
            time.sleep(0.05)
            continue
        ret, buf = cv2.imencode('.jpg', frame)
        if not ret:
            continue
        frame_bytes = buf.tobytes()
        yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame_bytes + b'\r\n')
    cap.release()

@app.get("/video_feed")
def video_feed():
    return StreamingResponse(gen_frames(), media_type='multipart/x-mixed-replace; boundary=frame')

# ---- startup tidy ----
@app.on_event("startup")
async def on_startup():
    if not hasattr(app.state, "capture_proc"):
        app.state.capture_proc = None
    try:
        with open(CAPTURE_LOG, "a", encoding="utf-8", errors="ignore") as lf:
            lf.write(f"{datetime.now().strftime('%Y-%m-%d %H:%M:%S')} [backend] started\n")
    except Exception:
        pass
