#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Glass curtain wall adhesive detection - backend (modified)
- Start/stop capture via POST /start_capture and /stop_capture
- When started: spawn CAPTURE_CMD with stdout=PIPE, read lines, write to capture.log,
  parse sample columns and push JSON samples into sample_queue (for /ws).
- Create a new CSV file for each run and append parsed samples into it.
- /log_ws streams raw capture log lines, /ws streams parsed samples.
- Camera MJPEG stream at /video_feed retained.
"""
import os
import time
import json
import threading
import asyncio
import subprocess
from datetime import datetime
from fastapi import FastAPI, WebSocket, WebSocketDisconnect, Request
from fastapi.responses import StreamingResponse, FileResponse, JSONResponse
from fastapi.staticfiles import StaticFiles

import cv2

# ---------------- CONFIG ----------------
CSV_DIR = "/home/pi/acc_data"            # ensure exists (will be created if missing)
CAPTURE_CWD = "/home/pi/acc_module/AI0"  # where your ./main resides
CAPTURE_EXE = "./main"
CAPTURE_CMD = ["sudo", "./main"]         # if using sudo, configure /etc/sudoers for nopass or run as root
CAPTURE_LOG = "/home/pi/capture.log"
CAMERA_INDEX = 0

# queue sizes
SAMPLE_QUEUE_MAX = 8000
LOG_QUEUE_MAX = 2000

POLL_INTERVAL = 0.05
# ----------------------------------------

app = FastAPI(title="玻璃幕墙结构胶检测系统")
# static directory: make sure your index.html is at ./static/index.html
app.mount("/static", StaticFiles(directory="static"), name="static")

@app.get("/")
def index():
    return FileResponse(os.path.join("static", "index.html"))

# async queues shared with websockets
sample_queue: asyncio.Queue = asyncio.Queue(maxsize=SAMPLE_QUEUE_MAX)
log_queue: asyncio.Queue = asyncio.Queue(maxsize=LOG_QUEUE_MAX)

# ---- helper: parse a capture stdout line into sample dict (if possible) ----
def try_parse_sample_from_line(line: str):
    """
    Expect a CSV-like line or whitespace-separated values containing at least:
    t, volt, accel_g, accel_mps2 (like your old csv)
    Return dict or None if cannot parse.
    """
    s = line.strip()
    if not s:
        return None
    # try CSV split first
    parts = [p.strip() for p in s.replace(';', ',').split(',') if p.strip()!='']
    if len(parts) < 4:
        # try whitespace split
        parts = [p for p in s.split() if p!='']
    if len(parts) < 4:
        return None
    try:
        t = float(parts[0])
        volt = float(parts[1])
        accel_g = float(parts[2])
        accel_mps2 = float(parts[3])
        return {"t": t, "volt": volt, "accel_g": accel_g, "accel_mps2": accel_mps2}
    except Exception:
        return None

# ---- capture process reader thread (reads stdout, writes log, csv, pushes queues) ----
def capture_reader(proc: subprocess.Popen, loop: asyncio.AbstractEventLoop, stop_event: threading.Event, csv_file_path: str):
    """
    Read lines from proc.stdout until stop_event is set or proc ends.
    Write raw lines to capture.log (append) and to log_queue.
    Parse sample lines and append to csv_file and push to sample_queue.
    """
    # open capture.log in append binary/text mode
    try:
        logf = open(CAPTURE_LOG, "a", encoding="utf-8", errors="ignore")
    except Exception as e:
        print("[capture_reader] open capture.log failed:", e)
        logf = None

    csvf = None
    try:
        os.makedirs(os.path.dirname(csv_file_path), exist_ok=True)
        csvf = open(csv_file_path, "a", encoding="utf-8", newline="")
        # If file was empty, write header
        if os.path.getsize(csv_file_path) == 0:
            csvf.write("t,volt,accel_g,accel_mps2\n")
            csvf.flush()
    except Exception as e:
        print("[capture_reader] open csv failed:", e)
        csvf = None

    # line-wise reading
    try:
        stdout = proc.stdout
        # ensure text mode
        for raw in iter(stdout.readline, b''):
            if stop_event.is_set():
                break
            try:
                line = raw.decode("utf-8", errors="ignore").rstrip("\r\n")
            except Exception:
                line = raw.decode("latin1", errors="ignore").rstrip("\r\n")
            # write to capture.log
            if logf:
                try:
                    logf.write(line + "\n")
                    logf.flush()
                except Exception:
                    pass
            # push raw line into log_queue (async)
            try:
                loop.call_soon_threadsafe(asyncio.create_task, log_queue.put(line))
            except Exception:
                pass

            # try parse sample and push to sample_queue + write to csv
            sample = try_parse_sample_from_line(line)
            if sample:
                # write csv
                if csvf:
                    try:
                        csvf.write("{t},{volt},{accel_g},{accel_mps2}\n".format(**sample))
                        csvf.flush()
                    except Exception:
                        pass
                # push to sample_queue
                try:
                    loop.call_soon_threadsafe(asyncio.create_task, sample_queue.put(sample))
                except Exception:
                    pass

        # proc.stdout EOF reached
    except Exception as e:
        print("[capture_reader] exception:", e)
    finally:
        if logf:
            try:
                logf.close()
            except:
                pass
        if csvf:
            try:
                csvf.close()
            except:
                pass
        print("[capture_reader] stopped, csv:", csv_file_path)

# ---- start/stop capture endpoints ----
def _is_capture_running():
    p = getattr(app.state, "capture_proc", None)
    return (p is not None) and (p.poll() is None)

@app.post("/start_capture")
async def start_capture(req: Request):
    """
    Start capture subprocess, create new CSV file and spawn reader thread.
    Response: {"status":"started","pid":..,"csv": "<path>"} or {"status":"already_running"}
    """
    if _is_capture_running():
        return JSONResponse({"status":"already_running", "pid": app.state.capture_proc.pid}, status_code=200)

    # ensure CAPTURE_CWD exists
    if not os.path.isdir(CAPTURE_CWD):
        return JSONResponse({"status":"failed", "reason": f"CAPTURE_CWD not found: {CAPTURE_CWD}"}, status_code=500)

    # ensure CSV_DIR exists
    try:
        os.makedirs(CSV_DIR, exist_ok=True)
    except Exception as e:
        return JSONResponse({"status":"failed", "reason": f"cannot create CSV_DIR: {e}"}, status_code=500)

    # prepare csv filename
    ts = datetime.now().strftime("%Y%m%d_%H%M%S")
    csv_name = f"ai0_accel_{ts}.csv"
    csv_path = os.path.join(CSV_DIR, csv_name)

    # start process with stdout=PIPE so we can parse output in-memory.
    try:
        proc = subprocess.Popen(CAPTURE_CMD, cwd=CAPTURE_CWD, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, stdin=subprocess.PIPE)
    except Exception as e:
        return JSONResponse({"status":"failed", "reason": f"start process failed: {e}. Note: if using sudo, configure sudoers or run as root."}, status_code=500)

    # record in app.state
    app.state.capture_proc = proc
    app.state.capture_stop_event = threading.Event()
    loop = asyncio.get_event_loop()

    # spawn reader thread
    t = threading.Thread(target=capture_reader, args=(proc, loop, app.state.capture_stop_event, csv_path), daemon=True)
    app.state.capture_thread = t
    app.state.capture_csv_path = csv_path
    t.start()

    print("[start_capture] started pid:", proc.pid, "csv:", csv_path)
    return JSONResponse({"status":"started", "pid": proc.pid, "csv": csv_path}, status_code=200)

@app.post("/stop_capture")
async def stop_capture():
    """
    Stop capture process and reader thread, close csv file.
    """
    proc = getattr(app.state, "capture_proc", None)
    if not proc:
        return JSONResponse({"status":"not_running"}, status_code=200)

    if proc.poll() is not None:
        # already exited
        app.state.capture_proc = None
        return JSONResponse({"status":"already_exited"}, status_code=200)

    # first try send newline to stdin (simulate 'press any key')
    try:
        if proc.stdin:
            try:
                proc.stdin.write(b"\n")
                proc.stdin.flush()
                # wait a bit
                proc.wait(timeout=2.0)
            except Exception:
                pass
    except Exception as e:
        print("[stop_capture] stdin send error:", e)

    # if still running, try terminate then kill
    if proc.poll() is None:
        try:
            proc.terminate()
            try:
                proc.wait(timeout=3.0)
            except subprocess.TimeoutExpired:
                proc.kill()
        except Exception as e:
            print("[stop_capture] terminate/kill error:", e)

    # signal reader to stop and join
    stop_event = getattr(app.state, "capture_stop_event", None)
    if stop_event:
        stop_event.set()
    thread = getattr(app.state, "capture_thread", None)
    if thread and thread.is_alive():
        thread.join(timeout=1.0)

    # clear state (keep csv path as record)
    csv_path = getattr(app.state, "capture_csv_path", None)
    app.state.capture_proc = None
    app.state.capture_thread = None
    app.state.capture_stop_event = None

    print("[stop_capture] stopped, csv:", csv_path)
    return JSONResponse({"status":"stopped", "csv": csv_path}, status_code=200)

# ---- /ws samples (push parsed sample JSON to client) ----
@app.websocket("/ws")
async def websocket_samples(websocket: WebSocket):
    await websocket.accept()
    print("[ws] client connected:", websocket.client)
    try:
        while True:
            sample = await sample_queue.get()
            await websocket.send_text(json.dumps(sample, ensure_ascii=False))
    except WebSocketDisconnect:
        print("[ws] disconnected:", websocket.client)

# ---- /log_ws: send raw capture log lines ----
@app.websocket("/log_ws")
async def log_ws(websocket: WebSocket):
    await websocket.accept()
    print("[log_ws] client connected:", websocket.client)
    try:
        while True:
            line = await log_queue.get()
            await websocket.send_text(line)
    except WebSocketDisconnect:
        print("[log_ws] disconnected:", websocket.client)

# ---- camera mjpeg route (unchanged) ----
def gen_frames():
    cap = cv2.VideoCapture(CAMERA_INDEX)
    if not cap.isOpened():
        print("[camera] cannot open index", CAMERA_INDEX)
        while True:
            time.sleep(0.5)
            yield b''
    while True:
        success, frame = cap.read()
        if not success:
            time.sleep(0.05)
            continue
        ret, buf = cv2.imencode('.jpg', frame)
        if not ret:
            continue
        frame_bytes = buf.tobytes()
        yield (b'--frame\r\n' b'Content-Type: image/jpeg\r\n\r\n' + frame_bytes + b'\r\n')
    cap.release()

@app.get("/video_feed")
def video_feed():
    return StreamingResponse(gen_frames(), media_type='multipart/x-mixed-replace; boundary=frame')

# ---- startup tidy ----
@app.on_event("startup")
async def on_startup():
    # ensure state fields exist
    if not hasattr(app.state, "capture_proc"):
        app.state.capture_proc = None
    print("backend started")
