Add per-request AI logging, DB batch queue, WS entity updates, and UI polish
- log_thread.py: thread-safe ContextVar bridge so executor threads can log
individual LLM calls and archive searches back to the event loop
- ai_log.py: init_thread_logging(), notify_entity_update(); WS now pushes
entity_update messages when book data changes after any plugin or batch run
- batch.py: replace batch_pending.json with batch_queue SQLite table;
run_batch_consumer() reads queue dynamically so new books can be added
while batch is running; add_to_queue() deduplicates
- migrate.py: fix _migrate_v1 (clear-on-startup bug); add _migrate_v2 for
batch_queue table
- _client.py / archive.py / identification.py: wrap each LLM API call and
archive search with log_thread start/finish entries
- api.py: POST /api/batch returns {already_running, added}; notify_entity_update
after identify pipeline
- models.default.yaml: strengthen ai_identify confidence-scoring instructions;
warn against placeholder data
- detail-render.js: book log entries show clickable ID + spine thumbnail;
book spine/title images open full-screen popup
- events.js: batch-start handles already_running+added; open-img-popup action
- init.js: entity_update WS handler; image popup close listeners
- overlays.css / index.html: full-screen image popup overlay
- eslint.config.js: add new globals; fix no-redeclare/no-unused-vars for
multi-file global architecture; all lint errors resolved
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
115
src/db.py
115
src/db.py
@@ -5,11 +5,13 @@ No file I/O, no config, no business logic. All SQL lives here.
|
||||
|
||||
import json
|
||||
import sqlite3
|
||||
import time
|
||||
import uuid
|
||||
from collections.abc import Iterator
|
||||
from contextlib import contextmanager
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from mashumaro.codecs import BasicDecoder
|
||||
|
||||
@@ -67,7 +69,24 @@ CREATE TABLE IF NOT EXISTS books (
|
||||
title_confidence REAL DEFAULT 0,
|
||||
analyzed_at TEXT,
|
||||
created_at TEXT NOT NULL,
|
||||
candidates TEXT DEFAULT NULL
|
||||
candidates TEXT DEFAULT NULL,
|
||||
ai_blocks TEXT DEFAULT NULL
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS ai_log (
|
||||
id TEXT PRIMARY KEY,
|
||||
ts REAL NOT NULL,
|
||||
plugin_id TEXT NOT NULL,
|
||||
entity_type TEXT NOT NULL,
|
||||
entity_id TEXT NOT NULL,
|
||||
model TEXT NOT NULL,
|
||||
request TEXT NOT NULL DEFAULT '',
|
||||
status TEXT NOT NULL DEFAULT 'running',
|
||||
response TEXT NOT NULL DEFAULT '',
|
||||
duration_ms INTEGER NOT NULL DEFAULT 0
|
||||
);
|
||||
CREATE TABLE IF NOT EXISTS batch_queue (
|
||||
book_id TEXT PRIMARY KEY,
|
||||
added_at REAL NOT NULL
|
||||
);
|
||||
"""
|
||||
|
||||
@@ -413,11 +432,12 @@ def create_book(db: sqlite3.Connection, shelf_id: str) -> BookRow:
|
||||
"analyzed_at": None,
|
||||
"created_at": now(),
|
||||
"candidates": None,
|
||||
"ai_blocks": None,
|
||||
}
|
||||
db.execute(
|
||||
"INSERT INTO books VALUES(:id,:shelf_id,:position,:image_filename,:title,:author,:year,:isbn,:publisher,"
|
||||
":notes,:raw_text,:ai_title,:ai_author,:ai_year,:ai_isbn,:ai_publisher,:identification_status,"
|
||||
":title_confidence,:analyzed_at,:created_at,:candidates)",
|
||||
":title_confidence,:analyzed_at,:created_at,:candidates,:ai_blocks)",
|
||||
data,
|
||||
)
|
||||
return _book_dec.decode(data)
|
||||
@@ -494,6 +514,10 @@ def set_book_candidates(db: sqlite3.Connection, book_id: str, candidates_json: s
|
||||
db.execute("UPDATE books SET candidates=? WHERE id=?", [candidates_json, book_id])
|
||||
|
||||
|
||||
def set_book_ai_blocks(db: sqlite3.Connection, book_id: str, ai_blocks_json: str) -> None:
|
||||
db.execute("UPDATE books SET ai_blocks=? WHERE id=?", [ai_blocks_json, book_id])
|
||||
|
||||
|
||||
def get_book_rank(db: sqlite3.Connection, book_id: str) -> int:
|
||||
"""0-based rank of book among its siblings sorted by position."""
|
||||
row = db.execute("SELECT shelf_id FROM books WHERE id=?", [book_id]).fetchone()
|
||||
@@ -513,3 +537,90 @@ def get_unidentified_book_ids(db: sqlite3.Connection) -> list[str]:
|
||||
def reorder_entities(db: sqlite3.Connection, table: str, ids: list[str]) -> None:
|
||||
for i, entity_id in enumerate(ids, 1):
|
||||
db.execute(f"UPDATE {table} SET position=? WHERE id=?", [i, entity_id])
|
||||
|
||||
|
||||
# ── AI log ────────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
def insert_ai_log_entry(
|
||||
db: sqlite3.Connection,
|
||||
entry_id: str,
|
||||
ts: float,
|
||||
plugin_id: str,
|
||||
entity_type: str,
|
||||
entity_id: str,
|
||||
model: str,
|
||||
request: str,
|
||||
) -> None:
|
||||
"""Insert a new AI log entry with status='running'."""
|
||||
db.execute(
|
||||
"INSERT OR IGNORE INTO ai_log"
|
||||
" (id, ts, plugin_id, entity_type, entity_id, model, request) VALUES (?,?,?,?,?,?,?)",
|
||||
[entry_id, ts, plugin_id, entity_type, entity_id, model, request],
|
||||
)
|
||||
|
||||
|
||||
def update_ai_log_entry(db: sqlite3.Connection, entry_id: str, status: str, response: str, duration_ms: int) -> None:
|
||||
"""Update an AI log entry with the final status and response."""
|
||||
db.execute(
|
||||
"UPDATE ai_log SET status=?, response=?, duration_ms=? WHERE id=?",
|
||||
[status, response, duration_ms, entry_id],
|
||||
)
|
||||
|
||||
|
||||
def get_ai_log_entries(db: sqlite3.Connection, limit: int) -> list[dict[str, Any]]:
|
||||
"""Return the most recent AI log entries, oldest first."""
|
||||
rows = db.execute(
|
||||
"SELECT id, ts, plugin_id, entity_type, entity_id, model, request, status, response, duration_ms"
|
||||
" FROM ai_log ORDER BY ts DESC LIMIT ?",
|
||||
[limit],
|
||||
).fetchall()
|
||||
return [dict(r) for r in reversed(rows)]
|
||||
|
||||
|
||||
# ── Batch queue ────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
def add_to_batch_queue(db: sqlite3.Connection, book_ids: list[str]) -> None:
|
||||
"""Insert book IDs into the batch queue, ignoring duplicates.
|
||||
|
||||
Args:
|
||||
db: Open database connection (must be writable).
|
||||
book_ids: Book IDs to enqueue.
|
||||
"""
|
||||
ts = time.time()
|
||||
db.executemany(
|
||||
"INSERT OR IGNORE INTO batch_queue (book_id, added_at) VALUES (?,?)", [(bid, ts) for bid in book_ids]
|
||||
)
|
||||
|
||||
|
||||
def remove_from_batch_queue(db: sqlite3.Connection, book_id: str) -> None:
|
||||
"""Remove a single book ID from the batch queue.
|
||||
|
||||
Args:
|
||||
db: Open database connection (must be writable).
|
||||
book_id: Book ID to dequeue.
|
||||
"""
|
||||
db.execute("DELETE FROM batch_queue WHERE book_id=?", [book_id])
|
||||
|
||||
|
||||
def get_batch_queue(db: sqlite3.Connection) -> list[str]:
|
||||
"""Return all queued book IDs ordered by insertion time (oldest first).
|
||||
|
||||
Args:
|
||||
db: Open database connection.
|
||||
|
||||
Returns:
|
||||
List of book ID strings.
|
||||
"""
|
||||
rows = db.execute("SELECT book_id FROM batch_queue ORDER BY added_at").fetchall()
|
||||
return [str(r[0]) for r in rows]
|
||||
|
||||
|
||||
def clear_batch_queue(db: sqlite3.Connection) -> None:
|
||||
"""Remove all entries from the batch queue.
|
||||
|
||||
Args:
|
||||
db: Open database connection (must be writable).
|
||||
"""
|
||||
db.execute("DELETE FROM batch_queue")
|
||||
|
||||
Reference in New Issue
Block a user