Add per-request AI logging, DB batch queue, WS entity updates, and UI polish
- log_thread.py: thread-safe ContextVar bridge so executor threads can log
individual LLM calls and archive searches back to the event loop
- ai_log.py: init_thread_logging(), notify_entity_update(); WS now pushes
entity_update messages when book data changes after any plugin or batch run
- batch.py: replace batch_pending.json with batch_queue SQLite table;
run_batch_consumer() reads queue dynamically so new books can be added
while batch is running; add_to_queue() deduplicates
- migrate.py: fix _migrate_v1 (clear-on-startup bug); add _migrate_v2 for
batch_queue table
- _client.py / archive.py / identification.py: wrap each LLM API call and
archive search with log_thread start/finish entries
- api.py: POST /api/batch returns {already_running, added}; notify_entity_update
after identify pipeline
- models.default.yaml: strengthen ai_identify confidence-scoring instructions;
warn against placeholder data
- detail-render.js: book log entries show clickable ID + spine thumbnail;
book spine/title images open full-screen popup
- events.js: batch-start handles already_running+added; open-img-popup action
- init.js: entity_update WS handler; image popup close listeners
- overlays.css / index.html: full-screen image popup overlay
- eslint.config.js: add new globals; fix no-redeclare/no-unused-vars for
multi-file global architecture; all lint errors resolved
Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
72
src/migrate.py
Normal file
72
src/migrate.py
Normal file
@@ -0,0 +1,72 @@
|
||||
"""Database migration functions.
|
||||
|
||||
Each migration is idempotent and safe to run on a database that has already been migrated.
|
||||
Run via run_migration() called from app startup after init_db().
|
||||
"""
|
||||
|
||||
import sqlite3
|
||||
|
||||
from db import DB_PATH
|
||||
|
||||
|
||||
def run_migration() -> None:
|
||||
"""Apply all pending schema migrations in order.
|
||||
|
||||
Currently applies:
|
||||
- v1: Add ai_blocks column to books; clear AI-derived data while preserving user data.
|
||||
- v2: Add batch_queue table for persistent batch processing queue.
|
||||
|
||||
Migrations are idempotent — running them on an already-migrated database is a no-op.
|
||||
"""
|
||||
c = sqlite3.connect(DB_PATH)
|
||||
c.row_factory = sqlite3.Row
|
||||
c.execute("PRAGMA foreign_keys = ON")
|
||||
try:
|
||||
_migrate_v1(c)
|
||||
_migrate_v2(c)
|
||||
c.commit()
|
||||
except Exception:
|
||||
c.rollback()
|
||||
raise
|
||||
finally:
|
||||
c.close()
|
||||
|
||||
|
||||
def _migrate_v1(c: sqlite3.Connection) -> None:
|
||||
"""Add ai_blocks column and clear stale AI data from all books (first run only).
|
||||
|
||||
- Adds ai_blocks TEXT DEFAULT NULL column if it does not exist.
|
||||
- On first run only (when the column is absent): clears raw_text, ai_*, title_confidence,
|
||||
analyzed_at, candidates, ai_blocks from all books (these are regenerated by the new pipeline).
|
||||
- For user_approved books: copies user fields back to ai_* so that
|
||||
compute_status() still returns 'user_approved' after the ai_* clear.
|
||||
|
||||
This migration assumes the database already has the base books schema.
|
||||
It is a no-op if ai_blocks already exists.
|
||||
"""
|
||||
cols = {row["name"] for row in c.execute("PRAGMA table_info(books)")}
|
||||
if "ai_blocks" not in cols:
|
||||
c.execute("ALTER TABLE books ADD COLUMN ai_blocks TEXT DEFAULT NULL")
|
||||
|
||||
# Clear AI-derived fields only when first adding the column.
|
||||
c.execute(
|
||||
"UPDATE books SET "
|
||||
"raw_text='', ai_title='', ai_author='', ai_year='', ai_isbn='', ai_publisher='', "
|
||||
"title_confidence=0, analyzed_at=NULL, candidates=NULL, ai_blocks=NULL"
|
||||
)
|
||||
|
||||
# For user_approved books, restore ai_* = user fields so status stays user_approved.
|
||||
c.execute(
|
||||
"UPDATE books SET "
|
||||
"ai_title=title, ai_author=author, ai_year=year, ai_isbn=isbn, ai_publisher=publisher "
|
||||
"WHERE identification_status='user_approved'"
|
||||
)
|
||||
|
||||
|
||||
def _migrate_v2(c: sqlite3.Connection) -> None:
|
||||
"""Add batch_queue table for persistent batch processing queue.
|
||||
|
||||
Replaces data/batch_pending.json with a DB table so batch state survives
|
||||
across restarts alongside all other persistent data.
|
||||
"""
|
||||
c.execute("CREATE TABLE IF NOT EXISTS batch_queue (" "book_id TEXT PRIMARY KEY," "added_at REAL NOT NULL" ")")
|
||||
Reference in New Issue
Block a user