feat: implement Event Log Projection & View (Step 8.2)

- Add migration 0008 for event_log_by_location table with cap trigger
- Create EventLogProjection for location-scoped event summaries
- Add GET /event-log route with location_id filtering
- Create event log templates with timeline styling
- Register EventLogProjection in eggs, feed, and move routes
- Cap events at 500 per location (trigger removes oldest)

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
2025-12-30 15:15:06 +00:00
parent 8e155080e4
commit bce4d099c9
14 changed files with 1355 additions and 10 deletions

View File

@@ -0,0 +1,267 @@
# ABOUTME: Tests for the event_log_by_location migration (0008-event-log-by-location.sql).
# ABOUTME: Validates table schema, constraints, index, and cap trigger.
import json
import apsw
import pytest
class TestMigrationCreatesTable:
"""Tests that migration creates the event_log_by_location table."""
def test_event_log_by_location_table_exists(self, seeded_db):
"""Migration creates event_log_by_location table."""
result = seeded_db.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='event_log_by_location'"
).fetchone()
assert result is not None
assert result[0] == "event_log_by_location"
class TestEventLogByLocationTable:
"""Tests for event_log_by_location table schema and constraints."""
@pytest.fixture
def valid_location_id(self, seeded_db):
"""Get Strip 1 location ID from seeds."""
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
return row[0]
def test_insert_valid_event_log(self, seeded_db, valid_location_id):
"""Can insert valid event log entry."""
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
summary = json.dumps({"eggs": 5, "species": "duck"})
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(event_id, valid_location_id, 1704067200000, "ProductCollected", "ppetru", summary),
)
result = seeded_db.execute(
"SELECT event_id, location_id, ts_utc, type, actor, summary FROM event_log_by_location"
).fetchone()
assert result[0] == event_id
assert result[1] == valid_location_id
assert result[2] == 1704067200000
assert result[3] == "ProductCollected"
assert result[4] == "ppetru"
assert json.loads(result[5]) == {"eggs": 5, "species": "duck"}
def test_event_id_is_primary_key(self, seeded_db, valid_location_id):
"""event_id is the primary key."""
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
summary = json.dumps({"msg": "test"})
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(event_id, valid_location_id, 1704067200000, "ProductCollected", "ppetru", summary),
)
# Same event_id should fail
with pytest.raises(apsw.ConstraintError):
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(event_id, valid_location_id, 1704067200001, "FeedGiven", "ines", summary),
)
def test_location_id_foreign_key(self, seeded_db):
"""location_id must reference existing location."""
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
invalid_location = "01ARZ3NDEKTSV4RRFFQ69XXXXX"
summary = json.dumps({"msg": "test"})
with pytest.raises(apsw.ConstraintError):
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(event_id, invalid_location, 1704067200000, "ProductCollected", "ppetru", summary),
)
def test_summary_must_be_valid_json(self, seeded_db, valid_location_id):
"""summary must be valid JSON."""
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
with pytest.raises(apsw.ConstraintError):
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(
event_id,
valid_location_id,
1704067200000,
"ProductCollected",
"ppetru",
"not json",
),
)
class TestEventLogIndex:
"""Tests for event_log_by_location index."""
def test_location_ts_index_exists(self, seeded_db):
"""Index idx_evlog_loc_ts exists."""
result = seeded_db.execute(
"SELECT name FROM sqlite_master WHERE type='index' AND name='idx_evlog_loc_ts'"
).fetchone()
assert result is not None
assert result[0] == "idx_evlog_loc_ts"
class TestEventLogCapTrigger:
"""Tests for the cap trigger that limits to 500 events per location."""
@pytest.fixture
def valid_location_id(self, seeded_db):
"""Get Strip 1 location ID from seeds."""
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
return row[0]
@pytest.fixture
def strip2_location_id(self, seeded_db):
"""Get Strip 2 location ID from seeds."""
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 2'").fetchone()
return row[0]
def test_trigger_exists(self, seeded_db):
"""Trigger trg_evlog_cap exists."""
result = seeded_db.execute(
"SELECT name FROM sqlite_master WHERE type='trigger' AND name='trg_evlog_cap'"
).fetchone()
assert result is not None
assert result[0] == "trg_evlog_cap"
def test_trigger_caps_at_500_per_location(self, seeded_db, valid_location_id):
"""Trigger removes oldest events when more than 500 are inserted."""
summary = json.dumps({"msg": "test"})
# Insert 501 events
for i in range(501):
event_id = f"01ARZ3NDEKTSV4RRFFQ69G{i:04d}A"
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(
event_id,
valid_location_id,
1704067200000 + i,
"ProductCollected",
"ppetru",
summary,
),
)
# Should have exactly 500
count = seeded_db.execute(
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
(valid_location_id,),
).fetchone()[0]
assert count == 500
# The oldest event (i=0) should be deleted
oldest_event = seeded_db.execute(
"""
SELECT event_id FROM event_log_by_location
WHERE location_id = ?
ORDER BY ts_utc ASC
LIMIT 1
""",
(valid_location_id,),
).fetchone()
# Event 0 was deleted, so oldest should be event 1
assert oldest_event[0] == "01ARZ3NDEKTSV4RRFFQ69G0001A"
def test_trigger_scopes_to_location(self, seeded_db, valid_location_id, strip2_location_id):
"""Cap trigger only removes events from same location."""
summary = json.dumps({"msg": "test"})
# Insert 500 events at location 1
for i in range(500):
event_id = f"01ARZ3NDEKTSV4RRFFQ69G{i:04d}A"
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(
event_id,
valid_location_id,
1704067200000 + i,
"ProductCollected",
"ppetru",
summary,
),
)
# Insert 1 event at location 2
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(
"01ARZ3NDEKTSV4RRFFQ69GXXXXA",
strip2_location_id,
1704067200000,
"FeedGiven",
"ppetru",
summary,
),
)
# Location 1 should still have 500
count1 = seeded_db.execute(
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
(valid_location_id,),
).fetchone()[0]
assert count1 == 500
# Location 2 should have 1
count2 = seeded_db.execute(
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
(strip2_location_id,),
).fetchone()[0]
assert count2 == 1
# Insert one more at location 1 to trigger cap
seeded_db.execute(
"""
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
VALUES (?, ?, ?, ?, ?, ?)
""",
(
"01ARZ3NDEKTSV4RRFFQ69GYYYYA",
valid_location_id,
1704067200501,
"ProductCollected",
"ppetru",
summary,
),
)
# Location 1 should still have 500, location 2 should still have 1
count1 = seeded_db.execute(
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
(valid_location_id,),
).fetchone()[0]
assert count1 == 500
count2 = seeded_db.execute(
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
(strip2_location_id,),
).fetchone()[0]
assert count2 == 1