feat: implement Event Log Projection & View (Step 8.2)
- Add migration 0008 for event_log_by_location table with cap trigger - Create EventLogProjection for location-scoped event summaries - Add GET /event-log route with location_id filtering - Create event log templates with timeline styling - Register EventLogProjection in eggs, feed, and move routes - Cap events at 500 per location (trigger removes oldest) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
267
tests/test_migration_event_log.py
Normal file
267
tests/test_migration_event_log.py
Normal file
@@ -0,0 +1,267 @@
|
||||
# ABOUTME: Tests for the event_log_by_location migration (0008-event-log-by-location.sql).
|
||||
# ABOUTME: Validates table schema, constraints, index, and cap trigger.
|
||||
|
||||
import json
|
||||
|
||||
import apsw
|
||||
import pytest
|
||||
|
||||
|
||||
class TestMigrationCreatesTable:
|
||||
"""Tests that migration creates the event_log_by_location table."""
|
||||
|
||||
def test_event_log_by_location_table_exists(self, seeded_db):
|
||||
"""Migration creates event_log_by_location table."""
|
||||
result = seeded_db.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table' AND name='event_log_by_location'"
|
||||
).fetchone()
|
||||
assert result is not None
|
||||
assert result[0] == "event_log_by_location"
|
||||
|
||||
|
||||
class TestEventLogByLocationTable:
|
||||
"""Tests for event_log_by_location table schema and constraints."""
|
||||
|
||||
@pytest.fixture
|
||||
def valid_location_id(self, seeded_db):
|
||||
"""Get Strip 1 location ID from seeds."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
return row[0]
|
||||
|
||||
def test_insert_valid_event_log(self, seeded_db, valid_location_id):
|
||||
"""Can insert valid event log entry."""
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
|
||||
summary = json.dumps({"eggs": 5, "species": "duck"})
|
||||
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(event_id, valid_location_id, 1704067200000, "ProductCollected", "ppetru", summary),
|
||||
)
|
||||
result = seeded_db.execute(
|
||||
"SELECT event_id, location_id, ts_utc, type, actor, summary FROM event_log_by_location"
|
||||
).fetchone()
|
||||
assert result[0] == event_id
|
||||
assert result[1] == valid_location_id
|
||||
assert result[2] == 1704067200000
|
||||
assert result[3] == "ProductCollected"
|
||||
assert result[4] == "ppetru"
|
||||
assert json.loads(result[5]) == {"eggs": 5, "species": "duck"}
|
||||
|
||||
def test_event_id_is_primary_key(self, seeded_db, valid_location_id):
|
||||
"""event_id is the primary key."""
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
|
||||
summary = json.dumps({"msg": "test"})
|
||||
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(event_id, valid_location_id, 1704067200000, "ProductCollected", "ppetru", summary),
|
||||
)
|
||||
# Same event_id should fail
|
||||
with pytest.raises(apsw.ConstraintError):
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(event_id, valid_location_id, 1704067200001, "FeedGiven", "ines", summary),
|
||||
)
|
||||
|
||||
def test_location_id_foreign_key(self, seeded_db):
|
||||
"""location_id must reference existing location."""
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
|
||||
invalid_location = "01ARZ3NDEKTSV4RRFFQ69XXXXX"
|
||||
summary = json.dumps({"msg": "test"})
|
||||
|
||||
with pytest.raises(apsw.ConstraintError):
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(event_id, invalid_location, 1704067200000, "ProductCollected", "ppetru", summary),
|
||||
)
|
||||
|
||||
def test_summary_must_be_valid_json(self, seeded_db, valid_location_id):
|
||||
"""summary must be valid JSON."""
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5FAA"
|
||||
|
||||
with pytest.raises(apsw.ConstraintError):
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
event_id,
|
||||
valid_location_id,
|
||||
1704067200000,
|
||||
"ProductCollected",
|
||||
"ppetru",
|
||||
"not json",
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class TestEventLogIndex:
|
||||
"""Tests for event_log_by_location index."""
|
||||
|
||||
def test_location_ts_index_exists(self, seeded_db):
|
||||
"""Index idx_evlog_loc_ts exists."""
|
||||
result = seeded_db.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='index' AND name='idx_evlog_loc_ts'"
|
||||
).fetchone()
|
||||
assert result is not None
|
||||
assert result[0] == "idx_evlog_loc_ts"
|
||||
|
||||
|
||||
class TestEventLogCapTrigger:
|
||||
"""Tests for the cap trigger that limits to 500 events per location."""
|
||||
|
||||
@pytest.fixture
|
||||
def valid_location_id(self, seeded_db):
|
||||
"""Get Strip 1 location ID from seeds."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
return row[0]
|
||||
|
||||
@pytest.fixture
|
||||
def strip2_location_id(self, seeded_db):
|
||||
"""Get Strip 2 location ID from seeds."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 2'").fetchone()
|
||||
return row[0]
|
||||
|
||||
def test_trigger_exists(self, seeded_db):
|
||||
"""Trigger trg_evlog_cap exists."""
|
||||
result = seeded_db.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='trigger' AND name='trg_evlog_cap'"
|
||||
).fetchone()
|
||||
assert result is not None
|
||||
assert result[0] == "trg_evlog_cap"
|
||||
|
||||
def test_trigger_caps_at_500_per_location(self, seeded_db, valid_location_id):
|
||||
"""Trigger removes oldest events when more than 500 are inserted."""
|
||||
summary = json.dumps({"msg": "test"})
|
||||
|
||||
# Insert 501 events
|
||||
for i in range(501):
|
||||
event_id = f"01ARZ3NDEKTSV4RRFFQ69G{i:04d}A"
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
event_id,
|
||||
valid_location_id,
|
||||
1704067200000 + i,
|
||||
"ProductCollected",
|
||||
"ppetru",
|
||||
summary,
|
||||
),
|
||||
)
|
||||
|
||||
# Should have exactly 500
|
||||
count = seeded_db.execute(
|
||||
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
|
||||
(valid_location_id,),
|
||||
).fetchone()[0]
|
||||
assert count == 500
|
||||
|
||||
# The oldest event (i=0) should be deleted
|
||||
oldest_event = seeded_db.execute(
|
||||
"""
|
||||
SELECT event_id FROM event_log_by_location
|
||||
WHERE location_id = ?
|
||||
ORDER BY ts_utc ASC
|
||||
LIMIT 1
|
||||
""",
|
||||
(valid_location_id,),
|
||||
).fetchone()
|
||||
# Event 0 was deleted, so oldest should be event 1
|
||||
assert oldest_event[0] == "01ARZ3NDEKTSV4RRFFQ69G0001A"
|
||||
|
||||
def test_trigger_scopes_to_location(self, seeded_db, valid_location_id, strip2_location_id):
|
||||
"""Cap trigger only removes events from same location."""
|
||||
summary = json.dumps({"msg": "test"})
|
||||
|
||||
# Insert 500 events at location 1
|
||||
for i in range(500):
|
||||
event_id = f"01ARZ3NDEKTSV4RRFFQ69G{i:04d}A"
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
event_id,
|
||||
valid_location_id,
|
||||
1704067200000 + i,
|
||||
"ProductCollected",
|
||||
"ppetru",
|
||||
summary,
|
||||
),
|
||||
)
|
||||
|
||||
# Insert 1 event at location 2
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
"01ARZ3NDEKTSV4RRFFQ69GXXXXA",
|
||||
strip2_location_id,
|
||||
1704067200000,
|
||||
"FeedGiven",
|
||||
"ppetru",
|
||||
summary,
|
||||
),
|
||||
)
|
||||
|
||||
# Location 1 should still have 500
|
||||
count1 = seeded_db.execute(
|
||||
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
|
||||
(valid_location_id,),
|
||||
).fetchone()[0]
|
||||
assert count1 == 500
|
||||
|
||||
# Location 2 should have 1
|
||||
count2 = seeded_db.execute(
|
||||
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
|
||||
(strip2_location_id,),
|
||||
).fetchone()[0]
|
||||
assert count2 == 1
|
||||
|
||||
# Insert one more at location 1 to trigger cap
|
||||
seeded_db.execute(
|
||||
"""
|
||||
INSERT INTO event_log_by_location (event_id, location_id, ts_utc, type, actor, summary)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""",
|
||||
(
|
||||
"01ARZ3NDEKTSV4RRFFQ69GYYYYA",
|
||||
valid_location_id,
|
||||
1704067200501,
|
||||
"ProductCollected",
|
||||
"ppetru",
|
||||
summary,
|
||||
),
|
||||
)
|
||||
|
||||
# Location 1 should still have 500, location 2 should still have 1
|
||||
count1 = seeded_db.execute(
|
||||
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
|
||||
(valid_location_id,),
|
||||
).fetchone()[0]
|
||||
assert count1 == 500
|
||||
|
||||
count2 = seeded_db.execute(
|
||||
"SELECT COUNT(*) FROM event_log_by_location WHERE location_id = ?",
|
||||
(strip2_location_id,),
|
||||
).fetchone()[0]
|
||||
assert count2 == 1
|
||||
454
tests/test_projection_event_log.py
Normal file
454
tests/test_projection_event_log.py
Normal file
@@ -0,0 +1,454 @@
|
||||
# ABOUTME: Tests for EventLogProjection.
|
||||
# ABOUTME: Validates event log entries are created for location-scoped events.
|
||||
|
||||
import json
|
||||
|
||||
from animaltrack.events.types import (
|
||||
ANIMAL_COHORT_CREATED,
|
||||
ANIMAL_MOVED,
|
||||
FEED_GIVEN,
|
||||
FEED_PURCHASED,
|
||||
HATCH_RECORDED,
|
||||
PRODUCT_COLLECTED,
|
||||
PRODUCT_SOLD,
|
||||
)
|
||||
from animaltrack.models.events import Event
|
||||
from animaltrack.projections.event_log import EventLogProjection
|
||||
|
||||
|
||||
def make_product_collected_event(
|
||||
event_id: str,
|
||||
location_id: str,
|
||||
animal_ids: list[str],
|
||||
quantity: int = 5,
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test ProductCollected event."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=PRODUCT_COLLECTED,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={
|
||||
"location_id": location_id,
|
||||
"animal_ids": animal_ids,
|
||||
},
|
||||
payload={
|
||||
"location_id": location_id,
|
||||
"product_code": "egg.duck",
|
||||
"quantity": quantity,
|
||||
"resolved_ids": animal_ids,
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
def make_cohort_event(
|
||||
event_id: str,
|
||||
location_id: str,
|
||||
animal_ids: list[str],
|
||||
species: str = "duck",
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test AnimalCohortCreated event."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=ANIMAL_COHORT_CREATED,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={
|
||||
"location_id": location_id,
|
||||
"animal_ids": animal_ids,
|
||||
},
|
||||
payload={
|
||||
"species": species,
|
||||
"count": len(animal_ids),
|
||||
"life_stage": "adult",
|
||||
"sex": "unknown",
|
||||
"location_id": location_id,
|
||||
"origin": "purchased",
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
def make_feed_given_event(
|
||||
event_id: str,
|
||||
location_id: str,
|
||||
feed_type_code: str = "layer",
|
||||
amount_kg: int = 5,
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test FeedGiven event."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=FEED_GIVEN,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={
|
||||
"location_id": location_id,
|
||||
},
|
||||
payload={
|
||||
"location_id": location_id,
|
||||
"feed_type_code": feed_type_code,
|
||||
"amount_kg": amount_kg,
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
def make_feed_purchased_event(
|
||||
event_id: str,
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test FeedPurchased event (no location)."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=FEED_PURCHASED,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={},
|
||||
payload={
|
||||
"feed_type_code": "layer",
|
||||
"bag_size_kg": 20,
|
||||
"bags_count": 1,
|
||||
"bag_price_cents": 2500,
|
||||
"vendor": None,
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
def make_product_sold_event(
|
||||
event_id: str,
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test ProductSold event (no location)."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=PRODUCT_SOLD,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={},
|
||||
payload={
|
||||
"product_code": "egg.duck",
|
||||
"quantity": 30,
|
||||
"total_price_cents": 900,
|
||||
"buyer": None,
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
def make_animal_moved_event(
|
||||
event_id: str,
|
||||
to_location_id: str,
|
||||
animal_ids: list[str],
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test AnimalMoved event."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=ANIMAL_MOVED,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={
|
||||
"to_location_id": to_location_id,
|
||||
"animal_ids": animal_ids,
|
||||
},
|
||||
payload={
|
||||
"to_location_id": to_location_id,
|
||||
"resolved_ids": animal_ids,
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
def make_hatch_event(
|
||||
event_id: str,
|
||||
location_id: str,
|
||||
hatched_live: int = 5,
|
||||
ts_utc: int = 1704067200000,
|
||||
) -> Event:
|
||||
"""Create a test HatchRecorded event."""
|
||||
return Event(
|
||||
id=event_id,
|
||||
type=HATCH_RECORDED,
|
||||
ts_utc=ts_utc,
|
||||
actor="test_user",
|
||||
entity_refs={
|
||||
"location_id": location_id,
|
||||
},
|
||||
payload={
|
||||
"species": "duck",
|
||||
"location_id": location_id,
|
||||
"assigned_brood_location_id": None,
|
||||
"hatched_live": hatched_live,
|
||||
"notes": None,
|
||||
},
|
||||
version=1,
|
||||
)
|
||||
|
||||
|
||||
class TestEventLogProjectionEventTypes:
|
||||
"""Tests for get_event_types method."""
|
||||
|
||||
def test_handles_product_collected(self, seeded_db):
|
||||
"""Projection handles ProductCollected event type."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert PRODUCT_COLLECTED in projection.get_event_types()
|
||||
|
||||
def test_handles_animal_cohort_created(self, seeded_db):
|
||||
"""Projection handles AnimalCohortCreated event type."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert ANIMAL_COHORT_CREATED in projection.get_event_types()
|
||||
|
||||
def test_handles_feed_given(self, seeded_db):
|
||||
"""Projection handles FeedGiven event type."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert FEED_GIVEN in projection.get_event_types()
|
||||
|
||||
def test_handles_animal_moved(self, seeded_db):
|
||||
"""Projection handles AnimalMoved event type."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert ANIMAL_MOVED in projection.get_event_types()
|
||||
|
||||
def test_handles_hatch_recorded(self, seeded_db):
|
||||
"""Projection handles HatchRecorded event type."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert HATCH_RECORDED in projection.get_event_types()
|
||||
|
||||
def test_does_not_handle_feed_purchased(self, seeded_db):
|
||||
"""Projection does not handle FeedPurchased (no location)."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert FEED_PURCHASED not in projection.get_event_types()
|
||||
|
||||
def test_does_not_handle_product_sold(self, seeded_db):
|
||||
"""Projection does not handle ProductSold (no location)."""
|
||||
projection = EventLogProjection(seeded_db)
|
||||
assert PRODUCT_SOLD not in projection.get_event_types()
|
||||
|
||||
|
||||
class TestEventLogProjectionApply:
|
||||
"""Tests for apply()."""
|
||||
|
||||
def test_creates_event_log_entry_for_product_collected(self, seeded_db):
|
||||
"""Apply creates event log entry for ProductCollected."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
animal_ids = ["01ARZ3NDEKTSV4RRFFQ69G5A01"]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_product_collected_event(event_id, location_id, animal_ids, quantity=5)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute(
|
||||
"SELECT event_id, location_id, type, actor FROM event_log_by_location"
|
||||
).fetchone()
|
||||
assert row[0] == event_id
|
||||
assert row[1] == location_id
|
||||
assert row[2] == PRODUCT_COLLECTED
|
||||
assert row[3] == "test_user"
|
||||
|
||||
def test_event_log_summary_contains_relevant_info(self, seeded_db):
|
||||
"""Event log summary contains relevant event info."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
animal_ids = ["01ARZ3NDEKTSV4RRFFQ69G5A01"]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_product_collected_event(event_id, location_id, animal_ids, quantity=5)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT summary FROM event_log_by_location").fetchone()
|
||||
summary = json.loads(row[0])
|
||||
assert summary["product_code"] == "egg.duck"
|
||||
assert summary["quantity"] == 5
|
||||
|
||||
def test_creates_event_log_entry_for_cohort_created(self, seeded_db):
|
||||
"""Apply creates event log entry for AnimalCohortCreated."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
animal_ids = ["01ARZ3NDEKTSV4RRFFQ69G5A01", "01ARZ3NDEKTSV4RRFFQ69G5A02"]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_cohort_event(event_id, location_id, animal_ids, species="duck")
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT event_id, type FROM event_log_by_location").fetchone()
|
||||
assert row[0] == event_id
|
||||
assert row[1] == ANIMAL_COHORT_CREATED
|
||||
|
||||
def test_cohort_summary_contains_species_and_count(self, seeded_db):
|
||||
"""Cohort event summary contains species and count."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
animal_ids = ["01ARZ3NDEKTSV4RRFFQ69G5A01", "01ARZ3NDEKTSV4RRFFQ69G5A02"]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_cohort_event(event_id, location_id, animal_ids, species="goose")
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT summary FROM event_log_by_location").fetchone()
|
||||
summary = json.loads(row[0])
|
||||
assert summary["species"] == "goose"
|
||||
assert summary["count"] == 2
|
||||
|
||||
def test_creates_event_log_entry_for_feed_given(self, seeded_db):
|
||||
"""Apply creates event log entry for FeedGiven."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_feed_given_event(event_id, location_id, amount_kg=3)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT event_id, type FROM event_log_by_location").fetchone()
|
||||
assert row[0] == event_id
|
||||
assert row[1] == FEED_GIVEN
|
||||
|
||||
def test_feed_given_summary_contains_amount(self, seeded_db):
|
||||
"""FeedGiven event summary contains feed type and amount."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_feed_given_event(event_id, location_id, feed_type_code="grower", amount_kg=5)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT summary FROM event_log_by_location").fetchone()
|
||||
summary = json.loads(row[0])
|
||||
assert summary["feed_type_code"] == "grower"
|
||||
assert summary["amount_kg"] == 5
|
||||
|
||||
def test_creates_event_log_for_animal_moved(self, seeded_db):
|
||||
"""Apply creates event log entry for AnimalMoved at destination."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 2'").fetchone()
|
||||
to_location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
animal_ids = ["01ARZ3NDEKTSV4RRFFQ69G5A01"]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_animal_moved_event(event_id, to_location_id, animal_ids)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute(
|
||||
"SELECT event_id, location_id, type FROM event_log_by_location"
|
||||
).fetchone()
|
||||
assert row[0] == event_id
|
||||
assert row[1] == to_location_id
|
||||
assert row[2] == ANIMAL_MOVED
|
||||
|
||||
def test_creates_event_log_for_hatch_recorded(self, seeded_db):
|
||||
"""Apply creates event log entry for HatchRecorded."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_hatch_event(event_id, location_id, hatched_live=8)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT event_id, type FROM event_log_by_location").fetchone()
|
||||
assert row[0] == event_id
|
||||
assert row[1] == HATCH_RECORDED
|
||||
|
||||
def test_hatch_summary_contains_hatched_count(self, seeded_db):
|
||||
"""HatchRecorded summary contains species and hatched count."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_hatch_event(event_id, location_id, hatched_live=8)
|
||||
projection.apply(event)
|
||||
|
||||
row = seeded_db.execute("SELECT summary FROM event_log_by_location").fetchone()
|
||||
summary = json.loads(row[0])
|
||||
assert summary["species"] == "duck"
|
||||
assert summary["hatched_live"] == 8
|
||||
|
||||
|
||||
class TestEventLogProjectionRevert:
|
||||
"""Tests for revert()."""
|
||||
|
||||
def test_removes_event_log_entry(self, seeded_db):
|
||||
"""Revert removes the event log entry."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
event_id = "01ARZ3NDEKTSV4RRFFQ69G5001"
|
||||
animal_ids = ["01ARZ3NDEKTSV4RRFFQ69G5A01"]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
event = make_product_collected_event(event_id, location_id, animal_ids)
|
||||
projection.apply(event)
|
||||
|
||||
# Verify row exists
|
||||
count = seeded_db.execute("SELECT COUNT(*) FROM event_log_by_location").fetchone()[0]
|
||||
assert count == 1
|
||||
|
||||
# Revert
|
||||
projection.revert(event)
|
||||
|
||||
# Verify row removed
|
||||
count = seeded_db.execute("SELECT COUNT(*) FROM event_log_by_location").fetchone()[0]
|
||||
assert count == 0
|
||||
|
||||
def test_revert_only_affects_specific_event(self, seeded_db):
|
||||
"""Revert only removes the specific event log entry."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
location_id = row[0]
|
||||
|
||||
projection = EventLogProjection(seeded_db)
|
||||
|
||||
# Create first event
|
||||
event1 = make_product_collected_event(
|
||||
"01ARZ3NDEKTSV4RRFFQ69G5001",
|
||||
location_id,
|
||||
["01ARZ3NDEKTSV4RRFFQ69G5A01"],
|
||||
)
|
||||
projection.apply(event1)
|
||||
|
||||
# Create second event
|
||||
event2 = make_feed_given_event(
|
||||
"01ARZ3NDEKTSV4RRFFQ69G5002",
|
||||
location_id,
|
||||
ts_utc=1704067300000,
|
||||
)
|
||||
projection.apply(event2)
|
||||
|
||||
# Verify both exist
|
||||
count = seeded_db.execute("SELECT COUNT(*) FROM event_log_by_location").fetchone()[0]
|
||||
assert count == 2
|
||||
|
||||
# Revert only event1
|
||||
projection.revert(event1)
|
||||
|
||||
# Event2 should still exist
|
||||
count = seeded_db.execute("SELECT COUNT(*) FROM event_log_by_location").fetchone()[0]
|
||||
assert count == 1
|
||||
|
||||
row = seeded_db.execute("SELECT event_id FROM event_log_by_location").fetchone()
|
||||
assert row[0] == "01ARZ3NDEKTSV4RRFFQ69G5002"
|
||||
217
tests/test_web_events.py
Normal file
217
tests/test_web_events.py
Normal file
@@ -0,0 +1,217 @@
|
||||
# ABOUTME: Tests for event log routes.
|
||||
# ABOUTME: Covers GET /event-log rendering and filtering by location.
|
||||
|
||||
import os
|
||||
import time
|
||||
|
||||
import pytest
|
||||
from starlette.testclient import TestClient
|
||||
|
||||
from animaltrack.events.payloads import AnimalCohortCreatedPayload, ProductCollectedPayload
|
||||
from animaltrack.events.store import EventStore
|
||||
from animaltrack.projections import (
|
||||
AnimalRegistryProjection,
|
||||
EventAnimalsProjection,
|
||||
EventLogProjection,
|
||||
IntervalProjection,
|
||||
ProductsProjection,
|
||||
ProjectionRegistry,
|
||||
)
|
||||
from animaltrack.services.animal import AnimalService
|
||||
from animaltrack.services.products import ProductService
|
||||
|
||||
|
||||
def make_test_settings(
|
||||
csrf_secret: str = "test-secret",
|
||||
trusted_proxy_ips: str = "127.0.0.1",
|
||||
dev_mode: bool = True,
|
||||
):
|
||||
"""Create Settings for testing by setting env vars temporarily."""
|
||||
from animaltrack.config import Settings
|
||||
|
||||
old_env = os.environ.copy()
|
||||
try:
|
||||
os.environ["CSRF_SECRET"] = csrf_secret
|
||||
os.environ["TRUSTED_PROXY_IPS"] = trusted_proxy_ips
|
||||
os.environ["DEV_MODE"] = str(dev_mode).lower()
|
||||
return Settings()
|
||||
finally:
|
||||
os.environ.clear()
|
||||
os.environ.update(old_env)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(seeded_db):
|
||||
"""Create a test client for the app."""
|
||||
from animaltrack.web.app import create_app
|
||||
|
||||
settings = make_test_settings(trusted_proxy_ips="testclient")
|
||||
app, rt = create_app(settings=settings, db=seeded_db)
|
||||
return TestClient(app, raise_server_exceptions=True)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_location_id(seeded_db):
|
||||
"""Get Strip 1 location ID from seeds."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 1'").fetchone()
|
||||
return row[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def strip2_location_id(seeded_db):
|
||||
"""Get Strip 2 location ID from seeds."""
|
||||
row = seeded_db.execute("SELECT id FROM locations WHERE name = 'Strip 2'").fetchone()
|
||||
return row[0]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def animal_service(seeded_db):
|
||||
"""Create an AnimalService for testing."""
|
||||
event_store = EventStore(seeded_db)
|
||||
registry = ProjectionRegistry()
|
||||
registry.register(AnimalRegistryProjection(seeded_db))
|
||||
registry.register(EventAnimalsProjection(seeded_db))
|
||||
registry.register(IntervalProjection(seeded_db))
|
||||
registry.register(EventLogProjection(seeded_db))
|
||||
return AnimalService(seeded_db, event_store, registry)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def product_service(seeded_db):
|
||||
"""Create a ProductService for testing."""
|
||||
event_store = EventStore(seeded_db)
|
||||
registry = ProjectionRegistry()
|
||||
registry.register(AnimalRegistryProjection(seeded_db))
|
||||
registry.register(EventAnimalsProjection(seeded_db))
|
||||
registry.register(IntervalProjection(seeded_db))
|
||||
registry.register(ProductsProjection(seeded_db))
|
||||
registry.register(EventLogProjection(seeded_db))
|
||||
return ProductService(seeded_db, event_store, registry)
|
||||
|
||||
|
||||
def create_cohort(animal_service, location_id, count=3):
|
||||
"""Helper to create a cohort and return animal IDs."""
|
||||
ts_utc = int(time.time() * 1000)
|
||||
payload = AnimalCohortCreatedPayload(
|
||||
species="duck",
|
||||
count=count,
|
||||
life_stage="adult",
|
||||
sex="unknown",
|
||||
location_id=location_id,
|
||||
origin="purchased",
|
||||
)
|
||||
event = animal_service.create_cohort(payload, ts_utc, "test_user")
|
||||
return event.entity_refs["animal_ids"]
|
||||
|
||||
|
||||
class TestEventLogRoute:
|
||||
"""Tests for GET /event-log route."""
|
||||
|
||||
def test_event_log_requires_location_id(self, client):
|
||||
"""Event log requires location_id parameter."""
|
||||
response = client.get("/event-log")
|
||||
assert response.status_code == 422
|
||||
|
||||
def test_event_log_returns_empty_for_new_location(self, client, valid_location_id):
|
||||
"""Event log returns empty state for location with no events."""
|
||||
response = client.get(f"/event-log?location_id={valid_location_id}")
|
||||
assert response.status_code == 200
|
||||
# Should show empty state message
|
||||
assert "No events" in response.text or "event-log" in response.text
|
||||
|
||||
def test_event_log_shows_events(self, client, seeded_db, animal_service, valid_location_id):
|
||||
"""Event log shows events for the location."""
|
||||
# Create some animals (creates AnimalCohortCreated event)
|
||||
create_cohort(animal_service, valid_location_id, count=5)
|
||||
|
||||
response = client.get(f"/event-log?location_id={valid_location_id}")
|
||||
assert response.status_code == 200
|
||||
assert "AnimalCohortCreated" in response.text or "cohort" in response.text.lower()
|
||||
|
||||
def test_event_log_shows_product_collected(
|
||||
self, client, seeded_db, animal_service, product_service, valid_location_id
|
||||
):
|
||||
"""Event log shows ProductCollected events."""
|
||||
# Create animals first
|
||||
animal_ids = create_cohort(animal_service, valid_location_id, count=3)
|
||||
|
||||
# Collect eggs
|
||||
ts_utc = int(time.time() * 1000)
|
||||
payload = ProductCollectedPayload(
|
||||
location_id=valid_location_id,
|
||||
product_code="egg.duck",
|
||||
quantity=5,
|
||||
resolved_ids=animal_ids,
|
||||
)
|
||||
product_service.collect_product(payload, ts_utc, "test_user")
|
||||
|
||||
response = client.get(f"/event-log?location_id={valid_location_id}")
|
||||
assert response.status_code == 200
|
||||
assert "ProductCollected" in response.text or "egg" in response.text.lower()
|
||||
|
||||
def test_event_log_filters_by_location(
|
||||
self, client, seeded_db, animal_service, valid_location_id, strip2_location_id
|
||||
):
|
||||
"""Event log only shows events for the specified location."""
|
||||
# Create animals at location 1
|
||||
create_cohort(animal_service, valid_location_id, count=3)
|
||||
|
||||
# Create animals at location 2
|
||||
create_cohort(animal_service, strip2_location_id, count=2)
|
||||
|
||||
# Get events for location 2 only
|
||||
response = client.get(f"/event-log?location_id={strip2_location_id}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Should see location 2 events only
|
||||
# Count the events displayed
|
||||
text = response.text
|
||||
# Location 2 should have 1 event (cohort of 2)
|
||||
assert "count" in text.lower() or "2" in text
|
||||
|
||||
def test_event_log_orders_by_time_descending(
|
||||
self, client, seeded_db, animal_service, product_service, valid_location_id
|
||||
):
|
||||
"""Event log shows newest events first."""
|
||||
# Create cohort first
|
||||
animal_ids = create_cohort(animal_service, valid_location_id, count=3)
|
||||
|
||||
# Then collect eggs
|
||||
ts_utc = int(time.time() * 1000) + 1000
|
||||
payload = ProductCollectedPayload(
|
||||
location_id=valid_location_id,
|
||||
product_code="egg.duck",
|
||||
quantity=5,
|
||||
resolved_ids=animal_ids,
|
||||
)
|
||||
product_service.collect_product(payload, ts_utc, "test_user")
|
||||
|
||||
response = client.get(f"/event-log?location_id={valid_location_id}")
|
||||
assert response.status_code == 200
|
||||
|
||||
# ProductCollected should appear before AnimalCohortCreated (newer first)
|
||||
text = response.text
|
||||
# Check that the response contains both event types
|
||||
cohort_pos = text.find("Cohort") if "Cohort" in text else text.find("cohort")
|
||||
egg_pos = text.find("egg") if "egg" in text else text.find("Product")
|
||||
|
||||
# Both should be present
|
||||
assert cohort_pos != -1 or egg_pos != -1
|
||||
|
||||
|
||||
class TestEventLogPartial:
|
||||
"""Tests for HTMX partial responses."""
|
||||
|
||||
def test_htmx_request_returns_partial(
|
||||
self, client, seeded_db, animal_service, valid_location_id
|
||||
):
|
||||
"""HTMX request returns partial HTML without full page wrapper."""
|
||||
create_cohort(animal_service, valid_location_id)
|
||||
|
||||
response = client.get(
|
||||
f"/event-log?location_id={valid_location_id}",
|
||||
headers={"HX-Request": "true"},
|
||||
)
|
||||
assert response.status_code == 200
|
||||
# Partial should not have full page structure
|
||||
assert "<html" not in response.text.lower()
|
||||
Reference in New Issue
Block a user