Compare commits
10 Commits
9cd890b936
...
e86af247da
| Author | SHA1 | Date | |
|---|---|---|---|
| e86af247da | |||
| 9fbda655f5 | |||
| 628d5cc6e6 | |||
| cccd76a44c | |||
| 0125bc4aaa | |||
| d19e5b7120 | |||
| 85a4c6bc7b | |||
| c214454d67 | |||
| 9709a78dc6 | |||
| a1fed4ebcd |
@@ -1,5 +1,5 @@
|
|||||||
# ABOUTME: Command-line interface for AnimalTrack.
|
# ABOUTME: Command-line interface for AnimalTrack.
|
||||||
# ABOUTME: Provides migrate, seed, and serve commands.
|
# ABOUTME: Provides migrate, seed, serve, and rebuild-projections commands.
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import sys
|
import sys
|
||||||
@@ -29,6 +29,12 @@ def main():
|
|||||||
serve_parser.add_argument("--port", type=int, default=3366, help="Port to listen on")
|
serve_parser.add_argument("--port", type=int, default=3366, help="Port to listen on")
|
||||||
serve_parser.add_argument("--host", type=str, default="0.0.0.0", help="Host to bind to")
|
serve_parser.add_argument("--host", type=str, default="0.0.0.0", help="Host to bind to")
|
||||||
|
|
||||||
|
# rebuild-projections command
|
||||||
|
subparsers.add_parser(
|
||||||
|
"rebuild-projections",
|
||||||
|
help="Rebuild all projections by replaying the event log",
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
|
|
||||||
if args.command is None:
|
if args.command is None:
|
||||||
@@ -113,6 +119,92 @@ def main():
|
|||||||
# Start server
|
# Start server
|
||||||
print(f"Starting server on {args.host}:{args.port}...")
|
print(f"Starting server on {args.host}:{args.port}...")
|
||||||
uvicorn.run(app, host=args.host, port=args.port)
|
uvicorn.run(app, host=args.host, port=args.port)
|
||||||
|
elif args.command == "rebuild-projections":
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from animaltrack.db import get_db
|
||||||
|
from animaltrack.events.processor import process_event
|
||||||
|
from animaltrack.migrations import run_migrations
|
||||||
|
from animaltrack.models.events import Event
|
||||||
|
from animaltrack.projections import ProjectionRegistry
|
||||||
|
from animaltrack.projections.animal_registry import AnimalRegistryProjection
|
||||||
|
from animaltrack.projections.event_animals import EventAnimalsProjection
|
||||||
|
from animaltrack.projections.event_log import EventLogProjection
|
||||||
|
from animaltrack.projections.feed import FeedInventoryProjection
|
||||||
|
from animaltrack.projections.intervals import IntervalProjection
|
||||||
|
from animaltrack.projections.products import ProductsProjection
|
||||||
|
|
||||||
|
# Get db_path directly - this command doesn't need web settings
|
||||||
|
db_path = os.environ.get("DB_PATH", "animaltrack.db")
|
||||||
|
|
||||||
|
# Run migrations first
|
||||||
|
print("Running migrations...")
|
||||||
|
success = run_migrations(
|
||||||
|
db_path=db_path,
|
||||||
|
migrations_dir="migrations",
|
||||||
|
verbose=False,
|
||||||
|
)
|
||||||
|
if not success:
|
||||||
|
print("Migration failed", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
db = get_db(db_path)
|
||||||
|
|
||||||
|
# Projection tables to truncate
|
||||||
|
projection_tables = [
|
||||||
|
"animal_registry",
|
||||||
|
"live_animals_by_location",
|
||||||
|
"animal_location_intervals",
|
||||||
|
"animal_attr_intervals",
|
||||||
|
"event_animals",
|
||||||
|
"event_log_by_location",
|
||||||
|
"feed_inventory",
|
||||||
|
]
|
||||||
|
|
||||||
|
print("Truncating projection tables...")
|
||||||
|
for table in projection_tables:
|
||||||
|
db.execute(f"DELETE FROM {table}")
|
||||||
|
print(f" Truncated {len(projection_tables)} tables")
|
||||||
|
|
||||||
|
# Register all projections
|
||||||
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
|
# Get all non-tombstoned events in order
|
||||||
|
print("Fetching events...")
|
||||||
|
rows = list(
|
||||||
|
db.execute(
|
||||||
|
"""SELECT id, type, ts_utc, actor, version, payload, entity_refs
|
||||||
|
FROM events
|
||||||
|
WHERE id NOT IN (SELECT target_event_id FROM event_tombstones)
|
||||||
|
ORDER BY ts_utc, id"""
|
||||||
|
).fetchall()
|
||||||
|
)
|
||||||
|
print(f" Found {len(rows)} events to replay")
|
||||||
|
|
||||||
|
# Replay events through projections
|
||||||
|
print("Replaying events...")
|
||||||
|
for i, row in enumerate(rows):
|
||||||
|
event = Event(
|
||||||
|
id=row[0],
|
||||||
|
type=row[1],
|
||||||
|
ts_utc=row[2],
|
||||||
|
actor=row[3],
|
||||||
|
version=row[4],
|
||||||
|
payload=json.loads(row[5]),
|
||||||
|
entity_refs=json.loads(row[6]),
|
||||||
|
)
|
||||||
|
process_event(event, registry)
|
||||||
|
if (i + 1) % 100 == 0:
|
||||||
|
print(f" Processed {i + 1}/{len(rows)} events...")
|
||||||
|
|
||||||
|
print(f"Rebuild complete: processed {len(rows)} events")
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
@@ -185,39 +185,30 @@ def _validate_subset(
|
|||||||
# Find selected IDs that no longer match the filter
|
# Find selected IDs that no longer match the filter
|
||||||
invalid_ids = selected_set - resolved_set
|
invalid_ids = selected_set - resolved_set
|
||||||
|
|
||||||
|
# Compute valid selected IDs (those that still match the filter)
|
||||||
|
valid_selected = [sid for sid in selected_ids if sid in resolved_set]
|
||||||
|
|
||||||
if not invalid_ids:
|
if not invalid_ids:
|
||||||
# All selected IDs are valid - compute hash from selected IDs
|
# All selected IDs are still in the filter resolution - valid
|
||||||
subset_hash = compute_roster_hash(selected_ids, context.from_location_id)
|
# No hash comparison needed: we validate IDs directly
|
||||||
|
|
||||||
# Verify hash matches what client sent
|
|
||||||
if subset_hash == context.roster_hash:
|
|
||||||
return SelectionValidationResult(
|
|
||||||
valid=True,
|
|
||||||
resolved_ids=selected_ids,
|
|
||||||
roster_hash=context.roster_hash,
|
|
||||||
diff=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Some selected IDs are no longer valid, or hash mismatch
|
|
||||||
# Compute diff: removed = invalid_ids, added = none
|
|
||||||
diff = SelectionDiff(
|
|
||||||
added=[],
|
|
||||||
removed=sorted(invalid_ids),
|
|
||||||
server_count=len(resolved_ids),
|
|
||||||
client_count=len(selected_ids),
|
|
||||||
)
|
|
||||||
|
|
||||||
if context.confirmed and not invalid_ids:
|
|
||||||
# Client confirmed, and all IDs are still valid
|
|
||||||
return SelectionValidationResult(
|
return SelectionValidationResult(
|
||||||
valid=True,
|
valid=True,
|
||||||
resolved_ids=selected_ids,
|
resolved_ids=selected_ids,
|
||||||
roster_hash=context.roster_hash,
|
roster_hash=compute_roster_hash(selected_ids, context.from_location_id),
|
||||||
diff=diff,
|
diff=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Invalid - return with valid selected IDs (those that still match)
|
# Some selected IDs are no longer valid
|
||||||
valid_selected = [sid for sid in selected_ids if sid in resolved_set]
|
# Compute diff: removed = invalid_ids, added = none
|
||||||
|
# In subset mode, server_count reflects valid selected count, not full filter
|
||||||
|
diff = SelectionDiff(
|
||||||
|
added=[],
|
||||||
|
removed=sorted(invalid_ids),
|
||||||
|
server_count=len(valid_selected),
|
||||||
|
client_count=len(selected_ids),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Return invalid with valid selected IDs (those that still match)
|
||||||
new_hash = compute_roster_hash(valid_selected, context.from_location_id)
|
new_hash = compute_roster_hash(valid_selected, context.from_location_id)
|
||||||
|
|
||||||
return SelectionValidationResult(
|
return SelectionValidationResult(
|
||||||
|
|||||||
@@ -3,6 +3,7 @@
|
|||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from fasthtml.common import Beforeware, Meta, fast_app, setup_toasts
|
from fasthtml.common import Beforeware, Meta, fast_app, setup_toasts
|
||||||
@@ -19,6 +20,7 @@ from animaltrack.web.middleware import (
|
|||||||
csrf_before,
|
csrf_before,
|
||||||
request_id_before,
|
request_id_before,
|
||||||
)
|
)
|
||||||
|
from animaltrack.web.responses import error_toast
|
||||||
from animaltrack.web.routes import (
|
from animaltrack.web.routes import (
|
||||||
actions_router,
|
actions_router,
|
||||||
animals_router,
|
animals_router,
|
||||||
@@ -124,11 +126,13 @@ def create_app(
|
|||||||
# So static_path should be the parent of static_base
|
# So static_path should be the parent of static_base
|
||||||
static_path_for_fasthtml = str(static_base.parent) if static_base.exists() else "."
|
static_path_for_fasthtml = str(static_base.parent) if static_base.exists() else "."
|
||||||
|
|
||||||
# Configure HTMX to swap 422 responses for validation errors
|
# Configure HTMX to swap certain error responses so UI feedback is visible
|
||||||
|
# 409 (Conflict) returns confirmation dialogs for selection mismatches
|
||||||
|
# 422 (Validation Error) returns forms with error messages
|
||||||
# Without this, hx-boost ignores non-2xx responses and errors appear to do nothing
|
# Without this, hx-boost ignores non-2xx responses and errors appear to do nothing
|
||||||
htmx_config = Meta(
|
htmx_config = Meta(
|
||||||
name="htmx-config",
|
name="htmx-config",
|
||||||
content='{"responseHandling":[{"code":"204","swap":false},{"code":"[23]..","swap":true},{"code":"422","swap":true},{"code":"[45]..","swap":false,"error":true}]}',
|
content='{"responseHandling":[{"code":"204","swap":false},{"code":"[23]..","swap":true},{"code":"409","swap":true},{"code":"422","swap":true},{"code":"[45]..","swap":false,"error":true}]}',
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create FastHTML app with HTMX extensions, MonsterUI theme, and static path
|
# Create FastHTML app with HTMX extensions, MonsterUI theme, and static path
|
||||||
@@ -157,6 +161,13 @@ def create_app(
|
|||||||
app.add_exception_handler(AuthenticationError, authentication_error_handler)
|
app.add_exception_handler(AuthenticationError, authentication_error_handler)
|
||||||
app.add_exception_handler(AuthorizationError, authorization_error_handler)
|
app.add_exception_handler(AuthorizationError, authorization_error_handler)
|
||||||
|
|
||||||
|
# Global handler for unhandled exceptions - log and show toast
|
||||||
|
async def unhandled_error_handler(request, exc):
|
||||||
|
logging.exception("Unhandled exception in %s %s", request.method, request.url.path)
|
||||||
|
return error_toast("An unexpected error occurred. Please try again.", status_code=500)
|
||||||
|
|
||||||
|
app.add_exception_handler(Exception, unhandled_error_handler)
|
||||||
|
|
||||||
# Register routes using APIRouter pattern
|
# Register routes using APIRouter pattern
|
||||||
health_router.to_app(app)
|
health_router.to_app(app)
|
||||||
actions_router.to_app(app)
|
actions_router.to_app(app)
|
||||||
|
|||||||
@@ -206,15 +206,11 @@ async def animal_cohort(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form
|
# Success: re-render fresh form
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
cohort_form(locations, species_list),
|
||||||
request,
|
title="Create Cohort - AnimalTrack",
|
||||||
cohort_form(locations, species_list),
|
active_nav=None,
|
||||||
title="Create Cohort - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -282,7 +278,11 @@ async def hatch_recorded(request: Request, session):
|
|||||||
# Extract form data
|
# Extract form data
|
||||||
species = form.get("species", "")
|
species = form.get("species", "")
|
||||||
location_id = form.get("location_id", "")
|
location_id = form.get("location_id", "")
|
||||||
assigned_brood_location_id = form.get("assigned_brood_location_id", "") or None
|
# "__none__" is a sentinel value used because FastHTML omits empty string attributes
|
||||||
|
brood_location_raw = form.get("assigned_brood_location_id", "")
|
||||||
|
assigned_brood_location_id = (
|
||||||
|
None if brood_location_raw in ("", "__none__") else brood_location_raw
|
||||||
|
)
|
||||||
hatched_live_str = form.get("hatched_live", "0")
|
hatched_live_str = form.get("hatched_live", "0")
|
||||||
notes = form.get("notes", "") or None
|
notes = form.get("notes", "") or None
|
||||||
nonce = form.get("nonce")
|
nonce = form.get("nonce")
|
||||||
@@ -349,15 +349,11 @@ async def hatch_recorded(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form
|
# Success: re-render fresh form
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
hatch_form(locations, species_list),
|
||||||
request,
|
title="Record Hatch - AnimalTrack",
|
||||||
hatch_form(locations, species_list),
|
active_nav=None,
|
||||||
title="Record Hatch - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -694,15 +690,11 @@ async def animal_tag_add(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form
|
# Success: re-render fresh form
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
tag_add_form(),
|
||||||
request,
|
title="Add Tag - AnimalTrack",
|
||||||
tag_add_form(),
|
active_nav=None,
|
||||||
title="Add Tag - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -947,15 +939,11 @@ async def animal_tag_end(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form
|
# Success: re-render fresh form
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
tag_end_form(),
|
||||||
request,
|
title="End Tag - AnimalTrack",
|
||||||
tag_end_form(),
|
active_nav=None,
|
||||||
title="End Tag - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1183,15 +1171,11 @@ async def animal_attrs(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form
|
# Success: re-render fresh form
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
attrs_form(),
|
||||||
request,
|
title="Update Attributes - AnimalTrack",
|
||||||
attrs_form(),
|
active_nav=None,
|
||||||
title="Update Attributes - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1468,22 +1452,18 @@ async def animal_outcome(request: Request, session):
|
|||||||
product_repo = ProductRepository(db)
|
product_repo = ProductRepository(db)
|
||||||
products = [(p.code, p.name) for p in product_repo.list_all() if p.active]
|
products = [(p.code, p.name) for p in product_repo.list_all() if p.active]
|
||||||
|
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
outcome_form(
|
||||||
request,
|
filter_str="",
|
||||||
outcome_form(
|
resolved_ids=[],
|
||||||
filter_str="",
|
roster_hash="",
|
||||||
resolved_ids=[],
|
ts_utc=int(time.time() * 1000),
|
||||||
roster_hash="",
|
resolved_count=0,
|
||||||
ts_utc=int(time.time() * 1000),
|
products=products,
|
||||||
resolved_count=0,
|
|
||||||
products=products,
|
|
||||||
),
|
|
||||||
title="Record Outcome - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Record Outcome - AnimalTrack",
|
||||||
|
active_nav=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -1692,21 +1672,17 @@ async def animal_status_correct(req: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form
|
# Success: re-render fresh form
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
req,
|
||||||
render_page(
|
status_correct_form(
|
||||||
req,
|
filter_str="",
|
||||||
status_correct_form(
|
resolved_ids=[],
|
||||||
filter_str="",
|
roster_hash="",
|
||||||
resolved_ids=[],
|
ts_utc=int(time.time() * 1000),
|
||||||
roster_hash="",
|
resolved_count=0,
|
||||||
ts_utc=int(time.time() * 1000),
|
|
||||||
resolved_count=0,
|
|
||||||
),
|
|
||||||
title="Correct Status - AnimalTrack",
|
|
||||||
active_nav=None,
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Correct Status - AnimalTrack",
|
||||||
|
active_nav=None,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -209,22 +209,18 @@ async def product_collected(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render form with location sticking, qty cleared
|
# Success: re-render form with location sticking, qty cleared
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
eggs_page(
|
||||||
request,
|
locations,
|
||||||
eggs_page(
|
products,
|
||||||
locations,
|
active_tab="harvest",
|
||||||
products,
|
selected_location_id=location_id,
|
||||||
active_tab="harvest",
|
harvest_action=product_collected,
|
||||||
selected_location_id=location_id,
|
sell_action=product_sold,
|
||||||
harvest_action=product_collected,
|
|
||||||
sell_action=product_sold,
|
|
||||||
),
|
|
||||||
title="Eggs - AnimalTrack",
|
|
||||||
active_nav="eggs",
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Eggs - AnimalTrack",
|
||||||
|
active_nav="eggs",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -320,22 +316,18 @@ async def product_sold(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render form with product sticking
|
# Success: re-render form with product sticking
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
eggs_page(
|
||||||
request,
|
locations,
|
||||||
eggs_page(
|
products,
|
||||||
locations,
|
active_tab="sell",
|
||||||
products,
|
selected_product_code=product_code,
|
||||||
active_tab="sell",
|
harvest_action=product_collected,
|
||||||
selected_product_code=product_code,
|
sell_action=product_sold,
|
||||||
harvest_action=product_collected,
|
|
||||||
sell_action=product_sold,
|
|
||||||
),
|
|
||||||
title="Eggs - AnimalTrack",
|
|
||||||
active_nav="eggs",
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Eggs - AnimalTrack",
|
||||||
|
active_nav="eggs",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -19,6 +19,13 @@ from animaltrack.events.exceptions import (
|
|||||||
)
|
)
|
||||||
from animaltrack.events.store import EventStore
|
from animaltrack.events.store import EventStore
|
||||||
from animaltrack.models.reference import UserRole
|
from animaltrack.models.reference import UserRole
|
||||||
|
from animaltrack.projections import ProjectionRegistry
|
||||||
|
from animaltrack.projections.animal_registry import AnimalRegistryProjection
|
||||||
|
from animaltrack.projections.event_animals import EventAnimalsProjection
|
||||||
|
from animaltrack.projections.event_log import EventLogProjection
|
||||||
|
from animaltrack.projections.feed import FeedInventoryProjection
|
||||||
|
from animaltrack.projections.intervals import IntervalProjection
|
||||||
|
from animaltrack.projections.products import ProductsProjection
|
||||||
from animaltrack.repositories.locations import LocationRepository
|
from animaltrack.repositories.locations import LocationRepository
|
||||||
from animaltrack.repositories.user_defaults import UserDefaultsRepository
|
from animaltrack.repositories.user_defaults import UserDefaultsRepository
|
||||||
from animaltrack.web.templates import render_page
|
from animaltrack.web.templates import render_page
|
||||||
@@ -193,8 +200,10 @@ def event_log_index(request: Request):
|
|||||||
location_id = request.query_params.get("location_id", "")
|
location_id = request.query_params.get("location_id", "")
|
||||||
event_type = request.query_params.get("event_type", "")
|
event_type = request.query_params.get("event_type", "")
|
||||||
|
|
||||||
# "all" means show all events (no location filter)
|
# "all" means no filter for both location and event type
|
||||||
show_all = location_id == "all" or location_id == ""
|
show_all = location_id == "all" or location_id == ""
|
||||||
|
if event_type == "all":
|
||||||
|
event_type = ""
|
||||||
|
|
||||||
# If no query param and not explicitly "all", try user defaults
|
# If no query param and not explicitly "all", try user defaults
|
||||||
if not location_id and not event_type and username:
|
if not location_id and not event_type and username:
|
||||||
@@ -207,21 +216,13 @@ def event_log_index(request: Request):
|
|||||||
location_repo = LocationRepository(db)
|
location_repo = LocationRepository(db)
|
||||||
locations = location_repo.list_active()
|
locations = location_repo.list_active()
|
||||||
|
|
||||||
# Find location name if we have a specific location_id
|
|
||||||
location_name = None
|
|
||||||
if location_id and location_id != "all":
|
|
||||||
for loc in locations:
|
|
||||||
if loc.id == location_id:
|
|
||||||
location_name = loc.name
|
|
||||||
break
|
|
||||||
|
|
||||||
# Get events based on filter
|
# Get events based on filter
|
||||||
events = []
|
events = []
|
||||||
if show_all or not location_id:
|
if show_all or not location_id:
|
||||||
# Show all events (from main events table)
|
# Show all events (from main events table)
|
||||||
events = get_all_events(db, event_type=event_type or None)
|
events = get_all_events(db, event_type=event_type or None)
|
||||||
elif location_id and location_name:
|
elif location_id:
|
||||||
# Show events for specific location
|
# Show events for specific location (location_name only used for header display)
|
||||||
events = get_event_log(db, location_id)
|
events = get_event_log(db, location_id)
|
||||||
# Filter by event type if specified
|
# Filter by event type if specified
|
||||||
if event_type:
|
if event_type:
|
||||||
@@ -335,7 +336,7 @@ async def event_delete(request: Request, event_id: str):
|
|||||||
if not auth:
|
if not auth:
|
||||||
return JSONResponse({"error": "Not authenticated"}, status_code=401)
|
return JSONResponse({"error": "Not authenticated"}, status_code=401)
|
||||||
|
|
||||||
if auth.role != UserRole.admin:
|
if auth.role != UserRole.ADMIN:
|
||||||
return JSONResponse({"error": "Admin role required"}, status_code=403)
|
return JSONResponse({"error": "Admin role required"}, status_code=403)
|
||||||
|
|
||||||
# Parse form data
|
# Parse form data
|
||||||
@@ -343,9 +344,15 @@ async def event_delete(request: Request, event_id: str):
|
|||||||
reason = form.get("reason", "")
|
reason = form.get("reason", "")
|
||||||
cascade = form.get("cascade", "false") == "true"
|
cascade = form.get("cascade", "false") == "true"
|
||||||
|
|
||||||
# Get event store and registry
|
# Get event store and registry with all projections
|
||||||
event_store = EventStore(db)
|
event_store = EventStore(db)
|
||||||
registry = request.app.state.registry
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Check for dependent events first
|
# Check for dependent events first
|
||||||
|
|||||||
@@ -245,25 +245,21 @@ async def feed_given(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render form with location/type sticking, amount reset
|
# Success: re-render form with location/type sticking, amount reset
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=str(
|
request,
|
||||||
render_page(
|
feed_page(
|
||||||
request,
|
locations,
|
||||||
feed_page(
|
feed_types,
|
||||||
locations,
|
active_tab="give",
|
||||||
feed_types,
|
selected_location_id=location_id,
|
||||||
active_tab="give",
|
selected_feed_type_code=feed_type_code,
|
||||||
selected_location_id=location_id,
|
default_amount_kg=default_amount_kg,
|
||||||
selected_feed_type_code=feed_type_code,
|
balance_warning=balance_warning,
|
||||||
default_amount_kg=default_amount_kg,
|
give_action=feed_given,
|
||||||
balance_warning=balance_warning,
|
purchase_action=feed_purchased,
|
||||||
give_action=feed_given,
|
|
||||||
purchase_action=feed_purchased,
|
|
||||||
),
|
|
||||||
title="Feed - AnimalTrack",
|
|
||||||
active_nav="feed",
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Feed - AnimalTrack",
|
||||||
|
active_nav="feed",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@@ -404,21 +400,17 @@ async def feed_purchased(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render form with fields cleared
|
# Success: re-render form with fields cleared
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=str(
|
request,
|
||||||
render_page(
|
feed_page(
|
||||||
request,
|
locations,
|
||||||
feed_page(
|
feed_types,
|
||||||
locations,
|
active_tab="purchase",
|
||||||
feed_types,
|
give_action=feed_given,
|
||||||
active_tab="purchase",
|
purchase_action=feed_purchased,
|
||||||
give_action=feed_given,
|
|
||||||
purchase_action=feed_purchased,
|
|
||||||
),
|
|
||||||
title="Feed - AnimalTrack",
|
|
||||||
active_nav="feed",
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Feed - AnimalTrack",
|
||||||
|
active_nav="feed",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
# ABOUTME: Routes for Location management functionality (admin-only).
|
# ABOUTME: Routes for Location management functionality.
|
||||||
# ABOUTME: Handles GET /locations and POST /actions/location-* routes.
|
# ABOUTME: Handles GET /locations, GET /locations/{id}, and POST /actions/location-* routes.
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
@@ -19,6 +19,7 @@ from animaltrack.services.location import LocationService, ValidationError
|
|||||||
from animaltrack.web.auth import require_role
|
from animaltrack.web.auth import require_role
|
||||||
from animaltrack.web.responses import success_toast
|
from animaltrack.web.responses import success_toast
|
||||||
from animaltrack.web.templates import render_page
|
from animaltrack.web.templates import render_page
|
||||||
|
from animaltrack.web.templates.location_detail import location_detail_panel
|
||||||
from animaltrack.web.templates.locations import location_list, rename_form
|
from animaltrack.web.templates.locations import location_list, rename_form
|
||||||
|
|
||||||
# APIRouter for multi-file route organization
|
# APIRouter for multi-file route organization
|
||||||
@@ -33,8 +34,79 @@ def _get_location_service(db) -> LocationService:
|
|||||||
return LocationService(db, event_store, registry)
|
return LocationService(db, event_store, registry)
|
||||||
|
|
||||||
|
|
||||||
|
def _get_recent_events(db, location_id: str, limit: int = 10) -> list[dict]:
|
||||||
|
"""Get recent events for a location from the event log projection."""
|
||||||
|
rows = db.execute(
|
||||||
|
"""
|
||||||
|
SELECT event_id, location_id, ts_utc, type, actor, summary
|
||||||
|
FROM event_log_by_location
|
||||||
|
WHERE location_id = ?
|
||||||
|
ORDER BY ts_utc DESC
|
||||||
|
LIMIT ?
|
||||||
|
""",
|
||||||
|
(location_id, limit),
|
||||||
|
).fetchall()
|
||||||
|
|
||||||
|
events = []
|
||||||
|
for row in rows:
|
||||||
|
events.append(
|
||||||
|
{
|
||||||
|
"event_id": row[0],
|
||||||
|
"location_id": row[1],
|
||||||
|
"ts_utc": row[2],
|
||||||
|
"type": row[3],
|
||||||
|
"actor": row[4],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return events
|
||||||
|
|
||||||
|
|
||||||
|
def _get_live_animal_count(db, location_id: str) -> int:
|
||||||
|
"""Get count of live animals at a location."""
|
||||||
|
row = db.execute(
|
||||||
|
"""
|
||||||
|
SELECT COUNT(*) FROM live_animals_by_location
|
||||||
|
WHERE location_id = ?
|
||||||
|
""",
|
||||||
|
(location_id,),
|
||||||
|
).fetchone()
|
||||||
|
return row[0] if row else 0
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# GET /locations - Location List
|
# GET /locations/{id} - Location Detail (Public)
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
@ar("/locations/{location_id}")
|
||||||
|
async def location_detail(req: Request, location_id: str):
|
||||||
|
"""GET /locations/{id} - Public location detail page."""
|
||||||
|
db = req.app.state.db
|
||||||
|
|
||||||
|
# Handle admin rename route - check if it's the special /rename path
|
||||||
|
# This is handled by a separate route, so we don't need to worry about it here
|
||||||
|
|
||||||
|
location = LocationRepository(db).get(location_id)
|
||||||
|
|
||||||
|
if location is None:
|
||||||
|
return HTMLResponse(content="Location not found", status_code=404)
|
||||||
|
|
||||||
|
# Get recent events at this location
|
||||||
|
recent_events = _get_recent_events(db, location_id)
|
||||||
|
|
||||||
|
# Get live animal count
|
||||||
|
animal_count = _get_live_animal_count(db, location_id)
|
||||||
|
|
||||||
|
return render_page(
|
||||||
|
req,
|
||||||
|
location_detail_panel(location, recent_events, animal_count),
|
||||||
|
title=f"{location.name} - AnimalTrack",
|
||||||
|
active_nav=None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# GET /locations - Location List (Admin)
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -299,18 +299,14 @@ async def animal_move(request: Request, session):
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Success: re-render fresh form (nothing sticks per spec)
|
# Success: re-render fresh form (nothing sticks per spec)
|
||||||
return HTMLResponse(
|
return render_page(
|
||||||
content=to_xml(
|
request,
|
||||||
render_page(
|
move_form(
|
||||||
request,
|
locations,
|
||||||
move_form(
|
action=animal_move,
|
||||||
locations,
|
|
||||||
action=animal_move,
|
|
||||||
),
|
|
||||||
title="Move - AnimalTrack",
|
|
||||||
active_nav="move",
|
|
||||||
)
|
|
||||||
),
|
),
|
||||||
|
title="Move - AnimalTrack",
|
||||||
|
active_nav="move",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -327,10 +327,12 @@ def hatch_form(
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Build brood location options (optional)
|
# Build brood location options (optional)
|
||||||
|
# Note: We use "__none__" as a sentinel value instead of "" because FastHTML
|
||||||
|
# omits empty string attributes, causing browsers to submit the text content.
|
||||||
brood_location_options = [
|
brood_location_options = [
|
||||||
Option(
|
Option(
|
||||||
"Same as hatch location",
|
"Same as hatch location",
|
||||||
value="",
|
value="__none__",
|
||||||
selected=not selected_brood_location,
|
selected=not selected_brood_location,
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|||||||
@@ -98,8 +98,6 @@ def animal_checkbox_list(
|
|||||||
*resolved_id_fields,
|
*resolved_id_fields,
|
||||||
# Hidden field to indicate subset selection mode
|
# Hidden field to indicate subset selection mode
|
||||||
Input(type="hidden", name="subset_mode", value="true"),
|
Input(type="hidden", name="subset_mode", value="true"),
|
||||||
# Hidden field for roster_hash - will be updated via JS
|
|
||||||
Input(type="hidden", name="roster_hash", id="roster-hash-field"),
|
|
||||||
# Script for selection management
|
# Script for selection management
|
||||||
selection_script(len(animals)),
|
selection_script(len(animals)),
|
||||||
id="animal-selection-list",
|
id="animal-selection-list",
|
||||||
@@ -126,8 +124,6 @@ def selection_script(total_count: int) -> Div:
|
|||||||
if (countText) {{
|
if (countText) {{
|
||||||
countText.textContent = checked + ' of {total_count} selected';
|
countText.textContent = checked + ' of {total_count} selected';
|
||||||
}}
|
}}
|
||||||
// Trigger hash recomputation if needed
|
|
||||||
computeSelectionHash();
|
|
||||||
}}
|
}}
|
||||||
|
|
||||||
function selectAllAnimals(selectAll) {{
|
function selectAllAnimals(selectAll) {{
|
||||||
@@ -137,39 +133,6 @@ def selection_script(total_count: int) -> Div:
|
|||||||
}});
|
}});
|
||||||
updateSelectionCount();
|
updateSelectionCount();
|
||||||
}}
|
}}
|
||||||
|
|
||||||
function getSelectedIds() {{
|
|
||||||
var checkboxes = document.querySelectorAll('#animal-selection-list input[name="selected_ids"]:checked');
|
|
||||||
return Array.from(checkboxes).map(cb => cb.value);
|
|
||||||
}}
|
|
||||||
|
|
||||||
function computeSelectionHash() {{
|
|
||||||
// Get selected IDs and compute hash via API
|
|
||||||
var selectedIds = getSelectedIds();
|
|
||||||
var fromLocationId = document.querySelector('input[name="from_location_id"]');
|
|
||||||
var fromLoc = fromLocationId ? fromLocationId.value : '';
|
|
||||||
|
|
||||||
fetch('/api/compute-hash', {{
|
|
||||||
method: 'POST',
|
|
||||||
headers: {{'Content-Type': 'application/json'}},
|
|
||||||
body: JSON.stringify({{
|
|
||||||
selected_ids: selectedIds,
|
|
||||||
from_location_id: fromLoc
|
|
||||||
}})
|
|
||||||
}})
|
|
||||||
.then(response => response.json())
|
|
||||||
.then(data => {{
|
|
||||||
var hashField = document.getElementById('roster-hash-field');
|
|
||||||
if (hashField) {{
|
|
||||||
hashField.value = data.roster_hash;
|
|
||||||
}}
|
|
||||||
}});
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Initialize hash on load
|
|
||||||
document.addEventListener('DOMContentLoaded', function() {{
|
|
||||||
computeSelectionHash();
|
|
||||||
}});
|
|
||||||
""")
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@@ -70,7 +70,7 @@ def event_detail_panel(
|
|||||||
# Affected animals
|
# Affected animals
|
||||||
affected_animals_section(affected_animals),
|
affected_animals_section(affected_animals),
|
||||||
# Delete button (admin only, not for tombstoned events)
|
# Delete button (admin only, not for tombstoned events)
|
||||||
delete_section(event.id) if user_role == UserRole.admin and not is_tombstoned else None,
|
delete_section(event.id) if user_role == UserRole.ADMIN and not is_tombstoned else None,
|
||||||
id="event-panel-content",
|
id="event-panel-content",
|
||||||
cls="bg-[#141413] h-full overflow-y-auto",
|
cls="bg-[#141413] h-full overflow-y-auto",
|
||||||
)
|
)
|
||||||
@@ -159,12 +159,26 @@ def render_payload_items(
|
|||||||
elif event_type == "AnimalMoved":
|
elif event_type == "AnimalMoved":
|
||||||
from_loc = payload.get("from_location_id", "")
|
from_loc = payload.get("from_location_id", "")
|
||||||
to_loc = payload.get("to_location_id", "")
|
to_loc = payload.get("to_location_id", "")
|
||||||
from_name = location_names.get(from_loc, from_loc[:8] + "..." if from_loc else "")
|
if from_loc:
|
||||||
to_name = location_names.get(to_loc, to_loc[:8] + "..." if to_loc else "")
|
from_name = location_names.get(from_loc, from_loc[:8] + "...")
|
||||||
if from_name:
|
items.append(
|
||||||
items.append(payload_item("From", from_name))
|
payload_item_with_link(
|
||||||
if to_name:
|
"From",
|
||||||
items.append(payload_item("To", to_name))
|
from_name,
|
||||||
|
f"/locations/{from_loc}",
|
||||||
|
f"ID: {from_loc}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
if to_loc:
|
||||||
|
to_name = location_names.get(to_loc, to_loc[:8] + "...")
|
||||||
|
items.append(
|
||||||
|
payload_item_with_link(
|
||||||
|
"To",
|
||||||
|
to_name,
|
||||||
|
f"/locations/{to_loc}",
|
||||||
|
f"ID: {to_loc}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
elif event_type == "AnimalTagged":
|
elif event_type == "AnimalTagged":
|
||||||
if "tag" in payload:
|
if "tag" in payload:
|
||||||
@@ -250,6 +264,48 @@ def payload_item(label: str, value: str) -> Div:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def payload_item_with_link(label: str, text: str, href: str, title: str) -> Div:
|
||||||
|
"""Payload item with a clickable link."""
|
||||||
|
return Div(
|
||||||
|
Span(label + ":", cls="text-stone-500 text-sm min-w-[100px]"),
|
||||||
|
A(
|
||||||
|
text,
|
||||||
|
href=href,
|
||||||
|
title=title,
|
||||||
|
cls="text-amber-500 hover:underline text-sm",
|
||||||
|
),
|
||||||
|
cls="flex gap-2",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def location_display(
|
||||||
|
location_id: str,
|
||||||
|
location_names: dict[str, str],
|
||||||
|
as_link: bool = True,
|
||||||
|
):
|
||||||
|
"""Render a location ID with name and tooltip.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
location_id: The location ULID.
|
||||||
|
location_names: Map of location IDs to names.
|
||||||
|
as_link: Whether to render as a link (default True).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A or Span element displaying the location name with ID tooltip.
|
||||||
|
"""
|
||||||
|
name = location_names.get(location_id, location_id[:8] + "...")
|
||||||
|
tooltip = f"ID: {location_id}"
|
||||||
|
|
||||||
|
if as_link:
|
||||||
|
return A(
|
||||||
|
name,
|
||||||
|
href=f"/locations/{location_id}",
|
||||||
|
title=tooltip,
|
||||||
|
cls="text-amber-500 hover:underline text-sm",
|
||||||
|
)
|
||||||
|
return Span(name, title=tooltip, cls="text-stone-300 text-sm")
|
||||||
|
|
||||||
|
|
||||||
def entity_refs_section(
|
def entity_refs_section(
|
||||||
entity_refs: dict[str, Any],
|
entity_refs: dict[str, Any],
|
||||||
location_names: dict[str, str],
|
location_names: dict[str, str],
|
||||||
@@ -264,15 +320,27 @@ def entity_refs_section(
|
|||||||
if key == "animal_ids":
|
if key == "animal_ids":
|
||||||
continue
|
continue
|
||||||
|
|
||||||
display_value = value
|
# Handle location references with links and tooltips
|
||||||
# Resolve location names
|
if (key.endswith("_location_id") or key == "location_id") and isinstance(value, str):
|
||||||
if key.endswith("_location_id") or key == "location_id":
|
loc_name = location_names.get(value, value[:8] + "...")
|
||||||
display_value = location_names.get(value, value[:8] + "..." if value else "")
|
items.append(
|
||||||
|
payload_item_with_link(
|
||||||
|
key.replace("_", " ").title(),
|
||||||
|
loc_name,
|
||||||
|
f"/locations/{value}",
|
||||||
|
f"ID: {value}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Handle lists
|
||||||
if isinstance(value, list):
|
if isinstance(value, list):
|
||||||
display_value = f"{len(value)} items"
|
display_value = f"{len(value)} items"
|
||||||
|
# Handle long strings
|
||||||
elif isinstance(value, str) and len(value) > 20:
|
elif isinstance(value, str) and len(value) > 20:
|
||||||
display_value = value[:8] + "..."
|
display_value = value[:8] + "..."
|
||||||
|
else:
|
||||||
|
display_value = value
|
||||||
|
|
||||||
items.append(payload_item(key.replace("_", " ").title(), str(display_value)))
|
items.append(payload_item(key.replace("_", " ").title(), str(display_value)))
|
||||||
|
|
||||||
@@ -409,7 +477,12 @@ def delete_script() -> Script:
|
|||||||
body: 'reason=Deleted via UI'
|
body: 'reason=Deleted via UI'
|
||||||
});
|
});
|
||||||
|
|
||||||
const data = await response.json();
|
// Try to parse JSON, but handle non-JSON responses gracefully
|
||||||
|
let data = {};
|
||||||
|
const contentType = response.headers.get('content-type');
|
||||||
|
if (contentType && contentType.includes('application/json')) {
|
||||||
|
data = await response.json();
|
||||||
|
}
|
||||||
|
|
||||||
if (response.ok) {
|
if (response.ok) {
|
||||||
statusEl.innerHTML = '<span class="text-green-400">Event deleted successfully!</span>';
|
statusEl.innerHTML = '<span class="text-green-400">Event deleted successfully!</span>';
|
||||||
@@ -422,7 +495,8 @@ def delete_script() -> Script:
|
|||||||
statusEl.innerHTML = '<span class="text-red-400">' + data.message + '</span>';
|
statusEl.innerHTML = '<span class="text-red-400">' + data.message + '</span>';
|
||||||
deleteBtn.disabled = false;
|
deleteBtn.disabled = false;
|
||||||
} else {
|
} else {
|
||||||
statusEl.innerHTML = '<span class="text-red-400">Error: ' + (data.error || 'Unknown error') + '</span>';
|
const errorMsg = data.error || 'Server error (' + response.status + ')';
|
||||||
|
statusEl.innerHTML = '<span class="text-red-400">Error: ' + errorMsg + '</span>';
|
||||||
deleteBtn.disabled = false;
|
deleteBtn.disabled = false;
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
|
|||||||
@@ -179,7 +179,7 @@ EVENT_TYPES = [
|
|||||||
|
|
||||||
def event_type_selector(selected_event_type: str = "") -> Any:
|
def event_type_selector(selected_event_type: str = "") -> Any:
|
||||||
"""Render event type filter dropdown."""
|
"""Render event type filter dropdown."""
|
||||||
options = [Option("All types", value="", selected=not selected_event_type)]
|
options = [Option("All types", value="all", selected=selected_event_type in ("", "all"))]
|
||||||
for event_type in EVENT_TYPES:
|
for event_type in EVENT_TYPES:
|
||||||
options.append(
|
options.append(
|
||||||
Option(event_type, value=event_type, selected=event_type == selected_event_type)
|
Option(event_type, value=event_type, selected=event_type == selected_event_type)
|
||||||
@@ -231,6 +231,6 @@ def event_log_panel(
|
|||||||
event_log_list(events),
|
event_log_list(events),
|
||||||
id="event-log-content",
|
id="event-log-content",
|
||||||
),
|
),
|
||||||
cls="bg-white rounded-lg shadow p-4",
|
cls="bg-[#141413] rounded-lg shadow p-4",
|
||||||
id="event-log",
|
id="event-log",
|
||||||
)
|
)
|
||||||
|
|||||||
122
src/animaltrack/web/templates/location_detail.py
Normal file
122
src/animaltrack/web/templates/location_detail.py
Normal file
@@ -0,0 +1,122 @@
|
|||||||
|
# ABOUTME: Template for location detail page.
|
||||||
|
# ABOUTME: Shows location information, status, and recent events.
|
||||||
|
|
||||||
|
from datetime import UTC, datetime
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from fasthtml.common import H1, H2, A, Div, Li, P, Span, Ul
|
||||||
|
|
||||||
|
from animaltrack.models.reference import Location
|
||||||
|
|
||||||
|
|
||||||
|
def format_timestamp(ts_utc: int) -> str:
|
||||||
|
"""Format timestamp for display."""
|
||||||
|
dt = datetime.fromtimestamp(ts_utc / 1000, tz=UTC)
|
||||||
|
return dt.strftime("%Y-%m-%d %H:%M")
|
||||||
|
|
||||||
|
|
||||||
|
def location_detail_panel(
|
||||||
|
location: Location,
|
||||||
|
recent_events: list[dict[str, Any]] | None = None,
|
||||||
|
animal_count: int = 0,
|
||||||
|
) -> Div:
|
||||||
|
"""Location detail page content.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
location: The location to display.
|
||||||
|
recent_events: Optional list of recent events at this location.
|
||||||
|
animal_count: Number of live animals currently at this location.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Div containing the location detail page.
|
||||||
|
"""
|
||||||
|
if recent_events is None:
|
||||||
|
recent_events = []
|
||||||
|
|
||||||
|
status_badge = (
|
||||||
|
Span("Active", cls="text-sm bg-green-900/50 text-green-300 px-2 py-1 rounded")
|
||||||
|
if location.active
|
||||||
|
else Span("Archived", cls="text-sm bg-stone-700 text-stone-400 px-2 py-1 rounded")
|
||||||
|
)
|
||||||
|
|
||||||
|
return Div(
|
||||||
|
# Header
|
||||||
|
Div(
|
||||||
|
H1(location.name, cls="text-2xl font-bold text-stone-100"),
|
||||||
|
status_badge,
|
||||||
|
cls="flex items-center gap-4 mb-6",
|
||||||
|
),
|
||||||
|
# Info card
|
||||||
|
Div(
|
||||||
|
info_row("Location ID", location.id, monospace=True),
|
||||||
|
info_row("Created", format_timestamp(location.created_at_utc)),
|
||||||
|
info_row("Last Updated", format_timestamp(location.updated_at_utc)),
|
||||||
|
info_row("Live Animals", str(animal_count)),
|
||||||
|
cls="bg-stone-900/50 rounded-lg p-4 space-y-2 mb-6",
|
||||||
|
),
|
||||||
|
# Recent events section
|
||||||
|
recent_events_section(recent_events) if recent_events else Div(),
|
||||||
|
# Back link
|
||||||
|
Div(
|
||||||
|
A(
|
||||||
|
"← Back to Event Log",
|
||||||
|
href="/event-log",
|
||||||
|
cls="text-amber-500 hover:underline",
|
||||||
|
),
|
||||||
|
cls="mt-6",
|
||||||
|
),
|
||||||
|
cls="max-w-2xl",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def info_row(label: str, value: str, monospace: bool = False) -> Div:
|
||||||
|
"""Single info row with label and value."""
|
||||||
|
value_cls = "text-stone-200"
|
||||||
|
if monospace:
|
||||||
|
value_cls += " font-mono text-sm"
|
||||||
|
return Div(
|
||||||
|
Span(label + ":", cls="text-stone-500 min-w-[120px]"),
|
||||||
|
Span(value, cls=value_cls),
|
||||||
|
cls="flex gap-4",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def recent_events_section(events: list[dict[str, Any]]) -> Div:
|
||||||
|
"""Section showing recent events at this location."""
|
||||||
|
event_items = []
|
||||||
|
for event in events[:10]: # Limit to 10 most recent
|
||||||
|
event_items.append(
|
||||||
|
Li(
|
||||||
|
A(
|
||||||
|
Span(
|
||||||
|
event.get("type", "Unknown"),
|
||||||
|
cls="text-amber-500 hover:underline",
|
||||||
|
),
|
||||||
|
Span(
|
||||||
|
f" - {format_timestamp(event.get('ts_utc', 0))}",
|
||||||
|
cls="text-stone-500 text-sm",
|
||||||
|
),
|
||||||
|
href=f"/events/{event.get('event_id')}",
|
||||||
|
hx_get=f"/events/{event.get('event_id')}",
|
||||||
|
hx_target="#event-panel",
|
||||||
|
hx_swap="innerHTML",
|
||||||
|
),
|
||||||
|
cls="py-1",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not event_items:
|
||||||
|
return Div(
|
||||||
|
H2("Recent Events", cls="text-lg font-semibold text-stone-300 mb-2"),
|
||||||
|
P("No events recorded at this location.", cls="text-stone-500"),
|
||||||
|
cls="mt-4",
|
||||||
|
)
|
||||||
|
|
||||||
|
return Div(
|
||||||
|
H2(
|
||||||
|
f"Recent Events ({len(events)})",
|
||||||
|
cls="text-lg font-semibold text-stone-300 mb-2",
|
||||||
|
),
|
||||||
|
Ul(*event_items, cls="space-y-1"),
|
||||||
|
cls="mt-4",
|
||||||
|
)
|
||||||
251
tests/test_cli_rebuild.py
Normal file
251
tests/test_cli_rebuild.py
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
# ABOUTME: Tests for rebuild-projections CLI command.
|
||||||
|
# ABOUTME: Verifies projection tables are truncated and events are replayed correctly.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from animaltrack.db import get_db
|
||||||
|
from animaltrack.events.enums import LifeStage, Origin
|
||||||
|
from animaltrack.events.payloads import AnimalCohortCreatedPayload
|
||||||
|
from animaltrack.events.store import EventStore
|
||||||
|
from animaltrack.migrations import run_migrations
|
||||||
|
from animaltrack.projections import ProjectionRegistry
|
||||||
|
from animaltrack.projections.animal_registry import AnimalRegistryProjection
|
||||||
|
from animaltrack.projections.event_animals import EventAnimalsProjection
|
||||||
|
from animaltrack.projections.event_log import EventLogProjection
|
||||||
|
from animaltrack.projections.feed import FeedInventoryProjection
|
||||||
|
from animaltrack.projections.intervals import IntervalProjection
|
||||||
|
from animaltrack.projections.products import ProductsProjection
|
||||||
|
from animaltrack.seeds import run_seeds
|
||||||
|
from animaltrack.services.animal import AnimalService
|
||||||
|
|
||||||
|
PROJECT_ROOT = Path(__file__).parent.parent
|
||||||
|
|
||||||
|
|
||||||
|
class TestRebuildProjectionsCLI:
|
||||||
|
"""Tests for rebuild-projections command."""
|
||||||
|
|
||||||
|
def test_rebuild_command_success(self, tmp_path):
|
||||||
|
"""Should rebuild projections via CLI and exit 0."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["DB_PATH"] = str(db_path)
|
||||||
|
env["CSRF_SECRET"] = "test-secret-for-csrf"
|
||||||
|
env["PYTHONPATH"] = str(PROJECT_ROOT / "src")
|
||||||
|
|
||||||
|
# First seed the database
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "animaltrack.cli", "seed"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
env=env,
|
||||||
|
cwd=str(PROJECT_ROOT),
|
||||||
|
)
|
||||||
|
assert result.returncode == 0, f"Seed failed: {result.stderr}"
|
||||||
|
|
||||||
|
# Then rebuild projections
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "animaltrack.cli", "rebuild-projections"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
env=env,
|
||||||
|
cwd=str(PROJECT_ROOT),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.returncode == 0, f"Rebuild failed: {result.stderr}"
|
||||||
|
assert "Truncating projection tables" in result.stdout
|
||||||
|
assert "Rebuild complete" in result.stdout
|
||||||
|
|
||||||
|
def test_rebuild_with_events(self, tmp_path):
|
||||||
|
"""Should correctly replay events and update projections."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
# Set up database with migrations and seeds
|
||||||
|
run_migrations(str(db_path), "migrations", verbose=False)
|
||||||
|
db = get_db(str(db_path))
|
||||||
|
run_seeds(db)
|
||||||
|
|
||||||
|
# Create some events via AnimalService
|
||||||
|
event_store = EventStore(db)
|
||||||
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
|
animal_service = AnimalService(db, event_store, registry)
|
||||||
|
|
||||||
|
# Create a cohort
|
||||||
|
import time
|
||||||
|
|
||||||
|
ts_utc = int(time.time() * 1000)
|
||||||
|
location = db.execute("SELECT id FROM locations LIMIT 1").fetchone()[0]
|
||||||
|
|
||||||
|
payload = AnimalCohortCreatedPayload(
|
||||||
|
species="duck",
|
||||||
|
count=5,
|
||||||
|
origin=Origin.PURCHASED,
|
||||||
|
life_stage=LifeStage.ADULT,
|
||||||
|
location_id=location,
|
||||||
|
)
|
||||||
|
animal_service.create_cohort(
|
||||||
|
payload=payload,
|
||||||
|
ts_utc=ts_utc,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify animal_registry has entries
|
||||||
|
count_before = db.execute("SELECT COUNT(*) FROM animal_registry").fetchone()[0]
|
||||||
|
assert count_before == 5
|
||||||
|
|
||||||
|
# Clear projections manually (simulating corruption)
|
||||||
|
db.execute("DELETE FROM animal_registry")
|
||||||
|
db.execute("DELETE FROM live_animals_by_location")
|
||||||
|
count_cleared = db.execute("SELECT COUNT(*) FROM animal_registry").fetchone()[0]
|
||||||
|
assert count_cleared == 0
|
||||||
|
|
||||||
|
# Now run rebuild via CLI
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["DB_PATH"] = str(db_path)
|
||||||
|
env["CSRF_SECRET"] = "test-secret-for-csrf"
|
||||||
|
env["PYTHONPATH"] = str(PROJECT_ROOT / "src")
|
||||||
|
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "animaltrack.cli", "rebuild-projections"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
env=env,
|
||||||
|
cwd=str(PROJECT_ROOT),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.returncode == 0, f"Rebuild failed: {result.stderr}"
|
||||||
|
# Verify events were processed (seed data may add additional events)
|
||||||
|
assert "events to replay" in result.stdout
|
||||||
|
assert "Rebuild complete" in result.stdout
|
||||||
|
|
||||||
|
# Verify projections are restored
|
||||||
|
db2 = get_db(str(db_path))
|
||||||
|
count_after = db2.execute("SELECT COUNT(*) FROM animal_registry").fetchone()[0]
|
||||||
|
# Should have at least our 5 animals restored
|
||||||
|
assert count_after >= 5
|
||||||
|
|
||||||
|
def test_rebuild_skips_tombstoned_events(self, tmp_path):
|
||||||
|
"""Should not replay events that have been tombstoned."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
# Set up database
|
||||||
|
run_migrations(str(db_path), "migrations", verbose=False)
|
||||||
|
db = get_db(str(db_path))
|
||||||
|
run_seeds(db)
|
||||||
|
|
||||||
|
# Count animals from seed data
|
||||||
|
seed_animal_count = db.execute("SELECT COUNT(*) FROM animal_registry").fetchone()[0]
|
||||||
|
|
||||||
|
# Create events via AnimalService
|
||||||
|
event_store = EventStore(db)
|
||||||
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
|
animal_service = AnimalService(db, event_store, registry)
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
ts_utc = int(time.time() * 1000)
|
||||||
|
location = db.execute("SELECT id FROM locations LIMIT 1").fetchone()[0]
|
||||||
|
|
||||||
|
# Create two cohorts
|
||||||
|
payload1 = AnimalCohortCreatedPayload(
|
||||||
|
species="duck",
|
||||||
|
count=3,
|
||||||
|
origin=Origin.PURCHASED,
|
||||||
|
life_stage=LifeStage.ADULT,
|
||||||
|
location_id=location,
|
||||||
|
)
|
||||||
|
animal_service.create_cohort(
|
||||||
|
payload=payload1,
|
||||||
|
ts_utc=ts_utc,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
payload2 = AnimalCohortCreatedPayload(
|
||||||
|
species="duck",
|
||||||
|
count=2,
|
||||||
|
origin=Origin.PURCHASED,
|
||||||
|
life_stage=LifeStage.ADULT,
|
||||||
|
location_id=location,
|
||||||
|
)
|
||||||
|
event2 = animal_service.create_cohort(
|
||||||
|
payload=payload2,
|
||||||
|
ts_utc=ts_utc + 1000,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify we have seed animals + 5 new animals
|
||||||
|
count_before = db.execute("SELECT COUNT(*) FROM animal_registry").fetchone()[0]
|
||||||
|
assert count_before == seed_animal_count + 5
|
||||||
|
|
||||||
|
# Tombstone the second event (manually, to simulate what delete_event does)
|
||||||
|
from ulid import ULID
|
||||||
|
|
||||||
|
tombstone_id = str(ULID())
|
||||||
|
db.execute(
|
||||||
|
"""INSERT INTO event_tombstones (id, ts_utc, actor, target_event_id, reason)
|
||||||
|
VALUES (?, ?, ?, ?, ?)""",
|
||||||
|
(tombstone_id, ts_utc + 2000, "test", event2.id, "test deletion"),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Run rebuild via CLI
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["DB_PATH"] = str(db_path)
|
||||||
|
env["CSRF_SECRET"] = "test-secret-for-csrf"
|
||||||
|
env["PYTHONPATH"] = str(PROJECT_ROOT / "src")
|
||||||
|
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "animaltrack.cli", "rebuild-projections"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
env=env,
|
||||||
|
cwd=str(PROJECT_ROOT),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.returncode == 0, f"Rebuild failed: {result.stderr}"
|
||||||
|
# Verify rebuild completed
|
||||||
|
assert "events to replay" in result.stdout
|
||||||
|
assert "Rebuild complete" in result.stdout
|
||||||
|
|
||||||
|
# Verify only seed animals + 3 from first cohort (second cohort tombstoned)
|
||||||
|
db2 = get_db(str(db_path))
|
||||||
|
count_after = db2.execute("SELECT COUNT(*) FROM animal_registry").fetchone()[0]
|
||||||
|
# Should have 2 fewer animals (the tombstoned cohort had count=2)
|
||||||
|
assert count_after == seed_animal_count + 3
|
||||||
|
|
||||||
|
def test_rebuild_empty_event_log(self, tmp_path):
|
||||||
|
"""Should handle empty event log gracefully."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
env = os.environ.copy()
|
||||||
|
env["DB_PATH"] = str(db_path)
|
||||||
|
env["CSRF_SECRET"] = "test-secret-for-csrf"
|
||||||
|
env["PYTHONPATH"] = str(PROJECT_ROOT / "src")
|
||||||
|
|
||||||
|
# Just run migrations (no seeds, no events)
|
||||||
|
result = subprocess.run(
|
||||||
|
[sys.executable, "-m", "animaltrack.cli", "rebuild-projections"],
|
||||||
|
capture_output=True,
|
||||||
|
text=True,
|
||||||
|
env=env,
|
||||||
|
cwd=str(PROJECT_ROOT),
|
||||||
|
)
|
||||||
|
|
||||||
|
assert result.returncode == 0, f"Rebuild failed: {result.stderr}"
|
||||||
|
assert "Found 0 events to replay" in result.stdout
|
||||||
|
assert "Rebuild complete: processed 0 events" in result.stdout
|
||||||
@@ -456,3 +456,86 @@ class TestSelectionMismatchError:
|
|||||||
|
|
||||||
assert error.result is result
|
assert error.result is result
|
||||||
assert error.result.diff is diff
|
assert error.result.diff is diff
|
||||||
|
|
||||||
|
|
||||||
|
# ============================================================================
|
||||||
|
# Tests for validate_selection - subset mode
|
||||||
|
# ============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
class TestValidateSelectionSubsetMode:
|
||||||
|
"""Tests for validate_selection with subset_mode=True."""
|
||||||
|
|
||||||
|
def test_subset_mode_returns_valid_when_all_selected_match(
|
||||||
|
self, seeded_db, animal_service, strip1_location_id
|
||||||
|
):
|
||||||
|
"""validate_selection returns valid=True when all selected IDs are in filter."""
|
||||||
|
# Create cohort of 5 animals
|
||||||
|
ts_utc = int(time.time() * 1000)
|
||||||
|
payload = make_cohort_payload(strip1_location_id, count=5)
|
||||||
|
event = animal_service.create_cohort(payload, ts_utc, "test_user")
|
||||||
|
all_ids = event.entity_refs["animal_ids"]
|
||||||
|
|
||||||
|
# User selects only 2 of them
|
||||||
|
selected_ids = all_ids[:2]
|
||||||
|
subset_hash = compute_roster_hash(selected_ids, None)
|
||||||
|
|
||||||
|
ctx = SelectionContext(
|
||||||
|
filter="species:duck",
|
||||||
|
resolved_ids=all_ids, # Full filter resolution
|
||||||
|
roster_hash=subset_hash, # Hash of selected subset
|
||||||
|
ts_utc=ts_utc,
|
||||||
|
from_location_id=None,
|
||||||
|
subset_mode=True,
|
||||||
|
selected_ids=selected_ids,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = validate_selection(seeded_db, ctx)
|
||||||
|
|
||||||
|
assert result.valid is True
|
||||||
|
assert result.resolved_ids == selected_ids
|
||||||
|
assert result.diff is None
|
||||||
|
|
||||||
|
def test_subset_mode_diff_server_count_is_valid_selected_count(
|
||||||
|
self, seeded_db, animal_service, strip1_location_id, strip2_location_id
|
||||||
|
):
|
||||||
|
"""In subset mode, diff.server_count should be count of valid selected IDs, not full filter."""
|
||||||
|
# Create cohort of 5 animals
|
||||||
|
ts_create = int(time.time() * 1000)
|
||||||
|
payload = make_cohort_payload(strip1_location_id, count=5)
|
||||||
|
event = animal_service.create_cohort(payload, ts_create, "test_user")
|
||||||
|
all_ids = event.entity_refs["animal_ids"]
|
||||||
|
|
||||||
|
# User selects 2 animals
|
||||||
|
selected_ids = all_ids[:2]
|
||||||
|
subset_hash = compute_roster_hash(selected_ids, None)
|
||||||
|
|
||||||
|
# Move one selected animal away (makes it invalid for the filter)
|
||||||
|
ts_move = ts_create + 1000
|
||||||
|
move_payload = AnimalMovedPayload(
|
||||||
|
resolved_ids=[selected_ids[0]],
|
||||||
|
from_location_id=strip1_location_id,
|
||||||
|
to_location_id=strip2_location_id,
|
||||||
|
)
|
||||||
|
animal_service.move_animals(move_payload, ts_move, "test_user")
|
||||||
|
|
||||||
|
# Now validate at ts_move - one of the selected animals is no longer at Strip 1
|
||||||
|
ctx = SelectionContext(
|
||||||
|
filter="location:'Strip 1'",
|
||||||
|
resolved_ids=all_ids, # Full filter resolution at creation time
|
||||||
|
roster_hash=subset_hash,
|
||||||
|
ts_utc=ts_move, # Validate at move time
|
||||||
|
from_location_id=None,
|
||||||
|
subset_mode=True,
|
||||||
|
selected_ids=selected_ids,
|
||||||
|
)
|
||||||
|
|
||||||
|
result = validate_selection(seeded_db, ctx)
|
||||||
|
|
||||||
|
assert result.valid is False
|
||||||
|
assert result.diff is not None
|
||||||
|
# BUG: diff.server_count is currently len(resolved_ids) = 4 (5 minus moved)
|
||||||
|
# SHOULD BE: len(valid_selected) = 1 (2 selected minus 1 moved)
|
||||||
|
assert result.diff.server_count == 1 # Only 1 valid selected animal remains
|
||||||
|
assert result.diff.client_count == 2 # User selected 2
|
||||||
|
assert selected_ids[0] in result.diff.removed # The moved animal is invalid
|
||||||
|
|||||||
@@ -149,7 +149,7 @@ class TestCohortCreationSuccess:
|
|||||||
assert count_after == count_before + 3
|
assert count_after == count_before + 3
|
||||||
|
|
||||||
def test_cohort_success_returns_toast(self, client, seeded_db, location_strip1_id):
|
def test_cohort_success_returns_toast(self, client, seeded_db, location_strip1_id):
|
||||||
"""Successful cohort creation stores toast in session."""
|
"""Successful cohort creation renders toast in response body."""
|
||||||
resp = client.post(
|
resp = client.post(
|
||||||
"/actions/animal-cohort",
|
"/actions/animal-cohort",
|
||||||
data={
|
data={
|
||||||
@@ -164,20 +164,8 @@ class TestCohortCreationSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
# Toast is stored in session cookie (FastHTML's add_toast mechanism)
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
# The session cookie contains base64-encoded toast data with "toasts" key
|
assert "Created 2 duck" in resp.text
|
||||||
assert "set-cookie" in resp.headers
|
|
||||||
session_cookie = resp.headers["set-cookie"]
|
|
||||||
assert "session_=" in session_cookie
|
|
||||||
# Base64 decode contains toast message (eyJ0b2FzdHMi... = {"toasts"...)
|
|
||||||
import base64
|
|
||||||
|
|
||||||
# Extract base64 portion from cookie value
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
# FastHTML uses itsdangerous, so format is base64.timestamp.signature
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Created 2 duck" in decoded
|
|
||||||
|
|
||||||
|
|
||||||
class TestCohortCreationValidation:
|
class TestCohortCreationValidation:
|
||||||
@@ -374,8 +362,36 @@ class TestHatchRecordingSuccess:
|
|||||||
|
|
||||||
assert count_at_nursery >= 3
|
assert count_at_nursery >= 3
|
||||||
|
|
||||||
|
def test_hatch_with_sentinel_brood_location_value(self, client, seeded_db, location_strip1_id):
|
||||||
|
"""POST with __none__ sentinel value for brood location works correctly.
|
||||||
|
|
||||||
|
The form uses "__none__" as a sentinel value because FastHTML omits empty
|
||||||
|
string attributes, which would cause browsers to submit the option text
|
||||||
|
content instead.
|
||||||
|
"""
|
||||||
|
resp = client.post(
|
||||||
|
"/actions/hatch-recorded",
|
||||||
|
data={
|
||||||
|
"species": "duck",
|
||||||
|
"location_id": location_strip1_id,
|
||||||
|
"assigned_brood_location_id": "__none__",
|
||||||
|
"hatched_live": "2",
|
||||||
|
"nonce": "test-hatch-nonce-sentinel",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
assert resp.status_code == 200
|
||||||
|
|
||||||
|
# Verify hatchlings are at hatch location (not a separate brood location)
|
||||||
|
count_at_location = seeded_db.execute(
|
||||||
|
"SELECT COUNT(*) FROM animal_registry WHERE location_id = ? AND life_stage = 'hatchling'",
|
||||||
|
(location_strip1_id,),
|
||||||
|
).fetchone()[0]
|
||||||
|
|
||||||
|
assert count_at_location >= 2
|
||||||
|
|
||||||
def test_hatch_success_returns_toast(self, client, seeded_db, location_strip1_id):
|
def test_hatch_success_returns_toast(self, client, seeded_db, location_strip1_id):
|
||||||
"""Successful hatch recording stores toast in session."""
|
"""Successful hatch recording renders toast in response body."""
|
||||||
resp = client.post(
|
resp = client.post(
|
||||||
"/actions/hatch-recorded",
|
"/actions/hatch-recorded",
|
||||||
data={
|
data={
|
||||||
@@ -387,16 +403,8 @@ class TestHatchRecordingSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
# Toast is stored in session cookie (FastHTML's add_toast mechanism)
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
assert "set-cookie" in resp.headers
|
assert "Recorded 2 hatchling" in resp.text
|
||||||
session_cookie = resp.headers["set-cookie"]
|
|
||||||
assert "session_=" in session_cookie
|
|
||||||
import base64
|
|
||||||
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Recorded 2 hatchling" in decoded
|
|
||||||
|
|
||||||
|
|
||||||
class TestHatchRecordingValidation:
|
class TestHatchRecordingValidation:
|
||||||
@@ -729,8 +737,7 @@ class TestTagAddSuccess:
|
|||||||
assert tag_count >= len(animals_for_tagging)
|
assert tag_count >= len(animals_for_tagging)
|
||||||
|
|
||||||
def test_tag_add_success_returns_toast(self, client, seeded_db, animals_for_tagging):
|
def test_tag_add_success_returns_toast(self, client, seeded_db, animals_for_tagging):
|
||||||
"""Successful tag add stores toast in session."""
|
"""Successful tag add renders toast in response body."""
|
||||||
import base64
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from animaltrack.selection import compute_roster_hash
|
from animaltrack.selection import compute_roster_hash
|
||||||
@@ -751,14 +758,8 @@ class TestTagAddSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
# Toast is stored in session cookie
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
assert "set-cookie" in resp.headers
|
assert "Tagged" in resp.text and "test-tag-toast" in resp.text
|
||||||
session_cookie = resp.headers["set-cookie"]
|
|
||||||
assert "session_=" in session_cookie
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Tagged" in decoded and "test-tag-toast" in decoded
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagAddValidation:
|
class TestTagAddValidation:
|
||||||
@@ -925,8 +926,7 @@ class TestTagEndSuccess:
|
|||||||
assert open_after == 0
|
assert open_after == 0
|
||||||
|
|
||||||
def test_tag_end_success_returns_toast(self, client, seeded_db, tagged_animals):
|
def test_tag_end_success_returns_toast(self, client, seeded_db, tagged_animals):
|
||||||
"""Successful tag end stores toast in session."""
|
"""Successful tag end renders toast in response body."""
|
||||||
import base64
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from animaltrack.selection import compute_roster_hash
|
from animaltrack.selection import compute_roster_hash
|
||||||
@@ -947,14 +947,8 @@ class TestTagEndSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
# Toast is stored in session cookie
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
assert "set-cookie" in resp.headers
|
assert "Ended tag" in resp.text and "test-end-tag" in resp.text
|
||||||
session_cookie = resp.headers["set-cookie"]
|
|
||||||
assert "session_=" in session_cookie
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Ended tag" in decoded and "test-end-tag" in decoded
|
|
||||||
|
|
||||||
|
|
||||||
class TestTagEndValidation:
|
class TestTagEndValidation:
|
||||||
@@ -1103,8 +1097,7 @@ class TestAttrsSuccess:
|
|||||||
assert adult_count == len(animals_for_tagging)
|
assert adult_count == len(animals_for_tagging)
|
||||||
|
|
||||||
def test_attrs_success_returns_toast(self, client, seeded_db, animals_for_tagging):
|
def test_attrs_success_returns_toast(self, client, seeded_db, animals_for_tagging):
|
||||||
"""Successful attrs update stores toast in session."""
|
"""Successful attrs update renders toast in response body."""
|
||||||
import base64
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from animaltrack.selection import compute_roster_hash
|
from animaltrack.selection import compute_roster_hash
|
||||||
@@ -1125,14 +1118,8 @@ class TestAttrsSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
# Toast is stored in session cookie
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
assert "set-cookie" in resp.headers
|
assert "Updated attributes" in resp.text
|
||||||
session_cookie = resp.headers["set-cookie"]
|
|
||||||
assert "session_=" in session_cookie
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Updated attributes" in decoded
|
|
||||||
|
|
||||||
|
|
||||||
class TestAttrsValidation:
|
class TestAttrsValidation:
|
||||||
@@ -1280,8 +1267,7 @@ class TestOutcomeSuccess:
|
|||||||
assert harvested_count == len(animals_for_tagging)
|
assert harvested_count == len(animals_for_tagging)
|
||||||
|
|
||||||
def test_outcome_success_returns_toast(self, client, seeded_db, animals_for_tagging):
|
def test_outcome_success_returns_toast(self, client, seeded_db, animals_for_tagging):
|
||||||
"""Successful outcome recording stores toast in session."""
|
"""Successful outcome recording renders toast in response body."""
|
||||||
import base64
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from animaltrack.selection import compute_roster_hash
|
from animaltrack.selection import compute_roster_hash
|
||||||
@@ -1302,14 +1288,8 @@ class TestOutcomeSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
# Toast is stored in session cookie
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
assert "set-cookie" in resp.headers
|
assert "Recorded sold" in resp.text
|
||||||
session_cookie = resp.headers["set-cookie"]
|
|
||||||
assert "session_=" in session_cookie
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Recorded sold" in decoded
|
|
||||||
|
|
||||||
|
|
||||||
class TestOutcomeValidation:
|
class TestOutcomeValidation:
|
||||||
|
|||||||
279
tests/test_web_events_delete.py
Normal file
279
tests/test_web_events_delete.py
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
# ABOUTME: Tests for event delete with projection verification.
|
||||||
|
# ABOUTME: Verifies that deleting events properly reverts projections.
|
||||||
|
|
||||||
|
import time
|
||||||
|
|
||||||
|
from animaltrack.db import get_db
|
||||||
|
from animaltrack.events.delete import delete_event
|
||||||
|
from animaltrack.events.enums import LifeStage, Origin, Outcome
|
||||||
|
from animaltrack.events.payloads import AnimalCohortCreatedPayload, AnimalOutcomePayload
|
||||||
|
from animaltrack.events.store import EventStore
|
||||||
|
from animaltrack.migrations import run_migrations
|
||||||
|
from animaltrack.projections import ProjectionRegistry
|
||||||
|
from animaltrack.projections.animal_registry import AnimalRegistryProjection
|
||||||
|
from animaltrack.projections.event_animals import EventAnimalsProjection
|
||||||
|
from animaltrack.projections.event_log import EventLogProjection
|
||||||
|
from animaltrack.projections.feed import FeedInventoryProjection
|
||||||
|
from animaltrack.projections.intervals import IntervalProjection
|
||||||
|
from animaltrack.projections.products import ProductsProjection
|
||||||
|
from animaltrack.seeds import run_seeds
|
||||||
|
from animaltrack.services.animal import AnimalService
|
||||||
|
|
||||||
|
|
||||||
|
class TestEventDeleteProjections:
|
||||||
|
"""Tests for delete_event with projection updates."""
|
||||||
|
|
||||||
|
def test_delete_animal_outcome_reverts_status(self, tmp_path):
|
||||||
|
"""Deleting AnimalOutcome should revert animals to alive status."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
# Set up database
|
||||||
|
run_migrations(str(db_path), "migrations", verbose=False)
|
||||||
|
db = get_db(str(db_path))
|
||||||
|
run_seeds(db)
|
||||||
|
|
||||||
|
# Create projections and services
|
||||||
|
event_store = EventStore(db)
|
||||||
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
|
animal_service = AnimalService(db, event_store, registry)
|
||||||
|
|
||||||
|
ts_utc = int(time.time() * 1000)
|
||||||
|
location = db.execute("SELECT id FROM locations LIMIT 1").fetchone()[0]
|
||||||
|
|
||||||
|
# Create a cohort
|
||||||
|
cohort_payload = AnimalCohortCreatedPayload(
|
||||||
|
species="duck",
|
||||||
|
count=3,
|
||||||
|
origin=Origin.PURCHASED,
|
||||||
|
life_stage=LifeStage.ADULT,
|
||||||
|
location_id=location,
|
||||||
|
)
|
||||||
|
cohort_event = animal_service.create_cohort(
|
||||||
|
payload=cohort_payload,
|
||||||
|
ts_utc=ts_utc,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get animal IDs
|
||||||
|
animal_ids = cohort_event.entity_refs["animal_ids"]
|
||||||
|
|
||||||
|
# Verify all animals are alive
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
assert row[0] == "alive"
|
||||||
|
|
||||||
|
# Record outcome (sold)
|
||||||
|
outcome_payload = AnimalOutcomePayload(
|
||||||
|
outcome=Outcome.SOLD,
|
||||||
|
resolved_ids=animal_ids,
|
||||||
|
)
|
||||||
|
outcome_event = animal_service.record_outcome(
|
||||||
|
payload=outcome_payload,
|
||||||
|
ts_utc=ts_utc + 1000,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify animals are now "sold"
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
assert row[0] == "sold", f"Animal {aid} should be sold, got {row[0]}"
|
||||||
|
|
||||||
|
# Delete the outcome event
|
||||||
|
deleted_ids = delete_event(
|
||||||
|
db=db,
|
||||||
|
event_store=event_store,
|
||||||
|
event_id=outcome_event.id,
|
||||||
|
actor="test",
|
||||||
|
role="admin",
|
||||||
|
cascade=False,
|
||||||
|
reason="test deletion",
|
||||||
|
registry=registry,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(deleted_ids) == 1
|
||||||
|
assert outcome_event.id in deleted_ids
|
||||||
|
|
||||||
|
# Verify animals are back to "alive"
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
assert row[0] == "alive", f"Animal {aid} should be alive after delete, got {row[0]}"
|
||||||
|
|
||||||
|
def test_delete_without_registry_does_not_revert(self, tmp_path):
|
||||||
|
"""Without registry projections, delete won't revert status (bug demo)."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
# Set up database
|
||||||
|
run_migrations(str(db_path), "migrations", verbose=False)
|
||||||
|
db = get_db(str(db_path))
|
||||||
|
run_seeds(db)
|
||||||
|
|
||||||
|
# Create projections and services
|
||||||
|
event_store = EventStore(db)
|
||||||
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
|
animal_service = AnimalService(db, event_store, registry)
|
||||||
|
|
||||||
|
ts_utc = int(time.time() * 1000)
|
||||||
|
location = db.execute("SELECT id FROM locations LIMIT 1").fetchone()[0]
|
||||||
|
|
||||||
|
# Create a cohort
|
||||||
|
cohort_payload = AnimalCohortCreatedPayload(
|
||||||
|
species="duck",
|
||||||
|
count=2,
|
||||||
|
origin=Origin.PURCHASED,
|
||||||
|
life_stage=LifeStage.ADULT,
|
||||||
|
location_id=location,
|
||||||
|
)
|
||||||
|
cohort_event = animal_service.create_cohort(
|
||||||
|
payload=cohort_payload,
|
||||||
|
ts_utc=ts_utc,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
animal_ids = cohort_event.entity_refs["animal_ids"]
|
||||||
|
|
||||||
|
# Record outcome (sold)
|
||||||
|
outcome_payload = AnimalOutcomePayload(
|
||||||
|
outcome=Outcome.SOLD,
|
||||||
|
resolved_ids=animal_ids,
|
||||||
|
)
|
||||||
|
outcome_event = animal_service.record_outcome(
|
||||||
|
payload=outcome_payload,
|
||||||
|
ts_utc=ts_utc + 1000,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify animals are "sold"
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
assert row[0] == "sold"
|
||||||
|
|
||||||
|
# Delete with EMPTY registry (simulating the bug)
|
||||||
|
empty_registry = ProjectionRegistry() # No projections registered!
|
||||||
|
deleted_ids = delete_event(
|
||||||
|
db=db,
|
||||||
|
event_store=event_store,
|
||||||
|
event_id=outcome_event.id,
|
||||||
|
actor="test",
|
||||||
|
role="admin",
|
||||||
|
cascade=False,
|
||||||
|
reason="test deletion",
|
||||||
|
registry=empty_registry,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(deleted_ids) == 1
|
||||||
|
|
||||||
|
# Bug: Animals are still "sold" because projections weren't reverted
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
# This demonstrates the bug - with empty registry, status is not reverted
|
||||||
|
assert row[0] == "sold", "Without projections, animal should stay sold"
|
||||||
|
|
||||||
|
def test_delete_death_outcome_reverts_to_alive(self, tmp_path):
|
||||||
|
"""Deleting death outcome should revert animals to alive status."""
|
||||||
|
db_path = tmp_path / "test.db"
|
||||||
|
|
||||||
|
# Set up database
|
||||||
|
run_migrations(str(db_path), "migrations", verbose=False)
|
||||||
|
db = get_db(str(db_path))
|
||||||
|
run_seeds(db)
|
||||||
|
|
||||||
|
# Create projections and services
|
||||||
|
event_store = EventStore(db)
|
||||||
|
registry = ProjectionRegistry()
|
||||||
|
registry.register(AnimalRegistryProjection(db))
|
||||||
|
registry.register(IntervalProjection(db))
|
||||||
|
registry.register(EventAnimalsProjection(db))
|
||||||
|
registry.register(ProductsProjection(db))
|
||||||
|
registry.register(FeedInventoryProjection(db))
|
||||||
|
registry.register(EventLogProjection(db))
|
||||||
|
|
||||||
|
animal_service = AnimalService(db, event_store, registry)
|
||||||
|
|
||||||
|
ts_utc = int(time.time() * 1000)
|
||||||
|
location = db.execute("SELECT id FROM locations LIMIT 1").fetchone()[0]
|
||||||
|
|
||||||
|
# Create a cohort
|
||||||
|
cohort_payload = AnimalCohortCreatedPayload(
|
||||||
|
species="duck",
|
||||||
|
count=2,
|
||||||
|
origin=Origin.PURCHASED,
|
||||||
|
life_stage=LifeStage.ADULT,
|
||||||
|
location_id=location,
|
||||||
|
)
|
||||||
|
cohort_event = animal_service.create_cohort(
|
||||||
|
payload=cohort_payload,
|
||||||
|
ts_utc=ts_utc,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
animal_ids = cohort_event.entity_refs["animal_ids"]
|
||||||
|
|
||||||
|
# Record death
|
||||||
|
outcome_payload = AnimalOutcomePayload(
|
||||||
|
outcome=Outcome.DEATH,
|
||||||
|
resolved_ids=animal_ids,
|
||||||
|
)
|
||||||
|
outcome_event = animal_service.record_outcome(
|
||||||
|
payload=outcome_payload,
|
||||||
|
ts_utc=ts_utc + 1000,
|
||||||
|
actor="test",
|
||||||
|
)
|
||||||
|
|
||||||
|
# Verify animals are "dead"
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
assert row[0] == "dead"
|
||||||
|
|
||||||
|
# Delete the outcome event with proper registry
|
||||||
|
deleted_ids = delete_event(
|
||||||
|
db=db,
|
||||||
|
event_store=event_store,
|
||||||
|
event_id=outcome_event.id,
|
||||||
|
actor="test",
|
||||||
|
role="admin",
|
||||||
|
cascade=False,
|
||||||
|
reason="test deletion",
|
||||||
|
registry=registry,
|
||||||
|
)
|
||||||
|
|
||||||
|
assert len(deleted_ids) == 1
|
||||||
|
|
||||||
|
# Verify animals are back to alive
|
||||||
|
for aid in animal_ids:
|
||||||
|
row = db.execute(
|
||||||
|
"SELECT status FROM animal_registry WHERE animal_id = ?",
|
||||||
|
(aid,),
|
||||||
|
).fetchone()
|
||||||
|
assert row[0] == "alive", f"Animal {aid} should be alive, got {row[0]}"
|
||||||
@@ -198,7 +198,7 @@ class TestMoveAnimalSuccess:
|
|||||||
location_strip2_id,
|
location_strip2_id,
|
||||||
ducks_at_strip1,
|
ducks_at_strip1,
|
||||||
):
|
):
|
||||||
"""Successful move returns session cookie with toast."""
|
"""Successful move renders toast in response body."""
|
||||||
ts_utc = int(time.time() * 1000)
|
ts_utc = int(time.time() * 1000)
|
||||||
filter_str = 'location:"Strip 1"'
|
filter_str = 'location:"Strip 1"'
|
||||||
filter_ast = parse_filter(filter_str)
|
filter_ast = parse_filter(filter_str)
|
||||||
@@ -219,16 +219,8 @@ class TestMoveAnimalSuccess:
|
|||||||
)
|
)
|
||||||
|
|
||||||
assert resp.status_code == 200
|
assert resp.status_code == 200
|
||||||
assert "set-cookie" in resp.headers
|
# Toast is injected into response body by FastHTML's toast middleware
|
||||||
session_cookie = resp.headers["set-cookie"]
|
assert "Moved 5 animals to Strip 2" in resp.text
|
||||||
assert "session_=" in session_cookie
|
|
||||||
# Base64 decode contains toast message
|
|
||||||
import base64
|
|
||||||
|
|
||||||
cookie_value = session_cookie.split("session_=")[1].split(";")[0]
|
|
||||||
base64_data = cookie_value.split(".")[0]
|
|
||||||
decoded = base64.b64decode(base64_data).decode()
|
|
||||||
assert "Moved 5 animals to Strip 2" in decoded
|
|
||||||
|
|
||||||
def test_move_success_resets_form(
|
def test_move_success_resets_form(
|
||||||
self,
|
self,
|
||||||
|
|||||||
Reference in New Issue
Block a user