"""Event removal tracking for "Recently Removed" dashboard widget."""
import json
import logging
from datetime import datetime, timedelta
from pathlib import Path
from typing import List, Dict, Any, Optional
logger = logging.getLogger(name)
REMOVAL_LOG_PATH = Path("/home/hoffmann_admin/.openclaw/workspace-socrates/hoffdesk-api/data/removed_events.jsonl")
REMOVAL_LOG_PATH.parent.mkdir(parents=True, exist_ok=True)
MAX_RETENTION_DAYS = 30
class RemovalTracker:
"""Track calendar event removals for undo/audit purposes."""
def __init__(self, log_path: Optional[Path] = None):
self.log_path = log_path or REMOVAL_LOG_PATH
self.log_path.parent.mkdir(parents=True, exist_ok=True)
async def log_removal(
self,
uid: str,
summary: str,
start_time: str,
removed_by: str = "telegram_callback",
source: str = "newsletter",
reason: Optional[str] = None
) -> None:
"""Log an event removal.
Args:
uid: Event UID that was removed
summary: Event title/summary
start_time: When the event was scheduled
removed_by: Who/what triggered removal (telegram_callback, manual, etc.)
source: Where the event came from (newsletter, appointment, family, etc.)
reason: Optional reason for removal
"""
entry = {
"uid": uid,
"summary": summary,
"start_time": start_time,
"removed_at": datetime.now().isoformat(),
"removed_by": removed_by,
"source": source,
"reason": reason,
"can_undo": False # Radicale doesn't support undo after DELETE
}
try:
with open(self.log_path, "a") as f:
f.write(json.dumps(entry) + "\n")
logger.info(f"Logged removal: {summary} ({uid})")
except Exception as e:
logger.error(f"Failed to log removal: {e}")
def get_recent_removals(
self,
hours: int = 24,
limit: int = 20,
source: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Get recently removed events.
Args:
hours: How far back to look
limit: Max results
source: Filter by source type (optional)
Returns:
List of removal records
"""
if not self.log_path.exists():
return []
cutoff = datetime.now() - timedelta(hours=hours)
removals = []
try:
with open(self.log_path, "r") as f:
for line in f:
line = line.strip()
if not line:
continue
try:
entry = json.loads(line)
removed_at = datetime.fromisoformat(entry.get("removed_at", ""))
if removed_at >= cutoff:
if source is None or entry.get("source") == source:
removals.append(entry)
except (json.JSONDecodeError, ValueError):
continue
except Exception as e:
logger.error(f"Failed to read removal log: {e}")
return []
# Sort by removal time, newest first
removals.sort(key=lambda x: x.get("removed_at", ""), reverse=True)
return removals[:limit]
def cleanup_old_entries(self) -> int:
"""Remove entries older than MAX_RETENTION_DAYS.
Returns:
Number of entries removed
"""
if not self.log_path.exists():
return 0
cutoff = datetime.now() - timedelta(days=MAX_RETENTION_DAYS)
kept = []
removed_count = 0
try:
with open(self.log_path, "r") as f:
for line in f:
line = line.strip()
if not line:
continue
try:
entry = json.loads(line)
removed_at = datetime.fromisoformat(entry.get("removed_at", ""))
if removed_at >= cutoff:
kept.append(line)
else:
removed_count += 1
except (json.JSONDecodeError, ValueError):
kept.append(line) # Keep malformed lines for inspection
with open(self.log_path, "w") as f:
for line in kept:
f.write(line + "\n")
if removed_count > 0:
logger.info(f"Cleaned up {removed_count} old removal entries")
return removed_count
except Exception as e:
logger.error(f"Failed to cleanup removal log: {e}")
return 0