Modules: triage, health_scanner, feedback_loop, memory_hygiene,
roadmap, validate_output, enhanced_search, auto_handoff
+ composite_scorer, intent_classifier
CLI: 'cortex <module> <command>' unified entry point
Tests: 157/169 passing (12 assertion mismatches from rename)
Docker: python:3.11-slim based
341 lines
12 KiB
Python
341 lines
12 KiB
Python
#!/usr/bin/env python3
|
|
"""Tests for triage.py and memory_hygiene.py — 35+ tests."""
|
|
|
|
import json
|
|
import os
|
|
import shutil
|
|
import sys
|
|
import tempfile
|
|
import unittest
|
|
from datetime import datetime, timedelta
|
|
from pathlib import Path
|
|
from unittest.mock import patch
|
|
|
|
sys.path.insert(0, os.path.dirname(__file__))
|
|
from cortex.triage import score_task, rank_tasks, analyze_message, TriageScore
|
|
import cortex.memory_hygiene as mh
|
|
|
|
|
|
class TestTriageUrgency(unittest.TestCase):
|
|
"""Test urgency scoring dimension."""
|
|
|
|
def test_high_urgency_german(self):
|
|
s = score_task("Das muss JETZT sofort gefixt werden!")
|
|
self.assertGreater(s.urgency, 0.7)
|
|
|
|
def test_high_urgency_english(self):
|
|
s = score_task("URGENT: system is down, critical error!")
|
|
self.assertGreater(s.urgency, 0.7)
|
|
|
|
def test_low_urgency(self):
|
|
s = score_task("Update the documentation when you have time")
|
|
self.assertLess(s.urgency, 0.3)
|
|
|
|
def test_emergency(self):
|
|
s = score_task("Notfall! Alles kaputt!")
|
|
self.assertGreater(s.urgency, 0.8)
|
|
|
|
def test_deadline_urgency(self):
|
|
s = score_task("Deadline today, needs to be done ASAP")
|
|
self.assertGreater(s.urgency, 0.5)
|
|
|
|
def test_no_urgency(self):
|
|
s = score_task("Nice to have feature for later")
|
|
self.assertEqual(s.urgency, 0.0)
|
|
|
|
|
|
class TestTriageImportance(unittest.TestCase):
|
|
"""Test importance scoring dimension."""
|
|
|
|
def test_production_security(self):
|
|
s = score_task("Production security breach detected")
|
|
self.assertGreater(s.importance, 0.7)
|
|
|
|
def test_customer_money(self):
|
|
s = score_task("Kunde verliert Geld wegen Fehler")
|
|
self.assertGreater(s.importance, 0.6)
|
|
|
|
def test_regulatory(self):
|
|
s = score_task("BaFin compliance audit next week")
|
|
self.assertGreater(s.importance, 0.8)
|
|
|
|
def test_low_importance(self):
|
|
s = score_task("Change button color on internal tool")
|
|
self.assertLess(s.importance, 0.3)
|
|
|
|
def test_gateway_dns(self):
|
|
s = score_task("Gateway DNS resolution failing")
|
|
self.assertGreater(s.importance, 0.5)
|
|
|
|
def test_albert_request(self):
|
|
s = score_task("Albert wants this done")
|
|
self.assertGreater(s.importance, 0.4)
|
|
|
|
|
|
class TestTriageEffort(unittest.TestCase):
|
|
"""Test effort estimation."""
|
|
|
|
def test_quick_fix(self):
|
|
s = score_task("Quick fix: change config value")
|
|
self.assertLess(s.effort, 0.4)
|
|
|
|
def test_major_refactor(self):
|
|
s = score_task("Complete refactor of the migration system")
|
|
self.assertGreater(s.effort, 0.6)
|
|
|
|
def test_simple_german(self):
|
|
s = score_task("Einfach schnell den Typo fixen")
|
|
self.assertLess(s.effort, 0.3)
|
|
|
|
def test_default_effort(self):
|
|
s = score_task("Do something")
|
|
self.assertAlmostEqual(s.effort, 0.5, places=1)
|
|
|
|
|
|
class TestTriageReadiness(unittest.TestCase):
|
|
"""Test readiness/dependency detection."""
|
|
|
|
def test_blocked(self):
|
|
s = score_task("Blocked: waiting for API key from vendor")
|
|
self.assertLess(s.readiness, 0.5)
|
|
|
|
def test_german_blocked(self):
|
|
s = score_task("Brauche erst die Credentials, warte auf Albert")
|
|
self.assertLess(s.readiness, 0.5)
|
|
|
|
def test_ready(self):
|
|
s = score_task("Fix the login page CSS")
|
|
self.assertEqual(s.readiness, 1.0)
|
|
|
|
def test_depends_on(self):
|
|
s = score_task("Depends on database migration finishing first")
|
|
self.assertLess(s.readiness, 0.5)
|
|
|
|
|
|
class TestTriagePriority(unittest.TestCase):
|
|
"""Test overall priority scoring."""
|
|
|
|
def test_critical_scores_high(self):
|
|
s = score_task("URGENT: Production is down! Critical security breach!")
|
|
self.assertGreater(s.priority, 0.6)
|
|
|
|
def test_blocked_lowers_priority(self):
|
|
ready = score_task("Fix production error")
|
|
blocked = score_task("Fix production error, blocked by vendor")
|
|
self.assertGreater(ready.priority, blocked.priority)
|
|
|
|
def test_easy_beats_hard_all_else_equal(self):
|
|
easy = score_task("Quick config fix for gateway")
|
|
hard = score_task("Complete refactor redesign for gateway")
|
|
# Both have gateway importance, but effort differs
|
|
self.assertGreater(easy.priority, hard.priority)
|
|
|
|
|
|
class TestTriageRanking(unittest.TestCase):
|
|
"""Test ranking multiple tasks."""
|
|
|
|
def test_rank_order(self):
|
|
ranked = rank_tasks([
|
|
"Write docs",
|
|
"URGENT: Production down!",
|
|
"Update OpenClaw config",
|
|
])
|
|
self.assertEqual(ranked[0].text, "URGENT: Production down!")
|
|
self.assertEqual(len(ranked), 3)
|
|
|
|
def test_rank_preserves_all(self):
|
|
tasks = ["a", "b", "c", "d"]
|
|
ranked = rank_tasks(tasks)
|
|
self.assertEqual(len(ranked), 4)
|
|
|
|
def test_rank_descending(self):
|
|
ranked = rank_tasks(["Fix docs", "CRITICAL production down", "Nice to have"])
|
|
for i in range(len(ranked) - 1):
|
|
self.assertGreaterEqual(ranked[i].priority, ranked[i + 1].priority)
|
|
|
|
|
|
class TestTriageAnalyze(unittest.TestCase):
|
|
"""Test message analysis."""
|
|
|
|
def test_analyze_returns_signals(self):
|
|
r = analyze_message("JETZT sofort Production fixen!")
|
|
self.assertIn("jetzt", r["signals"]["urgency"])
|
|
self.assertIn("sofort", r["signals"]["urgency"])
|
|
self.assertIn("production", r["signals"]["importance"])
|
|
|
|
def test_analyze_classification_critical(self):
|
|
r = analyze_message("URGENT critical production security down!")
|
|
self.assertIn("CRITICAL", r["classification"])
|
|
|
|
def test_analyze_classification_low(self):
|
|
r = analyze_message("Maybe someday update the readme")
|
|
self.assertIn("LOW", r["classification"])
|
|
|
|
|
|
class TestMemoryDuplicates(unittest.TestCase):
|
|
"""Test duplicate detection."""
|
|
|
|
def setUp(self):
|
|
self.tmpdir = Path(tempfile.mkdtemp())
|
|
self.orig_dir = mh.MEMORY_DIR
|
|
mh.MEMORY_DIR = self.tmpdir
|
|
|
|
def tearDown(self):
|
|
mh.MEMORY_DIR = self.orig_dir
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
def test_finds_exact_duplicates(self):
|
|
para = "This is a sufficiently long paragraph that should be detected as duplicate content across files."
|
|
(self.tmpdir / "a.md").write_text(f"# A\n\n{para}\n\nOther stuff.")
|
|
(self.tmpdir / "b.md").write_text(f"# B\n\n{para}\n\nDifferent stuff.")
|
|
dupes = mh.find_duplicates(min_length=20)
|
|
self.assertGreater(len(dupes), 0)
|
|
|
|
def test_no_false_positives(self):
|
|
(self.tmpdir / "a.md").write_text("# Unique content A\n\nCompletely different paragraph about apples.")
|
|
(self.tmpdir / "b.md").write_text("# Unique content B\n\nTotally unrelated text about oranges.")
|
|
dupes = mh.find_duplicates(min_length=20)
|
|
self.assertEqual(len(dupes), 0)
|
|
|
|
def test_short_paragraphs_ignored(self):
|
|
(self.tmpdir / "a.md").write_text("Hi\n\nHi")
|
|
(self.tmpdir / "b.md").write_text("Hi\n\nHi")
|
|
dupes = mh.find_duplicates(min_length=50)
|
|
self.assertEqual(len(dupes), 0)
|
|
|
|
|
|
class TestMemoryStaleness(unittest.TestCase):
|
|
"""Test staleness detection."""
|
|
|
|
def setUp(self):
|
|
self.tmpdir = Path(tempfile.mkdtemp())
|
|
self.orig_dir = mh.MEMORY_DIR
|
|
mh.MEMORY_DIR = self.tmpdir
|
|
|
|
def tearDown(self):
|
|
mh.MEMORY_DIR = self.orig_dir
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
def test_old_todo(self):
|
|
f = self.tmpdir / "notes.md"
|
|
f.write_text("# Notes\n\nTODO: fix this thing\n")
|
|
# Set mtime to 60 days ago
|
|
old = (datetime.now() - timedelta(days=60)).timestamp()
|
|
os.utime(f, (old, old))
|
|
stale = mh.find_stale()
|
|
reasons = [s["reason"] for s in stale]
|
|
self.assertTrue(any("TODO" in r for r in reasons))
|
|
|
|
def test_old_date(self):
|
|
f = self.tmpdir / "info.md"
|
|
old_date = (datetime.now() - timedelta(days=120)).strftime("%Y-%m-%d")
|
|
f.write_text(f"Meeting on {old_date} decided X.\n")
|
|
stale = mh.find_stale()
|
|
self.assertGreater(len(stale), 0)
|
|
|
|
def test_fresh_content_ok(self):
|
|
f = self.tmpdir / "fresh.md"
|
|
today = datetime.now().strftime("%Y-%m-%d")
|
|
f.write_text(f"Updated {today}: everything is fine.\n")
|
|
stale = mh.find_stale()
|
|
date_stale = [s for s in stale if "days old" in s.get("reason", "")]
|
|
self.assertEqual(len(date_stale), 0)
|
|
|
|
|
|
class TestMemoryOrphans(unittest.TestCase):
|
|
"""Test orphan detection."""
|
|
|
|
def setUp(self):
|
|
self.tmpdir = Path(tempfile.mkdtemp())
|
|
self.orig_dir = mh.MEMORY_DIR
|
|
mh.MEMORY_DIR = self.tmpdir
|
|
|
|
def tearDown(self):
|
|
mh.MEMORY_DIR = self.orig_dir
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
def test_finds_orphaned_files(self):
|
|
(self.tmpdir / "orphan.md").write_text("Nobody links to me")
|
|
(self.tmpdir / "main.md").write_text("# Main\nSome text.")
|
|
orph = mh.find_orphans()
|
|
self.assertIn("orphan.md", orph["orphaned_files"])
|
|
|
|
def test_finds_broken_links(self):
|
|
(self.tmpdir / "a.md").write_text("[click](nonexistent.md)")
|
|
orph = mh.find_orphans()
|
|
self.assertGreater(len(orph["broken_links"]), 0)
|
|
|
|
def test_finds_empty_files(self):
|
|
(self.tmpdir / "empty.md").write_text("")
|
|
orph = mh.find_orphans()
|
|
self.assertIn("empty.md", orph["empty_files"])
|
|
|
|
def test_permanent_files_not_orphaned(self):
|
|
(self.tmpdir / "WORKING.md").write_text("Current work")
|
|
orph = mh.find_orphans()
|
|
self.assertNotIn("WORKING.md", orph["orphaned_files"])
|
|
|
|
|
|
class TestMemoryStats(unittest.TestCase):
|
|
"""Test stats generation."""
|
|
|
|
def setUp(self):
|
|
self.tmpdir = Path(tempfile.mkdtemp())
|
|
self.orig_dir = mh.MEMORY_DIR
|
|
mh.MEMORY_DIR = self.tmpdir
|
|
|
|
def tearDown(self):
|
|
mh.MEMORY_DIR = self.orig_dir
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
def test_stats_structure(self):
|
|
(self.tmpdir / "a.md").write_text("Hello world")
|
|
(self.tmpdir / "b.json").write_text("{}")
|
|
stats = mh.gather_stats()
|
|
self.assertEqual(stats["total_files"], 2)
|
|
self.assertIn(".md", stats["files_by_extension"])
|
|
self.assertIn(".json", stats["files_by_extension"])
|
|
self.assertIsNotNone(stats["oldest"])
|
|
self.assertIsNotNone(stats["newest"])
|
|
|
|
def test_empty_dir(self):
|
|
stats = mh.gather_stats()
|
|
self.assertEqual(stats["total_files"], 0)
|
|
|
|
|
|
class TestMemoryArchive(unittest.TestCase):
|
|
"""Test archive functionality."""
|
|
|
|
def setUp(self):
|
|
self.tmpdir = Path(tempfile.mkdtemp())
|
|
self.orig_dir = mh.MEMORY_DIR
|
|
self.orig_archive = mh.ARCHIVE_DIR
|
|
mh.MEMORY_DIR = self.tmpdir
|
|
mh.ARCHIVE_DIR = self.tmpdir / "archive"
|
|
|
|
def tearDown(self):
|
|
mh.MEMORY_DIR = self.orig_dir
|
|
mh.ARCHIVE_DIR = self.orig_archive
|
|
shutil.rmtree(self.tmpdir)
|
|
|
|
def test_dry_run_doesnt_move(self):
|
|
(self.tmpdir / "2025-01-01.md").write_text("Old note")
|
|
result = mh.archive_old_notes(older_than_days=30, execute=False)
|
|
self.assertTrue(result["dry_run"])
|
|
self.assertEqual(result["count"], 1)
|
|
self.assertTrue((self.tmpdir / "2025-01-01.md").exists())
|
|
|
|
def test_execute_moves_files(self):
|
|
(self.tmpdir / "2025-01-01.md").write_text("Old note")
|
|
result = mh.archive_old_notes(older_than_days=30, execute=True)
|
|
self.assertFalse((self.tmpdir / "2025-01-01.md").exists())
|
|
self.assertTrue((self.tmpdir / "archive" / "2025" / "2025-01-01.md").exists())
|
|
|
|
def test_permanent_files_kept(self):
|
|
(self.tmpdir / "WORKING.md").write_text("Keep me")
|
|
result = mh.archive_old_notes(older_than_days=1, execute=True)
|
|
self.assertTrue((self.tmpdir / "WORKING.md").exists())
|
|
|
|
|
|
if __name__ == "__main__":
|
|
unittest.main()
|