336 lines
12 KiB
Python
336 lines
12 KiB
Python
"""Integration tests for the complete hook system."""
|
|
|
|
import json
|
|
import os
|
|
import tempfile
|
|
from pathlib import Path
|
|
from unittest.mock import patch
|
|
|
|
import pytest
|
|
|
|
|
|
class TestHookIntegration:
|
|
"""Test complete hook integration scenarios."""
|
|
|
|
def test_main_entry_pretooluse(self):
|
|
"""Ensure main dispatches to PreToolUse."""
|
|
from code_quality_guard import main
|
|
|
|
hook_input = {
|
|
"tool_name": "Write",
|
|
"tool_input": {
|
|
"file_path": "test.py",
|
|
"content": "def test(): pass",
|
|
},
|
|
}
|
|
|
|
with patch("sys.stdin") as mock_stdin, patch("builtins.print"):
|
|
mock_stdin.read.return_value = json.dumps(hook_input)
|
|
mock_stdin.__iter__.return_value = [json.dumps(hook_input)]
|
|
|
|
with patch("json.load", return_value=hook_input), patch(
|
|
"code_quality_guard.pretooluse_hook",
|
|
return_value={
|
|
"hookSpecificOutput": {
|
|
"hookEventName": "PreToolUse",
|
|
"permissionDecision": "allow",
|
|
},
|
|
},
|
|
) as mock_pre:
|
|
main()
|
|
mock_pre.assert_called_once()
|
|
|
|
def test_main_entry_posttooluse(self):
|
|
"""Ensure main dispatches to PostToolUse."""
|
|
from code_quality_guard import main
|
|
|
|
hook_input = {
|
|
"tool_name": "Write",
|
|
"tool_output": {
|
|
"file_path": "test.py",
|
|
"status": "success",
|
|
},
|
|
}
|
|
|
|
with patch("sys.stdin") as mock_stdin, patch("builtins.print"):
|
|
mock_stdin.read.return_value = json.dumps(hook_input)
|
|
mock_stdin.__iter__.return_value = [json.dumps(hook_input)]
|
|
|
|
with patch("json.load", return_value=hook_input), patch(
|
|
"code_quality_guard.posttooluse_hook",
|
|
return_value={
|
|
"hookSpecificOutput": {
|
|
"hookEventName": "PostToolUse",
|
|
},
|
|
"decision": "approve",
|
|
},
|
|
) as mock_post:
|
|
main()
|
|
mock_post.assert_called_once()
|
|
|
|
def test_main_invalid_json(self):
|
|
"""Invalid JSON falls back to allow."""
|
|
from code_quality_guard import main
|
|
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print, patch(
|
|
"sys.stdout.write",
|
|
) as mock_write:
|
|
with patch(
|
|
"json.load",
|
|
side_effect=json.JSONDecodeError("test", "test", 0),
|
|
):
|
|
main()
|
|
|
|
printed = (
|
|
mock_print.call_args[0][0]
|
|
if mock_print.call_args
|
|
else mock_write.call_args[0][0]
|
|
)
|
|
response = json.loads(printed)
|
|
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
|
|
|
|
def test_full_flow_clean_code(self, clean_code):
|
|
"""Clean code should pass both hook stages."""
|
|
from code_quality_guard import main
|
|
|
|
pre_input = {
|
|
"tool_name": "Write",
|
|
"tool_input": {
|
|
"file_path": f"{tempfile.gettempdir()}/clean.py",
|
|
"content": clean_code,
|
|
},
|
|
}
|
|
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=pre_input), patch(
|
|
"code_quality_guard.analyze_code_quality",
|
|
return_value={},
|
|
):
|
|
main()
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
|
|
|
|
test_file = Path(f"{tempfile.gettempdir()}/clean.py")
|
|
test_file.write_text(clean_code)
|
|
|
|
post_input = {
|
|
"tool_name": "Write",
|
|
"tool_output": {
|
|
"file_path": str(test_file),
|
|
"status": "success",
|
|
},
|
|
}
|
|
|
|
os.environ["QUALITY_SHOW_SUCCESS"] = "true"
|
|
try:
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=post_input):
|
|
main()
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert response.get("decision") == "approve"
|
|
assert "passed" in response.get("systemMessage", "").lower()
|
|
finally:
|
|
os.environ.pop("QUALITY_SHOW_SUCCESS", None)
|
|
test_file.unlink(missing_ok=True)
|
|
|
|
def test_environment_configuration_flow(self):
|
|
"""Environment settings change enforcement."""
|
|
from code_quality_guard import main
|
|
|
|
env_overrides = {
|
|
"QUALITY_ENFORCEMENT": "strict",
|
|
"QUALITY_COMPLEXITY_THRESHOLD": "5",
|
|
"QUALITY_DUP_ENABLED": "false",
|
|
"QUALITY_COMPLEXITY_ENABLED": "true",
|
|
"QUALITY_MODERN_ENABLED": "false",
|
|
}
|
|
os.environ.update(env_overrides)
|
|
|
|
complex_code = """
|
|
def complex_func(a, b, c):
|
|
if a:
|
|
if b:
|
|
if c:
|
|
return 1
|
|
else:
|
|
return 2
|
|
else:
|
|
return 3
|
|
else:
|
|
return 4
|
|
"""
|
|
|
|
hook_input = {
|
|
"tool_name": "Write",
|
|
"tool_input": {
|
|
"file_path": "complex.py",
|
|
"content": complex_code,
|
|
},
|
|
}
|
|
|
|
try:
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=hook_input), patch(
|
|
"code_quality_guard.analyze_code_quality",
|
|
return_value={
|
|
"complexity": {
|
|
"summary": {"average_cyclomatic_complexity": 8},
|
|
"distribution": {"High": 1},
|
|
},
|
|
},
|
|
):
|
|
with pytest.raises(SystemExit) as exc_info:
|
|
main()
|
|
assert exc_info.value.code == 2
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert (
|
|
response["hookSpecificOutput"]["permissionDecision"]
|
|
== "deny"
|
|
)
|
|
finally:
|
|
for key in env_overrides:
|
|
os.environ.pop(key, None)
|
|
|
|
def test_skip_patterns_integration(self):
|
|
"""Skip patterns should bypass checks."""
|
|
from code_quality_guard import main
|
|
|
|
hook_input = {
|
|
"tool_name": "Write",
|
|
"tool_input": {
|
|
"file_path": "test_something.py",
|
|
"content": "bad code with issues",
|
|
},
|
|
}
|
|
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=hook_input):
|
|
main()
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
|
|
|
|
def test_state_tracking_flow(self, temp_python_file):
|
|
"""State tracking should flag regressions."""
|
|
from code_quality_guard import main
|
|
|
|
os.environ["QUALITY_STATE_TRACKING"] = "true"
|
|
try:
|
|
pre_input = {
|
|
"tool_name": "Write",
|
|
"tool_input": {
|
|
"file_path": str(temp_python_file),
|
|
"content": (
|
|
"def func1(): pass\n"
|
|
"def func2(): pass\n"
|
|
"def func3(): pass"
|
|
),
|
|
},
|
|
}
|
|
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=pre_input), patch(
|
|
"code_quality_guard.analyze_code_quality",
|
|
return_value={},
|
|
):
|
|
main()
|
|
|
|
temp_python_file.write_text("def func1(): pass")
|
|
|
|
post_input = {
|
|
"tool_name": "Write",
|
|
"tool_output": {
|
|
"file_path": str(temp_python_file),
|
|
"status": "success",
|
|
},
|
|
}
|
|
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=post_input):
|
|
main()
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert response["decision"] == "block"
|
|
assert "reduced" in response["reason"].lower()
|
|
finally:
|
|
os.environ.pop("QUALITY_STATE_TRACKING", None)
|
|
|
|
def test_cross_tool_handling(self):
|
|
"""Supported tools should respond with allow."""
|
|
from code_quality_guard import main
|
|
|
|
tools = ["Write", "Edit", "MultiEdit", "Read", "Bash", "Task"]
|
|
|
|
for tool in tools:
|
|
if tool in {"Write", "Edit", "MultiEdit"}:
|
|
hook_input = {
|
|
"tool_name": tool,
|
|
"tool_input": {
|
|
"file_path": "test.py",
|
|
"content": "def test(): pass",
|
|
},
|
|
}
|
|
else:
|
|
hook_input = {"tool_name": tool, "tool_input": {}}
|
|
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=hook_input), patch(
|
|
"code_quality_guard.analyze_code_quality",
|
|
return_value={},
|
|
):
|
|
main()
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
|
|
|
|
def test_enforcement_mode_progression(self, complex_code):
|
|
"""Strict/warn/permissive modes map to deny/ask/allow."""
|
|
from code_quality_guard import main
|
|
|
|
hook_input = {
|
|
"tool_name": "Write",
|
|
"tool_input": {
|
|
"file_path": "complex.py",
|
|
"content": complex_code,
|
|
},
|
|
}
|
|
|
|
scenarios = [
|
|
("strict", "deny"),
|
|
("warn", "ask"),
|
|
("permissive", "allow"),
|
|
]
|
|
|
|
for mode, expected in scenarios:
|
|
os.environ["QUALITY_ENFORCEMENT"] = mode
|
|
os.environ["QUALITY_COMPLEXITY_THRESHOLD"] = "10"
|
|
try:
|
|
with patch("sys.stdin"), patch("builtins.print") as mock_print:
|
|
with patch("json.load", return_value=hook_input), patch(
|
|
"code_quality_guard.analyze_code_quality",
|
|
return_value={
|
|
"complexity": {
|
|
"summary": {"average_cyclomatic_complexity": 25},
|
|
"distribution": {"High": 1},
|
|
},
|
|
},
|
|
):
|
|
if expected in {"deny", "ask"}:
|
|
with pytest.raises(SystemExit) as exc_info:
|
|
main()
|
|
assert exc_info.value.code == 2
|
|
else:
|
|
main()
|
|
|
|
response = json.loads(mock_print.call_args[0][0])
|
|
assert (
|
|
response["hookSpecificOutput"]["permissionDecision"]
|
|
== expected
|
|
)
|
|
finally:
|
|
os.environ.pop("QUALITY_ENFORCEMENT", None)
|
|
os.environ.pop("QUALITY_COMPLEXITY_THRESHOLD", None)
|