Extend hook suppression checks

This commit is contained in:
2025-11-22 05:10:58 +00:00
parent 7c07018831
commit 79346ea083
2 changed files with 48 additions and 24 deletions

View File

@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "claude-scripts"
version = "0.1.2"
version = "0.1.3"
description = "A comprehensive Python code quality analysis toolkit for detecting duplicates, complexity metrics, and modernization opportunities"
authors = [{name = "Travis Vasceannie", email = "travis.vas@gmail.com"}]
readme = "README.md"

View File

@@ -1255,35 +1255,54 @@ def _detect_any_usage(content: str) -> list[str]:
]
def _detect_type_ignore_usage(content: str) -> list[str]:
"""Detect forbidden # type: ignore usage in proposed content."""
def _detect_suppression_comments(content: str) -> list[str]:
"""Detect forbidden suppression directives (type ignore, noqa, pyright)."""
pattern = re.compile(r"#\s*type:\s*ignore(?:\b|\[)", re.IGNORECASE)
lines_with_type_ignore: set[int] = set()
suppression_patterns: dict[str, re.Pattern[str]] = {
"type: ignore": re.compile(r"#\s*type:\s*ignore(?:\b|\[)", re.IGNORECASE),
"pyright: ignore": re.compile(
r"#\s*pyright:\s*ignore(?:\b|\[)?",
re.IGNORECASE,
),
"pyright report disable": re.compile(
r"#\s*pyright:\s*report[A-Za-z0-9_]+\s*=\s*ignore",
re.IGNORECASE,
),
"noqa": re.compile(r"#\s*noqa\b(?::[A-Z0-9 ,_-]+)?", re.IGNORECASE),
}
lines_by_rule: dict[str, set[int]] = {name: set() for name in suppression_patterns}
try:
for token_type, token_string, start, _, _ in tokenize.generate_tokens(
StringIO(content).readline,
):
if token_type == tokenize.COMMENT and pattern.search(token_string):
lines_with_type_ignore.add(start[0])
if token_type != tokenize.COMMENT:
continue
for name, pattern in suppression_patterns.items():
if pattern.search(token_string):
lines_by_rule[name].add(start[0])
except tokenize.TokenError:
for index, line in enumerate(content.splitlines(), start=1):
if pattern.search(line):
lines_with_type_ignore.add(index)
for name, pattern in suppression_patterns.items():
if pattern.search(line):
lines_by_rule[name].add(index)
if not lines_with_type_ignore:
return []
issues: list[str] = []
for name, lines in lines_by_rule.items():
if not lines:
continue
sorted_lines = sorted(lines)
display_lines = ", ".join(str(num) for num in sorted_lines[:5])
if len(sorted_lines) > 5:
display_lines += ", …"
sorted_lines = sorted(lines_with_type_ignore)
display_lines = ", ".join(str(num) for num in sorted_lines[:5])
if len(sorted_lines) > 5:
display_lines += ", …"
guidance = "remove the suppression and address the underlying issue"
issues.append(
f"⚠️ Forbidden {name} directive at line(s) {display_lines}; {guidance}",
)
return [
"⚠️ Forbidden # type: ignore usage at line(s) "
f"{display_lines}; remove the suppression and fix typing issues instead"
]
return issues
def _detect_old_typing_patterns(content: str) -> list[str]:
@@ -1616,12 +1635,17 @@ def pretooluse_hook(hook_data: JsonObject, config: QualityConfig) -> JsonObject:
enable_type_checks = tool_name == "Write"
# Always run core quality checks (Any, type: ignore, old typing, duplicates) regardless of skip patterns
# Always run core quality checks (Any, suppression directives, old typing, duplicates) regardless of skip patterns
any_usage_issues = _detect_any_usage(content)
type_ignore_issues = _detect_type_ignore_usage(content)
suppression_issues = _detect_suppression_comments(content)
old_typing_issues = _detect_old_typing_patterns(content)
suffix_duplication_issues = _detect_suffix_duplication(file_path, content)
precheck_issues = any_usage_issues + type_ignore_issues + old_typing_issues + suffix_duplication_issues
precheck_issues = (
any_usage_issues
+ suppression_issues
+ old_typing_issues
+ suffix_duplication_issues
)
# Run test quality checks if enabled and file is a test file
if run_test_checks:
@@ -1717,9 +1741,9 @@ def posttooluse_hook(
# Run full file quality checks on the entire content
if file_content:
any_usage_issues = _detect_any_usage(file_content)
type_ignore_issues = _detect_type_ignore_usage(file_content)
suppression_issues = _detect_suppression_comments(file_content)
issues.extend(any_usage_issues)
issues.extend(type_ignore_issues)
issues.extend(suppression_issues)
# Check state changes if tracking enabled
if config.state_tracking_enabled: