Refactor: move hooks to quality package

- Move Claude Code hooks under src/quality/hooks (rename modules)
- Add a project-local installer for Claude Code hooks
- Introduce internal_duplicate_detector and code_quality_guard
- Update tests to reference new module paths and guard API
- Bump package version to 0.1.1 and adjust packaging
This commit is contained in:
2025-10-26 22:15:04 +00:00
parent 812378c0e1
commit 4ac9b1c5e1
40 changed files with 3908 additions and 1004 deletions

View File

@@ -51,6 +51,12 @@ claude-quality full-analysis src/ --output report.json
# Create exceptions template
claude-quality create-exceptions-template --output-path .quality-exceptions.yaml
# Install Claude Code hook for this repo
python -m quality.hooks.install --project . --create-alias
# Or via the CLI entry-point
claude-quality-hook-install --project . --create-alias
```
## Architecture Overview
@@ -411,4 +417,4 @@ def fake_client() -> object:
- ✅ No duplicate logic; helpers or composition extracted.
- ✅ Tests use pytest, fixtures in conftest.py, and parameterization/marks.
- ✅ Tests avoid importing from tests and avoid control flow that reduces clarity; use parametrization instead.
- ✅ Third-party Any is contained via allowed strategies.
- ✅ Third-party Any is contained via allowed strategies.

View File

@@ -13,10 +13,14 @@ A comprehensive Python code quality analysis toolkit for detecting duplicates, c
## Installation
By default the package is published to a private Gitea mirror. Install it via:
```bash
pip install claude-scripts
pip install --index-url https://git.sidepiece.rip/api/packages/vasceannie/pypi claude-scripts==0.1.1
```
If you need a PyPI fallback, append `--extra-index-url https://pypi.org/simple`.
## Usage
### Command Line Interface
@@ -43,6 +47,19 @@ claude-quality modernization src/ --include-type-hints --format console
claude-quality full-analysis src/ --format json --output report.json
```
### Install Claude Code Hook
After installing the package, configure the Claude Code quality hook for your project:
```bash
python -m quality.hooks.install --project . --create-alias
# Or via the packaged CLI entry-point
claude-quality-hook-install --project . --create-alias
```
This command writes `.claude/settings.json`, adds a helper script at `.claude/configure-quality.sh`, and registers the hook with Claude Code using `python3 -m quality.hooks.cli`.
### Configuration
Create a configuration file to customize analysis parameters:

View File

@@ -1 +0,0 @@
cli.py

View File

@@ -4,7 +4,7 @@ build-backend = "hatchling.build"
[project]
name = "claude-scripts"
version = "0.1.0"
version = "0.1.1"
description = "A comprehensive Python code quality analysis toolkit for detecting duplicates, complexity metrics, and modernization opportunities"
authors = [{name = "Your Name", email = "your.email@example.com"}]
readme = "README.md"
@@ -51,6 +51,7 @@ Documentation = "https://github.com/yourusername/claude-scripts#readme"
[project.scripts]
claude-quality = "quality.cli.main:cli"
claude-quality-hook-install = "quality.hooks.install:main"
[tool.hatch.build.targets.sdist]
exclude = [
@@ -64,6 +65,10 @@ exclude = [
[tool.hatch.build.targets.wheel]
packages = ["src/quality"]
include = [
"src/quality/hooks/claude-code-settings.json",
"src/quality/hooks/logs/status_line.json",
]
[tool.ruff]
target-version = "py312"
@@ -120,7 +125,7 @@ minversion = "7.0"
addopts = [
"-ra",
"--strict-markers",
"--cov=code_quality_guard",
"--cov=quality.hooks.code_quality_guard",
"--cov-branch",
"--cov-report=term-missing:skip-covered",
"--cov-report=html",
@@ -162,4 +167,5 @@ dev = [
"pyrefly>=0.2.0",
"pytest>=8.4.2",
"mypy>=1.18.1",
"twine>=6.2.0",
]

View File

@@ -1,325 +1,110 @@
#!/bin/bash
# Setup script to make the code quality hook globally accessible from ~/repos projects
# This script creates a global Claude Code configuration that references the hook
# Setup script to install the Claude Code quality hooks as a project-local
# configuration inside .claude/ without mutating any global Claude settings.
set -e
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
HOOK_DIR="$SCRIPT_DIR/hooks"
HOOK_SCRIPT="$HOOK_DIR/code_quality_guard.py"
GLOBAL_CONFIG_DIR="$HOME/.claude"
GLOBAL_CONFIG_FILE="$GLOBAL_CONFIG_DIR/claude-code-settings.json"
# Colors for output
# Colors for formatted output
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
RED='\033[0;31m'
NC='\033[0m' # No Color
echo -e "${YELLOW}Setting up global Claude Code quality hook...${NC}"
echo -e "${YELLOW}Configuring project-local Claude Code quality hook...${NC}"
# Check if hook script exists
if [ ! -f "$HOOK_SCRIPT" ]; then
echo -e "${RED}Error: Hook script not found at $HOOK_SCRIPT${NC}"
exit 1
fi
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_DIR="$SCRIPT_DIR"
DEFAULT_MIRROR="https://git.sidepiece.rip/api/packages/vasceannie/pypi/simple"
CLAUDE_SCRIPTS_VERSION="${CLAUDE_SCRIPTS_VERSION:-0.1.1}"
CLAUDE_SCRIPTS_PYPI_INDEX="${CLAUDE_SCRIPTS_PYPI_INDEX:-$DEFAULT_MIRROR}"
CLAUDE_SCRIPTS_EXTRA_INDEX_URL="${CLAUDE_SCRIPTS_EXTRA_INDEX_URL:-}"
# Create Claude config directory if it doesn't exist
if [ ! -d "$GLOBAL_CONFIG_DIR" ]; then
echo "Creating Claude configuration directory at $GLOBAL_CONFIG_DIR"
mkdir -p "$GLOBAL_CONFIG_DIR"
fi
# Backup existing global config if it exists
if [ -f "$GLOBAL_CONFIG_FILE" ]; then
BACKUP_FILE="${GLOBAL_CONFIG_FILE}.backup.$(date +%Y%m%d_%H%M%S)"
echo "Backing up existing configuration to $BACKUP_FILE"
cp "$GLOBAL_CONFIG_FILE" "$BACKUP_FILE"
fi
# Create the global configuration
cat > "$GLOBAL_CONFIG_FILE" << EOF
{
"hooks": {
"PreToolUse": [
{
"matcher": "Write|Edit|MultiEdit",
"hooks": [
{
"type": "command",
"command": "cd $HOOK_DIR && python code_quality_guard.py"
}
]
}
],
"PostToolUse": [
{
"matcher": "Write|Edit|MultiEdit",
"hooks": [
{
"type": "command",
"command": "cd $HOOK_DIR && python code_quality_guard.py"
}
]
}
]
}
}
EOF
echo -e "${GREEN}✓ Global Claude Code configuration created at $GLOBAL_CONFIG_FILE${NC}"
# Create a convenience script to configure quality settings
QUALITY_CONFIG_SCRIPT="$HOME/.claude/configure-quality.sh"
cat > "$QUALITY_CONFIG_SCRIPT" << 'EOF'
#!/bin/bash
# Convenience script to configure code quality hook settings
# Usage: source ~/.claude/configure-quality.sh [preset]
case "${1:-default}" in
strict)
export QUALITY_ENFORCEMENT="strict"
export QUALITY_COMPLEXITY_THRESHOLD="10"
export QUALITY_DUP_THRESHOLD="0.7"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="true"
export QUALITY_TYPE_HINTS="true"
echo "✓ Strict quality mode enabled"
;;
moderate)
export QUALITY_ENFORCEMENT="warn"
export QUALITY_COMPLEXITY_THRESHOLD="15"
export QUALITY_DUP_THRESHOLD="0.8"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="true"
export QUALITY_TYPE_HINTS="false"
echo "✓ Moderate quality mode enabled"
;;
permissive)
export QUALITY_ENFORCEMENT="permissive"
export QUALITY_COMPLEXITY_THRESHOLD="20"
export QUALITY_DUP_THRESHOLD="0.9"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="false"
export QUALITY_TYPE_HINTS="false"
echo "✓ Permissive quality mode enabled"
;;
disabled)
export QUALITY_ENFORCEMENT="permissive"
export QUALITY_DUP_ENABLED="false"
export QUALITY_COMPLEXITY_ENABLED="false"
export QUALITY_MODERN_ENABLED="false"
echo "✓ Quality checks disabled"
;;
custom)
echo "Configure custom quality settings:"
read -p "Enforcement mode (strict/warn/permissive): " QUALITY_ENFORCEMENT
read -p "Complexity threshold (10-30): " QUALITY_COMPLEXITY_THRESHOLD
read -p "Duplicate threshold (0.5-1.0): " QUALITY_DUP_THRESHOLD
read -p "Enable duplicate detection? (true/false): " QUALITY_DUP_ENABLED
read -p "Enable complexity checks? (true/false): " QUALITY_COMPLEXITY_ENABLED
read -p "Enable modernization checks? (true/false): " QUALITY_MODERN_ENABLED
read -p "Require type hints? (true/false): " QUALITY_TYPE_HINTS
export QUALITY_ENFORCEMENT
export QUALITY_COMPLEXITY_THRESHOLD
export QUALITY_DUP_THRESHOLD
export QUALITY_DUP_ENABLED
export QUALITY_COMPLEXITY_ENABLED
export QUALITY_MODERN_ENABLED
export QUALITY_TYPE_HINTS
echo "✓ Custom quality settings configured"
;;
status)
echo "Current quality settings:"
echo " QUALITY_ENFORCEMENT: ${QUALITY_ENFORCEMENT:-strict}"
echo " QUALITY_COMPLEXITY_THRESHOLD: ${QUALITY_COMPLEXITY_THRESHOLD:-10}"
echo " QUALITY_DUP_THRESHOLD: ${QUALITY_DUP_THRESHOLD:-0.7}"
echo " QUALITY_DUP_ENABLED: ${QUALITY_DUP_ENABLED:-true}"
echo " QUALITY_COMPLEXITY_ENABLED: ${QUALITY_COMPLEXITY_ENABLED:-true}"
echo " QUALITY_MODERN_ENABLED: ${QUALITY_MODERN_ENABLED:-true}"
echo " QUALITY_TYPE_HINTS: ${QUALITY_TYPE_HINTS:-false}"
install_claude_scripts_if_missing() {
if command -v claude-quality >/dev/null 2>&1; then
return 0
;;
*)
# Default settings
export QUALITY_ENFORCEMENT="strict"
export QUALITY_COMPLEXITY_THRESHOLD="10"
export QUALITY_DUP_THRESHOLD="0.7"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="true"
export QUALITY_TYPE_HINTS="false"
echo "✓ Default quality settings applied"
echo ""
echo "Available presets:"
echo " strict - Strict quality enforcement (default)"
echo " moderate - Moderate quality checks with warnings"
echo " permissive - Permissive mode with suggestions"
echo " disabled - Disable all quality checks"
echo " custom - Configure custom settings"
echo " status - Show current settings"
echo ""
echo "Usage: source ~/.claude/configure-quality.sh [preset]"
;;
esac
# Enable post-tool features for better feedback
export QUALITY_STATE_TRACKING="true"
export QUALITY_CROSS_FILE_CHECK="true"
export QUALITY_VERIFY_NAMING="true"
export QUALITY_SHOW_SUCCESS="false" # Keep quiet unless there are issues
EOF
chmod +x "$QUALITY_CONFIG_SCRIPT"
echo -e "${GREEN}✓ Quality configuration script created at $QUALITY_CONFIG_SCRIPT${NC}"
# Add convenience alias to shell configuration
SHELL_RC=""
if [ -f "$HOME/.bashrc" ]; then
SHELL_RC="$HOME/.bashrc"
elif [ -f "$HOME/.zshrc" ]; then
SHELL_RC="$HOME/.zshrc"
fi
if [ -n "$SHELL_RC" ]; then
# Check if alias already exists
if ! grep -q "alias claude-quality" "$SHELL_RC" 2>/dev/null; then
echo "" >> "$SHELL_RC"
echo "# Claude Code quality configuration" >> "$SHELL_RC"
echo "alias claude-quality='source ~/.claude/configure-quality.sh'" >> "$SHELL_RC"
echo -e "${GREEN}✓ Added 'claude-quality' alias to $SHELL_RC${NC}"
fi
fi
# Test the hook installation
echo ""
echo -e "${YELLOW}Testing hook installation...${NC}"
cd "$HOOK_DIR"
TEST_OUTPUT=$(echo '{"tool_name":"Read","tool_input":{}}' | python code_quality_guard.py 2>&1)
if echo "$TEST_OUTPUT" | grep -q '"decision"'; then
echo -e "${GREEN}✓ Hook is working correctly${NC}"
else
echo -e "${RED}✗ Hook test failed. Output:${NC}"
echo "$TEST_OUTPUT"
echo -e "${YELLOW}claude-quality not found. Installing claude-scripts==${CLAUDE_SCRIPTS_VERSION} via ${CLAUDE_SCRIPTS_PYPI_INDEX}...${NC}"
if ! command -v python3 >/dev/null 2>&1; then
echo -e "${RED}Error: python3 is required to install claude-scripts${NC}"
return 1
fi
install_args=(python3 -m pip install --upgrade)
install_args+=(--index-url "$CLAUDE_SCRIPTS_PYPI_INDEX")
if [ -n "$CLAUDE_SCRIPTS_EXTRA_INDEX_URL" ]; then
install_args+=(--extra-index-url "$CLAUDE_SCRIPTS_EXTRA_INDEX_URL")
fi
install_args+=("claude-scripts==${CLAUDE_SCRIPTS_VERSION}")
if "${install_args[@]}"; then
if command -v claude-quality >/dev/null 2>&1; then
echo -e "${GREEN}✓ claude-quality installed successfully${NC}"
return 0
fi
echo -e "${RED}Error: claude-quality command still not found after installation${NC}"
return 1
fi
echo -e "${RED}Error: Failed to install claude-scripts from mirror${NC}"
return 1
}
install_claude_scripts_if_missing
HOOK_DIR="$(python3 - <<'PY'
from importlib import import_module
from pathlib import Path
try:
module = import_module("quality.hooks")
except ModuleNotFoundError:
raise SystemExit("")
print(Path(module.__file__).resolve().parent)
PY
)"
if [ -z "$HOOK_DIR" ]; then
echo -e "${RED}Error: Unable to locate quality.hooks package. Ensure claude-scripts is installed.${NC}"
exit 1
fi
# Create a README for the global setup
cat > "$GLOBAL_CONFIG_DIR/README_QUALITY_HOOK.md" << EOF
# Claude Code Quality Hook
HOOK_ENTRY="$HOOK_DIR/cli.py"
HOOK_TEMPLATE="$HOOK_DIR/claude-code-settings.json"
The code quality hook is now globally configured for all projects in ~/repos.
if [ ! -d "$HOOK_DIR" ]; then
echo -e "${RED}Error: Hook directory not found at $HOOK_DIR${NC}"
exit 1
fi
## Configuration
if [ ! -f "$HOOK_ENTRY" ]; then
echo -e "${RED}Error: Hook entry script not found at $HOOK_ENTRY${NC}"
exit 1
fi
The hook automatically runs on PreToolUse and PostToolUse events for Write, Edit, and MultiEdit operations.
### Quick Configuration
Use the \`claude-quality\` command to quickly configure quality settings:
\`\`\`bash
# Apply a preset
source ~/.claude/configure-quality.sh strict # Strict enforcement
source ~/.claude/configure-quality.sh moderate # Moderate with warnings
source ~/.claude/configure-quality.sh permissive # Permissive suggestions
source ~/.claude/configure-quality.sh disabled # Disable checks
# Or use the alias
claude-quality strict
# Check current settings
claude-quality status
\`\`\`
### Environment Variables
You can also set these environment variables directly:
- \`QUALITY_ENFORCEMENT\`: strict/warn/permissive
- \`QUALITY_COMPLEXITY_THRESHOLD\`: Maximum cyclomatic complexity (default: 10)
- \`QUALITY_DUP_THRESHOLD\`: Duplicate similarity threshold 0-1 (default: 0.7)
- \`QUALITY_DUP_ENABLED\`: Enable duplicate detection (default: true)
- \`QUALITY_COMPLEXITY_ENABLED\`: Enable complexity checks (default: true)
- \`QUALITY_MODERN_ENABLED\`: Enable modernization checks (default: true)
- \`QUALITY_TYPE_HINTS\`: Require type hints (default: false)
- \`QUALITY_STATE_TRACKING\`: Track file state changes (default: true)
- \`QUALITY_CROSS_FILE_CHECK\`: Check cross-file duplicates (default: true)
- \`QUALITY_VERIFY_NAMING\`: Verify PEP8 naming (default: true)
- \`QUALITY_SHOW_SUCCESS\`: Show success messages (default: false)
### Per-Project Configuration
To override settings for a specific project, add a \`.quality.env\` file to the project root:
\`\`\`bash
# .quality.env
QUALITY_ENFORCEMENT=moderate
QUALITY_COMPLEXITY_THRESHOLD=15
\`\`\`
Then source it: \`source .quality.env\`
## Features
### PreToolUse Checks
- Internal duplicate detection within files
- Cyclomatic complexity analysis
- Code modernization suggestions
- Type hint requirements
### PostToolUse Checks
- State tracking (detects quality degradation)
- Cross-file duplicate detection
- PEP8 naming convention verification
## Enforcement Modes
- **strict**: Blocks (deny) code that fails quality checks
- **warn**: Asks for confirmation (ask) on quality issues
- **permissive**: Allows code with warnings
## Troubleshooting
If the hook is not working:
1. Check that claude-quality binary is installed: \`which claude-quality\`
2. Verify Python environment: \`python --version\`
3. Test the hook directly: \`echo '{"tool_name":"Read","tool_input":{}}' | python $HOOK_DIR/code_quality_guard.py\`
4. Check logs: Claude Code may show hook errors in its output
## Uninstalling
To remove the global hook:
1. Delete or rename ~/.claude/claude-code-settings.json
2. Remove the claude-quality alias from your shell RC file
EOF
if [ ! -f "$HOOK_TEMPLATE" ]; then
echo -e "${RED}Error: Hook settings template not found at $HOOK_TEMPLATE${NC}"
exit 1
fi
echo ""
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${GREEN}✓ Global code quality hook successfully installed!${NC}"
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo ""
echo "The hook is now active for all Claude Code sessions in ~/repos projects."
echo -e "${YELLOW}Running Python installer to configure project-local hook...${NC}"
if python3 -m quality.hooks.install --project "$PROJECT_DIR" --create-alias; then
echo ""
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
echo -e "${GREEN}✓ Project-local code quality hook successfully installed!${NC}"
echo -e "${GREEN}━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━${NC}"
else
echo -e "${RED}✗ Project-local installer reported an error. See output above.${NC}"
exit 1
fi
echo ""
echo "Quick start:"
echo -e " ${YELLOW}claude-quality strict${NC} # Enable strict quality enforcement"
echo -e " ${YELLOW}claude-quality moderate${NC} # Use moderate settings"
echo -e " ${YELLOW}claude-quality status${NC} # Check current settings"
echo ""
echo "For more information, see: ~/.claude/README_QUALITY_HOOK.md"
echo ""
echo -e "${YELLOW}Note: Restart your shell or run 'source $SHELL_RC' to use the claude-quality alias${NC}"
echo -e " ${YELLOW}claude-quality status${NC} # Inspect current environment settings"

View File

@@ -22,6 +22,7 @@ Architecture:
- Analyzers: Supporting analysis tools (duplicates, types, etc.)
"""
from . import code_quality_guard
from .facade import Guards
__all__ = ["Guards"]
__all__ = ["Guards", "code_quality_guard"]

File diff suppressed because it is too large Load Diff

View File

@@ -1,11 +1,10 @@
"""Guard implementations for Claude Code hook validation."""
"""Guard implementations and utilities for Claude Code hooks."""
from .bash_guard import BashCommandGuard
from .file_protection_guard import FileProtectionGuard
from .quality_guard import CodeQualityGuard
from .utils import (
from .utils import ( # noqa: F401 - re-export for convenience
AnalysisResultsDict,
Path,
QualityConfig,
analyze_code_quality,
check_code_issues,
@@ -30,11 +29,9 @@ from .utils import (
run_type_checker_with_config,
should_skip_file,
store_pre_state,
subprocess,
verify_naming_conventions,
)
# Type alias for backward compatibility
AnalysisResults = AnalysisResultsDict
__all__ = [
@@ -43,7 +40,6 @@ __all__ = [
"BashCommandGuard",
"CodeQualityGuard",
"FileProtectionGuard",
"Path",
"QualityConfig",
"analyze_code_quality",
"check_code_issues",
@@ -68,6 +64,5 @@ __all__ = [
"run_type_checker_with_config",
"should_skip_file",
"store_pre_state",
"subprocess",
"verify_naming_conventions",
]

View File

@@ -8,9 +8,8 @@ and code style violations.
import ast
import re
import sys
from contextlib import suppress
from pathlib import Path
from typing import TypedDict, TypeGuard
from typing import TypeGuard
# Setup path for imports
sys.path.insert(0, str(Path(__file__).parent.parent))
@@ -18,33 +17,19 @@ sys.path.insert(0, str(Path(__file__).parent.parent.parent / "src"))
from models import HookResponse
class _AnyUsageDict(TypedDict, total=False):
"""Type definition for Any usage detection results."""
line: int
element: str
context: str
suggested: str
class _DuplicateDict(TypedDict, total=False):
"""Type definition for duplicate detection results."""
type: str
similarity: float
locations: list[object]
# Optionally import analyzer modules (graceful degradation if not available)
message_enrichment_module: object = None
type_inference_module: object = None
with suppress(ImportError):
try:
from analyzers import message_enrichment as message_enrichment_module
except ImportError:
pass
with suppress(ImportError):
try:
from analyzers import type_inference as type_inference_module
except ImportError:
pass
def _is_dict_str_obj(value: object) -> TypeGuard[dict[str, object]]:
@@ -52,47 +37,46 @@ def _is_dict_str_obj(value: object) -> TypeGuard[dict[str, object]]:
return isinstance(value, dict)
def _is_any_usage_dict(value: object) -> TypeGuard[_AnyUsageDict]:
"""Type guard for Any usage dict results."""
return isinstance(value, dict)
def _safe_dict_get(d: object, key: str) -> object | None:
"""Safely get a value from a dict, narrowing through isinstance checks."""
if isinstance(d, dict):
result = d.get(key)
if result is not None:
return result
return None
def _is_duplicate_dict(value: object) -> TypeGuard[_DuplicateDict]:
"""Type guard for duplicate detection dict results."""
return isinstance(value, dict)
def _safe_get_int(d: object, key: str, default: int = 0) -> int:
"""Safely get an int value from a dict."""
val = _safe_dict_get(d, key)
if isinstance(val, int):
return val
return default
def _safe_iter_list(value: object) -> list[object]:
"""Safely iterate over a list-like object and yield items as objects.
def _safe_get_str(d: object, key: str, default: str = "") -> str:
"""Safely get a str value from a dict."""
val = _safe_dict_get(d, key)
if isinstance(val, str):
return val
return default
Converts untyped list results from third-party modules into properly
typed list[object] by using Protocol-based iteration.
"""
# Don't narrow to list - work with object directly
len_method = getattr(value, "__len__", None)
iter_method = getattr(value, "__iter__", None)
if not (callable(len_method) and callable(iter_method)):
return []
def _safe_get_float(d: object, key: str, default: float = 0.0) -> float:
"""Safely get a float value from a dict."""
val = _safe_dict_get(d, key)
if isinstance(val, (int, float)):
return float(val)
return default
result: list[object] = []
try:
iterator_obj: object = iter_method()
# Get next method from iterator
next_method = getattr(iterator_obj, "__next__", None)
if not callable(next_method):
return []
while True:
try:
item: object = next_method()
result.append(item)
except StopIteration:
break
except (TypeError, AttributeError):
return []
return result
def _safe_get_list(d: object, key: str) -> list[object]:
"""Safely get a list value from a dict."""
val = _safe_dict_get(d, key)
if isinstance(val, list):
# Cast list[Unknown] to list[object] after isinstance narrows the type
return list(val)
return []
class CodeQualityGuard:
@@ -113,32 +97,32 @@ class CodeQualityGuard:
self.dup_engine: object = None
self.complexity_analyzer: object = None
with suppress(ImportError):
try:
from quality.detection.engine import DuplicateDetectionEngine
from quality.complexity.analyzer import ComplexityAnalyzer
from quality.config.schemas import QualityConfig
from quality.detection.engine import DuplicateDetectionEngine
config = QualityConfig()
self.dup_engine = DuplicateDetectionEngine(config)
self.complexity_analyzer = ComplexityAnalyzer(
config.complexity,
config,
config.complexity, config
)
except ImportError:
# Quality package not available, analyzers remain None
pass
@staticmethod
def pretooluse(hook_data: dict[str, object]) -> HookResponse:
def pretooluse(self, hook_data: dict[str, object]) -> HookResponse:
"""Handle PreToolUse hook for quality analysis.
Currently provides pass-through validation. Full analysis happens
in posttooluse after code is written.
Args:
hook_data: Hook input data (unused; provided for hook signature).
hook_data: Hook input data containing tool_name and tool_input.
Returns:
Hook response with permission decision (always allow pre-write).
"""
_ = hook_data
return {
"hookSpecificOutput": {
"hookEventName": "PreToolUse",
@@ -146,8 +130,7 @@ class CodeQualityGuard:
},
}
@staticmethod
def _extract_content(hook_data: dict[str, object]) -> str:
def _extract_content(self, hook_data: dict[str, object]) -> str:
"""Extract code content from hook data.
Checks tool_input.content first, then hook_data.content.
@@ -170,8 +153,7 @@ class CodeQualityGuard:
return ""
@staticmethod
def _check_any_usage(content: str) -> list[str]:
def _check_any_usage(self, content: str) -> list[str]:
"""Check for typing.Any usage without justification.
Args:
@@ -184,7 +166,7 @@ class CodeQualityGuard:
if type_inference_module is None:
return violations
with suppress(Exception):
try:
helper = getattr(type_inference_module, "TypeInferenceHelper", None)
if helper is None:
return violations
@@ -195,27 +177,30 @@ class CodeQualityGuard:
any_usages = find_method(content)
for usage_item in any_usages:
if not _is_any_usage_dict(usage_item):
if not isinstance(usage_item, dict):
continue
line_num = usage_item.get("line", 0)
element = usage_item.get("element", "unknown")
context = usage_item.get("context", "")
suggested = usage_item.get("suggested", "")
# Cast to the expected type after isinstance check
usage_dict = usage_item
line_num = _safe_get_int(usage_dict, "line", 0)
element = _safe_get_str(usage_dict, "element", "unknown")
context = _safe_get_str(usage_dict, "context", "")
suggested = _safe_get_str(usage_dict, "suggested", "")
msg = (
f"❌ Line {line_num}: Found `Any` type in {context}\n"
f" Element: {element}\n"
f" Suggested: {suggested}\n"
" Why: Using specific types prevents bugs and "
"improves IDE support"
f" Why: Using specific types prevents bugs and improves IDE support"
)
violations.append(msg)
except Exception: # noqa: BLE001
pass
return violations
@staticmethod
def _check_type_suppression(content: str) -> list[str]:
def _check_type_suppression(self, content: str) -> list[str]:
"""Check for type: ignore and # noqa suppression comments.
Args:
@@ -234,22 +219,19 @@ class CodeQualityGuard:
msg = (
f"🚫 Line {line_num}: Found `# type: ignore` suppression\n"
f" Code: {code}\n"
" Why: Type suppression hides real type errors and "
"prevents proper typing\n"
" Fix: Use proper type annotations or "
"TypeGuard/Protocol instead"
f" Why: Type suppression hides real type errors and prevents proper typing\n"
f" Fix: Use proper type annotations or TypeGuard/Protocol instead"
)
violations.append(msg)
# Check for noqa comments
# Check for # noqa comments
if re.search(r"#\s*noqa", line):
code = line.split("#")[0].strip()
msg = (
f"⚠️ Line {line_num}: Found `# noqa` linting suppression\n"
f" Code: {code}\n"
" Why: Suppressing linting hides code quality issues\n"
" Fix: Address the linting issue directly or document "
"why it's necessary"
f" Why: Suppressing linting hides code quality issues\n"
f" Fix: Address the linting issue directly or document why it's necessary"
)
violations.append(msg)
@@ -278,23 +260,19 @@ class CodeQualityGuard:
line_num = getattr(node, "lineno", 0)
msg = (
f"⚠️ Line {line_num}: High complexity in `{node.name}` "
f"(complexity: {complexity}, "
f"threshold: {self.COMPLEXITY_THRESHOLD})\n"
" Refactoring suggestions:\n"
"Extract nested conditions into separate functions\n"
"Use guard clauses to reduce nesting\n"
"Replace complex conditionals with "
"polymorphism/strategy pattern\n"
" • Break into smaller, focused functions\n"
" Why: Complex code is harder to understand, "
"test, and maintain"
f"(complexity: {complexity}, threshold: {self.COMPLEXITY_THRESHOLD})\n"
f" Refactoring suggestions:\n"
f" • Extract nested conditions into separate functions\n"
f"Use guard clauses to reduce nesting\n"
f"Replace complex conditionals with polymorphism/strategy pattern\n"
f"Break into smaller, focused functions\n"
f" Why: Complex code is harder to understand, test, and maintain"
)
violations.append(msg)
return violations
@staticmethod
def _calculate_complexity(node: ast.AST) -> int:
def _calculate_complexity(self, node: ast.AST) -> int:
"""Calculate cyclomatic complexity for a function.
Args:
@@ -314,63 +292,6 @@ class CodeQualityGuard:
complexity += len(child.values) - 1
return complexity
@staticmethod
def _format_duplicate_violations(
duplicates: object,
content: str,
) -> list[str]:
"""Format duplicate detection results into violation messages.
Args:
duplicates: List of duplicate detection results.
content: Source code being analyzed.
Returns:
List of formatted violation messages.
"""
violations: list[str] = []
if message_enrichment_module is None:
return violations
formatter = getattr(
message_enrichment_module,
"EnhancedMessageFormatter",
None,
)
if formatter is None:
return violations
format_method = getattr(formatter, "format_duplicate_message", None)
if format_method is None:
return violations
# Convert to properly typed list
dup_list = _safe_iter_list(duplicates)
if not dup_list:
return violations
# Process each duplicate entry
for dup_obj in dup_list:
if not _is_duplicate_dict(dup_obj):
continue
# After TypeGuard, dup_obj is _DuplicateDict
dup_type = dup_obj.get("type", "unknown")
similarity = dup_obj.get("similarity", 0.0)
locations = dup_obj.get("locations", [])
msg_result: object = format_method(
dup_type,
similarity,
locations,
content,
include_refactoring=True,
)
if isinstance(msg_result, str):
violations.append(msg_result)
return violations
def _check_duplicates(self, content: str) -> list[str]:
"""Check for duplicate code blocks.
@@ -384,7 +305,7 @@ class CodeQualityGuard:
if self.dup_engine is None:
return violations
with suppress(Exception):
try:
ast_analyzer = getattr(self.dup_engine, "ast_analyzer", None)
if ast_analyzer is None:
return violations
@@ -398,21 +319,39 @@ class CodeQualityGuard:
return violations
detect_method = getattr(
self.dup_engine,
"detect_duplicates_in_blocks",
None,
self.dup_engine, "detect_duplicates_in_blocks", None
)
if detect_method is None:
return violations
duplicates = detect_method(code_blocks)
if duplicates:
violations.extend(
CodeQualityGuard._format_duplicate_violations(
duplicates,
content,
),
)
if duplicates and message_enrichment_module is not None:
formatter = getattr(message_enrichment_module, "EnhancedMessageFormatter", None)
if formatter is not None:
format_method = getattr(formatter, "format_duplicate_message", None)
if format_method is not None:
for dup in duplicates:
if not isinstance(dup, dict):
continue
# Cast after isinstance check
dup_dict = dup
dup_type = _safe_get_str(dup_dict, "type", "unknown")
similarity = _safe_get_float(dup_dict, "similarity", 0.0)
locations = _safe_get_list(dup_dict, "locations")
msg = format_method(
dup_type,
similarity,
locations,
content,
include_refactoring=True,
)
if isinstance(msg, str):
violations.append(msg)
except Exception: # noqa: BLE001
pass
return violations
@@ -454,8 +393,7 @@ class CodeQualityGuard:
"🚫 Code Quality Issues Detected\n\n"
+ "\n\n".join(violations)
+ "\n\n"
"📚 Learn more: Use specific types, remove suppressions, "
"reduce complexity"
"📚 Learn more: Use specific types, remove suppressions, reduce complexity"
)
return {
"hookSpecificOutput": {"hookEventName": "PostToolUse"},

View File

@@ -12,8 +12,6 @@ from collections.abc import Mapping
from pathlib import Path
from typing import TypedDict
from ..facade import Guards
# Import types from parent modules
from ..models import HookResponse
@@ -471,6 +469,8 @@ def verify_naming_conventions(content: str, config: object) -> list[str]:
def pretooluse_hook(hook_data: Mapping[str, object], config: object) -> HookResponse:
"""Wrapper for pretooluse using Guards facade."""
_ = config
from ..facade import Guards
guards = Guards()
return guards.handle_pretooluse(dict(hook_data))
@@ -478,5 +478,7 @@ def pretooluse_hook(hook_data: Mapping[str, object], config: object) -> HookResp
def posttooluse_hook(hook_data: Mapping[str, object], config: object) -> HookResponse:
"""Wrapper for posttooluse using Guards facade."""
_ = config
from ..facade import Guards
guards = Guards()
return guards.handle_posttooluse(dict(hook_data))

View File

@@ -0,0 +1,442 @@
"""Project-local installer for Claude Code quality hooks."""
from __future__ import annotations
import argparse
import compileall
import json
import os
import shutil
import stat
from dataclasses import dataclass
from importlib import resources
from pathlib import Path
from textwrap import dedent
HOOKS_ROOT = Path(__file__).resolve().parent
DEFAULT_TEMPLATE_NAME = "claude-code-settings.json"
@dataclass(frozen=True)
class InstallResult:
"""Summary of installation actions."""
settings_path: Path
helper_script_path: Path
readme_path: Path
added_events: list[str]
backup_path: Path | None
alias_path: Path | None
def _load_template() -> dict[str, object]:
"""Load the bundled hook template JSON."""
try:
template_text = resources.files("quality.hooks").joinpath(DEFAULT_TEMPLATE_NAME).read_text("utf-8")
except FileNotFoundError as exc:
message = f"Template {DEFAULT_TEMPLATE_NAME} not found in package resources"
raise FileNotFoundError(message) from exc
data = json.loads(template_text)
if not isinstance(data, dict):
message = "Hook template must be a JSON object"
raise ValueError(message)
return data
def _read_existing_settings(path: Path) -> dict[str, object]:
"""Read existing settings JSON, falling back to empty dict on failure."""
if not path.exists():
return {}
try:
with path.open("r", encoding="utf-8") as handle:
data = json.load(handle)
if isinstance(data, dict):
return data
except json.JSONDecodeError:
return {}
return {}
def _collect_commands(entry: dict[str, object]) -> list[str]:
"""Collect command strings from a hook entry."""
hooks = entry.get("hooks")
if not isinstance(hooks, list):
return []
commands: list[str] = []
for hook in hooks:
if isinstance(hook, dict):
command = hook.get("command")
if isinstance(command, str):
commands.append(command)
return commands
def _merge_hooks(settings: dict[str, object], template: dict[str, object]) -> list[str]:
"""Merge template hooks into existing settings, returning changed event names."""
hooks_section = settings.get("hooks")
if not isinstance(hooks_section, dict):
hooks_section = {}
settings["hooks"] = hooks_section
template_hooks = template.get("hooks")
if not isinstance(template_hooks, dict):
return []
changed_events: list[str] = []
for event_name, template_entries in template_hooks.items():
if not isinstance(event_name, str) or not isinstance(template_entries, list):
continue
existing_entries = hooks_section.get(event_name)
if not isinstance(existing_entries, list):
existing_entries = []
hooks_section[event_name] = existing_entries
existing_commands = {
command
for entry in existing_entries
if isinstance(entry, dict)
for command in _collect_commands(entry)
}
appended = False
for entry in template_entries:
if not isinstance(entry, dict):
continue
commands = _collect_commands(entry)
if not commands:
continue
if any(command in existing_commands for command in commands):
continue
existing_entries.append(entry)
existing_commands.update(commands)
appended = True
if appended:
changed_events.append(event_name)
return changed_events
def _write_settings(path: Path, data: dict[str, object]) -> None:
"""Write JSON settings with pretty formatting."""
with path.open("w", encoding="utf-8") as handle:
json.dump(data, handle, indent=2)
handle.write("\n")
def _ensure_directory(path: Path) -> None:
"""Ensure directory exists."""
path.mkdir(parents=True, exist_ok=True)
def _backup_file(path: Path) -> Path | None:
"""Create a timestamped backup of an existing file."""
if not path.exists():
return None
timestamp = os.getenv("CLAUDE_HOOK_BACKUP_TS")
if timestamp is None:
from datetime import datetime
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = path.with_name(f"{path.name}.backup.{timestamp}")
shutil.copy2(path, backup_path)
return backup_path
def _write_helper_script(claude_dir: Path) -> Path:
"""Write the helper shell script for configuring presets."""
script_path = claude_dir / "configure-quality.sh"
script_content = dedent(
"""\
#!/bin/bash
# Convenience script to configure Claude quality hook settings.
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/configure-quality.sh" [preset]
export QUALITY_STATE_TRACKING="true"
export QUALITY_CROSS_FILE_CHECK="true"
export QUALITY_VERIFY_NAMING="true"
export QUALITY_SHOW_SUCCESS="false"
case "${1:-default}" in
strict)
export QUALITY_ENFORCEMENT="strict"
export QUALITY_COMPLEXITY_THRESHOLD="10"
export QUALITY_DUP_THRESHOLD="0.7"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="true"
export QUALITY_TYPE_HINTS="true"
echo "✓ Strict quality mode enabled"
;;
moderate)
export QUALITY_ENFORCEMENT="warn"
export QUALITY_COMPLEXITY_THRESHOLD="15"
export QUALITY_DUP_THRESHOLD="0.8"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="true"
export QUALITY_TYPE_HINTS="false"
echo "✓ Moderate quality mode enabled"
;;
permissive)
export QUALITY_ENFORCEMENT="permissive"
export QUALITY_COMPLEXITY_THRESHOLD="20"
export QUALITY_DUP_THRESHOLD="0.9"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="false"
export QUALITY_TYPE_HINTS="false"
echo "✓ Permissive quality mode enabled"
;;
disabled)
export QUALITY_ENFORCEMENT="permissive"
export QUALITY_DUP_ENABLED="false"
export QUALITY_COMPLEXITY_ENABLED="false"
export QUALITY_MODERN_ENABLED="false"
export QUALITY_TYPE_HINTS="false"
echo "✓ Quality checks disabled"
;;
custom)
echo "Configure custom quality settings:"
read -p "Enforcement mode (strict/warn/permissive): " QUALITY_ENFORCEMENT
read -p "Complexity threshold (10-30): " QUALITY_COMPLEXITY_THRESHOLD
read -p "Duplicate threshold (0.5-1.0): " QUALITY_DUP_THRESHOLD
read -p "Enable duplicate detection? (true/false): " QUALITY_DUP_ENABLED
read -p "Enable complexity checks? (true/false): " QUALITY_COMPLEXITY_ENABLED
read -p "Enable modernization checks? (true/false): " QUALITY_MODERN_ENABLED
read -p "Require type hints? (true/false): " QUALITY_TYPE_HINTS
export QUALITY_ENFORCEMENT
export QUALITY_COMPLEXITY_THRESHOLD
export QUALITY_DUP_THRESHOLD
export QUALITY_DUP_ENABLED
export QUALITY_COMPLEXITY_ENABLED
export QUALITY_MODERN_ENABLED
export QUALITY_TYPE_HINTS
echo "✓ Custom quality settings configured"
;;
status)
echo "Current quality settings:"
echo " QUALITY_ENFORCEMENT: ${QUALITY_ENFORCEMENT:-strict}"
echo " QUALITY_COMPLEXITY_THRESHOLD: ${QUALITY_COMPLEXITY_THRESHOLD:-10}"
echo " QUALITY_DUP_THRESHOLD: ${QUALITY_DUP_THRESHOLD:-0.7}"
echo " QUALITY_DUP_ENABLED: ${QUALITY_DUP_ENABLED:-true}"
echo " QUALITY_COMPLEXITY_ENABLED: ${QUALITY_COMPLEXITY_ENABLED:-true}"
echo " QUALITY_MODERN_ENABLED: ${QUALITY_MODERN_ENABLED:-true}"
echo " QUALITY_TYPE_HINTS: ${QUALITY_TYPE_HINTS:-false}"
return 0
;;
*)
export QUALITY_ENFORCEMENT="strict"
export QUALITY_COMPLEXITY_THRESHOLD="10"
export QUALITY_DUP_THRESHOLD="0.7"
export QUALITY_DUP_ENABLED="true"
export QUALITY_COMPLEXITY_ENABLED="true"
export QUALITY_MODERN_ENABLED="true"
export QUALITY_TYPE_HINTS="false"
echo "✓ Default quality settings applied"
echo ""
echo "Available presets: strict, moderate, permissive, disabled, custom, status"
echo "Usage: source ${BASH_SOURCE[0]} [preset]"
;;
esac
""",
)
script_path.write_text(script_content, encoding="utf-8")
script_path.chmod(script_path.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
return script_path
def _write_readme(claude_dir: Path, settings_path: Path, helper_script: Path) -> Path:
"""Write README documenting the hook configuration."""
readme_path = claude_dir / "README_QUALITY_HOOK.md"
readme_text = dedent(
f"""\
# Claude Code Quality Hook (Project Local)
The code quality hook is configured locally for this project.
- Settings file: {settings_path}
- Helper script: {helper_script}
- Hook entry point: python3 -m quality.hooks.cli
## Configuration
The hook runs on Claude Code PreToolUse, PostToolUse, and Stop events.
Apply presets with:
```bash
source {helper_script} strict
```
Environment variables recognised by the hook include:
- `QUALITY_ENFORCEMENT` (strict|warn|permissive)
- `QUALITY_COMPLEXITY_THRESHOLD`
- `QUALITY_DUP_THRESHOLD`
- `QUALITY_DUP_ENABLED`
- `QUALITY_COMPLEXITY_ENABLED`
- `QUALITY_MODERN_ENABLED`
- `QUALITY_TYPE_HINTS`
- `QUALITY_STATE_TRACKING`
- `QUALITY_CROSS_FILE_CHECK`
- `QUALITY_VERIFY_NAMING`
- `QUALITY_SHOW_SUCCESS`
## Maintenance
- Re-run the installer to refresh settings when claude-scripts updates.
- Remove the hook by deleting the entries for the quality checker from {settings_path}.
"""
)
readme_path.write_text(readme_text, encoding="utf-8")
return readme_path
def _default_shell_rc_paths() -> list[Path]:
"""Return candidate shell RC files."""
home = Path.home()
return [home / ".bashrc", home / ".zshrc"]
def _ensure_alias(helper_script: Path, explicit_path: Path | None = None) -> Path | None:
"""Add claude-quality alias to shell RC if missing."""
alias_line = f"alias claude-quality='source {helper_script}'"
candidates = [explicit_path] if explicit_path is not None else _default_shell_rc_paths()
for candidate in candidates:
if candidate is None:
continue
try:
existing = candidate.read_text(encoding="utf-8")
except FileNotFoundError:
candidate.parent.mkdir(parents=True, exist_ok=True)
candidate.write_text("", encoding="utf-8")
existing = ""
if alias_line in existing:
return candidate
with candidate.open("a", encoding="utf-8") as handle:
handle.write("\n# Claude Code quality configuration\n")
handle.write(f"{alias_line}\n")
return candidate
return None
def _compile_hooks() -> bool:
"""Compile hook sources to bytecode to surface syntax errors early."""
return compileall.compile_dir(str(HOOKS_ROOT), quiet=1)
def install(
project_path: Path,
*,
create_alias: bool = True,
alias_path: Path | None = None,
) -> InstallResult:
"""Perform installation and return summary."""
template = _load_template()
claude_dir = project_path / ".claude"
_ensure_directory(claude_dir)
settings_path = claude_dir / "settings.json"
backup_path = _backup_file(settings_path)
settings = _read_existing_settings(settings_path)
changed_events = _merge_hooks(settings, template)
if not settings and not changed_events:
# Template added no new events; still write template to ensure hooks exist.
settings = template
changed_events = list(template.get("hooks", {}).keys()) if isinstance(template.get("hooks"), dict) else []
_write_settings(settings_path, settings)
helper_script = _write_helper_script(claude_dir)
readme_path = _write_readme(claude_dir, settings_path, helper_script)
alias_file: Path | None = None
if create_alias:
alias_file = _ensure_alias(helper_script, alias_path)
if not _compile_hooks():
message = "Hook compilation failed; inspect Python files in quality.hooks."
raise RuntimeError(message)
return InstallResult(
settings_path=settings_path,
helper_script_path=helper_script,
readme_path=readme_path,
added_events=changed_events,
backup_path=backup_path,
alias_path=alias_file,
)
def build_parser() -> argparse.ArgumentParser:
"""Create CLI argument parser."""
parser = argparse.ArgumentParser(description="Install Claude Code quality hook for a project.")
parser.add_argument(
"--project",
type=Path,
default=Path.cwd(),
help="Project directory where .claude/ should be created (default: current directory)",
)
parser.add_argument(
"--create-alias",
action="store_true",
default=False,
help="Append claude-quality alias to shell configuration",
)
parser.add_argument(
"--alias-shellrc",
type=Path,
default=None,
help="Explicit shell RC file to update with the alias",
)
return parser
def main(argv: list[str] | None = None) -> int:
"""CLI entry point."""
parser = build_parser()
args = parser.parse_args(argv)
project_path = args.project.resolve()
create_alias = bool(args.create_alias)
alias_path = args.alias_shellrc.resolve() if args.alias_shellrc is not None else None
try:
result = install(project_path, create_alias=create_alias, alias_path=alias_path)
except (FileNotFoundError, ValueError, RuntimeError) as error:
print(f"{error}")
return 1
changed_text = ", ".join(result.added_events) if result.added_events else "none (already present)"
print(f"✓ Settings written to {result.settings_path}")
if result.backup_path is not None:
print(f" Backup created at {result.backup_path}")
print(f"✓ Helper script written to {result.helper_script_path}")
print(f"✓ README written to {result.readme_path}")
print(f"✓ Hook events added or confirmed: {changed_text}")
if result.alias_path is not None:
print(f"✓ Alias added to {result.alias_path}")
elif create_alias:
print("! No shell RC file updated (alias already present or no candidate found)")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -0,0 +1,518 @@
"""Internal duplicate detection for analyzing code blocks within a single file.
Uses AST analysis and multiple similarity algorithms to detect redundant patterns.
"""
import ast
import difflib
import hashlib
import re
from collections import defaultdict
from dataclasses import dataclass
from typing import Any, TypedDict
COMMON_DUPLICATE_METHODS = {
"__init__",
"__enter__",
"__exit__",
"__aenter__",
"__aexit__",
}
@dataclass
class CodeBlock:
"""Represents a code block (function, method, or class)."""
name: str
type: str # 'function', 'method', 'class'
start_line: int
end_line: int
source: str
ast_node: ast.AST
complexity: int = 0
tokens: list[str] = None
def __post_init__(self):
if self.tokens is None:
self.tokens = self._tokenize()
def _tokenize(self) -> list[str]:
"""Extract meaningful tokens from source code."""
# Remove comments and docstrings
code = re.sub(r"#.*$", "", self.source, flags=re.MULTILINE)
code = re.sub(r'""".*?"""', "", code, flags=re.DOTALL)
code = re.sub(r"'''.*?'''", "", code, flags=re.DOTALL)
# Extract identifiers, keywords, operators
return re.findall(r"\b\w+\b|[=<>!+\-*/]+", code)
@dataclass
class DuplicateGroup:
"""Group of similar code blocks."""
blocks: list[CodeBlock]
similarity_score: float
pattern_type: str # 'exact', 'structural', 'semantic'
description: str
class InternalDuplicateDetector:
"""Detects duplicate and similar code blocks within a single file."""
def __init__(
self,
similarity_threshold: float = 0.7,
min_lines: int = 4,
min_tokens: int = 20,
):
self.similarity_threshold = similarity_threshold
self.min_lines = min_lines
self.min_tokens = min_tokens
self.duplicate_groups: list[DuplicateGroup] = []
def analyze_code(self, source_code: str) -> dict[str, Any]:
"""Analyze source code for internal duplicates."""
try:
tree = ast.parse(source_code)
except SyntaxError:
return {
"error": "Failed to parse code",
"duplicates": [],
"summary": {"total_duplicates": 0},
}
# Extract code blocks
blocks = self._extract_code_blocks(tree, source_code)
# Filter blocks by size
blocks = [
b
for b in blocks
if (b.end_line - b.start_line + 1) >= self.min_lines
and len(b.tokens) >= self.min_tokens
]
if len(blocks) < 2:
return {
"duplicates": [],
"summary": {
"total_duplicates": 0,
"blocks_analyzed": len(blocks),
},
}
# Find duplicates
duplicate_groups = []
# 1. Check for exact duplicates (normalized)
exact_groups = self._find_exact_duplicates(blocks)
duplicate_groups.extend(exact_groups)
# 2. Check for structural similarity
structural_groups = self._find_structural_duplicates(blocks)
duplicate_groups.extend(structural_groups)
# 3. Check for semantic patterns
pattern_groups = self._find_pattern_duplicates(blocks)
duplicate_groups.extend(pattern_groups)
filtered_groups = [
group
for group in duplicate_groups
if group.similarity_score >= self.similarity_threshold
and not self._should_ignore_group(group)
]
results = [
{
"type": group.pattern_type,
"similarity": group.similarity_score,
"description": group.description,
"locations": [
{
"name": block.name,
"type": block.type,
"lines": f"{block.start_line}-{block.end_line}",
}
for block in group.blocks
],
}
for group in filtered_groups
]
return {
"duplicates": results,
"summary": {
"total_duplicates": len(results),
"blocks_analyzed": len(blocks),
"duplicate_lines": sum(
sum(b.end_line - b.start_line + 1 for b in g.blocks)
for g in filtered_groups
),
},
}
def _extract_code_blocks(self, tree: ast.AST, source: str) -> list[CodeBlock]:
"""Extract functions, methods, and classes from AST."""
blocks = []
lines = source.split("\n")
def create_block(
node: ast.AST,
block_type: str,
lines: list[str],
) -> CodeBlock | None:
try:
start = node.lineno - 1
end = node.end_lineno - 1 if hasattr(node, "end_lineno") else start
source = "\n".join(lines[start : end + 1])
return CodeBlock(
name=node.name,
type=block_type,
start_line=node.lineno,
end_line=node.end_lineno
if hasattr(node, "end_lineno")
else node.lineno,
source=source,
ast_node=node,
complexity=calculate_complexity(node),
)
except Exception: # noqa: BLE001
return None
def calculate_complexity(node: ast.AST) -> int:
"""Simple cyclomatic complexity calculation."""
complexity = 1
for child in ast.walk(node):
if isinstance(
child,
(ast.If, ast.While, ast.For, ast.ExceptHandler),
):
complexity += 1
elif isinstance(child, ast.BoolOp):
complexity += len(child.values) - 1
return complexity
def extract_blocks_from_node(
node: ast.AST,
parent: ast.AST | None = None,
) -> None:
"""Recursively extract code blocks from AST nodes."""
if isinstance(node, ast.ClassDef):
if block := create_block(node, "class", lines):
blocks.append(block)
for item in node.body:
extract_blocks_from_node(item, node)
return
if isinstance(node, (ast.FunctionDef, ast.AsyncFunctionDef)):
block_type = (
"method" if isinstance(parent, ast.ClassDef) else "function"
)
if block := create_block(node, block_type, lines):
blocks.append(block)
for child in ast.iter_child_nodes(node):
extract_blocks_from_node(child, node)
extract_blocks_from_node(tree)
return blocks
def _find_exact_duplicates(self, blocks: list[CodeBlock]) -> list[DuplicateGroup]:
"""Find exact or near-exact duplicate blocks."""
groups = []
processed = set()
for i, block1 in enumerate(blocks):
if i in processed:
continue
similar = [block1]
norm1 = self._normalize_code(block1.source)
for j, block2 in enumerate(blocks[i + 1 :], i + 1):
if j in processed:
continue
norm2 = self._normalize_code(block2.source)
# Check if normalized versions are very similar
similarity = difflib.SequenceMatcher(None, norm1, norm2).ratio()
if similarity >= 0.85: # High threshold for "exact" duplicates
similar.append(block2)
processed.add(j)
if len(similar) > 1:
# Calculate actual similarity on normalized code
total_sim = 0
count = 0
for k in range(len(similar)):
for idx in range(k + 1, len(similar)):
norm_k = self._normalize_code(similar[k].source)
norm_idx = self._normalize_code(similar[idx].source)
sim = difflib.SequenceMatcher(None, norm_k, norm_idx).ratio()
total_sim += sim
count += 1
avg_similarity = total_sim / count if count > 0 else 1.0
groups.append(
DuplicateGroup(
blocks=similar,
similarity_score=avg_similarity,
pattern_type="exact",
description=f"Nearly identical {similar[0].type}s",
),
)
processed.add(i)
return groups
def _normalize_code(self, code: str) -> str:
"""Normalize code for comparison (replace variable names, etc.)."""
# Remove comments and docstrings
code = re.sub(r"#.*$", "", code, flags=re.MULTILINE)
code = re.sub(r'""".*?"""', "", code, flags=re.DOTALL)
code = re.sub(r"'''.*?'''", "", code, flags=re.DOTALL)
# Replace string literals
code = re.sub(r'"[^"]*"', '"STR"', code)
code = re.sub(r"'[^']*'", "'STR'", code)
# Replace numbers
code = re.sub(r"\b\d+\.?\d*\b", "NUM", code)
# Normalize whitespace
code = re.sub(r"\s+", " ", code)
return code.strip()
def _find_structural_duplicates(
self,
blocks: list[CodeBlock],
) -> list[DuplicateGroup]:
"""Find structurally similar blocks using AST comparison."""
groups = []
processed = set()
for i, block1 in enumerate(blocks):
if i in processed:
continue
similar_blocks = [block1]
for j, block2 in enumerate(blocks[i + 1 :], i + 1):
if j in processed:
continue
similarity = self._ast_similarity(block1.ast_node, block2.ast_node)
if similarity >= self.similarity_threshold:
similar_blocks.append(block2)
processed.add(j)
if len(similar_blocks) > 1:
# Calculate average similarity
total_sim = 0
count = 0
for k in range(len(similar_blocks)):
for idx in range(k + 1, len(similar_blocks)):
total_sim += self._ast_similarity(
similar_blocks[k].ast_node,
similar_blocks[idx].ast_node,
)
count += 1
avg_similarity = total_sim / count if count > 0 else 0
groups.append(
DuplicateGroup(
blocks=similar_blocks,
similarity_score=avg_similarity,
pattern_type="structural",
description=f"Structurally similar {similar_blocks[0].type}s",
),
)
processed.add(i)
return groups
def _ast_similarity(self, node1: ast.AST, node2: ast.AST) -> float:
"""Calculate structural similarity between two AST nodes."""
def get_structure(node: ast.AST) -> list[str]:
"""Extract structural pattern from AST node."""
structure = []
for child in ast.walk(node):
structure.append(child.__class__.__name__)
return structure
struct1 = get_structure(node1)
struct2 = get_structure(node2)
if not struct1 or not struct2:
return 0.0
# Use sequence matcher for structural similarity
matcher = difflib.SequenceMatcher(None, struct1, struct2)
return matcher.ratio()
def _find_pattern_duplicates(self, blocks: list[CodeBlock]) -> list[DuplicateGroup]:
"""Find blocks with similar patterns (e.g., similar loops, conditions)."""
groups = []
pattern_groups = defaultdict(list)
for block in blocks:
patterns = self._extract_patterns(block)
for pattern_type, pattern_hash in patterns:
pattern_groups[(pattern_type, pattern_hash)].append(block)
for (pattern_type, _), similar_blocks in pattern_groups.items():
if len(similar_blocks) > 1:
# Calculate token-based similarity
total_sim = 0
count = 0
for i in range(len(similar_blocks)):
for j in range(i + 1, len(similar_blocks)):
sim = self._token_similarity(
similar_blocks[i].tokens,
similar_blocks[j].tokens,
)
total_sim += sim
count += 1
avg_similarity = total_sim / count if count > 0 else 0.7
if avg_similarity >= self.similarity_threshold:
groups.append(
DuplicateGroup(
blocks=similar_blocks,
similarity_score=avg_similarity,
pattern_type="semantic",
description=f"Similar {pattern_type} patterns",
),
)
return groups
def _extract_patterns(self, block: CodeBlock) -> list[tuple[str, str]]:
"""Extract semantic patterns from code block."""
patterns = []
# Pattern: for-if combination
if "for " in block.source and "if " in block.source:
pattern = re.sub(r"\b\w+\b", "VAR", block.source)
pattern = re.sub(r"\s+", "", pattern)
patterns.append(
("loop-condition", hashlib.sha256(pattern.encode()).hexdigest()[:8]),
)
# Pattern: multiple similar operations
operations = re.findall(r"(\w+)\s*[=+\-*/]+\s*(\w+)", block.source)
if len(operations) > 2:
op_pattern = "".join(sorted(op[0] for op in operations))
patterns.append(
("repetitive-ops", hashlib.sha256(op_pattern.encode()).hexdigest()[:8]),
)
# Pattern: similar function calls
calls = re.findall(r"(\w+)\s*\([^)]*\)", block.source)
if len(calls) > 2:
call_pattern = "".join(sorted(set(calls)))
patterns.append(
(
"similar-calls",
hashlib.sha256(call_pattern.encode()).hexdigest()[:8],
),
)
return patterns
def _token_similarity(self, tokens1: list[str], tokens2: list[str]) -> float:
"""Calculate similarity between token sequences."""
if not tokens1 or not tokens2:
return 0.0
# Use Jaccard similarity on token sets
set1 = set(tokens1)
set2 = set(tokens2)
intersection = len(set1 & set2)
union = len(set1 | set2)
if union == 0:
return 0.0
jaccard = intersection / union
# Also consider sequence similarity
sequence_sim = difflib.SequenceMatcher(None, tokens1, tokens2).ratio()
# Weighted combination
return 0.6 * jaccard + 0.4 * sequence_sim
def _should_ignore_group(self, group: DuplicateGroup) -> bool:
"""Drop duplicate groups that match common boilerplate patterns."""
if not group.blocks:
return False
if all(block.name in COMMON_DUPLICATE_METHODS for block in group.blocks):
max_lines = max(
block.end_line - block.start_line + 1 for block in group.blocks
)
max_complexity = max(block.complexity for block in group.blocks)
# Allow simple lifecycle dunder methods to repeat across classes.
if max_lines <= 12 and max_complexity <= 3:
return True
# Exempt simple Arrange-Act-Assert style test functions
if all(block.name.startswith("test_") for block in group.blocks):
max_lines = max(block.end_line - block.start_line + 1 for block in group.blocks)
patterns = {"arrange", "act", "assert"}
if max_lines <= 20:
for block in group.blocks:
lower_source = block.source.lower()
if not all(pattern in lower_source for pattern in patterns):
break
else:
return True
return False
def detect_internal_duplicates(
source_code: str,
threshold: float = 0.7,
min_lines: int = 4,
) -> dict[str, Any]:
"""Main function to detect internal duplicates in code."""
detector = InternalDuplicateDetector(
similarity_threshold=threshold,
min_lines=min_lines,
)
return detector.analyze_code(source_code)
class DuplicateLocation(TypedDict):
"""Location information for a duplicate code block."""
name: str
lines: str
class Duplicate(TypedDict):
"""Duplicate code detection result."""
similarity: float
description: str
locations: list[DuplicateLocation]
class DuplicateResults(TypedDict):
"""Results from duplicate detection analysis."""
duplicates: list[Duplicate]
summary: dict[str, Any]

View File

@@ -23,13 +23,6 @@ class HookResponse(TypedDict, total=False):
systemMessage: str
class HookResponseRequired(TypedDict, total=True):
"""Hook response with required fields for testing."""
permissionDecision: str
reason: str
class ToolInput(TypedDict, total=False):
"""Tool input data within hook payload."""

View File

@@ -1,5 +1,6 @@
"""Comprehensive test suite covering all hook interaction scenarios."""
# ruff: noqa: SLF001
# pyright: reportPrivateUsage=false, reportPrivateImportUsage=false, reportPrivateLocalImportUsage=false, reportUnusedCallResult=false, reportUnknownArgumentType=false, reportUnknownVariableType=false, reportUnknownLambdaType=false, reportUnknownMemberType=false
from __future__ import annotations
@@ -13,7 +14,7 @@ from tempfile import gettempdir
import pytest
from hooks.guards import utils as guard
from quality.hooks import code_quality_guard as guard
class TestProjectStructureVariations:
@@ -31,15 +32,14 @@ class TestProjectStructureVariations:
test_file.write_text("# test")
# Should find project root
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
# Should create .tmp in root
tmp_dir = guard.get_project_tmp_dir(str(test_file))
tmp_dir = guard._get_project_tmp_dir(str(test_file))
assert tmp_dir == root / ".tmp"
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -54,14 +54,13 @@ class TestProjectStructureVariations:
test_file = root / "src/package/module.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
venv_bin = guard.get_project_venv_bin(str(test_file))
venv_bin = guard._get_project_venv_bin(str(test_file))
assert venv_bin == root / ".venv/bin"
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -82,15 +81,14 @@ class TestProjectStructureVariations:
test_file.write_text("# test")
# Should find inner project root
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == inner
# Should use inner venv
venv_bin = guard.get_project_venv_bin(str(test_file))
venv_bin = guard._get_project_venv_bin(str(test_file))
assert venv_bin == inner / ".venv/bin"
finally:
import shutil
if outer.exists():
shutil.rmtree(outer)
@@ -104,12 +102,11 @@ class TestProjectStructureVariations:
# With no markers, searches all the way up
# (may find .git in home directory or elsewhere)
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
# Should at least not crash
assert isinstance(found_root, Path)
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -124,11 +121,10 @@ class TestProjectStructureVariations:
test_file = deep / "module.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -149,12 +145,11 @@ class TestConfigurationInheritance:
test_file = root / "src/mod.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
assert (found_root / "pyrightconfig.json").exists()
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -168,11 +163,10 @@ class TestConfigurationInheritance:
test_file = root / "main.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -187,14 +181,13 @@ class TestConfigurationInheritance:
test_file = root / "main.py"
test_file.write_text("# test")
tmp_dir = guard.get_project_tmp_dir(str(test_file))
tmp_dir = guard._get_project_tmp_dir(str(test_file))
assert tmp_dir.exists()
gitignore_content = (root / ".gitignore").read_text()
assert ".tmp/" in gitignore_content
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -210,13 +203,12 @@ class TestConfigurationInheritance:
test_file = root / "main.py"
test_file.write_text("# test")
_ = guard.get_project_tmp_dir(str(test_file))
_ = guard._get_project_tmp_dir(str(test_file))
# Should not have been modified
assert (root / ".gitignore").read_text() == original
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -233,7 +225,7 @@ class TestVirtualEnvironmentEdgeCases:
test_file = root / "main.py"
test_file.write_text("# test")
venv_bin = guard.get_project_venv_bin(str(test_file))
venv_bin = guard._get_project_venv_bin(str(test_file))
# Should not be in the test project
assert str(root) not in str(venv_bin)
@@ -241,7 +233,6 @@ class TestVirtualEnvironmentEdgeCases:
assert venv_bin.name == "bin"
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -254,14 +245,13 @@ class TestVirtualEnvironmentEdgeCases:
test_file = root / "main.py"
test_file.write_text("# test")
venv_bin = guard.get_project_venv_bin(str(test_file))
venv_bin = guard._get_project_venv_bin(str(test_file))
# Should fallback since bin/ doesn't exist in project
assert str(root) not in str(venv_bin)
assert venv_bin.name == "bin"
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -294,10 +284,11 @@ class TestVirtualEnvironmentEdgeCases:
monkeypatch.setattr(guard.subprocess, "run", capture_run)
guard.run_type_checker_with_config(
guard._run_type_checker(
"basedpyright",
str(test_file),
guard.QualityConfig(),
original_file_path=str(test_file),
)
# PYTHONPATH should not be set (or not include src)
@@ -305,7 +296,6 @@ class TestVirtualEnvironmentEdgeCases:
assert "src" not in captured_env["PYTHONPATH"]
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -315,7 +305,13 @@ class TestTypeCheckerIntegration:
def test_all_tools_disabled(self) -> None:
"""All type checkers disabled returns no issues."""
issues = guard.run_type_checker("basedpyright", "test.py")
config = guard.QualityConfig(
basedpyright_enabled=False,
pyrefly_enabled=False,
sourcery_enabled=False,
)
issues = guard.run_type_checks("test.py", config)
assert issues == []
def test_tool_not_found_returns_warning(
@@ -326,7 +322,7 @@ class TestTypeCheckerIntegration:
monkeypatch.setattr(guard.Path, "exists", lambda _: False, raising=False)
monkeypatch.setattr(guard, "_ensure_tool_installed", lambda _: False)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"basedpyright",
"test.py",
guard.QualityConfig(),
@@ -347,7 +343,7 @@ class TestTypeCheckerIntegration:
monkeypatch.setattr(guard.subprocess, "run", timeout_run)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"basedpyright",
"test.py",
guard.QualityConfig(),
@@ -369,7 +365,7 @@ class TestTypeCheckerIntegration:
monkeypatch.setattr(guard.subprocess, "run", error_run)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"basedpyright",
"test.py",
guard.QualityConfig(),
@@ -387,7 +383,7 @@ class TestTypeCheckerIntegration:
monkeypatch.setattr(guard.Path, "exists", lambda _: False, raising=False)
monkeypatch.setattr(guard, "_ensure_tool_installed", lambda _: False)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"unknown_tool",
"test.py",
guard.QualityConfig(),
@@ -431,17 +427,17 @@ class TestWorkingDirectoryScenarios:
monkeypatch.setattr(guard.subprocess, "run", capture_run)
guard.run_type_checker_with_config(
guard._run_type_checker(
"basedpyright",
str(test_file),
guard.QualityConfig(),
original_file_path=str(test_file),
)
assert captured_cwd
assert captured_cwd[0] == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -451,7 +447,7 @@ class TestErrorConditions:
def test_invalid_syntax_in_content(self) -> None:
"""Invalid Python syntax is detected."""
issues = guard.detect_any_usage("def broken(:\n pass")
issues = guard._detect_any_usage("def broken(:\n pass")
# Should still check for Any even with syntax error
assert isinstance(issues, list)
@@ -460,7 +456,6 @@ class TestErrorConditions:
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""Permission error creating .tmp is handled."""
def raise_permission(*_args: object, **_kw: object) -> None:
message = "Cannot create directory"
raise PermissionError(message)
@@ -469,7 +464,7 @@ class TestErrorConditions:
# Should raise and be caught by caller
with pytest.raises(PermissionError):
guard.get_project_tmp_dir("/some/file.py")
guard._get_project_tmp_dir("/some/file.py")
def test_empty_file_content(self) -> None:
"""Empty file content is handled."""
@@ -481,11 +476,10 @@ class TestErrorConditions:
test_file.write_text("")
# Should not crash
tmp_dir = guard.get_project_tmp_dir(str(test_file))
tmp_dir = guard._get_project_tmp_dir(str(test_file))
assert tmp_dir.exists()
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -503,14 +497,13 @@ class TestFileLocationVariations:
test_file = root / "tests/test_module.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
# Test file detection
assert guard.is_test_file(str(test_file))
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -524,11 +517,10 @@ class TestFileLocationVariations:
test_file = root / "main.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
found_root = guard._find_project_root(str(test_file))
assert found_root == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -572,7 +564,6 @@ class TestTempFileManagement:
assert not temp_files
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -586,14 +577,13 @@ class TestTempFileManagement:
test_file = root / "src/mod.py"
test_file.write_text("# test")
tmp_dir = guard.get_project_tmp_dir(str(test_file))
tmp_dir = guard._get_project_tmp_dir(str(test_file))
# Should be in project, not /tmp
assert str(tmp_dir).startswith(str(root))
assert not str(tmp_dir).startswith(gettempdir())
finally:
import shutil
if root.exists():
shutil.rmtree(root)

View File

@@ -4,7 +4,7 @@ import os
import pytest
from tests.hooks.test_config_compatibility import QualityConfig
from quality.hooks import code_quality_guard as guard
class TestQualityConfig:
@@ -12,7 +12,7 @@ class TestQualityConfig:
def test_default_config(self):
"""Test default configuration values."""
config = QualityConfig()
config = guard.QualityConfig()
# Core settings
assert config.duplicate_threshold == 0.7
@@ -39,7 +39,7 @@ class TestQualityConfig:
def test_from_env_with_defaults(self):
"""Test loading config from environment with defaults."""
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
# Should use defaults when env vars not set
assert config.duplicate_threshold == 0.7
@@ -62,7 +62,7 @@ class TestQualityConfig:
"QUALITY_SHOW_SUCCESS": "true",
}
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert config.duplicate_threshold == 0.8
assert config.duplicate_enabled is False
@@ -79,7 +79,7 @@ class TestQualityConfig:
def test_from_env_with_invalid_boolean(self):
"""Test loading config with invalid boolean values."""
os.environ["QUALITY_DUP_ENABLED"] = "invalid"
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
# Should default to False for invalid boolean
assert config.duplicate_enabled is False
@@ -89,14 +89,14 @@ class TestQualityConfig:
os.environ["QUALITY_DUP_THRESHOLD"] = "not_a_float"
with pytest.raises(ValueError, match="could not convert string to float"):
_ = QualityConfig.from_env()
_ = guard.QualityConfig.from_env()
def test_from_env_with_invalid_int(self):
"""Test loading config with invalid int values."""
os.environ["QUALITY_COMPLEXITY_THRESHOLD"] = "not_an_int"
with pytest.raises(ValueError, match="invalid literal"):
_ = QualityConfig.from_env()
_ = guard.QualityConfig.from_env()
def test_enforcement_modes(self):
"""Test different enforcement modes."""
@@ -104,34 +104,34 @@ class TestQualityConfig:
for mode in modes:
os.environ["QUALITY_ENFORCEMENT"] = mode
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert config.enforcement_mode == mode
def test_skip_patterns_initialization(self):
"""Test skip patterns initialization."""
config = QualityConfig()
config = guard.QualityConfig(skip_patterns=None)
assert config.skip_patterns is not None
assert len(config.skip_patterns) > 0
custom_patterns = ["custom_test_", "/custom/"]
config = QualityConfig(skip_patterns=custom_patterns)
config = guard.QualityConfig(skip_patterns=custom_patterns)
assert config.skip_patterns == custom_patterns
def test_threshold_boundaries(self):
"""Test threshold boundary values."""
# Test minimum threshold
os.environ["QUALITY_DUP_THRESHOLD"] = "0.0"
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert config.duplicate_threshold == 0.0
# Test maximum threshold
os.environ["QUALITY_DUP_THRESHOLD"] = "1.0"
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert config.duplicate_threshold == 1.0
# Test complexity threshold
os.environ["QUALITY_COMPLEXITY_THRESHOLD"] = "1"
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert config.complexity_threshold == 1
def test_config_combinations(self, monkeypatch: pytest.MonkeyPatch) -> None:
@@ -179,7 +179,7 @@ class TestQualityConfig:
with monkeypatch.context() as mp:
for key, value in env_values.items():
mp.setenv(key, value)
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
for key, expected_value in expected_values.items():
assert getattr(config, key) == expected_value
@@ -191,5 +191,5 @@ class TestQualityConfig:
for value, expected_bool in zip(test_values, expected, strict=False):
os.environ["QUALITY_DUP_ENABLED"] = value
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert config.duplicate_enabled == expected_bool

View File

@@ -2,18 +2,14 @@
from __future__ import annotations
from hooks.analyzers import (
from quality.hooks.internal_duplicate_detector import (
Duplicate,
DuplicateResults,
detect_internal_duplicates,
)
def _run_detection(
code: str,
*,
threshold: float,
) -> tuple[DuplicateResults, list[Duplicate]]:
def _run_detection(code: str, *, threshold: float) -> tuple[DuplicateResults, list[Duplicate]]:
"""Run duplicate detection and return typed results."""
result = detect_internal_duplicates(code, threshold=threshold)

View File

@@ -4,16 +4,14 @@ from __future__ import annotations
import json
from types import SimpleNamespace
from typing import TYPE_CHECKING, cast
from typing import TYPE_CHECKING
from unittest.mock import patch
import pytest
from hooks.guards import posttooluse_hook, pretooluse_hook
from tests.hooks.test_config_compatibility import QualityConfig
from quality.hooks.code_quality_guard import QualityConfig, posttooluse_hook, pretooluse_hook
if TYPE_CHECKING:
from collections.abc import Iterator, Mapping
from collections.abc import Iterator
from pathlib import Path
@@ -109,7 +107,7 @@ def test_pretooluse_handles_platform_metadata(
file_path.parent.mkdir(parents=True, exist_ok=True)
content = "def sample() -> None:\n return None\n"
with patch("code_quality_guard.analyze_code_quality", return_value={}):
with patch("quality.hooks.code_quality_guard.analyze_code_quality", return_value={}):
response = pretooluse_hook(
_pre_request(
file_path,
@@ -123,7 +121,7 @@ def test_pretooluse_handles_platform_metadata(
config,
)
assert response.get("permissionDecision") == "allow"
assert response["permissionDecision"] == "allow"
def test_state_tracking_isolation_between_containers(
@@ -149,7 +147,7 @@ def beta():
file_b = container_b / "service.py"
# PreToolUse runs register the pre-state for each container/project pair.
with patch("code_quality_guard.analyze_code_quality", return_value={}):
with patch("quality.hooks.code_quality_guard.analyze_code_quality", return_value={}):
response_a_pre = pretooluse_hook(
_pre_request(
file_a,
@@ -171,8 +169,8 @@ def beta():
config,
)
assert response_a_pre.get("permissionDecision") == "allow"
assert response_b_pre.get("permissionDecision") == "allow"
assert response_a_pre["permissionDecision"] == "allow"
assert response_b_pre["permissionDecision"] == "allow"
# The first container writes fewer functions which should trigger a warning.
file_a.write_text(
@@ -239,7 +237,7 @@ def beta():
path_one.parent.mkdir(parents=True, exist_ok=True)
path_two.parent.mkdir(parents=True, exist_ok=True)
with patch("code_quality_guard.analyze_code_quality", return_value={}):
with patch("quality.hooks.code_quality_guard.analyze_code_quality", return_value={}):
pretooluse_hook(
_pre_request(
path_one,
@@ -325,7 +323,7 @@ def test_cross_file_duplicate_project_root_detection(
captured["cmd"] = cmd
return SimpleNamespace(returncode=0, stdout=json.dumps({"duplicates": []}))
with patch("code_quality_guard.subprocess.run", side_effect=fake_run):
with patch("quality.hooks.code_quality_guard.subprocess.run", side_effect=fake_run):
response = posttooluse_hook(
{
"tool_name": "Write",
@@ -354,7 +352,7 @@ def test_main_handles_permission_decisions_for_multiple_users(
monkeypatch: pytest.MonkeyPatch,
) -> None:
"""`main` should surface deny/ask decisions for different user contexts."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
hook_inputs = [
{
@@ -388,6 +386,8 @@ def test_main_handles_permission_decisions_for_multiple_users(
{
"hookSpecificOutput": {
"hookEventName": "PreToolUse",
"permissionDecision": "deny",
"permissionDecisionReason": "Tenant user-deny lacks write access",
},
"permissionDecision": "deny",
"reason": "Tenant user-deny lacks write access",
@@ -411,15 +411,15 @@ def test_main_handles_permission_decisions_for_multiple_users(
],
)
input_iter = cast("Iterator[Mapping[str, object]]", iter(hook_inputs))
input_iter: Iterator[dict[str, object]] = iter(hook_inputs)
def fake_json_load(_stream: object) -> Mapping[str, object]:
def fake_json_load(_stream: object) -> dict[str, object]:
return next(input_iter)
def fake_pretooluse(
_hook_data: dict[str, object],
_config: QualityConfig,
) -> Mapping[str, object]:
) -> dict[str, object]:
return next(responses)
exit_calls: list[tuple[str, int]] = []
@@ -434,8 +434,8 @@ def test_main_handles_permission_decisions_for_multiple_users(
printed.append(message)
monkeypatch.setattr("json.load", fake_json_load)
monkeypatch.setattr("code_quality_guard.pretooluse_hook", fake_pretooluse)
monkeypatch.setattr("code_quality_guard._exit_with_reason", fake_exit)
monkeypatch.setattr("quality.hooks.code_quality_guard.pretooluse_hook", fake_pretooluse)
monkeypatch.setattr("quality.hooks.code_quality_guard._exit_with_reason", fake_exit)
monkeypatch.setattr("builtins.print", fake_print)
# First tenant should produce a deny decision with exit code 2.

View File

@@ -4,21 +4,17 @@ import os
import subprocess
from unittest.mock import MagicMock, patch
from hooks.guards import (
from quality.hooks.code_quality_guard import (
QualityConfig,
analyze_code_quality,
detect_internal_duplicates,
posttooluse_hook,
pretooluse_hook,
)
from hooks.models import HookResponse
def _perm(response: HookResponse) -> str | None:
permission_decision = response.get("hookSpecificOutput", {}).get(
"permissionDecision",
)
return permission_decision if isinstance(permission_decision, str) else None
def _perm(response: dict) -> str | None:
return response.get("hookSpecificOutput", {}).get("permissionDecision")
class TestEdgeCases:
@@ -39,7 +35,7 @@ class TestEdgeCases:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert _perm(result) == "allow"
@@ -143,7 +139,7 @@ def greet_世界():
# Simulate rapid consecutive calls
results = []
for _ in range(5):
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
results.append(result)
@@ -267,7 +263,7 @@ def func_c():
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"complexity": {
"summary": {"average_cyclomatic_complexity": 1},
@@ -283,7 +279,7 @@ def func_c():
enforcement_mode="permissive", # Use permissive mode for high thresholds
)
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"complexity": {
"summary": {"average_cyclomatic_complexity": 50},
@@ -415,7 +411,7 @@ def infinite_recursion():
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert _perm(result) == "allow"
@@ -462,9 +458,7 @@ def process_data(data):
duplicates = detect_internal_duplicates(code_with_duplicates, threshold=0.8)
# Should detect duplicates without memory issues
assert "duplicates" in duplicates
duplicates_dict = duplicates.get("duplicates", {})
assert isinstance(duplicates_dict, list)
assert len(duplicates_dict) > 0
assert len(duplicates["duplicates"]) > 0
def test_special_python_constructs(self):
"""Test handling of special Python constructs."""

View File

@@ -5,17 +5,17 @@ import json
import shutil
import sys
import tempfile
from collections.abc import Callable
from datetime import UTC, datetime
from pathlib import Path
from typing import cast
from unittest.mock import MagicMock, patch
import pytest
from hooks.guards import (
from quality.hooks.code_quality_guard import (
AnalysisResults,
QualityConfig,
analyze_code_quality,
check_code_issues,
check_cross_file_duplicates,
check_state_changes,
get_claude_quality_command,
@@ -23,47 +23,6 @@ from hooks.guards import (
store_pre_state,
verify_naming_conventions,
)
from tests.hooks.test_config_compatibility import QualityConfig
def _process_analysis_results(results: AnalysisResults) -> tuple[bool, list[str]]:
"""Process analysis results into issue list."""
issues = []
# Check internal duplicates
if "internal_duplicates" in results:
internal_dup = results["internal_duplicates"]
duplicates = internal_dup.get("duplicates", [])
if isinstance(duplicates, list):
for dup in duplicates:
if isinstance(dup, dict):
similarity = dup.get("similarity", 0)
if isinstance(similarity, (int, float)) and similarity > 0.8:
description = dup.get("description", "Unknown")
issues.append(
f"Duplicate Code Detected: {description} {similarity:.0%}",
)
# Check complexity
if "complexity" in results:
complexity = results["complexity"]
summary = complexity.get("summary", {})
if isinstance(summary, dict):
avg_complexity = summary.get("average_cyclomatic_complexity", 0)
if isinstance(avg_complexity, (int, float)) and avg_complexity > 10:
issues.append("High complexity detected")
# Check modernization
if "modernization" in results:
modern = results["modernization"]
files = modern.get("files", {})
if isinstance(files, dict):
for file_data in files.values():
if isinstance(file_data, list) and len(file_data) > 0:
issues.append("Missing type hints")
has_issues = len(issues) > 0
return has_issues, issues
@pytest.fixture
@@ -75,7 +34,6 @@ def set_platform(monkeypatch: pytest.MonkeyPatch):
return _setter
class TestHelperFunctions:
"""Test helper functions in the hook."""
@@ -123,7 +81,7 @@ class TestHelperFunctions:
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
platform_name: str,
scripts_dir: str,
executable_name: str,
@@ -135,14 +93,14 @@ class TestHelperFunctions:
executable = self._touch(tmp_path / ".venv" / scripts_dir / executable_name)
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == [str(executable), "-m", "quality.cli.main"]
def test_get_claude_quality_command_python_and_python3(
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
) -> None:
"""Prefer python when both python and python3 executables exist."""
@@ -152,7 +110,7 @@ class TestHelperFunctions:
python_path = self._touch(tmp_path / ".venv" / "bin" / "python")
python3_path = self._touch(tmp_path / ".venv" / "bin" / "python3")
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == [str(python_path), "-m", "quality.cli.main"]
assert python3_path.exists() # Sanity check that both executables were present
@@ -160,7 +118,7 @@ class TestHelperFunctions:
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
) -> None:
"""Fallback to claude-quality script when python executables are absent."""
@@ -169,14 +127,14 @@ class TestHelperFunctions:
cli_path = self._touch(tmp_path / ".venv" / "bin" / "claude-quality")
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == [str(cli_path)]
def test_get_claude_quality_command_windows_cli_without_extension(
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
) -> None:
"""Handle Windows when the claude-quality script lacks an .exe suffix."""
@@ -185,13 +143,14 @@ class TestHelperFunctions:
cli_path = self._touch(tmp_path / ".venv" / "Scripts" / "claude-quality")
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == [str(cli_path)]
def test_get_claude_quality_command_system_python_fallback(
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
) -> None:
"""Fallback to python3 on POSIX and python on Windows when venv tools absent."""
@@ -202,7 +161,7 @@ class TestHelperFunctions:
monkeypatch.setattr(shutil, "which", fake_which)
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == ["python3", "-m", "quality.cli.main"]
set_platform("win32")
@@ -212,13 +171,14 @@ class TestHelperFunctions:
monkeypatch.setattr(shutil, "which", windows_which)
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == ["python", "-m", "quality.cli.main"]
def test_get_claude_quality_command_cli_on_path(
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
) -> None:
"""Use claude-quality from PATH when no virtualenv interpreters exist."""
@@ -229,13 +189,14 @@ class TestHelperFunctions:
monkeypatch.setattr(shutil, "which", fake_which)
cmd = get_claude_quality_command()
cmd = get_claude_quality_command(repo_root=tmp_path)
assert cmd == ["claude-quality"]
def test_get_claude_quality_command_unix_venv_missing(
def test_get_claude_quality_command_raises_when_missing(
self,
tmp_path: Path,
monkeypatch: pytest.MonkeyPatch,
set_platform: Callable[[str], None],
set_platform,
) -> None:
"""Raise a clear error when no interpreter or CLI can be located."""
@@ -243,7 +204,7 @@ class TestHelperFunctions:
monkeypatch.setattr(shutil, "which", lambda _name: None)
with pytest.raises(RuntimeError) as excinfo:
get_claude_quality_command()
get_claude_quality_command(repo_root=tmp_path)
assert "was not found on PATH" in str(excinfo.value)
@@ -275,7 +236,7 @@ class TestHelperFunctions:
def test_check_state_changes_no_pre_state(self):
"""Test state changes when no pre-state exists."""
test_path = f"{tempfile.gettempdir()}/test.py"
issues = check_state_changes(test_path, "test content")
issues = check_state_changes(test_path)
assert issues == []
def test_check_state_changes_with_degradation(self):
@@ -298,7 +259,7 @@ class TestHelperFunctions:
# First call reads pre-state, second reads current file
mock_read.side_effect = [json.dumps(pre_state), current_content]
issues = check_state_changes(test_path, current_content)
issues = check_state_changes(test_path)
# Should detect function reduction
assert len(issues) > 0
@@ -322,7 +283,7 @@ class TestHelperFunctions:
with patch("pathlib.Path.read_text") as mock_read:
mock_read.side_effect = [json.dumps(pre_state), current_content]
issues = check_state_changes(test_path, current_content)
issues = check_state_changes(test_path)
assert len(issues) > 0
assert any("size increased significantly" in issue for issue in issues)
@@ -368,20 +329,22 @@ class TestHelperFunctions:
issues = check_cross_file_duplicates(test_path, config)
assert issues == []
def test_verify_naming_conventions_violations(self, non_pep8_code: str):
def test_verify_naming_conventions_violations(self, non_pep8_code):
"""Test naming convention verification with violations."""
config = QualityConfig()
issues = verify_naming_conventions(non_pep8_code, config)
with patch("pathlib.Path.read_text", return_value=non_pep8_code):
test_path = f"{tempfile.gettempdir()}/test.py"
issues = verify_naming_conventions(test_path)
assert len(issues) == 2
assert any("Non-PEP8 function names" in issue for issue in issues)
assert any("Non-PEP8 class names" in issue for issue in issues)
assert len(issues) == 2
assert any("Non-PEP8 function names" in issue for issue in issues)
assert any("Non-PEP8 class names" in issue for issue in issues)
def test_verify_naming_conventions_clean(self, clean_code: str):
def test_verify_naming_conventions_clean(self, clean_code):
"""Test naming convention verification with clean code."""
config = QualityConfig()
issues = verify_naming_conventions(clean_code, config)
assert issues == []
with patch("pathlib.Path.read_text", return_value=clean_code):
test_path = f"{tempfile.gettempdir()}/test.py"
issues = verify_naming_conventions(test_path)
assert issues == []
def test_analyze_code_quality_all_checks(self):
"""Test analyze_code_quality with all checks enabled."""
@@ -393,7 +356,7 @@ class TestHelperFunctions:
test_content = "def test(): pass"
with patch("code_quality_guard.detect_internal_duplicates") as mock_dup:
with patch("quality.hooks.code_quality_guard.detect_internal_duplicates") as mock_dup:
with patch("subprocess.run") as mock_run:
# Setup mock returns
mock_dup.return_value = {"duplicates": []}
@@ -420,7 +383,7 @@ class TestHelperFunctions:
pyrefly_enabled=False,
)
with patch("code_quality_guard.detect_internal_duplicates") as mock_dup:
with patch("quality.hooks.code_quality_guard.detect_internal_duplicates") as mock_dup:
with patch("subprocess.run") as mock_run:
results = analyze_code_quality("def test(): pass", "test.py", config)
@@ -431,8 +394,9 @@ class TestHelperFunctions:
def test_check_code_issues_internal_duplicates(self):
"""Test issue detection for internal duplicates."""
config = QualityConfig()
results = cast(
"AnalysisResults",
AnalysisResults,
{
"internal_duplicates": {
"duplicates": [
@@ -449,7 +413,7 @@ class TestHelperFunctions:
},
)
has_issues, issues = _process_analysis_results(results)
has_issues, issues = check_code_issues(results, config)
assert has_issues is True
assert len(issues) > 0
@@ -458,8 +422,9 @@ class TestHelperFunctions:
def test_check_code_issues_complexity(self):
"""Test issue detection for complexity."""
config = QualityConfig(complexity_threshold=10)
results = cast(
"AnalysisResults",
AnalysisResults,
{
"complexity": {
"summary": {"average_cyclomatic_complexity": 15},
@@ -468,7 +433,7 @@ class TestHelperFunctions:
},
)
has_issues, issues = _process_analysis_results(results)
has_issues, issues = check_code_issues(results, config)
assert has_issues is True
assert any("High Code Complexity Detected" in issue for issue in issues)
@@ -476,8 +441,9 @@ class TestHelperFunctions:
def test_check_code_issues_modernization(self):
"""Test issue detection for modernization."""
config = QualityConfig(require_type_hints=True)
results = cast(
"AnalysisResults",
AnalysisResults,
{
"modernization": {
"files": {
@@ -491,18 +457,20 @@ class TestHelperFunctions:
},
)
has_issues, issues = _process_analysis_results(results)
has_issues, issues = check_code_issues(results, config)
assert has_issues is True
assert any("Modernization needed" in issue for issue in issues)
def test_check_code_issues_type_hints_threshold(self):
"""Test type hint threshold detection."""
config = QualityConfig(require_type_hints=True)
# Create 15 type hint issues
type_issues = [{"issue_type": "missing_return_type"} for _ in range(15)]
results = cast(
"AnalysisResults",
AnalysisResults,
{
"modernization": {
"files": {"test.py": type_issues},
@@ -510,7 +478,7 @@ class TestHelperFunctions:
},
)
has_issues, issues = _process_analysis_results(results)
has_issues, issues = check_code_issues(results, config)
assert has_issues is True
assert any("Many missing type hints" in issue for issue in issues)
@@ -518,9 +486,10 @@ class TestHelperFunctions:
def test_check_code_issues_no_issues(self):
"""Test when no issues are found."""
results = cast("AnalysisResults", {})
config = QualityConfig()
results = cast(AnalysisResults, {})
has_issues, issues = _process_analysis_results(results)
has_issues, issues = check_code_issues(results, config)
assert has_issues is False
assert issues == []

View File

@@ -14,7 +14,7 @@ class TestHookIntegration:
def test_main_entry_pretooluse(self):
"""Ensure main dispatches to PreToolUse."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
hook_input = {
"tool_name": "Write",
@@ -28,24 +28,21 @@ class TestHookIntegration:
mock_stdin.read.return_value = json.dumps(hook_input)
mock_stdin.__iter__.return_value = [json.dumps(hook_input)]
with (
patch("json.load", return_value=hook_input),
patch(
"code_quality_guard.pretooluse_hook",
return_value={
"hookSpecificOutput": {
"hookEventName": "PreToolUse",
"permissionDecision": "allow",
},
with patch("json.load", return_value=hook_input), patch(
"quality.hooks.code_quality_guard.pretooluse_hook",
return_value={
"hookSpecificOutput": {
"hookEventName": "PreToolUse",
"permissionDecision": "allow",
},
) as mock_pre,
):
},
) as mock_pre:
main()
mock_pre.assert_called_once()
def test_main_entry_posttooluse(self):
"""Ensure main dispatches to PostToolUse."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
hook_input = {
"tool_name": "Write",
@@ -59,32 +56,25 @@ class TestHookIntegration:
mock_stdin.read.return_value = json.dumps(hook_input)
mock_stdin.__iter__.return_value = [json.dumps(hook_input)]
with (
patch("json.load", return_value=hook_input),
patch(
"code_quality_guard.posttooluse_hook",
return_value={
"hookSpecificOutput": {
"hookEventName": "PostToolUse",
},
"decision": "approve",
with patch("json.load", return_value=hook_input), patch(
"quality.hooks.code_quality_guard.posttooluse_hook",
return_value={
"hookSpecificOutput": {
"hookEventName": "PostToolUse",
},
) as mock_post,
):
"decision": "approve",
},
) as mock_post:
main()
mock_post.assert_called_once()
def test_main_invalid_json(self):
"""Invalid JSON falls back to allow."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
with (
patch("sys.stdin"),
patch("builtins.print") as mock_print,
patch(
"sys.stdout.write",
) as mock_write,
):
with patch("sys.stdin"), patch("builtins.print") as mock_print, patch(
"sys.stdout.write",
) as mock_write:
with patch(
"json.load",
side_effect=json.JSONDecodeError("test", "test", 0),
@@ -99,9 +89,9 @@ class TestHookIntegration:
response = json.loads(printed)
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
def test_full_flow_clean_code(self, clean_code: str):
def test_full_flow_clean_code(self, clean_code):
"""Clean code should pass both hook stages."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
pre_input = {
"tool_name": "Write",
@@ -112,12 +102,9 @@ class TestHookIntegration:
}
with patch("sys.stdin"), patch("builtins.print") as mock_print:
with (
patch("json.load", return_value=pre_input),
patch(
"code_quality_guard.analyze_code_quality",
return_value={},
),
with patch("json.load", return_value=pre_input), patch(
"quality.hooks.code_quality_guard.analyze_code_quality",
return_value={},
):
main()
@@ -150,7 +137,7 @@ class TestHookIntegration:
def test_environment_configuration_flow(self):
"""Environment settings change enforcement."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
env_overrides = {
"QUALITY_ENFORCEMENT": "strict",
@@ -185,31 +172,31 @@ class TestHookIntegration:
try:
with patch("sys.stdin"), patch("builtins.print") as mock_print:
with (
patch("json.load", return_value=hook_input),
patch(
"code_quality_guard.analyze_code_quality",
return_value={
"complexity": {
"summary": {"average_cyclomatic_complexity": 8},
"distribution": {"High": 1},
},
with patch("json.load", return_value=hook_input), patch(
"quality.hooks.code_quality_guard.analyze_code_quality",
return_value={
"complexity": {
"summary": {"average_cyclomatic_complexity": 8},
"distribution": {"High": 1},
},
),
},
):
with pytest.raises(SystemExit) as exc_info:
main()
assert exc_info.value.code == 2
response = json.loads(mock_print.call_args[0][0])
assert response["hookSpecificOutput"]["permissionDecision"] == "deny"
assert (
response["hookSpecificOutput"]["permissionDecision"]
== "deny"
)
finally:
for key in env_overrides:
os.environ.pop(key, None)
def test_skip_patterns_integration(self):
"""Skip patterns should bypass checks."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
hook_input = {
"tool_name": "Write",
@@ -226,9 +213,9 @@ class TestHookIntegration:
response = json.loads(mock_print.call_args[0][0])
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
def test_state_tracking_flow(self, temp_python_file: Path):
def test_state_tracking_flow(self, temp_python_file):
"""State tracking should flag regressions."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
os.environ["QUALITY_STATE_TRACKING"] = "true"
try:
@@ -237,18 +224,17 @@ class TestHookIntegration:
"tool_input": {
"file_path": str(temp_python_file),
"content": (
"def func1(): pass\ndef func2(): pass\ndef func3(): pass"
"def func1(): pass\n"
"def func2(): pass\n"
"def func3(): pass"
),
},
}
with patch("sys.stdin"), patch("builtins.print") as mock_print:
with (
patch("json.load", return_value=pre_input),
patch(
"code_quality_guard.analyze_code_quality",
return_value={},
),
with patch("json.load", return_value=pre_input), patch(
"quality.hooks.code_quality_guard.analyze_code_quality",
return_value={},
):
main()
@@ -274,7 +260,7 @@ class TestHookIntegration:
def test_cross_tool_handling(self):
"""Supported tools should respond with allow."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
tools = ["Write", "Edit", "MultiEdit", "Read", "Bash", "Task"]
@@ -291,21 +277,18 @@ class TestHookIntegration:
hook_input = {"tool_name": tool, "tool_input": {}}
with patch("sys.stdin"), patch("builtins.print") as mock_print:
with (
patch("json.load", return_value=hook_input),
patch(
"code_quality_guard.analyze_code_quality",
return_value={},
),
with patch("json.load", return_value=hook_input), patch(
"quality.hooks.code_quality_guard.analyze_code_quality",
return_value={},
):
main()
response = json.loads(mock_print.call_args[0][0])
assert response["hookSpecificOutput"]["permissionDecision"] == "allow"
def test_enforcement_mode_progression(self, complex_code: str):
def test_enforcement_mode_progression(self, complex_code):
"""Strict/warn/permissive modes map to deny/ask/allow."""
from hooks.cli import main
from quality.hooks.code_quality_guard import main
hook_input = {
"tool_name": "Write",
@@ -326,17 +309,14 @@ class TestHookIntegration:
os.environ["QUALITY_COMPLEXITY_THRESHOLD"] = "10"
try:
with patch("sys.stdin"), patch("builtins.print") as mock_print:
with (
patch("json.load", return_value=hook_input),
patch(
"code_quality_guard.analyze_code_quality",
return_value={
"complexity": {
"summary": {"average_cyclomatic_complexity": 25},
"distribution": {"High": 1},
},
with patch("json.load", return_value=hook_input), patch(
"quality.hooks.code_quality_guard.analyze_code_quality",
return_value={
"complexity": {
"summary": {"average_cyclomatic_complexity": 25},
"distribution": {"High": 1},
},
),
},
):
if expected in {"deny", "ask"}:
with pytest.raises(SystemExit) as exc_info:
@@ -347,7 +327,8 @@ class TestHookIntegration:
response = json.loads(mock_print.call_args[0][0])
assert (
response["hookSpecificOutput"]["permissionDecision"] == expected
response["hookSpecificOutput"]["permissionDecision"]
== expected
)
finally:
os.environ.pop("QUALITY_ENFORCEMENT", None)

View File

@@ -3,7 +3,7 @@
import tempfile
from unittest.mock import patch
from hooks.guards import QualityConfig, posttooluse_hook
from quality.hooks.code_quality_guard import QualityConfig, posttooluse_hook
class TestPostToolUseHook:
@@ -48,7 +48,7 @@ class TestPostToolUseHook:
result = posttooluse_hook(hook_data, config)
assert "decision" not in result
def test_tool_response_success_path(self, clean_code: str):
def test_tool_response_success_path(self, clean_code):
"""Use tool_response payload to drive successful post hook handling."""
config = QualityConfig(show_success=True, verify_naming=False)
hook_data = {
@@ -114,7 +114,7 @@ class TestPostToolUseHook:
with patch("pathlib.Path.exists", return_value=True):
with patch("pathlib.Path.read_text", return_value="def test(): pass"):
with patch("code_quality_guard.check_state_changes") as mock_check:
with patch("quality.hooks.code_quality_guard.check_state_changes") as mock_check:
mock_check.return_value = [
"⚠️ Reduced functions: 5 → 2",
"⚠️ File size increased significantly: 100 → 250 lines",
@@ -140,7 +140,7 @@ class TestPostToolUseHook:
with patch("pathlib.Path.exists", return_value=True):
with patch("pathlib.Path.read_text", return_value="def test(): pass"):
with patch(
"code_quality_guard.check_cross_file_duplicates",
"quality.hooks.code_quality_guard.check_cross_file_duplicates",
) as mock_check:
mock_check.return_value = ["⚠️ Cross-file duplication detected"]
@@ -151,7 +151,7 @@ class TestPostToolUseHook:
assert isinstance(reason, str)
assert "cross-file duplication" in reason.lower()
def test_naming_convention_check(self, non_pep8_code: str):
def test_naming_convention_violations(self, non_pep8_code):
"""Test naming convention verification."""
config = QualityConfig(verify_naming=True)
hook_data = {
@@ -170,7 +170,7 @@ class TestPostToolUseHook:
assert "non-pep8 function names" in reason_lower
assert "non-pep8 class names" in reason_lower
def test_clean_code_verification(self, clean_code: str):
def test_show_success_message(self, clean_code):
"""Test success message when enabled."""
config = QualityConfig(show_success=True, verify_naming=False)
hook_data = {
@@ -187,7 +187,7 @@ class TestPostToolUseHook:
assert isinstance(system_msg, str)
assert "passed post-write verification" in system_msg.lower()
def test_no_message_when_success_disabled(self, clean_code: str):
def test_no_message_when_success_disabled(self, clean_code):
"""Test no message when show_success is disabled."""
config = QualityConfig(show_success=False, verify_naming=False)
hook_data = {
@@ -216,12 +216,12 @@ class TestPostToolUseHook:
with patch("pathlib.Path.exists", return_value=True):
with patch("pathlib.Path.read_text", return_value="def test(): pass"):
with patch("code_quality_guard.check_state_changes") as mock_state:
with patch("quality.hooks.code_quality_guard.check_state_changes") as mock_state:
with patch(
"code_quality_guard.check_cross_file_duplicates",
"quality.hooks.code_quality_guard.check_cross_file_duplicates",
) as mock_cross:
with patch(
"code_quality_guard.verify_naming_conventions",
"quality.hooks.code_quality_guard.verify_naming_conventions",
) as mock_naming:
mock_state.return_value = ["⚠️ Issue 1"]
mock_cross.return_value = ["⚠️ Issue 2"]
@@ -285,12 +285,12 @@ class TestPostToolUseHook:
with patch("pathlib.Path.exists", return_value=True):
with patch("pathlib.Path.read_text", return_value="def test(): pass"):
# Should not call any check functions
with patch("code_quality_guard.check_state_changes") as mock_state:
with patch("quality.hooks.code_quality_guard.check_state_changes") as mock_state:
with patch(
"code_quality_guard.check_cross_file_duplicates",
"quality.hooks.code_quality_guard.check_cross_file_duplicates",
) as mock_cross:
with patch(
"code_quality_guard.verify_naming_conventions",
"quality.hooks.code_quality_guard.verify_naming_conventions",
) as mock_naming:
result = posttooluse_hook(hook_data, config)

View File

@@ -1,30 +1,21 @@
"""Test PreToolUse hook functionality."""
from collections.abc import Mapping
from unittest.mock import MagicMock, patch
from unittest.mock import patch
from hooks.guards import QualityConfig, pretooluse_hook
from hooks.models import HookResponse, HookResponseRequired
from quality.hooks.code_quality_guard import QualityConfig, pretooluse_hook
TEST_QUALITY_CONDITIONAL = (
"Test Quality: no-conditionals-in-tests - Conditional found in test"
)
def get_reason_str(result: HookResponse | HookResponseRequired) -> str:
def get_reason_str(result: dict[str, object]) -> str:
"""Extract and assert reason field as string."""
reason = result.get("reason", "")
reason = result["reason"]
assert isinstance(reason, str), f"Expected str, got {type(reason)}"
return reason
def get_permission_decision(result: HookResponse | HookResponseRequired) -> str:
"""Extract and assert permission decision field as string."""
decision = result.get("permissionDecision", "allow")
assert isinstance(decision, str), f"Expected str, got {type(decision)}"
return decision
class TestPreToolUseHook:
"""Test PreToolUse hook behavior."""
@@ -37,7 +28,7 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
def test_tool_input_must_be_mapping(self):
"""Allow requests that provide a non-mapping tool_input payload."""
@@ -47,10 +38,10 @@ class TestPreToolUseHook:
"tool_input": ["unexpected", "structure"],
}
with patch("hooks.guards.utils.perform_quality_check") as mock_check:
with patch("quality.hooks.code_quality_guard._perform_quality_check") as mock_check:
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
mock_check.assert_not_called()
def test_non_python_file_allowed(self):
@@ -65,7 +56,7 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
def test_test_file_skipped(self):
"""Test that test files are skipped when configured."""
@@ -79,9 +70,9 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
def test_clean_code_allowed(self, clean_code: str):
def test_clean_code_allowed(self, clean_code):
"""Test that clean code is allowed."""
config = QualityConfig()
hook_data = {
@@ -92,12 +83,12 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
def test_complex_code_denied_strict(self, complex_code: str):
def test_complex_code_denied_strict(self, complex_code):
"""Test that complex code is denied in strict mode."""
config = QualityConfig(enforcement_mode="strict")
hook_data = {
@@ -108,7 +99,7 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"complexity": {
"summary": {"average_cyclomatic_complexity": 25},
@@ -117,12 +108,12 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
reason = get_reason_str(result)
assert result["permissionDecision"] == "deny"
reason = result["reason"]
assert isinstance(reason, str)
assert "quality check failed" in reason.lower()
def test_complex_code_ask_warn_mode(self, complex_code: str):
def test_complex_code_ask_warn_mode(self, complex_code):
"""Test that complex code triggers ask in warn mode."""
config = QualityConfig(enforcement_mode="warn")
hook_data = {
@@ -133,7 +124,7 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"complexity": {
"summary": {"average_cyclomatic_complexity": 25},
@@ -142,9 +133,9 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "ask"
assert result["permissionDecision"] == "ask"
def test_complex_code_allowed_permissive(self, complex_code: str):
def test_complex_code_allowed_permissive(self, complex_code):
"""Test that complex code is allowed with warning in permissive mode."""
config = QualityConfig(enforcement_mode="permissive")
hook_data = {
@@ -155,7 +146,7 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"complexity": {
"summary": {"average_cyclomatic_complexity": 25},
@@ -164,11 +155,11 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
reason = str(result.get("reason", ""))
assert "warning" in reason.lower()
def test_duplicate_code_detection(self, duplicate_code: str):
def test_duplicate_code_detection(self, duplicate_code):
"""Test internal duplicate detection."""
config = QualityConfig(duplicate_enabled=True)
hook_data = {
@@ -179,7 +170,7 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.detect_internal_duplicates") as mock_dup:
with patch("quality.hooks.code_quality_guard.detect_internal_duplicates") as mock_dup:
mock_dup.return_value = {
"duplicates": [
{
@@ -193,13 +184,13 @@ class TestPreToolUseHook:
],
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"internal_duplicates": mock_dup.return_value,
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "duplicate" in get_reason_str(result).lower()
def test_edit_tool_handling(self):
@@ -214,10 +205,10 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
# Verify new_string was analyzed
call_args = mock_analyze.call_args[0]
@@ -237,10 +228,10 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
# Verify concatenated content was analyzed
call_args = mock_analyze.call_args[0]
@@ -266,12 +257,12 @@ class TestPreToolUseHook:
}
with patch(
"hooks.guards.utils.perform_quality_check",
"quality.hooks.code_quality_guard._perform_quality_check",
return_value=(False, []),
) as mock_check:
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
mock_check.assert_called_once()
analyzed_content = mock_check.call_args[0][1]
assert "def kept()" in analyzed_content
@@ -288,8 +279,8 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.store_pre_state") as mock_store:
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.store_pre_state") as mock_store:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
pretooluse_hook(hook_data, config)
@@ -308,11 +299,11 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.side_effect = Exception("Analysis failed")
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
reason = str(result.get("reason", ""))
assert "error" in reason.lower()
@@ -329,22 +320,14 @@ class TestPreToolUseHook:
},
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
# Test path pattern match
new_hook_data = {
"tool_name": hook_data["tool_name"],
"tool_input": {
"file_path": "/ignored/file.py",
"content": hook_data["tool_input"]["content"]
if isinstance(hook_data["tool_input"], dict)
else "bad code",
},
}
result = pretooluse_hook(new_hook_data, config)
assert get_permission_decision(result) == "allow"
hook_data["tool_input"]["file_path"] = "/ignored/file.py"
result = pretooluse_hook(hook_data, config)
assert result["permissionDecision"] == "allow"
def test_modernization_issues(self, old_style_code: str):
def test_modernization_issues(self, old_style_code):
"""Test modernization issue detection."""
config = QualityConfig(modernization_enabled=True, require_type_hints=True)
hook_data = {
@@ -355,7 +338,7 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"modernization": {
"files": {
@@ -368,7 +351,7 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "modernization" in get_reason_str(result).lower()
def test_type_hint_threshold(self):
@@ -383,7 +366,7 @@ class TestPreToolUseHook:
}
# Test with many type hint issues
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {
"modernization": {
"files": {
@@ -396,7 +379,7 @@ class TestPreToolUseHook:
}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "type hints" in get_reason_str(result).lower()
def test_any_usage_denied_on_analysis_failure(self):
@@ -414,12 +397,12 @@ class TestPreToolUseHook:
}
with patch(
"hooks.guards.utils.perform_quality_check",
"quality.hooks.code_quality_guard._perform_quality_check",
side_effect=RuntimeError("boom"),
):
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "typing.any" in get_reason_str(result).lower()
assert "fix these issues" in get_reason_str(result).lower()
@@ -435,11 +418,11 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "any" in get_reason_str(result).lower()
def test_any_usage_detected_in_multiedit(self):
@@ -457,18 +440,19 @@ class TestPreToolUseHook:
{
"old_string": "pass",
"new_string": (
"def handler(arg: Any) -> str:\n return str(arg)\n"
"def handler(arg: Any) -> str:\n"
" return str(arg)\n"
),
},
],
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "any" in get_reason_str(result).lower()
def test_type_ignore_usage_denied_on_analysis_failure(self):
@@ -485,12 +469,12 @@ class TestPreToolUseHook:
}
with patch(
"hooks.guards.utils.perform_quality_check",
"quality.hooks.code_quality_guard._perform_quality_check",
side_effect=RuntimeError("boom"),
):
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "type: ignore" in get_reason_str(result).lower()
assert "fix these issues" in get_reason_str(result).lower()
@@ -506,11 +490,11 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "type: ignore" in get_reason_str(result).lower()
def test_type_ignore_usage_detected_in_multiedit(self):
@@ -537,11 +521,11 @@ class TestPreToolUseHook:
},
}
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "type: ignore" in get_reason_str(result).lower()
@@ -550,7 +534,7 @@ class TestTestQualityChecks:
def test_is_test_file_detection(self):
"""Test test file path detection."""
from hooks.guards import is_test_file
from quality.hooks.code_quality_guard import is_test_file
# Test files in test directories
assert is_test_file("tests/test_example.py") is True
@@ -575,16 +559,16 @@ class TestTestQualityChecks:
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = [TEST_QUALITY_CONDITIONAL]
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
# Should be denied due to test quality issues
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "test quality" in get_reason_str(result).lower()
mock_test_check.assert_called_once()
@@ -599,16 +583,16 @@ class TestTestQualityChecks:
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = []
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
# Should be allowed since it's not a test file
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
mock_test_check.assert_not_called()
def test_test_quality_checks_disabled_when_config_disabled(self):
@@ -622,16 +606,16 @@ class TestTestQualityChecks:
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = ["Test Quality: Issue found"]
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
# Should be allowed since test quality checks are disabled
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
mock_test_check.assert_not_called()
def test_test_quality_checks_with_clean_test_code(self):
@@ -645,16 +629,16 @@ class TestTestQualityChecks:
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = [] # No issues
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
# Should be allowed since no test quality issues
assert get_permission_decision(result) == "allow"
assert result["permissionDecision"] == "allow"
mock_test_check.assert_called_once()
def test_test_quality_checks_with_edit_tool(self):
@@ -669,16 +653,16 @@ class TestTestQualityChecks:
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = [TEST_QUALITY_CONDITIONAL]
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
# Should be denied due to test quality issues
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "test quality" in get_reason_str(result).lower()
mock_test_check.assert_called_once()
@@ -692,28 +676,33 @@ class TestTestQualityChecks:
"edits": [
{
"old_string": "a",
"new_string": ("def test_func1():\n assert True"),
"new_string": (
"def test_func1():\n"
" assert True"
),
},
{
"old_string": "b",
"new_string": (
"def test_func2():\n if False:\n pass"
"def test_func2():\n"
" if False:\n"
" pass"
),
},
],
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = [TEST_QUALITY_CONDITIONAL]
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
# Should be denied due to test quality issues
assert get_permission_decision(result) == "deny"
assert result["permissionDecision"] == "deny"
assert "test quality" in get_reason_str(result).lower()
mock_test_check.assert_called_once()
@@ -733,33 +722,17 @@ class TestTestQualityChecks:
},
}
with patch("code_quality_guard.run_test_quality_checks") as mock_test_check:
with patch("quality.hooks.code_quality_guard.run_test_quality_checks") as mock_test_check:
mock_test_check.return_value = [TEST_QUALITY_CONDITIONAL]
with patch("code_quality_guard.analyze_code_quality") as mock_analyze:
self._extracted_from_test_test_quality_checks_combined_with_other_prechecks_21(
mock_analyze,
hook_data,
config,
mock_test_check,
)
with patch("quality.hooks.code_quality_guard.analyze_code_quality") as mock_analyze:
mock_analyze.return_value = {}
# TODO Rename this here and in
# `test_test_quality_checks_combined_with_other_prechecks`
def _extracted_from_test_test_quality_checks_combined_with_other_prechecks_21(
self,
mock_analyze: MagicMock,
hook_data: Mapping[str, object],
config: QualityConfig,
mock_test_check: MagicMock,
) -> None:
mock_analyze.return_value = {}
result = pretooluse_hook(hook_data, config)
result = pretooluse_hook(hook_data, config)
# Should be denied due to multiple precheck issues
assert get_permission_decision(result) == "deny"
assert "any" in get_reason_str(result).lower()
assert "type: ignore" in get_reason_str(result).lower()
assert "test quality" in get_reason_str(result).lower()
mock_test_check.assert_called_once()
# Should be denied due to multiple precheck issues
assert result["permissionDecision"] == "deny"
assert "any" in get_reason_str(result).lower()
assert "type: ignore" in get_reason_str(result).lower()
assert "test quality" in get_reason_str(result).lower()
mock_test_check.assert_called_once()

View File

@@ -5,20 +5,17 @@
from __future__ import annotations
# pyright: reportPrivateUsage=false, reportPrivateImportUsage=false, reportPrivateLocalImportUsage=false, reportUnknownArgumentType=false, reportUnknownLambdaType=false, reportUnknownMemberType=false, reportUnusedCallResult=false
import json
import subprocess
from collections.abc import Iterable
from typing import TYPE_CHECKING, cast
import pytest
from hooks.guards import utils as guard
from tests.hooks.test_config_compatibility import QualityConfig
# Type alias for analysis results
AnalysisResults = guard.AnalysisResultsDict
from quality.hooks import code_quality_guard as guard
if TYPE_CHECKING:
from collections.abc import Iterable
from pathlib import Path
@@ -41,7 +38,7 @@ def test_quality_config_from_env_parsing(
) -> None:
"""Ensure QualityConfig.from_env correctly parses environment overrides."""
monkeypatch.setenv(env_key, value)
config = QualityConfig.from_env()
config = guard.QualityConfig.from_env()
assert getattr(config, attr) == expected
@@ -133,21 +130,15 @@ def test_run_type_checker_known_tools(
elif isinstance(returncode_obj, str):
exit_code = int(returncode_obj)
else:
error_msg = f"Unexpected returncode type: {type(returncode_obj)!r}"
raise TypeError(error_msg)
raise AssertionError(f"Unexpected returncode type: {type(returncode_obj)!r}")
stdout_obj = run_payload.get("stdout", "")
stdout = str(stdout_obj)
return subprocess.CompletedProcess(
list(cmd),
exit_code,
stdout=stdout,
stderr="",
)
return subprocess.CompletedProcess(list(cmd), exit_code, stdout=stdout, stderr="")
monkeypatch.setattr(guard.subprocess, "run", fake_run)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
tool_name,
"tmp.py",
guard.QualityConfig(),
@@ -174,13 +165,12 @@ def test_run_type_checker_runtime_exceptions(
"""Timeouts and OS errors surface as warnings."""
monkeypatch.setattr(guard.Path, "exists", lambda _path: True, raising=False)
def raise_exc(*_args: object, **_kwargs: object) -> None:
raise exception
monkeypatch.setattr(guard.subprocess, "run", raise_exc)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"sourcery",
"tmp.py",
guard.QualityConfig(),
@@ -195,7 +185,7 @@ def test_run_type_checker_tool_missing(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(guard.Path, "exists", lambda _path: False, raising=False)
monkeypatch.setattr(guard, "_ensure_tool_installed", lambda _name: False)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"pyrefly",
"tmp.py",
guard.QualityConfig(),
@@ -210,7 +200,7 @@ def test_run_type_checker_unknown_tool(monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(guard.Path, "exists", lambda _path: True, raising=False)
success, message = guard.run_type_checker_with_config(
success, message = guard._run_type_checker(
"unknown",
"tmp.py",
guard.QualityConfig(),
@@ -277,7 +267,7 @@ def test_run_quality_analyses_invokes_cli(
monkeypatch.setattr(guard.subprocess, "run", fake_run)
results = guard.run_quality_analyses(
results = guard._run_quality_analyses(
content=script_path.read_text(encoding="utf-8"),
tmp_path=str(script_path),
config=dummy_config,
@@ -300,7 +290,7 @@ def test_run_quality_analyses_invokes_cli(
def test_detect_any_usage(content: str, expected: bool) -> None:
"""_detect_any_usage flags Any usage even on syntax errors."""
result = guard.detect_any_usage(content)
result = guard._detect_any_usage(content)
assert (len(result) > 0) is expected
@@ -323,16 +313,16 @@ def test_handle_quality_issues_modes(
config = guard.QualityConfig(enforcement_mode=mode)
issues = ["Issue one", "Issue two"]
response = guard.handle_quality_issues(
response = guard._handle_quality_issues(
"example.py",
issues,
config,
forced_permission=forced,
)
decision = cast("str", response["permissionDecision"])
decision = cast(str, response["permissionDecision"])
assert decision == expected_permission
if forced is None:
reason = cast("str", response.get("reason", ""))
reason = cast(str, response.get("reason", ""))
assert any(issue in reason for issue in issues)
@@ -342,13 +332,12 @@ def test_perform_quality_check_with_state_tracking(
"""_perform_quality_check stores state and reports detected issues."""
tracked_calls: list[str] = []
def record_state(path: str, _content: str) -> None:
tracked_calls.append(path)
monkeypatch.setattr(guard, "store_pre_state", record_state)
def fake_analyze(*_args: object, **_kwargs: object) -> AnalysisResults:
def fake_analyze(*_args: object, **_kwargs: object) -> guard.AnalysisResults:
return {
"modernization": {
"files": {"example.py": [{"issue_type": "use_enumerate"}]},
@@ -359,7 +348,7 @@ def test_perform_quality_check_with_state_tracking(
config = guard.QualityConfig(state_tracking_enabled=True)
has_issues, issues = guard.perform_quality_check(
has_issues, issues = guard._perform_quality_check(
"example.py",
"def old(): pass",
config,
@@ -368,7 +357,8 @@ def test_perform_quality_check_with_state_tracking(
assert tracked_calls == ["example.py"]
assert has_issues is True
assert any(
"Modernization" in issue or "modernization" in issue.lower() for issue in issues
"Modernization" in issue or "modernization" in issue.lower()
for issue in issues
)
@@ -398,7 +388,7 @@ def test_check_cross_file_duplicates_command(monkeypatch: pytest.MonkeyPatch) ->
def test_create_hook_response_includes_reason() -> None:
"""_create_hook_response embeds permission, reason, and system message."""
response = guard.create_hook_response(
response = guard._create_hook_response(
"PreToolUse",
permission="deny",
reason="Testing",
@@ -406,9 +396,9 @@ def test_create_hook_response_includes_reason() -> None:
additional_context="context",
decision="block",
)
assert cast("str", response["permissionDecision"]) == "deny"
assert cast("str", response["reason"]) == "Testing"
assert cast("str", response["systemMessage"]) == "System"
hook_output = cast("dict[str, object]", response["hookSpecificOutput"])
assert cast("str", hook_output["additionalContext"]) == "context"
assert cast("str", response["decision"]) == "block"
assert cast(str, response["permissionDecision"]) == "deny"
assert cast(str, response["reason"]) == "Testing"
assert cast(str, response["systemMessage"]) == "System"
hook_output = cast(dict[str, object], response["hookSpecificOutput"])
assert cast(str, hook_output["additionalContext"]) == "context"
assert cast(str, response["decision"]) == "block"

View File

@@ -3,6 +3,8 @@
from __future__ import annotations
# pyright: reportPrivateUsage=false, reportPrivateImportUsage=false, reportPrivateLocalImportUsage=false, reportUnknownArgumentType=false, reportUnknownVariableType=false, reportUnknownLambdaType=false, reportUnknownMemberType=false, reportUnusedCallResult=false
# ruff: noqa: SLF001
import json
import os
import subprocess
@@ -11,7 +13,7 @@ from pathlib import Path
import pytest
from hooks.guards import utils as guard
from quality.hooks import code_quality_guard as guard
class TestVenvDetection:
@@ -22,31 +24,26 @@ class TestVenvDetection:
# Use home directory to avoid /tmp check
root = Path.home() / f"test_proj_{os.getpid()}"
try:
self._extracted_from_test_finds_venv_from_file_path_6(root)
src_dir = root / "src/pkg"
src_dir.mkdir(parents=True)
venv_bin = root / ".venv/bin"
venv_bin.mkdir(parents=True)
# Create the file so path exists
test_file = src_dir / "mod.py"
test_file.write_text("# test")
result = guard._get_project_venv_bin(str(test_file))
assert result == venv_bin
finally:
import shutil
if root.exists():
shutil.rmtree(root)
# TODO Rename this here and in `test_finds_venv_from_file_path`
def _extracted_from_test_finds_venv_from_file_path_6(self, root: Path) -> None:
src_dir = root / "src/pkg"
src_dir.mkdir(parents=True)
venv_bin = root / ".venv/bin"
venv_bin.mkdir(parents=True)
# Create the file so path exists
test_file = src_dir / "mod.py"
test_file.write_text("# test")
result = guard.get_project_venv_bin(str(test_file))
assert result == venv_bin
def test_fallback_when_no_venv(self) -> None:
"""Should fallback to claude-scripts venv when no venv found."""
# Use a path that definitely has no .venv
result = guard.get_project_venv_bin("/etc/hosts")
result = guard._get_project_venv_bin("/etc/hosts")
# Should fall back to claude-scripts
expected = (Path(__file__).parent.parent.parent / ".venv" / "bin").resolve()
assert result.resolve() == expected
@@ -57,31 +54,27 @@ class TestErrorFormatting:
def test_basedpyright_formatting(self) -> None:
"""BasedPyright errors should be formatted."""
output = json.dumps(
{
"generalDiagnostics": [
{
"message": "Test error",
"rule": "testRule",
"range": {"start": {"line": 5}},
},
],
},
)
result = guard.format_basedpyright_errors(output)
output = json.dumps({
"generalDiagnostics": [{
"message": "Test error",
"rule": "testRule",
"range": {"start": {"line": 5}},
}],
})
result = guard._format_basedpyright_errors(output)
assert "Found 1 type error" in result
assert "Line 6:" in result
def test_pyrefly_formatting(self) -> None:
"""Pyrefly errors should be formatted."""
output = "ERROR Test error\nERROR Another error"
result = guard.format_pyrefly_errors(output)
result = guard._format_pyrefly_errors(output)
assert "Found 2 type error" in result
def test_sourcery_formatting(self) -> None:
"""Sourcery errors should be formatted."""
output = "file.py:1:1 - Issue\n✖ 1 issue detected"
result = guard.format_sourcery_errors(output)
result = guard._format_sourcery_errors(output)
assert "Found 1 code quality issue" in result
@@ -101,7 +94,6 @@ class TestPythonpathSetup:
try:
(root / "src").mkdir(parents=True)
(root / ".venv/bin").mkdir(parents=True)
(root / ".git").mkdir() # Add project marker
tool = root / ".venv/bin/basedpyright"
tool.write_text("#!/bin/bash\necho fake")
tool.chmod(0o755)
@@ -123,9 +115,10 @@ class TestPythonpathSetup:
test_file = root / "src/mod.py"
test_file.write_text("# test")
guard.run_type_checker(
guard._run_type_checker(
"basedpyright",
str(test_file),
guard.QualityConfig(),
original_file_path=str(test_file),
)
@@ -133,7 +126,6 @@ class TestPythonpathSetup:
assert str(root / "src") in captured_env["PYTHONPATH"]
finally:
import shutil
if root.exists():
shutil.rmtree(root)
@@ -145,64 +137,47 @@ class TestProjectRootAndTempFiles:
"""Should find project root from deeply nested file."""
root = Path.home() / f"test_root_{os.getpid()}"
try:
self._extracted_from_test_finds_project_root_from_nested_file_6(root)
# Create project structure
nested = root / "src/pkg/subpkg"
nested.mkdir(parents=True)
(root / ".git").mkdir()
test_file = nested / "module.py"
test_file.write_text("# test")
found_root = guard._find_project_root(str(test_file))
assert found_root == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)
# TODO Rename this here and in `test_finds_project_root_from_nested_file`
def _extracted_from_test_finds_project_root_from_nested_file_6(
self,
root: Path,
) -> None:
# Create project structure
nested = root / "src/pkg/subpkg"
nested.mkdir(parents=True)
(root / ".git").mkdir()
test_file = nested / "module.py"
test_file.write_text("# test")
found_root = guard.find_project_root(str(test_file))
assert found_root == root
def test_creates_tmp_dir_in_project_root(self) -> None:
"""Should create .tmp directory in project root."""
root = Path.home() / f"test_tmp_{os.getpid()}"
try:
self._extracted_from_test_creates_tmp_dir_in_project_root_5(root)
(root / "src").mkdir(parents=True)
(root / "pyproject.toml").touch()
test_file = root / "src/module.py"
test_file.write_text("# test")
tmp_dir = guard._get_project_tmp_dir(str(test_file))
assert tmp_dir.exists()
assert tmp_dir == root / ".tmp"
assert tmp_dir.parent == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)
# TODO Rename this here and in `test_creates_tmp_dir_in_project_root`
def _extracted_from_test_creates_tmp_dir_in_project_root_5(
self,
root: Path,
) -> None:
(root / "src").mkdir(parents=True)
(root / "pyproject.toml").touch()
test_file = root / "src/module.py"
test_file.write_text("# test")
tmp_dir = guard.get_project_tmp_dir(str(test_file))
assert tmp_dir.exists()
assert tmp_dir == root / ".tmp"
assert tmp_dir.parent == root
def test_runs_from_project_root(self, monkeypatch: pytest.MonkeyPatch) -> None:
"""Type checkers should run from project root to find configs."""
root = Path.home() / f"test_cwd_{os.getpid()}"
try:
(root / "src").mkdir(parents=True)
(root / ".venv/bin").mkdir(parents=True)
(root / ".git").mkdir() # Add project marker
tool = root / ".venv/bin/basedpyright"
tool.write_text("#!/bin/bash\necho fake")
tool.chmod(0o755)
@@ -226,9 +201,10 @@ class TestProjectRootAndTempFiles:
test_file = root / "src/mod.py"
test_file.write_text("# test")
guard.run_type_checker(
guard._run_type_checker(
"basedpyright",
str(test_file),
guard.QualityConfig(),
original_file_path=str(test_file),
)
@@ -237,6 +213,5 @@ class TestProjectRootAndTempFiles:
assert captured_cwd[0] == root
finally:
import shutil
if root.exists():
shutil.rmtree(root)

392
uv.lock generated
View File

@@ -38,6 +38,52 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/40/d1047a5addcade9291685d06ef42a63c1347517018bafd82747af9da0294/basedpyright-1.31.4-py3-none-any.whl", hash = "sha256:055e4a38024bd653be12d6216c1cfdbee49a1096d342b4d5f5b4560f7714b6fc", size = 11731440, upload-time = "2025-09-03T13:05:52.308Z" },
]
[[package]]
name = "certifi"
version = "2025.10.5"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
]
[[package]]
name = "cffi"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pycparser", marker = "implementation_name != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" },
{ url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" },
{ url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" },
{ url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" },
{ url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" },
{ url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" },
{ url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" },
{ url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" },
{ url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" },
{ url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" },
{ url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" },
{ url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" },
{ url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" },
{ url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" },
{ url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" },
{ url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" },
{ url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" },
{ url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" },
{ url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" },
{ url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" },
{ url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" },
{ url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" },
{ url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" },
{ url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" },
{ url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" },
{ url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" },
]
[[package]]
name = "cfgv"
version = "3.4.0"
@@ -47,9 +93,66 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" },
]
[[package]]
name = "charset-normalizer"
version = "3.4.4"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
{ url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
{ url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
{ url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
{ url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
{ url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
{ url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
{ url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
{ url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
{ url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
{ url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
{ url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
{ url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
{ url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
{ url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
{ url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
{ url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
{ url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
{ url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
{ url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
{ url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
{ url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
{ url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
{ url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
{ url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
{ url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
{ url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
{ url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
{ url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
{ url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
{ url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
{ url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
{ url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
{ url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
{ url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
{ url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
{ url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
{ url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
{ url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
{ url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
{ url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
{ url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
{ url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
{ url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
{ url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
{ url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
{ url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
{ url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
{ url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
]
[[package]]
name = "claude-scripts"
version = "0.1.0"
version = "0.1.1"
source = { editable = "." }
dependencies = [
{ name = "bandit" },
@@ -78,6 +181,7 @@ dev = [
{ name = "pyrefly" },
{ name = "pytest" },
{ name = "sourcery" },
{ name = "twine" },
]
[package.metadata]
@@ -106,6 +210,7 @@ dev = [
{ name = "pyrefly", specifier = ">=0.2.0" },
{ name = "pytest", specifier = ">=8.4.2" },
{ name = "sourcery", specifier = ">=1.37.0" },
{ name = "twine", specifier = ">=6.2.0" },
]
[[package]]
@@ -193,6 +298,50 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/44/0c/50db5379b615854b5cf89146f8f5bd1d5a9693d7f3a987e269693521c404/coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3", size = 208986, upload-time = "2025-08-29T15:35:14.506Z" },
]
[[package]]
name = "cryptography"
version = "46.0.3"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/9f/33/c00162f49c0e2fe8064a62cb92b93e50c74a72bc370ab92f86112b33ff62/cryptography-46.0.3.tar.gz", hash = "sha256:a8b17438104fed022ce745b362294d9ce35b4c2e45c1d958ad4a4b019285f4a1", size = 749258, upload-time = "2025-10-15T23:18:31.74Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1c/67/38769ca6b65f07461eb200e85fc1639b438bdc667be02cf7f2cd6a64601c/cryptography-46.0.3-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:09859af8466b69bc3c27bdf4f5d84a665e0f7ab5088412e9e2ec49758eca5cbc", size = 4296667, upload-time = "2025-10-15T23:16:54.369Z" },
{ url = "https://files.pythonhosted.org/packages/5c/49/498c86566a1d80e978b42f0d702795f69887005548c041636df6ae1ca64c/cryptography-46.0.3-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:01ca9ff2885f3acc98c29f1860552e37f6d7c7d013d7334ff2a9de43a449315d", size = 4450807, upload-time = "2025-10-15T23:16:56.414Z" },
{ url = "https://files.pythonhosted.org/packages/4b/0a/863a3604112174c8624a2ac3c038662d9e59970c7f926acdcfaed8d61142/cryptography-46.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6eae65d4c3d33da080cff9c4ab1f711b15c1d9760809dad6ea763f3812d254cb", size = 4299615, upload-time = "2025-10-15T23:16:58.442Z" },
{ url = "https://files.pythonhosted.org/packages/64/02/b73a533f6b64a69f3cd3872acb6ebc12aef924d8d103133bb3ea750dc703/cryptography-46.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5bf0ed4490068a2e72ac03d786693adeb909981cc596425d09032d372bcc849", size = 4016800, upload-time = "2025-10-15T23:17:00.378Z" },
{ url = "https://files.pythonhosted.org/packages/25/d5/16e41afbfa450cde85a3b7ec599bebefaef16b5c6ba4ec49a3532336ed72/cryptography-46.0.3-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:5ecfccd2329e37e9b7112a888e76d9feca2347f12f37918facbb893d7bb88ee8", size = 4984707, upload-time = "2025-10-15T23:17:01.98Z" },
{ url = "https://files.pythonhosted.org/packages/c9/56/e7e69b427c3878352c2fb9b450bd0e19ed552753491d39d7d0a2f5226d41/cryptography-46.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:a2c0cd47381a3229c403062f764160d57d4d175e022c1df84e168c6251a22eec", size = 4482541, upload-time = "2025-10-15T23:17:04.078Z" },
{ url = "https://files.pythonhosted.org/packages/78/f6/50736d40d97e8483172f1bb6e698895b92a223dba513b0ca6f06b2365339/cryptography-46.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:549e234ff32571b1f4076ac269fcce7a808d3bf98b76c8dd560e42dbc66d7d91", size = 4299464, upload-time = "2025-10-15T23:17:05.483Z" },
{ url = "https://files.pythonhosted.org/packages/00/de/d8e26b1a855f19d9994a19c702fa2e93b0456beccbcfe437eda00e0701f2/cryptography-46.0.3-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:c0a7bb1a68a5d3471880e264621346c48665b3bf1c3759d682fc0864c540bd9e", size = 4950838, upload-time = "2025-10-15T23:17:07.425Z" },
{ url = "https://files.pythonhosted.org/packages/8f/29/798fc4ec461a1c9e9f735f2fc58741b0daae30688f41b2497dcbc9ed1355/cryptography-46.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:10b01676fc208c3e6feeb25a8b83d81767e8059e1fe86e1dc62d10a3018fa926", size = 4481596, upload-time = "2025-10-15T23:17:09.343Z" },
{ url = "https://files.pythonhosted.org/packages/15/8d/03cd48b20a573adfff7652b76271078e3045b9f49387920e7f1f631d125e/cryptography-46.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:0abf1ffd6e57c67e92af68330d05760b7b7efb243aab8377e583284dbab72c71", size = 4426782, upload-time = "2025-10-15T23:17:11.22Z" },
{ url = "https://files.pythonhosted.org/packages/fa/b1/ebacbfe53317d55cf33165bda24c86523497a6881f339f9aae5c2e13e57b/cryptography-46.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a04bee9ab6a4da801eb9b51f1b708a1b5b5c9eb48c03f74198464c66f0d344ac", size = 4698381, upload-time = "2025-10-15T23:17:12.829Z" },
{ url = "https://files.pythonhosted.org/packages/73/dc/9aa866fbdbb95b02e7f9d086f1fccfeebf8953509b87e3f28fff927ff8a0/cryptography-46.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c8daeb2d2174beb4575b77482320303f3d39b8e81153da4f0fb08eb5fe86a6c5", size = 4288728, upload-time = "2025-10-15T23:17:21.527Z" },
{ url = "https://files.pythonhosted.org/packages/c5/fd/bc1daf8230eaa075184cbbf5f8cd00ba9db4fd32d63fb83da4671b72ed8a/cryptography-46.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:39b6755623145ad5eff1dab323f4eae2a32a77a7abef2c5089a04a3d04366715", size = 4435078, upload-time = "2025-10-15T23:17:23.042Z" },
{ url = "https://files.pythonhosted.org/packages/82/98/d3bd5407ce4c60017f8ff9e63ffee4200ab3e23fe05b765cab805a7db008/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:db391fa7c66df6762ee3f00c95a89e6d428f4d60e7abc8328f4fe155b5ac6e54", size = 4293460, upload-time = "2025-10-15T23:17:24.885Z" },
{ url = "https://files.pythonhosted.org/packages/26/e9/e23e7900983c2b8af7a08098db406cf989d7f09caea7897e347598d4cd5b/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:78a97cf6a8839a48c49271cdcbd5cf37ca2c1d6b7fdd86cc864f302b5e9bf459", size = 3995237, upload-time = "2025-10-15T23:17:26.449Z" },
{ url = "https://files.pythonhosted.org/packages/91/15/af68c509d4a138cfe299d0d7ddb14afba15233223ebd933b4bbdbc7155d3/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:dfb781ff7eaa91a6f7fd41776ec37c5853c795d3b358d4896fdbb5df168af422", size = 4967344, upload-time = "2025-10-15T23:17:28.06Z" },
{ url = "https://files.pythonhosted.org/packages/ca/e3/8643d077c53868b681af077edf6b3cb58288b5423610f21c62aadcbe99f4/cryptography-46.0.3-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:6f61efb26e76c45c4a227835ddeae96d83624fb0d29eb5df5b96e14ed1a0afb7", size = 4466564, upload-time = "2025-10-15T23:17:29.665Z" },
{ url = "https://files.pythonhosted.org/packages/0e/43/c1e8726fa59c236ff477ff2b5dc071e54b21e5a1e51aa2cee1676f1c986f/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:23b1a8f26e43f47ceb6d6a43115f33a5a37d57df4ea0ca295b780ae8546e8044", size = 4292415, upload-time = "2025-10-15T23:17:31.686Z" },
{ url = "https://files.pythonhosted.org/packages/42/f9/2f8fefdb1aee8a8e3256a0568cffc4e6d517b256a2fe97a029b3f1b9fe7e/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:b419ae593c86b87014b9be7396b385491ad7f320bde96826d0dd174459e54665", size = 4931457, upload-time = "2025-10-15T23:17:33.478Z" },
{ url = "https://files.pythonhosted.org/packages/79/30/9b54127a9a778ccd6d27c3da7563e9f2d341826075ceab89ae3b41bf5be2/cryptography-46.0.3-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:50fc3343ac490c6b08c0cf0d704e881d0d660be923fd3076db3e932007e726e3", size = 4466074, upload-time = "2025-10-15T23:17:35.158Z" },
{ url = "https://files.pythonhosted.org/packages/ac/68/b4f4a10928e26c941b1b6a179143af9f4d27d88fe84a6a3c53592d2e76bf/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:22d7e97932f511d6b0b04f2bfd818d73dcd5928db509460aaf48384778eb6d20", size = 4420569, upload-time = "2025-10-15T23:17:37.188Z" },
{ url = "https://files.pythonhosted.org/packages/a3/49/3746dab4c0d1979888f125226357d3262a6dd40e114ac29e3d2abdf1ec55/cryptography-46.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d55f3dffadd674514ad19451161118fd010988540cee43d8bc20675e775925de", size = 4681941, upload-time = "2025-10-15T23:17:39.236Z" },
{ url = "https://files.pythonhosted.org/packages/27/32/b68d27471372737054cbd34c84981f9edbc24fe67ca225d389799614e27f/cryptography-46.0.3-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:4b7387121ac7d15e550f5cb4a43aef2559ed759c35df7336c402bb8275ac9683", size = 4294089, upload-time = "2025-10-15T23:17:48.269Z" },
{ url = "https://files.pythonhosted.org/packages/26/42/fa8389d4478368743e24e61eea78846a0006caffaf72ea24a15159215a14/cryptography-46.0.3-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:15ab9b093e8f09daab0f2159bb7e47532596075139dd74365da52ecc9cb46c5d", size = 4440029, upload-time = "2025-10-15T23:17:49.837Z" },
{ url = "https://files.pythonhosted.org/packages/5f/eb/f483db0ec5ac040824f269e93dd2bd8a21ecd1027e77ad7bdf6914f2fd80/cryptography-46.0.3-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:46acf53b40ea38f9c6c229599a4a13f0d46a6c3fa9ef19fc1a124d62e338dfa0", size = 4297222, upload-time = "2025-10-15T23:17:51.357Z" },
{ url = "https://files.pythonhosted.org/packages/fd/cf/da9502c4e1912cb1da3807ea3618a6829bee8207456fbbeebc361ec38ba3/cryptography-46.0.3-cp38-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10ca84c4668d066a9878890047f03546f3ae0a6b8b39b697457b7757aaf18dbc", size = 4012280, upload-time = "2025-10-15T23:17:52.964Z" },
{ url = "https://files.pythonhosted.org/packages/6b/8f/9adb86b93330e0df8b3dcf03eae67c33ba89958fc2e03862ef1ac2b42465/cryptography-46.0.3-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:36e627112085bb3b81b19fed209c05ce2a52ee8b15d161b7c643a7d5a88491f3", size = 4978958, upload-time = "2025-10-15T23:17:54.965Z" },
{ url = "https://files.pythonhosted.org/packages/d1/a0/5fa77988289c34bdb9f913f5606ecc9ada1adb5ae870bd0d1054a7021cc4/cryptography-46.0.3-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1000713389b75c449a6e979ffc7dcc8ac90b437048766cef052d4d30b8220971", size = 4473714, upload-time = "2025-10-15T23:17:56.754Z" },
{ url = "https://files.pythonhosted.org/packages/14/e5/fc82d72a58d41c393697aa18c9abe5ae1214ff6f2a5c18ac470f92777895/cryptography-46.0.3-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:b02cf04496f6576afffef5ddd04a0cb7d49cf6be16a9059d793a30b035f6b6ac", size = 4296970, upload-time = "2025-10-15T23:17:58.588Z" },
{ url = "https://files.pythonhosted.org/packages/78/06/5663ed35438d0b09056973994f1aec467492b33bd31da36e468b01ec1097/cryptography-46.0.3-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:71e842ec9bc7abf543b47cf86b9a743baa95f4677d22baa4c7d5c69e49e9bc04", size = 4940236, upload-time = "2025-10-15T23:18:00.897Z" },
{ url = "https://files.pythonhosted.org/packages/fc/59/873633f3f2dcd8a053b8dd1d38f783043b5fce589c0f6988bf55ef57e43e/cryptography-46.0.3-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:402b58fc32614f00980b66d6e56a5b4118e6cb362ae8f3fda141ba4689bd4506", size = 4472642, upload-time = "2025-10-15T23:18:02.749Z" },
{ url = "https://files.pythonhosted.org/packages/3d/39/8e71f3930e40f6877737d6f69248cf74d4e34b886a3967d32f919cc50d3b/cryptography-46.0.3-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ef639cb3372f69ec44915fafcd6698b6cc78fbe0c2ea41be867f6ed612811963", size = 4423126, upload-time = "2025-10-15T23:18:04.85Z" },
{ url = "https://files.pythonhosted.org/packages/cd/c7/f65027c2810e14c3e7268353b1681932b87e5a48e65505d8cc17c99e36ae/cryptography-46.0.3-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b51b8ca4f1c6453d8829e1eb7299499ca7f313900dd4d89a24b8b87c0a780d4", size = 4686573, upload-time = "2025-10-15T23:18:06.908Z" },
]
[[package]]
name = "datasketch"
version = "1.6.5"
@@ -215,6 +364,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/33/6b/e0547afaf41bf2c42e52430072fa5658766e3d65bd4b03a563d1b6336f57/distlib-0.4.0-py2.py3-none-any.whl", hash = "sha256:9659f7d87e46584a30b5780e43ac7a2143098441670ff0a49d5f9034c54a6c16", size = 469047, upload-time = "2025-07-17T16:51:58.613Z" },
]
[[package]]
name = "docutils"
version = "0.22.2"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4a/c0/89fe6215b443b919cb98a5002e107cb5026854ed1ccb6b5833e0768419d1/docutils-0.22.2.tar.gz", hash = "sha256:9fdb771707c8784c8f2728b67cb2c691305933d68137ef95a75db5f4dfbc213d", size = 2289092, upload-time = "2025-09-20T17:55:47.994Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/66/dd/f95350e853a4468ec37478414fc04ae2d61dad7a947b3015c3dcc51a09b9/docutils-0.22.2-py3-none-any.whl", hash = "sha256:b0e98d679283fc3bb0ead8a5da7f501baa632654e7056e9c5846842213d674d8", size = 632667, upload-time = "2025-09-20T17:55:43.052Z" },
]
[[package]]
name = "filelock"
version = "3.19.1"
@@ -224,6 +382,18 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/42/14/42b2651a2f46b022ccd948bca9f2d5af0fd8929c4eec235b8d6d844fbe67/filelock-3.19.1-py3-none-any.whl", hash = "sha256:d38e30481def20772f5baf097c122c3babc4fcdb7e14e57049eb9d88c6dc017d", size = 15988, upload-time = "2025-08-14T16:56:01.633Z" },
]
[[package]]
name = "id"
version = "1.5.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/22/11/102da08f88412d875fa2f1a9a469ff7ad4c874b0ca6fed0048fe385bdb3d/id-1.5.0.tar.gz", hash = "sha256:292cb8a49eacbbdbce97244f47a97b4c62540169c976552e497fd57df0734c1d", size = 15237, upload-time = "2024-12-04T19:53:05.575Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9f/cb/18326d2d89ad3b0dd143da971e77afd1e6ca6674f1b1c3df4b6bec6279fc/id-1.5.0-py3-none-any.whl", hash = "sha256:f1434e1cef91f2cbb8a4ec64663d5a23b9ed43ef44c4c957d02583d61714c658", size = 13611, upload-time = "2024-12-04T19:53:03.02Z" },
]
[[package]]
name = "identify"
version = "2.6.14"
@@ -233,6 +403,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/e5/ae/2ad30f4652712c82f1c23423d79136fbce338932ad166d70c1efb86a5998/identify-2.6.14-py2.py3-none-any.whl", hash = "sha256:11a073da82212c6646b1f39bb20d4483bfb9543bd5566fec60053c4bb309bf2e", size = 99172, upload-time = "2025-09-06T19:30:51.759Z" },
]
[[package]]
name = "idna"
version = "3.11"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
]
[[package]]
name = "iniconfig"
version = "2.1.0"
@@ -242,6 +421,65 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" },
]
[[package]]
name = "jaraco-classes"
version = "3.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "more-itertools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/06/c0/ed4a27bc5571b99e3cff68f8a9fa5b56ff7df1c2251cc715a652ddd26402/jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd", size = 11780, upload-time = "2024-03-31T07:27:36.643Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/7f/66/b15ce62552d84bbfcec9a4873ab79d993a1dd4edb922cbfccae192bd5b5f/jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790", size = 6777, upload-time = "2024-03-31T07:27:34.792Z" },
]
[[package]]
name = "jaraco-context"
version = "6.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/df/ad/f3777b81bf0b6e7bc7514a1656d3e637b2e8e15fab2ce3235730b3e7a4e6/jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3", size = 13912, upload-time = "2024-08-20T03:39:27.358Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ff/db/0c52c4cf5e4bd9f5d7135ec7669a3a767af21b3a308e1ed3674881e52b62/jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4", size = 6825, upload-time = "2024-08-20T03:39:25.966Z" },
]
[[package]]
name = "jaraco-functools"
version = "4.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "more-itertools" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f7/ed/1aa2d585304ec07262e1a83a9889880701079dde796ac7b1d1826f40c63d/jaraco_functools-4.3.0.tar.gz", hash = "sha256:cfd13ad0dd2c47a3600b439ef72d8615d482cedcff1632930d6f28924d92f294", size = 19755, upload-time = "2025-08-18T20:05:09.91Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b4/09/726f168acad366b11e420df31bf1c702a54d373a83f968d94141a8c3fde0/jaraco_functools-4.3.0-py3-none-any.whl", hash = "sha256:227ff8ed6f7b8f62c56deff101545fa7543cf2c8e7b82a7c2116e672f29c26e8", size = 10408, upload-time = "2025-08-18T20:05:08.69Z" },
]
[[package]]
name = "jeepney"
version = "0.9.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7b/6f/357efd7602486741aa73ffc0617fb310a29b588ed0fd69c2399acbb85b0c/jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732", size = 106758, upload-time = "2025-02-27T18:51:01.684Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b2/a3/e137168c9c44d18eff0376253da9f1e9234d0239e0ee230d2fee6cea8e55/jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683", size = 49010, upload-time = "2025-02-27T18:51:00.104Z" },
]
[[package]]
name = "keyring"
version = "25.6.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "jaraco-classes" },
{ name = "jaraco-context" },
{ name = "jaraco-functools" },
{ name = "jeepney", marker = "sys_platform == 'linux'" },
{ name = "pywin32-ctypes", marker = "sys_platform == 'win32'" },
{ name = "secretstorage", marker = "sys_platform == 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/70/09/d904a6e96f76ff214be59e7aa6ef7190008f52a0ab6689760a98de0bf37d/keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66", size = 62750, upload-time = "2024-12-25T15:26:45.782Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/d3/32/da7f44bcb1105d3e88a0b74ebdca50c59121d2ddf71c9e34ba47df7f3a56/keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd", size = 39085, upload-time = "2024-12-25T15:26:44.377Z" },
]
[[package]]
name = "levenshtein"
version = "0.27.1"
@@ -316,6 +554,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" },
]
[[package]]
name = "more-itertools"
version = "10.8.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ea/5d/38b681d3fce7a266dd9ab73c66959406d565b3e85f21d5e66e1181d93721/more_itertools-10.8.0.tar.gz", hash = "sha256:f638ddf8a1a0d134181275fb5d58b086ead7c6a72429ad725c67503f13ba30bd", size = 137431, upload-time = "2025-09-02T15:23:11.018Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a4/8e/469e5a4a2f5855992e425f3cb33804cc07bf18d48f2db061aec61ce50270/more_itertools-10.8.0-py3-none-any.whl", hash = "sha256:52d4362373dcf7c52546bc4af9a86ee7c4579df9a8dc268be0a2f949d376cc9b", size = 69667, upload-time = "2025-09-02T15:23:09.635Z" },
]
[[package]]
name = "mypy"
version = "1.18.1"
@@ -357,6 +604,39 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" },
]
[[package]]
name = "nh3"
version = "0.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/cf/a6/c6e942fc8dcadab08645f57a6d01d63e97114a30ded5f269dc58e05d4741/nh3-0.3.1.tar.gz", hash = "sha256:6a854480058683d60bdc7f0456105092dae17bef1f300642856d74bd4201da93", size = 18590, upload-time = "2025-10-07T03:27:58.217Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9c/24/4becaa61e066ff694c37627f5ef7528901115ffa17f7a6693c40da52accd/nh3-0.3.1-cp313-cp313t-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:80dc7563a2a3b980e44b221f69848e3645bbf163ab53e3d1add4f47b26120355", size = 1420887, upload-time = "2025-10-07T03:27:25.654Z" },
{ url = "https://files.pythonhosted.org/packages/94/49/16a6ec9098bb9bdf0fb9f09d6464865a3a48858d8d96e779a998ec3bdce0/nh3-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f600ad86114df21efc4a3592faa6b1d099c0eebc7e018efebb1c133376097da", size = 791700, upload-time = "2025-10-07T03:27:27.041Z" },
{ url = "https://files.pythonhosted.org/packages/1d/cc/1c024d7c23ad031dfe82ad59581736abcc403b006abb0d2785bffa768b54/nh3-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:669a908706cd28203d9cfce2f567575686e364a1bc6074d413d88d456066f743", size = 830225, upload-time = "2025-10-07T03:27:28.315Z" },
{ url = "https://files.pythonhosted.org/packages/89/08/4a87f9212373bd77bba01c1fd515220e0d263316f448d9c8e4b09732a645/nh3-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:a5721f59afa0ab3dcaa0d47e58af33a5fcd254882e1900ee4a8968692a40f79d", size = 999112, upload-time = "2025-10-07T03:27:29.782Z" },
{ url = "https://files.pythonhosted.org/packages/19/cf/94783911eb966881a440ba9641944c27152662a253c917a794a368b92a3c/nh3-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2cb6d9e192fbe0d451c7cb1350dadedbeae286207dbf101a28210193d019752e", size = 1070424, upload-time = "2025-10-07T03:27:31.2Z" },
{ url = "https://files.pythonhosted.org/packages/71/44/efb57b44e86a3de528561b49ed53803e5d42cd0441dcfd29b89422160266/nh3-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:474b176124c1b495ccfa1c20f61b7eb83ead5ecccb79ab29f602c148e8378489", size = 996129, upload-time = "2025-10-07T03:27:32.595Z" },
{ url = "https://files.pythonhosted.org/packages/ee/d3/87c39ea076510e57ee99a27fa4c2335e9e5738172b3963ee7c744a32726c/nh3-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4a2434668f4eef4eab17c128e565ce6bea42113ce10c40b928e42c578d401800", size = 980310, upload-time = "2025-10-07T03:27:34.282Z" },
{ url = "https://files.pythonhosted.org/packages/bc/30/00cfbd2a4d268e8d3bda9d1542ba4f7a20fbed37ad1e8e51beeee3f6fdae/nh3-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:0f454ba4c6aabafcaae964ae6f0a96cecef970216a57335fabd229a265fbe007", size = 584439, upload-time = "2025-10-07T03:27:36.103Z" },
{ url = "https://files.pythonhosted.org/packages/80/fa/39d27a62a2f39eb88c2bd50d9fee365a3645e456f3ec483c945a49c74f47/nh3-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:22b9e9c9eda497b02b7273b79f7d29e1f1170d2b741624c1b8c566aef28b1f48", size = 592388, upload-time = "2025-10-07T03:27:37.075Z" },
{ url = "https://files.pythonhosted.org/packages/7c/39/7df1c4ee13ef65ee06255df8101141793e97b4326e8509afbce5deada2b5/nh3-0.3.1-cp313-cp313t-win_arm64.whl", hash = "sha256:42e426f36e167ed29669b77ae3c4b9e185e4a1b130a86d7c3249194738a1d7b2", size = 579337, upload-time = "2025-10-07T03:27:38.055Z" },
{ url = "https://files.pythonhosted.org/packages/e1/28/a387fed70438d2810c8ac866e7b24bf1a5b6f30ae65316dfe4de191afa52/nh3-0.3.1-cp38-abi3-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:1de5c1a35bed19a1b1286bab3c3abfe42e990a8a6c4ce9bb9ab4bde49107ea3b", size = 1433666, upload-time = "2025-10-07T03:27:39.118Z" },
{ url = "https://files.pythonhosted.org/packages/c7/f9/500310c1f19cc80770a81aac3c94a0c6b4acdd46489e34019173b2b15a50/nh3-0.3.1-cp38-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eaba26591867f697cffdbc539faddeb1d75a36273f5bfe957eb421d3f87d7da1", size = 819897, upload-time = "2025-10-07T03:27:40.488Z" },
{ url = "https://files.pythonhosted.org/packages/d0/d4/ebb0965d767cba943793fa8f7b59d7f141bd322c86387a5e9485ad49754a/nh3-0.3.1-cp38-abi3-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:489ca5ecd58555c2865701e65f614b17555179e71ecc76d483b6f3886b813a9b", size = 803562, upload-time = "2025-10-07T03:27:41.86Z" },
{ url = "https://files.pythonhosted.org/packages/0a/9c/df037a13f0513283ecee1cf99f723b18e5f87f20e480582466b1f8e3a7db/nh3-0.3.1-cp38-abi3-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5a25662b392b06f251da6004a1f8a828dca7f429cd94ac07d8a98ba94d644438", size = 1050854, upload-time = "2025-10-07T03:27:43.29Z" },
{ url = "https://files.pythonhosted.org/packages/d0/9d/488fce56029de430e30380ec21f29cfaddaf0774f63b6aa2bf094c8b4c27/nh3-0.3.1-cp38-abi3-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38b4872499ab15b17c5c6e9f091143d070d75ddad4a4d1ce388d043ca556629c", size = 1002152, upload-time = "2025-10-07T03:27:44.358Z" },
{ url = "https://files.pythonhosted.org/packages/da/4a/24b0118de34d34093bf03acdeca3a9556f8631d4028814a72b9cc5216382/nh3-0.3.1-cp38-abi3-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:48425995d37880281b467f7cf2b3218c1f4750c55bcb1ff4f47f2320a2bb159c", size = 912333, upload-time = "2025-10-07T03:27:45.757Z" },
{ url = "https://files.pythonhosted.org/packages/11/0e/16b3886858b3953ef836dea25b951f3ab0c5b5a431da03f675c0e999afb8/nh3-0.3.1-cp38-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94292dd1bd2a2e142fa5bb94c0ee1d84433a5d9034640710132da7e0376fca3a", size = 796945, upload-time = "2025-10-07T03:27:47.169Z" },
{ url = "https://files.pythonhosted.org/packages/87/bb/aac139cf6796f2e0fec026b07843cea36099864ec104f865e2d802a25a30/nh3-0.3.1-cp38-abi3-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:dd6d1be301123a9af3263739726eeeb208197e5e78fc4f522408c50de77a5354", size = 837257, upload-time = "2025-10-07T03:27:48.243Z" },
{ url = "https://files.pythonhosted.org/packages/f8/d7/1d770876a288a3f5369fd6c816363a5f9d3a071dba24889458fdeb4f7a49/nh3-0.3.1-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b74bbd047b361c0f21d827250c865ff0895684d9fcf85ea86131a78cfa0b835b", size = 1004142, upload-time = "2025-10-07T03:27:49.278Z" },
{ url = "https://files.pythonhosted.org/packages/31/2a/c4259e8b94c2f4ba10a7560e0889a6b7d2f70dce7f3e93f6153716aaae47/nh3-0.3.1-cp38-abi3-musllinux_1_2_armv7l.whl", hash = "sha256:b222c05ae5139320da6caa1c5aed36dd0ee36e39831541d9b56e048a63b4d701", size = 1075896, upload-time = "2025-10-07T03:27:50.527Z" },
{ url = "https://files.pythonhosted.org/packages/59/06/b15ba9fea4773741acb3382dcf982f81e55f6053e8a6e72a97ac91928b1d/nh3-0.3.1-cp38-abi3-musllinux_1_2_i686.whl", hash = "sha256:b0d6c834d3c07366ecbdcecc1f4804c5ce0a77fa52ee4653a2a26d2d909980ea", size = 1003235, upload-time = "2025-10-07T03:27:51.673Z" },
{ url = "https://files.pythonhosted.org/packages/1d/13/74707f99221bbe0392d18611b51125d45f8bd5c6be077ef85575eb7a38b1/nh3-0.3.1-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:670f18b09f75c86c3865f79543bf5acd4bbe2a5a4475672eef2399dd8cdb69d2", size = 987308, upload-time = "2025-10-07T03:27:53.003Z" },
{ url = "https://files.pythonhosted.org/packages/ee/81/24bf41a5ce7648d7e954de40391bb1bcc4b7731214238c7138c2420f962c/nh3-0.3.1-cp38-abi3-win32.whl", hash = "sha256:d7431b2a39431017f19cd03144005b6c014201b3e73927c05eab6ca37bb1d98c", size = 591695, upload-time = "2025-10-07T03:27:54.43Z" },
{ url = "https://files.pythonhosted.org/packages/a5/ca/263eb96b6d32c61a92c1e5480b7f599b60db7d7fbbc0d944be7532d0ac42/nh3-0.3.1-cp38-abi3-win_amd64.whl", hash = "sha256:c0acef923a1c3a2df3ee5825ea79c149b6748c6449781c53ab6923dc75e87d26", size = 600564, upload-time = "2025-10-07T03:27:55.966Z" },
{ url = "https://files.pythonhosted.org/packages/34/67/d5e07efd38194f52b59b8af25a029b46c0643e9af68204ee263022924c27/nh3-0.3.1-cp38-abi3-win_arm64.whl", hash = "sha256:a3e810a92fb192373204456cac2834694440af73d749565b4348e30235da7f0b", size = 586369, upload-time = "2025-10-07T03:27:57.234Z" },
]
[[package]]
name = "nodeenv"
version = "1.9.1"
@@ -497,6 +777,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/5b/a5/987a405322d78a73b66e39e4a90e4ef156fd7141bf71df987e50717c321b/pre_commit-4.3.0-py2.py3-none-any.whl", hash = "sha256:2b0747ad7e6e967169136edffee14c16e148a778a54e4f967921aa1ebf2308d8", size = 220965, upload-time = "2025-08-09T18:56:13.192Z" },
]
[[package]]
name = "pycparser"
version = "2.23"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fe/cf/d2d3b9f5699fb1e4615c8e32ff220203e43b248e1dfcc6736ad9057731ca/pycparser-2.23.tar.gz", hash = "sha256:78816d4f24add8f10a06d6f05b4d424ad9e96cfebf68a4ddc99c65c0720d00c2", size = 173734, upload-time = "2025-09-09T13:23:47.91Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a0/e3/59cd50310fc9b59512193629e1984c1f95e5c8ae6e5d8c69532ccc65a7fe/pycparser-2.23-py3-none-any.whl", hash = "sha256:e5c6e8d3fbad53479cab09ac03729e0a9faf2bee3db8208a550daf5af81a5934", size = 118140, upload-time = "2025-09-09T13:23:46.651Z" },
]
[[package]]
name = "pydantic"
version = "2.11.9"
@@ -621,6 +910,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/2a/95/8c8fd923b0a702388da4f9e0368f490d123cc5224279e6a083984304a15e/python_levenshtein-0.27.1-py3-none-any.whl", hash = "sha256:e1a4bc2a70284b2ebc4c505646142fecd0f831e49aa04ed972995895aec57396", size = 9426, upload-time = "2025-03-02T19:47:24.801Z" },
]
[[package]]
name = "pywin32-ctypes"
version = "0.2.3"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/85/9f/01a1a99704853cb63f253eea009390c88e7131c67e66a0a02099a8c917cb/pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755", size = 29471, upload-time = "2024-08-14T10:15:34.626Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/de/3d/8161f7711c017e01ac9f008dfddd9410dff3674334c233bde66e7ba65bbf/pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8", size = 30756, upload-time = "2024-08-14T10:15:33.187Z" },
]
[[package]]
name = "pyyaml"
version = "6.0.2"
@@ -723,6 +1021,56 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/f4/dfc7b8c46b1044a47f7ca55deceb5965985cff3193906cb32913121e6652/rapidfuzz-3.14.1-cp314-cp314t-win_arm64.whl", hash = "sha256:7cd312c380d3ce9d35c3ec9726b75eee9da50e8a38e89e229a03db2262d3d96b", size = 853771, upload-time = "2025-09-08T21:08:00.816Z" },
]
[[package]]
name = "readme-renderer"
version = "44.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "docutils" },
{ name = "nh3" },
{ name = "pygments" },
]
sdist = { url = "https://files.pythonhosted.org/packages/5a/a9/104ec9234c8448c4379768221ea6df01260cd6c2ce13182d4eac531c8342/readme_renderer-44.0.tar.gz", hash = "sha256:8712034eabbfa6805cacf1402b4eeb2a73028f72d1166d6f5cb7f9c047c5d1e1", size = 32056, upload-time = "2024-07-08T15:00:57.805Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e1/67/921ec3024056483db83953ae8e48079ad62b92db7880013ca77632921dd0/readme_renderer-44.0-py3-none-any.whl", hash = "sha256:2fbca89b81a08526aadf1357a8c2ae889ec05fb03f5da67f9769c9a592166151", size = 13310, upload-time = "2024-07-08T15:00:56.577Z" },
]
[[package]]
name = "requests"
version = "2.32.5"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "certifi" },
{ name = "charset-normalizer" },
{ name = "idna" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
]
[[package]]
name = "requests-toolbelt"
version = "1.0.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "requests" },
]
sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" },
]
[[package]]
name = "rfc3986"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/85/40/1520d68bfa07ab5a6f065a186815fb6610c86fe957bc065754e47f7b0840/rfc3986-2.0.0.tar.gz", hash = "sha256:97aacf9dbd4bfd829baad6e6309fa6573aaf1be3f6fa735c8ab05e46cecb261c", size = 49026, upload-time = "2022-01-10T00:52:30.832Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ff/9a/9afaade874b2fa6c752c36f1548f718b5b83af81ed9b76628329dab81c1b/rfc3986-2.0.0-py2.py3-none-any.whl", hash = "sha256:50b1502b60e289cb37883f3dfd34532b8873c7de9f49bb546641ce9cbd256ebd", size = 31326, upload-time = "2022-01-10T00:52:29.594Z" },
]
[[package]]
name = "rich"
version = "14.1.0"
@@ -823,6 +1171,19 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/97/30/2f9a5243008f76dfc5dee9a53dfb939d9b31e16ce4bd4f2e628bfc5d89d2/scipy-1.16.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d2a4472c231328d4de38d5f1f68fdd6d28a615138f842580a8a321b5845cf779", size = 26448374, upload-time = "2025-09-11T17:45:03.45Z" },
]
[[package]]
name = "secretstorage"
version = "3.4.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cryptography" },
{ name = "jeepney" },
]
sdist = { url = "https://files.pythonhosted.org/packages/31/9f/11ef35cf1027c1339552ea7bfe6aaa74a8516d8b5caf6e7d338daf54fd80/secretstorage-3.4.0.tar.gz", hash = "sha256:c46e216d6815aff8a8a18706a2fbfd8d53fcbb0dce99301881687a1b0289ef7c", size = 19748, upload-time = "2025-09-09T16:42:13.859Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/91/ff/2e2eed29e02c14a5cb6c57f09b2d5b40e65d6cc71f45b52e0be295ccbc2f/secretstorage-3.4.0-py3-none-any.whl", hash = "sha256:0e3b6265c2c63509fb7415717607e4b2c9ab767b7f344a57473b779ca13bd02e", size = 15272, upload-time = "2025-09-09T16:42:12.744Z" },
]
[[package]]
name = "six"
version = "1.17.0"
@@ -851,6 +1212,26 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/80/c5/0c06759b95747882bb50abda18f5fb48c3e9b0fbfc6ebc0e23550b52415d/stevedore-5.5.0-py3-none-any.whl", hash = "sha256:18363d4d268181e8e8452e71a38cd77630f345b2ef6b4a8d5614dac5ee0d18cf", size = 49518, upload-time = "2025-08-25T12:54:25.445Z" },
]
[[package]]
name = "twine"
version = "6.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "id" },
{ name = "keyring", marker = "platform_machine != 'ppc64le' and platform_machine != 's390x'" },
{ name = "packaging" },
{ name = "readme-renderer" },
{ name = "requests" },
{ name = "requests-toolbelt" },
{ name = "rfc3986" },
{ name = "rich" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e0/a8/949edebe3a82774c1ec34f637f5dd82d1cf22c25e963b7d63771083bbee5/twine-6.2.0.tar.gz", hash = "sha256:e5ed0d2fd70c9959770dce51c8f39c8945c574e18173a7b81802dab51b4b75cf", size = 172262, upload-time = "2025-09-04T15:43:17.255Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/3a/7a/882d99539b19b1490cac5d77c67338d126e4122c8276bf640e411650c830/twine-6.2.0-py3-none-any.whl", hash = "sha256:418ebf08ccda9a8caaebe414433b0ba5e25eb5e4a927667122fbe8f829f985d8", size = 42727, upload-time = "2025-09-04T15:43:15.994Z" },
]
[[package]]
name = "types-pyyaml"
version = "6.0.12.20250915"
@@ -881,6 +1262,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
]
[[package]]
name = "urllib3"
version = "2.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
]
[[package]]
name = "virtualenv"
version = "20.34.0"