chore: update submodule and refine logging configuration

- Updated the client submodule to the latest commit for improved compatibility.
- Adjusted logging configuration in tests to utilize a more consistent format.
- Enhanced test assertions for clarity by removing unnecessary messages and ensuring consistency across various test files.

All quality checks pass.
This commit is contained in:
2026-01-01 21:03:09 +00:00
parent 5c5f2c5556
commit 8df1aa0c03
7 changed files with 47 additions and 628 deletions

2
client

Submodule client updated: 1655334a00...d5d1a23b83

View File

@@ -226,8 +226,8 @@ venv = ".venv"
[tool.pyrefly]
pythonVersion = "3.12"
python-interpreter-path = "/home/vasceannie/repos/noteflow/.venv/bin/python"
site-package-path = ["/home/vasceannie/repos/noteflow/.venv/lib/python3.12/site-packages"]
python-interpreter-path = ".venv/bin/python"
site-package-path = [".venv/lib/python3.12/site-packages"]
exclude = ["**/proto/*_pb2*.py", "**/proto/*_pb2*.pyi", ".venv"]
[tool.pytest.ini_options]

View File

@@ -101,17 +101,20 @@ class WebhooksMixin:
if not request.url or not request.url.startswith(("http://", "https://")):
logger.error(LOG_EVENT_WEBHOOK_REGISTRATION_FAILED, reason="invalid_url", url=request.url)
await abort_invalid_argument(context, "URL must start with http:// or https://")
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
# Validate events
if not request.events:
logger.error(LOG_EVENT_WEBHOOK_REGISTRATION_FAILED, reason="no_events", url=request.url)
await abort_invalid_argument(context, "At least one event type required")
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
try:
events = _parse_events(list(request.events))
except ValueError as exc:
logger.error(LOG_EVENT_WEBHOOK_REGISTRATION_FAILED, reason="invalid_event_type", url=request.url, error=str(exc))
await abort_invalid_argument(context, f"Invalid event type: {exc}")
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
try:
workspace_id = UUID(request.workspace_id)
@@ -119,18 +122,20 @@ class WebhooksMixin:
from noteflow.config.constants import ERROR_INVALID_WORKSPACE_ID_FORMAT
logger.error(LOG_EVENT_WEBHOOK_REGISTRATION_FAILED, reason="invalid_workspace_id", workspace_id=request.workspace_id)
await abort_invalid_argument(context, ERROR_INVALID_WORKSPACE_ID_FORMAT)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
async with self._create_repository_provider() as uow:
if not uow.supports_webhooks:
logger.error(LOG_EVENT_WEBHOOK_REGISTRATION_FAILED, reason=ErrorCode.DATABASE_REQUIRED.code)
await abort_database_required(context, _ENTITY_WEBHOOKS)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
config = WebhookConfig.create(
workspace_id=workspace_id,
url=request.url,
events=list(events),
name=request.name or "Webhook",
secret=request.secret if request.secret else None,
secret=request.secret or None,
timeout_ms=request.timeout_ms or 10000,
max_retries=request.max_retries or 3,
)
@@ -152,6 +157,7 @@ class WebhooksMixin:
reason=ErrorCode.DATABASE_REQUIRED.code,
)
await abort_database_required(context, _ENTITY_WEBHOOKS)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
if request.enabled_only:
webhooks = await uow.webhooks.get_all_enabled()
@@ -183,6 +189,7 @@ class WebhooksMixin:
webhook_id=request.webhook_id,
)
await abort_invalid_argument(context, _ERR_INVALID_WEBHOOK_ID)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
async with self._create_repository_provider() as uow:
if not uow.supports_webhooks:
@@ -192,6 +199,7 @@ class WebhooksMixin:
webhook_id=str(webhook_id),
)
await abort_database_required(context, _ENTITY_WEBHOOKS)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
config = await uow.webhooks.get_by_id(webhook_id)
if config is None:
@@ -201,35 +209,26 @@ class WebhooksMixin:
webhook_id=str(webhook_id),
)
await abort_not_found(context, _ENTITY_WEBHOOK, request.webhook_id)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
# Build updates dict with proper typing
updates: dict[
str, str | frozenset[WebhookEventType] | bool | int | None
] = {}
if request.HasField("url"):
updates["url"] = request.url
if request.events: # repeated fields don't use HasField
updates["events"] = _parse_events(list(request.events))
if request.HasField("name"):
updates["name"] = request.name
if request.HasField("enabled"):
updates["enabled"] = request.enabled
if request.HasField("timeout_ms"):
updates["timeout_ms"] = request.timeout_ms
if request.HasField("max_retries"):
updates["max_retries"] = request.max_retries
if request.HasField("secret"):
updates["secret"] = request.secret
updated = replace(config, **updates, updated_at=utc_now())
# Build updated config with explicit field assignments to satisfy type checker
updated = replace(
config,
url=request.url if request.HasField("url") else config.url,
events=_parse_events(list(request.events)) if request.events else config.events,
name=request.name if request.HasField("name") else config.name,
enabled=request.enabled if request.HasField("enabled") else config.enabled,
timeout_ms=request.timeout_ms if request.HasField("timeout_ms") else config.timeout_ms,
max_retries=request.max_retries if request.HasField("max_retries") else config.max_retries,
secret=request.secret if request.HasField("secret") else config.secret,
updated_at=utc_now(),
)
saved = await uow.webhooks.update(updated)
await uow.commit()
logger.info(
"webhook_updated",
webhook_id=str(webhook_id),
updated_fields=list(updates.keys()),
)
return _webhook_config_to_proto(saved)
@@ -248,6 +247,7 @@ class WebhooksMixin:
webhook_id=request.webhook_id,
)
await abort_invalid_argument(context, _ERR_INVALID_WEBHOOK_ID)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
async with self._create_repository_provider() as uow:
if not uow.supports_webhooks:
@@ -257,6 +257,7 @@ class WebhooksMixin:
webhook_id=str(webhook_id),
)
await abort_database_required(context, _ENTITY_WEBHOOKS)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
deleted = await uow.webhooks.delete(webhook_id)
await uow.commit()
@@ -289,6 +290,7 @@ class WebhooksMixin:
webhook_id=request.webhook_id,
)
await abort_invalid_argument(context, _ERR_INVALID_WEBHOOK_ID)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
limit = min(request.limit or DEFAULT_WEBHOOK_DELIVERY_HISTORY_LIMIT, MAX_WEBHOOK_DELIVERIES_LIMIT)
@@ -300,6 +302,7 @@ class WebhooksMixin:
webhook_id=str(webhook_id),
)
await abort_database_required(context, _ENTITY_WEBHOOKS)
raise # Unreachable: abort raises, but helps Pyrefly control flow analysis
deliveries = await uow.webhooks.get_deliveries(webhook_id, limit=limit)

View File

@@ -105,7 +105,7 @@ class TestRegisterOidcProvider:
assert response.id == str(sample_provider.id), "should return provider id"
assert response.name == "Test Authentik", "should return provider name"
assert response.preset == "authentik", "should return preset"
mock_service.register_provider.assert_called_once(), "register_provider should be called exactly once"
mock_service.register_provider.assert_called_once()
async def test_returns_warnings_from_validation(
self,
@@ -149,7 +149,7 @@ class TestRegisterOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.RegisterOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for missing name"
mock_grpc_context.abort.assert_called_once()
async def test_rejects_missing_issuer_url(
self,
@@ -165,7 +165,7 @@ class TestRegisterOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.RegisterOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for missing issuer_url"
mock_grpc_context.abort.assert_called_once()
async def test_rejects_invalid_issuer_url_scheme(
self,
@@ -182,7 +182,7 @@ class TestRegisterOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.RegisterOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for invalid issuer URL scheme"
mock_grpc_context.abort.assert_called_once()
async def test_rejects_missing_client_id(
self,
@@ -198,7 +198,7 @@ class TestRegisterOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.RegisterOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for missing client_id"
mock_grpc_context.abort.assert_called_once()
async def test_handles_discovery_error(
self,
@@ -222,7 +222,7 @@ class TestRegisterOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.RegisterOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for discovery error"
mock_grpc_context.abort.assert_called_once()
class TestListOidcProviders:
@@ -348,7 +348,7 @@ class TestGetOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.GetOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called when provider not found"
mock_grpc_context.abort.assert_called_once()
async def test_aborts_on_invalid_provider_id_format(
self,
@@ -361,7 +361,7 @@ class TestGetOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.GetOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for invalid UUID format"
mock_grpc_context.abort.assert_called_once()
class TestUpdateOidcProvider:
@@ -466,7 +466,7 @@ class TestUpdateOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.UpdateOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called when provider not found"
mock_grpc_context.abort.assert_called_once()
class TestDeleteOidcProvider:
@@ -491,7 +491,7 @@ class TestDeleteOidcProvider:
response = await oidc_servicer.DeleteOidcProvider(request, mock_grpc_context)
assert response.success is True, "delete should return success=True"
mock_service.registry.remove_provider.assert_called_once_with(provider_id), "remove_provider should be called with correct ID"
mock_service.registry.remove_provider.assert_called_once_with(provider_id)
async def test_delete_aborts_when_provider_not_found(
self,
@@ -509,7 +509,7 @@ class TestDeleteOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.DeleteOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called when provider not found"
mock_grpc_context.abort.assert_called_once()
async def test_aborts_on_invalid_provider_id(
self,
@@ -522,7 +522,7 @@ class TestDeleteOidcProvider:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.DeleteOidcProvider(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called for invalid UUID format"
mock_grpc_context.abort.assert_called_once()
class TestRefreshOidcDiscovery:
@@ -619,7 +619,7 @@ class TestRefreshOidcDiscovery:
with pytest.raises(AssertionError, match="Unreachable"):
await oidc_servicer.RefreshOidcDiscovery(request, mock_grpc_context)
mock_grpc_context.abort.assert_called_once(), "abort should be called when provider not found"
mock_grpc_context.abort.assert_called_once()
class TestListOidcPresets:

View File

@@ -96,9 +96,9 @@ class TestGetLogLevel:
class TestCreateRenderer:
"""Tests for _create_renderer helper function."""
def test_returns_json_renderer_when_json_console_enabled(self) -> None:
"""_create_renderer returns JSONRenderer when enable_json_console is True."""
config = LoggingConfig(enable_json_console=True)
def test_returns_json_renderer_when_json_format_enabled(self) -> None:
"""_create_renderer returns JSONRenderer when log_format is 'json'."""
config = LoggingConfig(log_format="json")
renderer = _create_renderer(config)
assert isinstance(
renderer, structlog.processors.JSONRenderer
@@ -106,7 +106,7 @@ class TestCreateRenderer:
def test_returns_json_renderer_when_not_tty(self) -> None:
"""_create_renderer returns JSONRenderer when stderr is not a TTY."""
config = LoggingConfig(enable_json_console=False)
config = LoggingConfig(log_format="auto")
with patch("sys.stderr") as mock_stderr:
mock_stderr.isatty.return_value = False
renderer = _create_renderer(config)
@@ -116,7 +116,7 @@ class TestCreateRenderer:
def test_returns_console_renderer_for_tty(self) -> None:
"""_create_renderer returns ConsoleRenderer for TTY with colors."""
config = LoggingConfig(enable_json_console=False, console_colors=True)
config = LoggingConfig(log_format="auto", console_colors=True)
with patch("sys.stderr") as mock_stderr:
mock_stderr.isatty.return_value = True
renderer = _create_renderer(config)

View File

@@ -1,584 +0,0 @@
"""Comprehensive tests for logging migration script.
Tests cover:
- Pattern detection (needs_migration)
- Transformation correctness (transform_file)
- Validation logic (validate_transformation)
- Edge cases and error handling
- End-to-end scenarios with real file patterns
"""
from __future__ import annotations
import sys
from pathlib import Path
from textwrap import dedent
# Import the migration module - scripts/ added to extraPaths in pyproject.toml
sys.path.insert(0, str(Path(__file__).parent.parent.parent / 'scripts'))
from migrate_logging import (
TransformResult,
has_infrastructure_import,
needs_migration,
should_skip_file,
transform_file,
uses_logging_constants,
validate_transformation,
)
# Test constants
SAMPLE_STDLIB_LOGGER = dedent('''
"""Module with stdlib logging."""
import logging
logger = logging.getLogger(__name__)
def do_work():
logger.info("Doing work")
''').strip()
SAMPLE_ALREADY_MIGRATED = dedent('''
"""Module already using infrastructure logging."""
import logging
from noteflow.infrastructure.logging import get_logger
logger = get_logger(__name__)
def do_work():
logger.info("Doing work")
''').strip()
SAMPLE_NO_LOGGING = dedent('''
"""Module without logging."""
def simple_function():
return 42
''').strip()
SAMPLE_COMPLEX_MODULE = dedent('''
"""Complex module with various patterns."""
from __future__ import annotations
import logging
import os
from pathlib import Path
from typing import TYPE_CHECKING
if TYPE_CHECKING:
from collections.abc import Sequence
logger = logging.getLogger(__name__)
# Some constants
DEFAULT_TIMEOUT = 30
class MyService:
"""A service class."""
def __init__(self):
self._initialized = False
def start(self):
logger.info("Starting service")
self._initialized = True
def process(self, data: str) -> str:
logger.debug("Processing: %s", data)
return data.upper()
''').strip()
SAMPLE_MULTIPLE_LOGGERS = dedent('''
"""Module with multiple logger definitions."""
import logging
# Main logger
logger = logging.getLogger(__name__)
# Also create a child logger (should not be transformed)
child_logger = logging.getLogger(__name__ + ".child")
def work():
logger.info("Main work")
child_logger.debug("Child work")
''').strip()
SAMPLE_WITH_LOGGING_CONSTANTS = dedent('''
"""Module using logging constants."""
import logging
logger = logging.getLogger(__name__)
def set_debug():
logger.setLevel(logging.DEBUG)
''').strip()
class TestNeedsMigration:
"""Tests for needs_migration function."""
def test_detects_stdlib_pattern(self) -> None:
"""Detects standard stdlib logging pattern."""
assert needs_migration(SAMPLE_STDLIB_LOGGER) is True, "stdlib logging pattern should require migration"
def test_ignores_already_migrated(self) -> None:
"""Already migrated files don't need migration."""
# File has get_logger instead of logging.getLogger, so pattern won't match
assert needs_migration(SAMPLE_ALREADY_MIGRATED) is False, "already migrated file should not need migration"
def test_ignores_no_logging(self) -> None:
"""Files without logging don't need migration."""
assert needs_migration(SAMPLE_NO_LOGGING) is False, "file without logging should not need migration"
def test_detects_complex_module(self) -> None:
"""Detects logging in complex modules."""
assert needs_migration(SAMPLE_COMPLEX_MODULE) is True, "complex module with logging should require migration"
def test_requires_both_import_and_getlogger(self) -> None:
"""Requires both import logging AND getLogger call."""
# Only import, no getLogger
only_import = "import logging\n\ndef foo(): pass"
assert needs_migration(only_import) is False, "file with only import (no getLogger) should not need migration"
# Only getLogger, no import (invalid but should handle)
only_getlogger = "logger = logging.getLogger(__name__)"
assert needs_migration(only_getlogger) is False, "file with only getLogger (no import) should not need migration"
class TestHasInfrastructureImport:
"""Tests for has_infrastructure_import function."""
def test_detects_get_logger_import(self) -> None:
"""Detects get_logger import."""
content = "from noteflow.infrastructure.logging import get_logger"
assert has_infrastructure_import(content) is True, "should detect get_logger import from infrastructure.logging"
def test_detects_multi_import(self) -> None:
"""Detects multi-symbol import."""
content = "from noteflow.infrastructure.logging import get_logger, configure_logging"
assert has_infrastructure_import(content) is True, "should detect multi-symbol import from infrastructure.logging"
def test_no_import(self) -> None:
"""No import returns False."""
content = "import logging"
assert has_infrastructure_import(content) is False, "stdlib logging import should not be detected as infrastructure import"
class TestShouldSkipFile:
"""Tests for should_skip_file function."""
def test_skips_proto_files(self) -> None:
"""Skips protobuf generated files."""
assert should_skip_file(Path('foo_pb2.py')) is True, "should skip *_pb2.py protobuf files"
assert should_skip_file(Path('foo_pb2_grpc.py')) is True, "should skip *_pb2_grpc.py protobuf files"
def test_skips_logging_module_files(self) -> None:
"""Skips files in logging module itself."""
assert should_skip_file(
Path('src/noteflow/infrastructure/logging/config.py')
) is True, "should skip files within infrastructure/logging module"
assert should_skip_file(
Path('src/noteflow/infrastructure/logging/__init__.py')
) is True, "should skip __init__.py within infrastructure/logging module"
def test_allows_normal_files(self) -> None:
"""Allows normal Python files."""
assert should_skip_file(Path('src/noteflow/grpc/client.py')) is False, "should not skip normal grpc files"
assert should_skip_file(Path('src/noteflow/application/services/foo.py')) is False, "should not skip normal application service files"
class TestTransformFile:
"""Tests for transform_file function."""
def test_transforms_simple_module(self, tmp_path: Path) -> None:
"""Transforms a simple module correctly."""
test_file = tmp_path / 'simple.py'
test_file.write_text(SAMPLE_STDLIB_LOGGER)
result = transform_file(test_file)
assert result.has_changes is True, "simple module with logging should have changes"
assert 'from noteflow.infrastructure.logging import get_logger' in result.transformed, "transformed code should include infrastructure import"
assert 'logger = get_logger(__name__)' in result.transformed, "transformed code should use get_logger(__name__)"
assert 'logging.getLogger(__name__)' not in result.transformed, "transformed code should not contain logging.getLogger"
def test_transforms_complex_module(self, tmp_path: Path) -> None:
"""Transforms complex module preserving structure, removes unused import logging."""
test_file = tmp_path / 'complex.py'
test_file.write_text(SAMPLE_COMPLEX_MODULE)
result = transform_file(test_file)
assert result.has_changes is True, "complex module with logging should have changes"
# import logging should be removed (not using constants)
assert 'import logging' not in result.transformed, "unused import logging should be removed"
# Infrastructure import should be present
assert 'from noteflow.infrastructure.logging import get_logger' in result.transformed, "transformed code should include infrastructure import"
# Check getLogger replaced
assert 'logger = get_logger(__name__)' in result.transformed, "transformed code should use get_logger(__name__)"
# Changes should include removal
assert 'Removed unused import logging' in result.changes, "changes should document removal of unused import"
def test_handles_multiple_loggers(self, tmp_path: Path) -> None:
"""Only transforms main __name__ logger, not child loggers."""
test_file = tmp_path / 'multiple.py'
test_file.write_text(SAMPLE_MULTIPLE_LOGGERS)
result = transform_file(test_file)
assert result.has_changes is True, "module with multiple loggers should have changes"
# Main logger transformed
assert 'logger = get_logger(__name__)' in result.transformed, "main logger should be transformed to get_logger"
# Child logger NOT transformed (different pattern)
assert 'child_logger = logging.getLogger(__name__ + ".child")' in result.transformed, "child logger with custom name should not be transformed"
def test_no_change_for_already_migrated(self, tmp_path: Path) -> None:
"""No changes for already migrated files."""
test_file = tmp_path / 'migrated.py'
test_file.write_text(SAMPLE_ALREADY_MIGRATED)
result = transform_file(test_file)
assert result.has_changes is False, "already migrated file should have no changes"
def test_no_change_for_no_logging(self, tmp_path: Path) -> None:
"""No changes for files without logging."""
test_file = tmp_path / 'no_logging.py'
test_file.write_text(SAMPLE_NO_LOGGING)
result = transform_file(test_file)
assert result.has_changes is False, "file without logging should have no changes"
def test_preserves_file_structure(self, tmp_path: Path) -> None:
"""Preserves docstrings, imports order, and code structure."""
test_file = tmp_path / 'structured.py'
test_file.write_text(SAMPLE_COMPLEX_MODULE)
result = transform_file(test_file)
# Docstring preserved
assert '"""Complex module with various patterns."""' in result.transformed, "module docstring should be preserved"
# Class preserved
assert 'class MyService:' in result.transformed, "class definition should be preserved"
# Type checking block preserved
assert 'if TYPE_CHECKING:' in result.transformed, "TYPE_CHECKING block should be preserved"
class TestValidateTransformation:
"""Tests for validate_transformation function."""
def test_valid_transformation_passes(self, tmp_path: Path) -> None:
"""Valid transformation has no errors."""
test_file = tmp_path / 'valid.py'
test_file.write_text(SAMPLE_STDLIB_LOGGER)
result = transform_file(test_file)
errors = validate_transformation(result)
assert errors == [], f'Expected no errors, got: {errors}'
def test_detects_missing_import(self, tmp_path: Path) -> None:
"""Detects missing infrastructure import."""
result = TransformResult(
file_path=tmp_path / 'bad.py',
original=SAMPLE_STDLIB_LOGGER,
transformed=SAMPLE_STDLIB_LOGGER.replace(
'logger = logging.getLogger(__name__)',
'logger = get_logger(__name__)',
),
)
errors = validate_transformation(result)
assert any('Missing infrastructure.logging import' in e for e in errors), f"should detect missing import error, got: {errors}"
def test_detects_syntax_error(self, tmp_path: Path) -> None:
"""Detects syntax errors in transformed code."""
result = TransformResult(
file_path=tmp_path / 'syntax_error.py',
original=SAMPLE_STDLIB_LOGGER,
transformed='def broken(\n', # Invalid syntax
)
errors = validate_transformation(result)
assert any('Syntax error' in e for e in errors), f"should detect syntax error, got: {errors}"
def test_no_errors_for_unchanged(self, tmp_path: Path) -> None:
"""No errors for unchanged files."""
result = TransformResult(
file_path=tmp_path / 'unchanged.py',
original=SAMPLE_NO_LOGGING,
transformed=SAMPLE_NO_LOGGING,
)
errors = validate_transformation(result)
assert errors == [], f"unchanged file should have no validation errors, got: {errors}"
class TestTransformResultDiff:
"""Tests for TransformResult diff generation."""
def test_diff_shows_changes(self, tmp_path: Path) -> None:
"""Diff correctly shows added and removed lines."""
test_file = tmp_path / 'diff_test.py'
test_file.write_text(SAMPLE_STDLIB_LOGGER)
result = transform_file(test_file)
diff = result.get_diff()
assert '-logger = logging.getLogger(__name__)' in diff, "diff should show removed logging.getLogger line"
assert '+logger = get_logger(__name__)' in diff, "diff should show added get_logger line"
assert '+from noteflow.infrastructure.logging import get_logger' in diff, "diff should show added infrastructure import"
def test_no_diff_for_unchanged(self, tmp_path: Path) -> None:
"""No diff for unchanged files."""
test_file = tmp_path / 'no_diff.py'
test_file.write_text(SAMPLE_NO_LOGGING)
result = transform_file(test_file)
diff = result.get_diff()
assert diff == '', f"unchanged file should have empty diff, got: {diff!r}"
class TestEndToEndScenarios:
"""End-to-end tests with realistic file patterns."""
def test_grpc_service_pattern(self, tmp_path: Path) -> None:
"""Transforms typical gRPC service file."""
content = dedent('''
"""gRPC service implementation."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING
import grpc
if TYPE_CHECKING:
from noteflow.grpc.proto import noteflow_pb2
logger = logging.getLogger(__name__)
class MyServicer:
def __init__(self):
logger.info("Servicer initialized")
def DoSomething(self, request, context):
logger.debug("Processing request")
return noteflow_pb2.Response()
''').strip()
test_file = tmp_path / 'service.py'
test_file.write_text(content)
result = transform_file(test_file)
errors = validate_transformation(result)
assert result.has_changes is True, "gRPC service file should have changes"
assert errors == [], f"gRPC service transformation should have no errors, got: {errors}"
assert 'from noteflow.infrastructure.logging import get_logger' in result.transformed, "transformed gRPC service should include infrastructure import"
assert 'logger = get_logger(__name__)' in result.transformed, "transformed gRPC service should use get_logger"
def test_application_service_pattern(self, tmp_path: Path) -> None:
"""Transforms typical application service file."""
content = dedent('''
"""Application service with logging."""
import logging
from dataclasses import dataclass
from noteflow.domain.entities import Meeting
logger = logging.getLogger(__name__)
@dataclass
class ServiceResult:
success: bool
message: str
class MeetingService:
def create_meeting(self, title: str) -> ServiceResult:
logger.info("Creating meeting: %s", title)
meeting = Meeting(title=title)
logger.debug("Meeting created with ID: %s", meeting.id)
return ServiceResult(success=True, message="Created")
''').strip()
test_file = tmp_path / 'meeting_service.py'
test_file.write_text(content)
result = transform_file(test_file)
errors = validate_transformation(result)
assert result.has_changes is True, "application service file should have changes"
assert errors == [], f"application service transformation should have no errors, got: {errors}"
# Percent-style formatting should be preserved
assert 'logger.info("Creating meeting: %s", title)' in result.transformed, "percent-style formatting should be preserved"
def test_infrastructure_adapter_pattern(self, tmp_path: Path) -> None:
"""Transforms typical infrastructure adapter file."""
content = dedent('''
"""Infrastructure adapter with exception logging."""
from __future__ import annotations
import logging
from typing import Any
logger = logging.getLogger(__name__)
class DatabaseAdapter:
def __init__(self, connection_string: str):
self._conn_str = connection_string
logger.info("Database adapter initialized")
def execute(self, query: str) -> Any:
try:
logger.debug("Executing query: %s", query[:50])
# ... execute query
return None
except Exception:
logger.exception("Query failed")
raise
''').strip()
test_file = tmp_path / 'database.py'
test_file.write_text(content)
result = transform_file(test_file)
errors = validate_transformation(result)
assert result.has_changes is True, "infrastructure adapter file should have changes"
assert errors == [], f"infrastructure adapter transformation should have no errors, got: {errors}"
# Exception logging preserved
assert 'logger.exception("Query failed")' in result.transformed, "exception logging should be preserved"
class TestEdgeCases:
"""Tests for edge cases and unusual patterns."""
def test_logger_with_custom_name(self, tmp_path: Path) -> None:
"""Handles logger with custom name (not __name__)."""
content = dedent('''
import logging
# Custom logger name - should NOT be transformed
logger = logging.getLogger("custom.logger.name")
''').strip()
test_file = tmp_path / 'custom_logger.py'
test_file.write_text(content)
result = transform_file(test_file)
# Custom name pattern doesn't match, so no migration needed
assert result.has_changes is False, "logger with custom name should not be transformed"
def test_multiline_getlogger(self, tmp_path: Path) -> None:
"""Handles multiline getLogger call."""
content = dedent('''
import logging
logger = logging.getLogger(
__name__
)
def work():
logger.info("Working")
''').strip()
test_file = tmp_path / 'multiline.py'
test_file.write_text(content)
result = transform_file(test_file)
# Our regex is simple and won't match multiline
# This is acceptable - user can migrate manually
# The key is we don't break anything
assert 'logging' in result.transformed, "multiline getLogger should be preserved (not broken)"
def test_empty_file(self, tmp_path: Path) -> None:
"""Handles empty file gracefully."""
test_file = tmp_path / 'empty.py'
test_file.write_text('')
result = transform_file(test_file)
assert result.has_changes is False, "empty file should have no changes"
assert result.transformed == '', "empty file transformation should produce empty string"
def test_file_with_only_comments(self, tmp_path: Path) -> None:
"""Handles file with only comments."""
content = '# Just a comment\n# Another comment\n'
test_file = tmp_path / 'comments.py'
test_file.write_text(content)
result = transform_file(test_file)
assert result.has_changes is False, "file with only comments should have no changes"
def test_keeps_import_logging_when_constants_used(self, tmp_path: Path) -> None:
"""Keeps import logging when logging constants are used."""
test_file = tmp_path / 'with_constants.py'
test_file.write_text(SAMPLE_WITH_LOGGING_CONSTANTS)
result = transform_file(test_file)
assert result.has_changes is True, "file with logging constants should have changes"
# import logging should be KEPT (logging.DEBUG is used)
assert 'import logging' in result.transformed, "import logging should be kept when constants are used"
assert 'from noteflow.infrastructure.logging import get_logger' in result.transformed, "infrastructure import should be added"
assert 'logger = get_logger(__name__)' in result.transformed, "logger should use get_logger"
# Should NOT have the removal message
assert 'Removed unused import logging' not in result.changes, "should not remove import logging when constants are used"
def test_uses_logging_constants_detection(self) -> None:
"""Tests uses_logging_constants function directly."""
assert uses_logging_constants("logging.DEBUG") is True, "should detect logging.DEBUG constant"
assert uses_logging_constants("logging.StreamHandler()") is True, "should detect logging.StreamHandler usage"
assert uses_logging_constants("logging.basicConfig()") is True, "should detect logging.basicConfig usage"
assert uses_logging_constants("logger.info('hello')") is False, "logger.info should not be detected as constant usage"
def test_adds_get_logger_to_existing_import(self, tmp_path: Path) -> None:
"""Adds get_logger to existing infrastructure import."""
content = dedent('''
import logging
from noteflow.infrastructure.logging import configure_logging
configure_logging()
logger = logging.getLogger(__name__)
def work():
logger.info("Working")
''').strip()
test_file = tmp_path / 'partial_import.py'
test_file.write_text(content)
result = transform_file(test_file)
assert result.has_changes is True, "file with partial infrastructure import should have changes"
assert 'get_logger, configure_logging' in result.transformed, "get_logger should be added to existing import"
assert 'logger = get_logger(__name__)' in result.transformed, "logger should use get_logger"
assert 'Added get_logger to existing import' in result.changes, "changes should document adding get_logger to existing import"