580 lines
18 KiB
Python
580 lines
18 KiB
Python
"""Global test fixtures to mock optional extra dependencies.
|
|
|
|
These stubs allow running the suite without installing heavy/optional packages
|
|
like openai/anthropic/ollama/pywinctl, while individual tests can still
|
|
override with more specific monkeypatches when needed.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import asyncio
|
|
import os
|
|
import sys
|
|
import types
|
|
from collections.abc import Coroutine, Generator, Sequence
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from types import SimpleNamespace
|
|
from typing import Protocol, cast
|
|
from unittest.mock import AsyncMock, MagicMock
|
|
from uuid import uuid4
|
|
|
|
import pytest
|
|
|
|
from noteflow.config.constants import DEFAULT_SAMPLE_RATE
|
|
from noteflow.config.settings import CalendarIntegrationSettings
|
|
from noteflow.domain.entities import Meeting
|
|
from noteflow.domain.value_objects import MeetingId
|
|
from noteflow.domain.webhooks import WebhookConfig, WebhookEventType
|
|
from noteflow.grpc.service import NoteFlowServicer
|
|
from noteflow.infrastructure.security.crypto import AesGcmCryptoBox
|
|
from noteflow.infrastructure.security.keystore import InMemoryKeyStore
|
|
|
|
# Re-export for test convenience - tests can import from conftest
|
|
SAMPLE_RATE = DEFAULT_SAMPLE_RATE
|
|
"""Standard test sample rate (16 kHz). Import this instead of using magic number 16000."""
|
|
|
|
|
|
# ============================================================================
|
|
# Shared logging context fixtures
|
|
# ============================================================================
|
|
|
|
|
|
@pytest.fixture
|
|
def reset_context_vars() -> Generator[None, None, None]:
|
|
"""Reset logging context variables before and after each test.
|
|
|
|
This fixture resets request_id_var, user_id_var, and workspace_id_var
|
|
to None for test isolation. Use via pytest.mark.usefixtures or as parameter.
|
|
"""
|
|
from noteflow.infrastructure.logging import (
|
|
request_id_var,
|
|
user_id_var,
|
|
workspace_id_var,
|
|
)
|
|
|
|
# Reset before test
|
|
request_id_var.set(None)
|
|
user_id_var.set(None)
|
|
workspace_id_var.set(None)
|
|
yield
|
|
# Reset after test
|
|
request_id_var.set(None)
|
|
user_id_var.set(None)
|
|
workspace_id_var.set(None)
|
|
|
|
|
|
@pytest.fixture
|
|
def mock_oauth_manager() -> MagicMock:
|
|
"""Create mock OAuthManager for testing OAuth flows.
|
|
|
|
Provides common OAuth manager mock with initiate_auth, complete_auth,
|
|
refresh_tokens, and revoke_tokens methods. Tests requiring specific
|
|
return values should configure the mock in their setup.
|
|
"""
|
|
manager = MagicMock()
|
|
manager.initiate_auth = MagicMock(
|
|
return_value=("https://auth.example.com/authorize", "state123")
|
|
)
|
|
manager.complete_auth = AsyncMock()
|
|
manager.refresh_tokens = AsyncMock()
|
|
manager.revoke_tokens = AsyncMock()
|
|
return manager
|
|
|
|
|
|
# ============================================================================
|
|
# Test database URL fixture (required for Settings validation)
|
|
# ============================================================================
|
|
|
|
_TEST_DATABASE_URL = "postgresql+asyncpg://test:test@localhost:5432/noteflow_test"
|
|
|
|
|
|
@pytest.fixture(autouse=True, scope="session")
|
|
def set_test_database_url() -> Generator[None, None, None]:
|
|
"""Set dummy database URL for tests that instantiate Settings.
|
|
|
|
Many tests create NoteFlowServicer which calls get_settings() which
|
|
requires NOTEFLOW_DATABASE_URL. This fixture sets a dummy value.
|
|
"""
|
|
original = os.environ.get("NOTEFLOW_DATABASE_URL")
|
|
os.environ["NOTEFLOW_DATABASE_URL"] = _TEST_DATABASE_URL
|
|
yield
|
|
if original is None:
|
|
del os.environ["NOTEFLOW_DATABASE_URL"]
|
|
else:
|
|
os.environ["NOTEFLOW_DATABASE_URL"] = original
|
|
|
|
|
|
@pytest.fixture(autouse=True)
|
|
def clear_settings_cache() -> Generator[None, None, None]:
|
|
"""Clear cached settings before each test for isolation.
|
|
|
|
The get_settings() and get_trigger_settings() functions use lru_cache which
|
|
can cause test pollution if settings are loaded with different env vars.
|
|
"""
|
|
from noteflow.config.settings._loaders import get_settings, get_trigger_settings
|
|
|
|
get_settings.cache_clear()
|
|
get_trigger_settings.cache_clear()
|
|
yield
|
|
get_settings.cache_clear()
|
|
get_trigger_settings.cache_clear()
|
|
|
|
|
|
# ============================================================================
|
|
# Platform-specific library path setup (run before pytest collection)
|
|
# ============================================================================
|
|
|
|
# macOS Homebrew: Set library path for WeasyPrint's GLib/GTK dependencies
|
|
_homebrew_lib = Path("/opt/homebrew/lib")
|
|
if sys.platform == "darwin" and _homebrew_lib.exists():
|
|
_current_path = os.environ.get("DYLD_LIBRARY_PATH", "")
|
|
_homebrew_str = str(_homebrew_lib)
|
|
if _homebrew_str not in _current_path:
|
|
os.environ["DYLD_LIBRARY_PATH"] = (
|
|
f"{_homebrew_str}:{_current_path}" if _current_path else _homebrew_str
|
|
)
|
|
|
|
# ============================================================================
|
|
# Module-level mocks (run before pytest collection)
|
|
# ============================================================================
|
|
|
|
# Mock sounddevice if PortAudio is not available (must be done before collection)
|
|
if "sounddevice" not in sys.modules:
|
|
try:
|
|
import sounddevice as _sounddevice
|
|
|
|
del _sounddevice
|
|
except OSError:
|
|
# PortAudio library not found - mock the module
|
|
def _mock_query_devices() -> list[dict[str, object]]:
|
|
return []
|
|
|
|
sounddevice_module = types.ModuleType("sounddevice")
|
|
sounddevice_module.__dict__["InputStream"] = MagicMock
|
|
sounddevice_module.__dict__["OutputStream"] = MagicMock
|
|
sounddevice_module.__dict__["query_devices"] = _mock_query_devices
|
|
sounddevice_module.__dict__["default"] = SimpleNamespace(device=(0, 0))
|
|
sys.modules["sounddevice"] = sounddevice_module
|
|
|
|
|
|
@pytest.fixture(autouse=True, scope="session")
|
|
def mock_optional_extras() -> None:
|
|
"""Install lightweight stubs for optional extra deps if absent."""
|
|
|
|
if "openai" not in sys.modules:
|
|
try:
|
|
import openai as _openai
|
|
|
|
del _openai
|
|
except ImportError:
|
|
|
|
def _default_create(**_: object) -> SimpleNamespace:
|
|
return SimpleNamespace(
|
|
choices=[SimpleNamespace(message=SimpleNamespace(content="{}"))],
|
|
usage=SimpleNamespace(total_tokens=0),
|
|
)
|
|
|
|
def _mock_openai_client(**_: object) -> SimpleNamespace:
|
|
return SimpleNamespace(
|
|
chat=SimpleNamespace(completions=SimpleNamespace(create=_default_create))
|
|
)
|
|
|
|
openai_module = types.ModuleType("openai")
|
|
openai_module.__dict__["OpenAI"] = _mock_openai_client
|
|
sys.modules["openai"] = openai_module
|
|
|
|
if "anthropic" not in sys.modules:
|
|
try:
|
|
import anthropic as _anthropic
|
|
|
|
del _anthropic
|
|
except ImportError:
|
|
|
|
def _default_messages_create(**_: object) -> SimpleNamespace:
|
|
return SimpleNamespace(
|
|
content=[SimpleNamespace(text="{}")],
|
|
usage=SimpleNamespace(input_tokens=0, output_tokens=0),
|
|
)
|
|
|
|
def _mock_anthropic_client(**_: object) -> SimpleNamespace:
|
|
return SimpleNamespace(messages=SimpleNamespace(create=_default_messages_create))
|
|
|
|
anthropic_module = types.ModuleType("anthropic")
|
|
anthropic_module.__dict__["Anthropic"] = _mock_anthropic_client
|
|
sys.modules["anthropic"] = anthropic_module
|
|
|
|
if "ollama" not in sys.modules:
|
|
try:
|
|
import ollama as _ollama
|
|
|
|
del _ollama
|
|
except ImportError:
|
|
|
|
def _default_chat(**_: object) -> dict[str, object]:
|
|
return {
|
|
"message": {
|
|
"content": '{"executive_summary": "", "key_points": [], "action_items": []}'
|
|
},
|
|
"eval_count": 0,
|
|
"prompt_eval_count": 0,
|
|
}
|
|
|
|
def _mock_list() -> dict[str, object]:
|
|
return {}
|
|
|
|
def _mock_ollama_client(**_: object) -> SimpleNamespace:
|
|
return SimpleNamespace(list=_mock_list, chat=_default_chat)
|
|
|
|
ollama_module = types.ModuleType("ollama")
|
|
ollama_module.__dict__["Client"] = _mock_ollama_client
|
|
sys.modules["ollama"] = ollama_module
|
|
|
|
# pywinctl depends on pymonctl, which may fail in headless environments
|
|
# Mock both if not already present
|
|
if "pymonctl" not in sys.modules:
|
|
try:
|
|
import pymonctl as _pymonctl
|
|
|
|
del _pymonctl
|
|
except Exception:
|
|
# Mock pymonctl for headless environments (Xlib.error.DisplayNameError, etc.)
|
|
def _mock_get_all_monitors() -> list[object]:
|
|
return []
|
|
|
|
pymonctl_module = types.ModuleType("pymonctl")
|
|
pymonctl_module.__dict__["getAllMonitors"] = _mock_get_all_monitors
|
|
sys.modules["pymonctl"] = pymonctl_module
|
|
|
|
if "pywinctl" not in sys.modules:
|
|
try:
|
|
import pywinctl as _pywinctl
|
|
|
|
del _pywinctl
|
|
except Exception:
|
|
# ImportError: package not installed
|
|
# OSError/Xlib errors: pywinctl may fail in headless environments
|
|
def _mock_get_active_window() -> None:
|
|
return None
|
|
|
|
def _mock_get_all_windows() -> list[object]:
|
|
return []
|
|
|
|
def _mock_get_all_titles() -> list[str]:
|
|
return []
|
|
|
|
pywinctl_module = types.ModuleType("pywinctl")
|
|
pywinctl_module.__dict__["getActiveWindow"] = _mock_get_active_window
|
|
pywinctl_module.__dict__["getAllWindows"] = _mock_get_all_windows
|
|
pywinctl_module.__dict__["getAllTitles"] = _mock_get_all_titles
|
|
sys.modules["pywinctl"] = pywinctl_module
|
|
|
|
|
|
@pytest.fixture
|
|
def mock_uow() -> MagicMock:
|
|
"""Create a mock UnitOfWork for service tests.
|
|
|
|
Provides a fully-configured mock UnitOfWork with all repository mocks
|
|
and async context manager support.
|
|
"""
|
|
uow = MagicMock()
|
|
uow.__aenter__ = AsyncMock(return_value=uow)
|
|
uow.__aexit__ = AsyncMock(return_value=None)
|
|
uow.commit = AsyncMock()
|
|
uow.rollback = AsyncMock()
|
|
uow.meetings = MagicMock()
|
|
uow.segments = MagicMock()
|
|
uow.summaries = MagicMock()
|
|
uow.annotations = MagicMock()
|
|
uow.preferences = MagicMock()
|
|
uow.diarization_jobs = MagicMock()
|
|
uow.entities = MagicMock()
|
|
uow.webhooks = MagicMock()
|
|
uow.integrations = MagicMock()
|
|
uow.assets = MagicMock()
|
|
uow.assets.delete_meeting_assets = AsyncMock()
|
|
uow.tasks = MagicMock()
|
|
uow.analytics = MagicMock()
|
|
uow.supports_webhooks = True
|
|
uow.supports_integrations = True
|
|
uow.supports_tasks = True
|
|
uow.supports_analytics = True
|
|
return uow
|
|
|
|
|
|
@pytest.fixture
|
|
def crypto() -> AesGcmCryptoBox:
|
|
"""Create crypto instance with in-memory keystore."""
|
|
return AesGcmCryptoBox(InMemoryKeyStore())
|
|
|
|
|
|
@pytest.fixture
|
|
def meetings_dir(tmp_path: Path) -> Path:
|
|
"""Create temporary meetings directory."""
|
|
return tmp_path / "meetings"
|
|
|
|
|
|
@pytest.fixture
|
|
def webhook_config() -> WebhookConfig:
|
|
"""Create a webhook config subscribed to MEETING_COMPLETED event."""
|
|
return WebhookConfig.create(
|
|
workspace_id=uuid4(),
|
|
url="https://example.com/webhook",
|
|
events=[WebhookEventType.MEETING_COMPLETED],
|
|
name="Test Webhook",
|
|
)
|
|
|
|
|
|
@pytest.fixture
|
|
def webhook_config_all_events() -> WebhookConfig:
|
|
"""Create a webhook config subscribed to all events."""
|
|
return WebhookConfig.create(
|
|
workspace_id=uuid4(),
|
|
url="https://example.com/webhook",
|
|
events=[
|
|
WebhookEventType.MEETING_COMPLETED,
|
|
WebhookEventType.SUMMARY_GENERATED,
|
|
WebhookEventType.RECORDING_STARTED,
|
|
WebhookEventType.RECORDING_STOPPED,
|
|
],
|
|
name="All Events Webhook",
|
|
secret="test-secret-key",
|
|
)
|
|
|
|
|
|
@pytest.fixture
|
|
def sample_datetime() -> datetime:
|
|
"""Create sample UTC datetime for testing."""
|
|
from datetime import UTC, datetime
|
|
|
|
return datetime(2024, 1, 15, 10, 30, 0, tzinfo=UTC)
|
|
|
|
|
|
@pytest.fixture
|
|
def calendar_settings() -> CalendarIntegrationSettings:
|
|
"""Create test calendar settings for OAuth testing."""
|
|
return CalendarIntegrationSettings(
|
|
google_client_id="test-google-client-id",
|
|
google_client_secret="test-google-client-secret",
|
|
outlook_client_id="test-outlook-client-id",
|
|
outlook_client_secret="test-outlook-client-secret",
|
|
redirect_uri="http://localhost:8080/callback",
|
|
sync_hours_ahead=24,
|
|
max_events=20,
|
|
sync_interval_minutes=15,
|
|
)
|
|
|
|
|
|
# ============================================================================
|
|
# Common domain fixtures
|
|
# ============================================================================
|
|
|
|
|
|
@pytest.fixture
|
|
def meeting_id() -> MeetingId:
|
|
"""Create a test meeting ID."""
|
|
from noteflow.domain.value_objects import MeetingId
|
|
|
|
return MeetingId(uuid4())
|
|
|
|
|
|
@pytest.fixture
|
|
def sample_meeting() -> Meeting:
|
|
"""Create a sample meeting for testing."""
|
|
from noteflow.domain.entities import Meeting
|
|
|
|
return Meeting.create(title="Test Meeting")
|
|
|
|
|
|
@pytest.fixture
|
|
def recording_meeting() -> Meeting:
|
|
"""Create a meeting in RECORDING state."""
|
|
from noteflow.domain.entities import Meeting
|
|
|
|
meeting = Meeting.create(title="Recording Meeting")
|
|
meeting.start_recording()
|
|
return meeting
|
|
|
|
|
|
@pytest.fixture
|
|
def sample_rate() -> int:
|
|
"""Default audio sample rate (16 kHz) for testing."""
|
|
return DEFAULT_SAMPLE_RATE
|
|
|
|
|
|
# ============================================================================
|
|
# gRPC context mock
|
|
# ============================================================================
|
|
|
|
|
|
@pytest.fixture
|
|
def mock_grpc_context() -> MagicMock:
|
|
"""Create mock gRPC context for servicer tests.
|
|
|
|
Uses asyncio.sleep(0) as side_effect for abort to avoid
|
|
'coroutine was never awaited' warnings during GC.
|
|
"""
|
|
import asyncio
|
|
|
|
import grpc.aio
|
|
|
|
def _abort_side_effect(*args: object, **kwargs: object) -> Coroutine[object, object, None]:
|
|
return asyncio.sleep(0)
|
|
|
|
ctx = MagicMock(spec=grpc.aio.ServicerContext)
|
|
ctx.abort = MagicMock(side_effect=_abort_side_effect)
|
|
return ctx
|
|
|
|
|
|
# ============================================================================
|
|
# ASR engine mock
|
|
# ============================================================================
|
|
|
|
|
|
@pytest.fixture
|
|
def mockasr_engine() -> MagicMock:
|
|
"""Create default mock ASR engine for testing.
|
|
|
|
Returns:
|
|
Mock ASR engine with sync and async transcribe methods.
|
|
"""
|
|
from dataclasses import dataclass
|
|
|
|
import numpy as np
|
|
from numpy.typing import NDArray
|
|
|
|
@dataclass
|
|
class MockAsrResult:
|
|
"""Mock ASR transcription result."""
|
|
|
|
text: str
|
|
start: float = 0.0
|
|
end: float = 1.0
|
|
language: str = "en"
|
|
language_probability: float = 0.99
|
|
avg_logprob: float = -0.5
|
|
no_speech_prob: float = 0.01
|
|
|
|
engine = MagicMock()
|
|
engine.is_loaded = True
|
|
engine.model_size = "base"
|
|
|
|
def _transcribe(_audio: NDArray[np.float32]) -> list[MockAsrResult]:
|
|
return [MockAsrResult(text="Test transcription")]
|
|
|
|
async def _transcribe_async(
|
|
_audio: NDArray[np.float32],
|
|
_language: str | None = None,
|
|
) -> list[MockAsrResult]:
|
|
await asyncio.sleep(0)
|
|
return [MockAsrResult(text="Test transcription")]
|
|
|
|
engine.transcribe = _transcribe
|
|
engine.transcribe_async = _transcribe_async
|
|
return engine
|
|
|
|
|
|
# ============================================================================
|
|
# gRPC Servicer fixtures
|
|
# ============================================================================
|
|
|
|
|
|
@pytest.fixture
|
|
def memory_servicer(mockasr_engine: MagicMock, tmp_path: Path) -> NoteFlowServicer:
|
|
"""Create NoteFlowServicer with in-memory backend for testing.
|
|
|
|
Uses memory store (no database) for fast unit testing of
|
|
concurrency and state management.
|
|
"""
|
|
return NoteFlowServicer(
|
|
asr_engine=mockasr_engine,
|
|
session_factory=None,
|
|
meetings_dir=tmp_path / "meetings",
|
|
)
|
|
|
|
|
|
# ============================================================================
|
|
# Typed pytest.approx helper
|
|
# ============================================================================
|
|
|
|
|
|
class _ApproxCallable(Protocol):
|
|
"""Protocol for pytest.approx with explicit types."""
|
|
|
|
def __call__(
|
|
self,
|
|
expected: float,
|
|
*,
|
|
rel: float | None = None,
|
|
abs: float | None = None,
|
|
nan_ok: bool = False,
|
|
) -> object: ...
|
|
|
|
|
|
class _ApproxSequenceCallable(Protocol):
|
|
"""Protocol for pytest.approx with sequence types."""
|
|
|
|
def __call__(
|
|
self,
|
|
expected: Sequence[float],
|
|
*,
|
|
rel: float | None = None,
|
|
abs: float | None = None,
|
|
nan_ok: bool = False,
|
|
) -> object: ...
|
|
|
|
|
|
def approx_float(
|
|
expected: float,
|
|
*,
|
|
rel: float | None = None,
|
|
abs: float | None = None,
|
|
) -> object:
|
|
"""Typed wrapper for pytest.approx to satisfy type checkers.
|
|
|
|
pytest.approx lacks proper type stubs, leading to reportUnknownMemberType
|
|
errors. This wrapper provides explicit typing while delegating to the
|
|
underlying pytest.approx functionality.
|
|
|
|
Args:
|
|
expected: The expected float value.
|
|
rel: Relative tolerance (as a fraction).
|
|
abs: Absolute tolerance.
|
|
|
|
Returns:
|
|
ApproxBase instance for comparison.
|
|
|
|
Example:
|
|
assert result == approx_float(1.5, rel=0.01)
|
|
"""
|
|
_approx = cast(_ApproxCallable, pytest.approx)
|
|
return _approx(expected, rel=rel, abs=abs)
|
|
|
|
|
|
def approx_sequence(
|
|
expected: Sequence[float],
|
|
*,
|
|
rel: float | None = None,
|
|
abs: float | None = None,
|
|
) -> object:
|
|
"""Typed wrapper for pytest.approx with sequence types.
|
|
|
|
Similar to approx_float but for sequences of floats (e.g., embeddings).
|
|
pytest.approx lacks proper type stubs, leading to reportUnknownMemberType
|
|
errors. This wrapper provides explicit typing while delegating to the
|
|
underlying pytest.approx functionality.
|
|
|
|
Args:
|
|
expected: The expected sequence of float values.
|
|
rel: Relative tolerance (as a fraction).
|
|
abs: Absolute tolerance.
|
|
|
|
Returns:
|
|
ApproxBase instance for comparison.
|
|
|
|
Example:
|
|
assert embedding == approx_sequence([0.1, 0.2, 0.3], rel=0.01)
|
|
"""
|
|
_approx = cast(_ApproxSequenceCallable, pytest.approx)
|
|
return _approx(expected, rel=rel, abs=abs)
|