599 lines
23 KiB
Python
599 lines
23 KiB
Python
"""Tests for server auto-enable functionality.
|
|
|
|
Validates that the server automatically enables cloud LLM summarization and
|
|
calendar services based on app configuration stored in the database.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
from unittest.mock import AsyncMock, MagicMock, patch
|
|
from uuid import uuid4
|
|
|
|
import pytest
|
|
|
|
from noteflow.application.services.summarization import (
|
|
SummarizationMode,
|
|
SummarizationService,
|
|
SummarizationServiceSettings,
|
|
)
|
|
from noteflow.domain.entities.integration import Integration, IntegrationStatus, IntegrationType
|
|
from noteflow.grpc.startup.startup import (
|
|
auto_enable_cloud_llm,
|
|
check_calendar_needed_from_db,
|
|
)
|
|
from noteflow.infrastructure.persistence.unit_of_work import SqlAlchemyUnitOfWork
|
|
from noteflow.infrastructure.summarization.cloud_provider import CloudBackend
|
|
|
|
|
|
def _create_mock_uow(
|
|
ai_config: object | None = None,
|
|
calendar_integrations: list[Integration] | None = None,
|
|
supports_integrations: bool = True,
|
|
) -> MagicMock:
|
|
"""Create a mock UnitOfWork with configurable preferences and integrations."""
|
|
uow = MagicMock(spec=SqlAlchemyUnitOfWork)
|
|
|
|
# Mock preferences repository
|
|
uow.preferences = MagicMock()
|
|
|
|
async def mock_get(key: str) -> object | None:
|
|
return ai_config if key == "ai_config" else None
|
|
|
|
uow.preferences.get = AsyncMock(side_effect=mock_get)
|
|
|
|
# Mock integrations repository
|
|
uow.supports_integrations = supports_integrations
|
|
uow.integrations = MagicMock()
|
|
uow.integrations.list_by_type = AsyncMock(return_value=calendar_integrations or [])
|
|
|
|
return uow
|
|
|
|
|
|
def _create_mocksummarization_service() -> MagicMock:
|
|
"""Create a mock summarization service."""
|
|
service = MagicMock()
|
|
service.settings = SummarizationServiceSettings()
|
|
service.register_provider = MagicMock()
|
|
return service
|
|
|
|
|
|
def _create_integration(status: IntegrationStatus) -> Integration:
|
|
"""Create a test integration with given status."""
|
|
integration = Integration.create(
|
|
workspace_id=uuid4(),
|
|
name="Google Calendar",
|
|
integration_type=IntegrationType.CALENDAR,
|
|
config={"provider": "google"},
|
|
)
|
|
# Simulate status changes
|
|
if status == IntegrationStatus.CONNECTED:
|
|
integration.connect(provider_email="test@example.com")
|
|
elif status == IntegrationStatus.ERROR:
|
|
integration.mark_error("Test error")
|
|
elif status == IntegrationStatus.DISCONNECTED:
|
|
integration.disconnect()
|
|
return integration
|
|
|
|
|
|
class TestAutoEnableCloudLlm:
|
|
"""Tests for auto_enable_cloud_llm helper function."""
|
|
|
|
@pytest.mark.parametrize(
|
|
"ai_config,description",
|
|
[
|
|
(None, "ai_config preference doesn't exist"),
|
|
("not a dict", "ai_config is not a dictionary"),
|
|
({"transcription": {}}, "ai_config has no summary section"),
|
|
({"summary": "not a dict"}, "summary config is not a dictionary"),
|
|
({"summary": {"provider": "ollama"}}, "provider is local/ollama (not cloud)"),
|
|
(
|
|
{"summary": {"provider": "openai", "api_key": ""}},
|
|
"provider is cloud but API key is empty string",
|
|
),
|
|
({"summary": {"provider": "openai", "api_key": None}}, "API key is None"),
|
|
(
|
|
{
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-test-key",
|
|
"test_status": "untested",
|
|
}
|
|
},
|
|
"test_status is 'untested'",
|
|
),
|
|
(
|
|
{
|
|
"summary": {
|
|
"provider": "anthropic",
|
|
"api_key": "sk-ant-test-key",
|
|
"test_status": "error",
|
|
}
|
|
},
|
|
"test_status is 'error'",
|
|
),
|
|
],
|
|
ids=[
|
|
"no_ai_config",
|
|
"ai_config_not_dict",
|
|
"no_summary_section",
|
|
"summary_not_dict",
|
|
"local_provider",
|
|
"empty_api_key",
|
|
"api_key_none",
|
|
"test_status_untested",
|
|
"test_status_error",
|
|
],
|
|
)
|
|
async def test_returns_none_when_config_invalid(
|
|
self,
|
|
ai_config: dict[str, object] | None,
|
|
description: str,
|
|
) -> None:
|
|
"""Should return None when config is invalid: {description}."""
|
|
uow = _create_mock_uow(ai_config=ai_config)
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, f"Expected None when {description}"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_enables_openai_when_valid_config(self) -> None:
|
|
"""Should enable OpenAI cloud provider when config is valid."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-test-openai-key",
|
|
"test_status": "success",
|
|
"model": "gpt-4o",
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
mock_summarizer = MagicMock()
|
|
mock_cloud_summarizer_class.return_value = mock_summarizer
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result == "openai", "Expected 'openai' provider to be enabled"
|
|
mock_cloud_summarizer_class.assert_called_once_with(
|
|
backend=CloudBackend.OPENAI,
|
|
api_key="sk-test-openai-key",
|
|
model="gpt-4o",
|
|
base_url=None,
|
|
)
|
|
service.register_provider.assert_called_once_with(
|
|
SummarizationMode.CLOUD, mock_summarizer
|
|
)
|
|
assert service.settings.cloud_consent_granted is True, (
|
|
"Cloud consent should be granted after successful enable"
|
|
)
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_enables_anthropic_when_valid_config(self) -> None:
|
|
"""Should enable Anthropic cloud provider when config is valid."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "anthropic",
|
|
"api_key": "sk-ant-test-key",
|
|
"test_status": "success",
|
|
"model": "claude-3-5-sonnet-20241022",
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
mock_summarizer = MagicMock()
|
|
mock_cloud_summarizer_class.return_value = mock_summarizer
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result == "anthropic", "Expected 'anthropic' provider to be enabled"
|
|
mock_cloud_summarizer_class.assert_called_once_with(
|
|
backend=CloudBackend.ANTHROPIC,
|
|
api_key="sk-ant-test-key",
|
|
model="claude-3-5-sonnet-20241022",
|
|
base_url=None,
|
|
)
|
|
service.register_provider.assert_called_once_with(
|
|
SummarizationMode.CLOUD, mock_summarizer
|
|
)
|
|
assert service.settings.cloud_consent_granted is True, (
|
|
"Cloud consent should be granted after successful enable"
|
|
)
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_uses_none_model_when_not_specified(self) -> None:
|
|
"""Should pass None for model when not specified in config."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-test-key",
|
|
"test_status": "success",
|
|
# No model specified
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
await auto_enable_cloud_llm(uow, service)
|
|
|
|
mock_cloud_summarizer_class.assert_called_once_with(
|
|
backend=CloudBackend.OPENAI,
|
|
api_key="sk-test-key",
|
|
model=None,
|
|
base_url=None,
|
|
)
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_uses_none_model_when_empty_string(self) -> None:
|
|
"""Should pass None for model when it's an empty string."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "anthropic",
|
|
"api_key": "sk-ant-key",
|
|
"test_status": "success",
|
|
"model": "",
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
await auto_enable_cloud_llm(uow, service)
|
|
|
|
# Empty string is falsy, so model should be None
|
|
mock_cloud_summarizer_class.assert_called_once_with(
|
|
backend=CloudBackend.ANTHROPIC,
|
|
api_key="sk-ant-key",
|
|
model=None,
|
|
base_url=None,
|
|
)
|
|
|
|
|
|
class TestCheckCalendarNeededFromDb:
|
|
"""Tests for check_calendar_needed_from_db helper function."""
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_false_when_integrations_not_supported(self) -> None:
|
|
"""Should return False when UoW doesn't support integrations."""
|
|
uow = _create_mock_uow(supports_integrations=False)
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is False, "Expected False when UoW doesn't support integrations"
|
|
uow.integrations.list_by_type.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_false_when_no_calendar_integrations(self) -> None:
|
|
"""Should return False when no calendar integrations exist."""
|
|
uow = _create_mock_uow(calendar_integrations=[])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is False, "Expected False when no calendar integrations exist"
|
|
uow.integrations.list_by_type.assert_awaited_once_with("calendar")
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_false_when_all_integrations_disconnected(self) -> None:
|
|
"""Should return False when all calendar integrations are disconnected."""
|
|
disconnected = _create_integration(IntegrationStatus.DISCONNECTED)
|
|
uow = _create_mock_uow(calendar_integrations=[disconnected])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is False, "Expected False when all integrations are disconnected"
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_false_when_integration_newly_created(self) -> None:
|
|
"""Should return False when calendar integration is newly created (disconnected)."""
|
|
# A newly created integration starts in DISCONNECTED status
|
|
new_integration = Integration.create(
|
|
workspace_id=uuid4(),
|
|
name="Google Calendar",
|
|
integration_type=IntegrationType.CALENDAR,
|
|
config={"provider": "google"},
|
|
)
|
|
uow = _create_mock_uow(calendar_integrations=[new_integration])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is False, (
|
|
"Expected False when integration is newly created (disconnected status)"
|
|
)
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_false_when_all_integrations_errored(self) -> None:
|
|
"""Should return False when all calendar integrations have errors."""
|
|
errored = _create_integration(IntegrationStatus.ERROR)
|
|
uow = _create_mock_uow(calendar_integrations=[errored])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is False, "Expected False when all integrations have error status"
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_true_when_connected_integration_exists(self) -> None:
|
|
"""Should return True when at least one connected integration exists."""
|
|
connected = _create_integration(IntegrationStatus.CONNECTED)
|
|
uow = _create_mock_uow(calendar_integrations=[connected])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is True, "Expected True when at least one connected integration exists"
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_true_with_mixed_statuses(self) -> None:
|
|
"""Should return True when mixed statuses include a connected one."""
|
|
disconnected = _create_integration(IntegrationStatus.DISCONNECTED)
|
|
connected = _create_integration(IntegrationStatus.CONNECTED)
|
|
errored = _create_integration(IntegrationStatus.ERROR)
|
|
|
|
uow = _create_mock_uow(calendar_integrations=[disconnected, connected, errored])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is True, "Expected True when mixed statuses include a connected integration"
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_returns_true_with_multiple_connected(self) -> None:
|
|
"""Should return True when multiple connected integrations exist."""
|
|
google = _create_integration(IntegrationStatus.CONNECTED)
|
|
outlook = _create_integration(IntegrationStatus.CONNECTED)
|
|
|
|
uow = _create_mock_uow(calendar_integrations=[google, outlook])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is True, "Expected True when multiple connected integrations exist"
|
|
|
|
|
|
class TestAutoEnableEdgeCases:
|
|
"""Edge case tests for auto-enable functionality."""
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_handles_missing_test_status_key(self) -> None:
|
|
"""Should not enable when test_status key is missing entirely."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-test-key",
|
|
# No test_status key at all
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, "Expected None when test_status key is missing"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_handles_missing_provider_key(self) -> None:
|
|
"""Should not enable when provider key is missing."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"api_key": "sk-test-key",
|
|
"test_status": "success",
|
|
# No provider key
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, "Expected None when provider key is missing"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_handles_missing_api_key_field(self) -> None:
|
|
"""Should not enable when api_key field is missing."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"test_status": "success",
|
|
# No api_key key
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, "Expected None when api_key field is missing"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_rejects_uppercase_provider(self) -> None:
|
|
"""Should reject uppercase provider names (only lowercase accepted)."""
|
|
# The frontend stores lowercase, so uppercase should not match
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "OpenAI", # Uppercase - should not match
|
|
"api_key": "sk-test-key",
|
|
"test_status": "success",
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, "Expected None for uppercase provider name (only lowercase accepted)"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_empty_ai_config_dict(self) -> None:
|
|
"""Should handle empty ai_config dictionary."""
|
|
uow = _create_mock_uow(ai_config={})
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, "Expected None for empty ai_config dictionary"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_empty_summary_config_dict(self) -> None:
|
|
"""Should handle empty summary config dictionary."""
|
|
uow = _create_mock_uow(ai_config={"summary": {}})
|
|
service = _create_mocksummarization_service()
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result is None, "Expected None for empty summary config dictionary"
|
|
service.register_provider.assert_not_called()
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_passes_non_string_model_to_summarizer(self) -> None:
|
|
"""Should pass non-string model values through to CloudSummarizer."""
|
|
# Edge case: model could be a list or dict from malformed config
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-test-key",
|
|
"test_status": "success",
|
|
"model": ["gpt-4", "gpt-3.5"], # Invalid: list instead of string
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
mock_summarizer = MagicMock()
|
|
mock_cloud_summarizer_class.return_value = mock_summarizer
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
# Non-string truthy values are passed through (CloudSummarizer handles validation)
|
|
assert result == "openai", "Expected 'openai' provider with non-string model value"
|
|
mock_cloud_summarizer_class.assert_called_once_with(
|
|
backend=CloudBackend.OPENAI,
|
|
api_key="sk-test-key",
|
|
model=["gpt-4", "gpt-3.5"],
|
|
base_url=None,
|
|
)
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_cloud_llm_handles_summarizer_instantiation_failure(self) -> None:
|
|
"""Should propagate exception when CloudSummarizer instantiation fails."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-invalid-key",
|
|
"test_status": "success",
|
|
}
|
|
}
|
|
)
|
|
service = _create_mocksummarization_service()
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
mock_cloud_summarizer_class.side_effect = ValueError("Invalid API key format")
|
|
|
|
with pytest.raises(ValueError, match="Invalid API key format"):
|
|
await auto_enable_cloud_llm(uow, service)
|
|
|
|
service.register_provider.assert_not_called()
|
|
|
|
|
|
class TestAutoEnableWithRealSummarizationService:
|
|
"""Tests using real SummarizationService to verify provider registration."""
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_openai_provider_actually_registered(self) -> None:
|
|
"""Verify OpenAI provider is actually registered with real service."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "openai",
|
|
"api_key": "sk-test-real-key",
|
|
"test_status": "success",
|
|
"model": "gpt-4",
|
|
}
|
|
}
|
|
)
|
|
# Use real service, not mock
|
|
service = SummarizationService(settings=SummarizationServiceSettings())
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
mock_summarizer = MagicMock()
|
|
mock_summarizer.is_available = True
|
|
mock_cloud_summarizer_class.return_value = mock_summarizer
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result == "openai", "Expected 'openai' provider to be enabled"
|
|
# Verify the provider was actually registered
|
|
assert SummarizationMode.CLOUD in service.providers, (
|
|
"CLOUD mode should be registered in providers"
|
|
)
|
|
assert service.settings.cloud_consent_granted is True, "Cloud consent should be granted"
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_anthropic_provider_actually_registered(self) -> None:
|
|
"""Verify Anthropic provider is actually registered with real service."""
|
|
uow = _create_mock_uow(
|
|
ai_config={
|
|
"summary": {
|
|
"provider": "anthropic",
|
|
"api_key": "sk-ant-real-key",
|
|
"test_status": "success",
|
|
}
|
|
}
|
|
)
|
|
service = SummarizationService(settings=SummarizationServiceSettings())
|
|
|
|
with patch("noteflow.grpc.startup.startup.CloudSummarizer") as mock_cloud_summarizer_class:
|
|
mock_summarizer = MagicMock()
|
|
mock_summarizer.is_available = True
|
|
mock_cloud_summarizer_class.return_value = mock_summarizer
|
|
|
|
result = await auto_enable_cloud_llm(uow, service)
|
|
|
|
assert result == "anthropic", "Expected 'anthropic' provider to be enabled"
|
|
assert SummarizationMode.CLOUD in service.providers, (
|
|
"CLOUD mode should be registered in providers"
|
|
)
|
|
assert service.settings.cloud_consent_granted is True, "Cloud consent should be granted"
|
|
|
|
|
|
class TestAutoEnableIntegrationStatus:
|
|
"""Tests verifying integration status enum handling."""
|
|
|
|
@pytest.mark.asyncio
|
|
@pytest.mark.parametrize(
|
|
("status", "expected"),
|
|
[
|
|
pytest.param(IntegrationStatus.CONNECTED, True, id="connected_enables"),
|
|
pytest.param(IntegrationStatus.ERROR, False, id="error_disables"),
|
|
pytest.param(IntegrationStatus.DISCONNECTED, False, id="disconnected_disables"),
|
|
],
|
|
)
|
|
async def test_only_connected_status_triggers_calendar_enable(
|
|
self, status: IntegrationStatus, expected: bool
|
|
) -> None:
|
|
"""Verify that only IntegrationStatus.CONNECTED triggers calendar enable."""
|
|
integration = _create_integration(status)
|
|
uow = _create_mock_uow(calendar_integrations=[integration])
|
|
|
|
result = await check_calendar_needed_from_db(uow)
|
|
|
|
assert result is expected, f"Expected {expected} for IntegrationStatus.{status.name}"
|