chore: update client submodule and enhance webhook service interfaces

- Updated the client submodule reference to the latest commit for improved integration.
- Introduced `WebhookExecutorLike` and `SummarizationServiceLike` protocols to define minimal interfaces for webhook delivery and summarization services.
- Refactored type hints in various service methods to improve type safety and clarity.
- Enhanced the cleanup and error handling in the meeting stop operations and summarization consent functions.
This commit is contained in:
2026-01-13 01:00:36 +00:00
parent bed20ce682
commit 103ed09f32
11 changed files with 116 additions and 57 deletions

2
client

Submodule client updated: e26bf54ad8...d8e84e27c3

View File

@@ -2,7 +2,7 @@
from __future__ import annotations
from typing import TYPE_CHECKING
from typing import TYPE_CHECKING, Protocol
from noteflow.config.constants import DEFAULT_MEETING_TITLE
from noteflow.domain.utils.time import utc_now
@@ -47,6 +47,23 @@ _DELIVERY_LOG_TEMPLATES: dict[str, str] = {
}
class WebhookExecutorLike(Protocol):
"""Minimal executor interface for webhook delivery."""
async def deliver(
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Deliver a webhook payload."""
...
async def close(self) -> None:
"""Release executor resources."""
...
class WebhookService:
"""Orchestrate webhook delivery for meeting events.
@@ -56,7 +73,7 @@ class WebhookService:
def __init__(
self,
executor: WebhookExecutor | None = None,
executor: WebhookExecutorLike | None = None,
) -> None:
"""Initialize webhook service.

View File

@@ -3,6 +3,7 @@
from __future__ import annotations
import asyncio
from collections.abc import Callable
from typing import TYPE_CHECKING
from uuid import UUID
@@ -15,6 +16,9 @@ from ...proto import noteflow_pb2
from ..errors import abort_invalid_argument, parse_optional_uuid_or_abort
if TYPE_CHECKING:
from noteflow.application.services.webhook_service import WebhookService
from noteflow.infrastructure.audio.writer import MeetingAudioWriter
from .._types import GrpcContext
logger = get_logger(__name__)
@@ -42,13 +46,13 @@ async def wait_for_stream_exit(
def cleanup_audio_writer(
audio_writers: dict[str, object],
close_audio_writer_func: object,
audio_writers: dict[str, MeetingAudioWriter],
close_audio_writer_func: Callable[[str], None],
meeting_id: str,
) -> None:
"""Close audio writer if open."""
if meeting_id in audio_writers:
close_audio_writer_func(meeting_id) # type: ignore[operator]
close_audio_writer_func(meeting_id)
async def transition_to_stopped(
@@ -70,14 +74,14 @@ async def transition_to_stopped(
async def fire_stop_webhooks(
webhook_service: object | None,
webhook_service: WebhookService | None,
meeting: Meeting,
) -> None:
"""Trigger webhooks for meeting stop (fire-and-forget)."""
if webhook_service is None:
return
try:
await webhook_service.trigger_recording_stopped( # type: ignore[attr-defined]
await webhook_service.trigger_recording_stopped(
meeting_id=str(meeting.id),
title=meeting.title or DEFAULT_MEETING_TITLE,
duration_seconds=meeting.duration_seconds or 0.0,
@@ -85,7 +89,7 @@ async def fire_stop_webhooks(
except Exception:
logger.exception("Failed to trigger recording.stopped webhooks")
try:
await webhook_service.trigger_meeting_completed(meeting) # type: ignore[attr-defined]
await webhook_service.trigger_meeting_completed(meeting)
except Exception:
logger.exception("Failed to trigger meeting.completed webhooks")

View File

@@ -2,15 +2,32 @@
from __future__ import annotations
from typing import TYPE_CHECKING, Protocol
from ...proto import noteflow_pb2
from .._types import GrpcContext
from ..errors import abort_failed_precondition
from ..errors._constants import UNREACHABLE_ERROR
if TYPE_CHECKING:
from noteflow.application.services.summarization.summarization_service import (
SummarizationServiceSettings,
)
class SummarizationServiceLike(Protocol):
settings: "SummarizationServiceSettings"
async def grant_cloud_consent(self) -> None:
"""Grant consent for cloud summarization."""
async def revoke_cloud_consent(self) -> None:
"""Revoke consent for cloud summarization."""
async def grant_cloud_consent(
summarization_service: object | None,
request: object,
summarization_service: SummarizationServiceLike | None,
request: noteflow_pb2.GrantCloudConsentRequest,
context: GrpcContext,
) -> noteflow_pb2.GrantCloudConsentResponse:
"""Grant consent for cloud-based summarization."""
@@ -21,7 +38,7 @@ async def grant_cloud_consent(
)
raise RuntimeError(UNREACHABLE_ERROR)
await summarization_service.grant_cloud_consent() # type: ignore[attr-defined]
await summarization_service.grant_cloud_consent()
from noteflow.infrastructure.logging import get_logger
logger = get_logger(__name__)
@@ -30,8 +47,8 @@ async def grant_cloud_consent(
async def revoke_cloud_consent(
summarization_service: object | None,
request: object,
summarization_service: SummarizationServiceLike | None,
request: noteflow_pb2.RevokeCloudConsentRequest,
context: GrpcContext,
) -> noteflow_pb2.RevokeCloudConsentResponse:
"""Revoke consent for cloud-based summarization."""
@@ -42,7 +59,7 @@ async def revoke_cloud_consent(
)
raise RuntimeError(UNREACHABLE_ERROR)
await summarization_service.revoke_cloud_consent() # type: ignore[attr-defined]
await summarization_service.revoke_cloud_consent()
from noteflow.infrastructure.logging import get_logger
logger = get_logger(__name__)
@@ -51,8 +68,8 @@ async def revoke_cloud_consent(
async def get_cloud_consent_status(
summarization_service: object | None,
request: object,
summarization_service: SummarizationServiceLike | None,
request: noteflow_pb2.GetCloudConsentStatusRequest,
context: GrpcContext,
) -> noteflow_pb2.GetCloudConsentStatusResponse:
"""Return current cloud consent status."""
@@ -60,5 +77,5 @@ async def get_cloud_consent_status(
# Default to not granted if service unavailable
return noteflow_pb2.GetCloudConsentStatusResponse(consent_granted=False)
return noteflow_pb2.GetCloudConsentStatusResponse(
consent_granted=summarization_service.settings.cloud_consent_granted, # type: ignore[attr-defined]
consent_granted=summarization_service.settings.cloud_consent_granted,
)

View File

@@ -1,15 +1,14 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings
from . import noteflow_pb2 as noteflow__pb2
import noteflow_pb2 as noteflow__pb2
GRPC_GENERATED_VERSION = '1.76.0'
GRPC_VERSION = grpc.__version__
_version_not_supported = False
GRPC_GENERATED_VERSION = '1.76.0'
try:
from grpc._utilities import first_version_is_lower
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
@@ -18,7 +17,8 @@ except ImportError:
if _version_not_supported:
raise RuntimeError(
f'The grpc package installed is at version {GRPC_VERSION}, but the generated code in noteflow_pb2_grpc.py depends on'
f'The grpc package installed is at version {GRPC_VERSION},'
+ ' but the generated code in noteflow_pb2_grpc.py depends on'
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'

View File

@@ -6,6 +6,7 @@ import time
from noteflow.infrastructure.asr import FasterWhisperEngine
from noteflow.infrastructure.logging import get_logger
from sqlalchemy.ext.asyncio import AsyncEngine
from ..service import NoteFlowServicer
@@ -38,7 +39,7 @@ def load_asr_engine(
async def stop_server(
server: object | None,
servicer: NoteFlowServicer | None,
db_engine: object | None,
db_engine: AsyncEngine | None,
grace_period: float = 5.0,
) -> None:
"""Stop the server gracefully.
@@ -67,7 +68,6 @@ async def stop_server(
# Dispose database engine to release connection pool
if db_engine is not None:
logger.debug("Disposing database engine connection pool")
if hasattr(db_engine, "dispose"):
await db_engine.dispose() # type: ignore[attr-defined]
await db_engine.dispose()
logger.info("Server stopped")

View File

@@ -3,13 +3,25 @@
from __future__ import annotations
from collections.abc import Sequence
from datetime import datetime
from typing import Protocol
from uuid import UUID
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import Mapped
class GetByIdMixin[TModel, TEntity]:
class _HasId(Protocol):
id: Mapped[UUID]
class _HasMeetingFields(Protocol):
meeting_id: Mapped[UUID]
created_at: Mapped[datetime]
class GetByIdMixin[TModel: _HasId, TEntity]:
"""Mixin providing standardized get_by_id implementation.
Requires class attributes:
@@ -37,13 +49,13 @@ class GetByIdMixin[TModel, TEntity]:
Returns:
Domain entity if found, None otherwise.
"""
stmt = select(self._model_class).where(self._model_class.id == entity_id) # type: ignore[attr-defined]
stmt = select(self._model_class).where(self._model_class.id == entity_id)
result = await self._session.execute(stmt)
model = result.scalar_one_or_none()
return self._to_domain(model) if model else None
class DeleteByIdMixin[TModel]:
class DeleteByIdMixin[TModel: _HasId]:
"""Mixin providing standardized delete implementation.
Requires class attributes:
@@ -64,7 +76,7 @@ class DeleteByIdMixin[TModel]:
Returns:
True if deleted, False if not found.
"""
stmt = select(self._model_class).where(self._model_class.id == entity_id) # type: ignore[attr-defined]
stmt = select(self._model_class).where(self._model_class.id == entity_id)
result = await self._session.execute(stmt)
model = result.scalar_one_or_none()
if not model:
@@ -74,7 +86,7 @@ class DeleteByIdMixin[TModel]:
return True
class GetByMeetingMixin[TModel, TEntity]:
class GetByMeetingMixin[TModel: _HasMeetingFields, TEntity]:
"""Mixin for repositories with meeting-scoped entities.
Requires class attributes:
@@ -104,8 +116,8 @@ class GetByMeetingMixin[TModel, TEntity]:
"""
stmt = (
select(self._model_class)
.where(self._model_class.meeting_id == meeting_id) # type: ignore[attr-defined]
.order_by(self._model_class.created_at.asc()) # type: ignore[attr-defined]
.where(self._model_class.meeting_id == meeting_id)
.order_by(self._model_class.created_at.asc())
)
result = await self._session.execute(stmt)
models = list(result.scalars().all())

View File

@@ -162,7 +162,7 @@ class SqlAlchemyProjectRepository(
stmt = select(ProjectModel).where(
and_(
ProjectModel.workspace_id == workspace_id,
ProjectModel.is_default == True, # noqa: E712
ProjectModel.is_default.is_(True),
),
)
model = await self._execute_scalar(stmt)

View File

@@ -6,7 +6,6 @@ meetings with segments, summaries, and mock executors.
from __future__ import annotations
from typing import Any
from unittest.mock import AsyncMock, MagicMock
from uuid import uuid4
@@ -16,7 +15,13 @@ from noteflow.application.services.webhook_service import WebhookService
from noteflow.domain.entities import Meeting, Segment, Summary
from noteflow.domain.entities.summary import ActionItem, KeyPoint
from noteflow.domain.utils.time import utc_now
from noteflow.domain.webhooks import WebhookConfig, WebhookDelivery, WebhookEventType
from noteflow.domain.value_objects import MeetingId
from noteflow.domain.webhooks import (
WebhookConfig,
WebhookDelivery,
WebhookEventType,
WebhookPayloadDict,
)
from noteflow.infrastructure.webhooks import WebhookExecutor
# =============================================================================
@@ -24,7 +29,7 @@ from noteflow.infrastructure.webhooks import WebhookExecutor
# =============================================================================
def create_segment(segment_id: int, meeting_id: Any, text: str | None = None) -> Segment:
def create_segment(segment_id: int, meeting_id: MeetingId, text: str | None = None) -> Segment:
"""Create a single segment with deterministic timing."""
segment_text = text or f"Segment {segment_id} content"
return Segment(
@@ -81,20 +86,20 @@ def meeting_with_summary(completed_meeting: Meeting) -> Meeting:
@pytest.fixture
def captured_payloads() -> list[dict[str, Any]]:
def captured_payloads() -> list[WebhookPayloadDict]:
"""Store payloads passed to executor for verification."""
return []
@pytest.fixture
def mock_executor(captured_payloads: list[dict[str, Any]]) -> MagicMock:
def mock_executor(captured_payloads: list[WebhookPayloadDict]) -> MagicMock:
"""Create a mock executor that captures delivered payloads."""
executor = MagicMock(spec=WebhookExecutor)
async def capture_delivery(
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
captured_payloads.append(payload)
return WebhookDelivery(

View File

@@ -2,7 +2,6 @@
from __future__ import annotations
from typing import Any
from unittest.mock import MagicMock
from uuid import uuid4
@@ -11,7 +10,12 @@ import pytest
from noteflow.application.services.webhook_service import WebhookService
from noteflow.domain.entities import Meeting
from noteflow.domain.utils.time import utc_now
from noteflow.domain.webhooks import WebhookConfig, WebhookDelivery, WebhookEventType
from noteflow.domain.webhooks import (
WebhookConfig,
WebhookDelivery,
WebhookEventType,
WebhookPayloadDict,
)
from .conftest import (
create_failing_webhook,
@@ -32,7 +36,7 @@ class FailingExecutor:
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Always raise RuntimeError."""
del config, event_type, payload # Unused - always raises
@@ -53,7 +57,7 @@ class FailFirstSucceedSecondExecutor:
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""First call fails, subsequent calls succeed."""
self.call_count += 1
@@ -68,7 +72,7 @@ class FailFirstSucceedSecondExecutor:
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Return result based on whether first call was done."""
del config, event_type, payload # Unused - always raises
@@ -90,7 +94,7 @@ class SuccessAfterFirstExecutor:
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Fail for specific configs, succeed for others."""
# Check membership - this is O(1) lookup, not conditional logic
@@ -104,7 +108,7 @@ class SuccessAfterFirstExecutor:
should_fail: bool,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Handle delivery based on pre-configured failure status."""
# Using polymorphism instead of conditional
@@ -118,7 +122,7 @@ class SuccessAfterFirstExecutor:
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Fail the delivery."""
del config, event_type, payload # Unused - always raises
@@ -128,7 +132,7 @@ class SuccessAfterFirstExecutor:
self,
config: WebhookConfig,
event_type: WebhookEventType,
payload: dict[str, Any],
payload: WebhookPayloadDict,
) -> WebhookDelivery:
"""Succeed the delivery."""
return WebhookDelivery(
@@ -227,7 +231,7 @@ class TestMeetingCompletedPayload:
webhook_service: WebhookService,
webhook_config: WebhookConfig,
completed_meeting: Meeting,
captured_payloads: list[dict[str, Any]],
captured_payloads: list[WebhookPayloadDict],
) -> None:
"""Payload includes meeting ID, title, duration, and segment count."""
webhook_service.register_webhook(webhook_config)
@@ -252,7 +256,7 @@ class TestMeetingCompletedPayload:
webhook_service: WebhookService,
webhook_config: WebhookConfig,
meeting_with_summary: Meeting,
captured_payloads: list[dict[str, Any]],
captured_payloads: list[WebhookPayloadDict],
) -> None:
"""Payload correctly indicates whether meeting has a summary."""
webhook_service.register_webhook(webhook_config)
@@ -267,7 +271,7 @@ class TestMeetingCompletedPayload:
self,
webhook_service: WebhookService,
completed_meeting: Meeting,
captured_payloads: list[dict[str, Any]],
captured_payloads: list[WebhookPayloadDict],
) -> None:
"""No payloads are delivered when no webhooks are registered."""
deliveries = await webhook_service.trigger_meeting_completed(completed_meeting)
@@ -285,7 +289,7 @@ class TestSummaryGeneratedPayload:
webhook_service: WebhookService,
webhook_config_all_events: WebhookConfig,
meeting_with_summary: Meeting,
captured_payloads: list[dict[str, Any]],
captured_payloads: list[WebhookPayloadDict],
) -> None:
"""Payload includes summary content and item counts."""
webhook_service.register_webhook(webhook_config_all_events)
@@ -317,7 +321,7 @@ class TestRecordingPayloads:
self,
webhook_service: WebhookService,
webhook_config_all_events: WebhookConfig,
captured_payloads: list[dict[str, Any]],
captured_payloads: list[WebhookPayloadDict],
) -> None:
"""Recording started payload contains meeting ID and title."""
webhook_service.register_webhook(webhook_config_all_events)
@@ -341,7 +345,7 @@ class TestRecordingPayloads:
self,
webhook_service: WebhookService,
webhook_config_all_events: WebhookConfig,
captured_payloads: list[dict[str, Any]],
captured_payloads: list[WebhookPayloadDict],
) -> None:
"""Recording stopped payload includes duration."""
webhook_service.register_webhook(webhook_config_all_events)
@@ -392,7 +396,7 @@ class TestErrorResilience:
"""Executor exceptions are caught and don't crash the trigger call."""
# Use the always-failing executor
failing_executor = FailingExecutor()
service = WebhookService(executor=failing_executor) # type: ignore[arg-type]
service = WebhookService(executor=failing_executor)
service.register_webhook(webhook_config)
# Should complete without raising
@@ -409,7 +413,7 @@ class TestErrorResilience:
# Use executor that fails for specific URL
failing_url = "https://failing.example.com/hook"
executor = SuccessAfterFirstExecutor(configs_to_fail={failing_url})
service = WebhookService(executor=executor) # type: ignore[arg-type]
service = WebhookService(executor=executor)
# Register webhooks explicitly (no loop)
failing_webhook = create_failing_webhook()

View File

@@ -137,7 +137,7 @@ class TestViolation:
)
with pytest.raises(FrozenInstanceError):
v.rule = "changed" # type: ignore[misc]
setattr(v, "rule", "changed")
class TestContentHash: