chore: update client submodule and enhance logging functionality
- Updated the client submodule to the latest commit for improved features and stability. - Enhanced logging capabilities by introducing a new LogEntry structure, including trace context and semantic event types for better observability. - Updated gRPC proto definitions to accommodate new logging fields, ensuring comprehensive logging data is captured. - Improved the observability mixin to utilize the new log entry structure, streamlining log retrieval and formatting.
This commit is contained in:
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
1460
.hygeine/clippy.json
1460
.hygeine/clippy.json
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@@ -34,6 +34,7 @@ from ._domain import (
|
||||
# External service converters
|
||||
from ._external import (
|
||||
entity_to_proto,
|
||||
log_entry_to_proto,
|
||||
metrics_to_proto,
|
||||
sync_run_to_proto,
|
||||
webhook_config_to_proto,
|
||||
@@ -82,6 +83,7 @@ __all__ = [
|
||||
"epoch_seconds_to_datetime",
|
||||
"export_format_to_proto",
|
||||
"iso_string_to_datetime",
|
||||
"log_entry_to_proto",
|
||||
"meeting_to_proto",
|
||||
"metrics_to_proto",
|
||||
"oidc_provider_to_proto",
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
"""External service converters: webhooks, sync, entity, metrics."""
|
||||
"""External service converters: webhooks, sync, entity, metrics, logs."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from noteflow.domain.entities import SyncRun
|
||||
from noteflow.domain.entities.named_entity import NamedEntity
|
||||
from noteflow.domain.webhooks.events import WebhookConfig, WebhookDelivery
|
||||
from noteflow.infrastructure.logging import LogEntry
|
||||
from noteflow.infrastructure.metrics import PerformanceMetrics
|
||||
|
||||
from ...proto import noteflow_pb2
|
||||
@@ -110,3 +111,26 @@ def metrics_to_proto(
|
||||
process_memory_mb=metrics.process_memory_mb,
|
||||
active_connections=metrics.active_connections,
|
||||
)
|
||||
|
||||
|
||||
def log_entry_to_proto(entry: LogEntry) -> noteflow_pb2.LogEntryProto:
|
||||
"""Convert LogEntry to proto message.
|
||||
|
||||
Args:
|
||||
entry: LogEntry from the log buffer.
|
||||
|
||||
Returns:
|
||||
Proto LogEntryProto message.
|
||||
"""
|
||||
return noteflow_pb2.LogEntryProto(
|
||||
timestamp=entry.timestamp.isoformat(),
|
||||
level=entry.level,
|
||||
source=entry.source,
|
||||
message=entry.message,
|
||||
details=entry.details,
|
||||
trace_id=entry.trace_id or "",
|
||||
span_id=entry.span_id or "",
|
||||
event_type=entry.event_type or "",
|
||||
operation_id=entry.operation_id or "",
|
||||
entity_id=entry.entity_id or "",
|
||||
)
|
||||
|
||||
@@ -10,7 +10,7 @@ from noteflow.infrastructure.persistence.constants import DEFAULT_LOG_LIMIT, MAX
|
||||
|
||||
from ..proto import noteflow_pb2
|
||||
from ._types import GrpcContext
|
||||
from .converters import metrics_to_proto
|
||||
from .converters import log_entry_to_proto, metrics_to_proto
|
||||
|
||||
|
||||
class ObservabilityServicer(Protocol):
|
||||
@@ -47,16 +47,7 @@ class ObservabilityMixin:
|
||||
)
|
||||
|
||||
return noteflow_pb2.GetRecentLogsResponse(
|
||||
logs=[
|
||||
noteflow_pb2.LogEntryProto(
|
||||
timestamp=entry.timestamp.isoformat(),
|
||||
level=entry.level,
|
||||
source=entry.source,
|
||||
message=entry.message,
|
||||
details=entry.details,
|
||||
)
|
||||
for entry in entries
|
||||
],
|
||||
logs=[log_entry_to_proto(entry) for entry in entries],
|
||||
)
|
||||
|
||||
async def GetPerformanceMetrics(
|
||||
|
||||
@@ -129,8 +129,8 @@ class SummarizationMixin:
|
||||
style_prompt=style_prompt,
|
||||
)
|
||||
summary = result.summary
|
||||
provider_name = result.result.provider_name
|
||||
model_name = result.result.model_name
|
||||
provider_name = summary.provider_name
|
||||
model_name = summary.model_name
|
||||
logger.info(
|
||||
"Generated summary using %s/%s",
|
||||
provider_name,
|
||||
|
||||
@@ -1373,6 +1373,21 @@ message LogEntryProto {
|
||||
|
||||
// Additional details (key-value pairs)
|
||||
map<string, string> details = 5;
|
||||
|
||||
// Distributed tracing correlation ID
|
||||
string trace_id = 6;
|
||||
|
||||
// Span ID within trace
|
||||
string span_id = 7;
|
||||
|
||||
// Semantic event type (e.g., "meeting.created", "summary.generated")
|
||||
string event_type = 8;
|
||||
|
||||
// Groups related events (e.g., all events for one meeting session)
|
||||
string operation_id = 9;
|
||||
|
||||
// Primary entity ID (e.g., meeting_id for meeting events)
|
||||
string entity_id = 10;
|
||||
}
|
||||
|
||||
message GetPerformanceMetricsRequest {
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1158,7 +1158,7 @@ class GetRecentLogsResponse(_message.Message):
|
||||
def __init__(self, logs: _Optional[_Iterable[_Union[LogEntryProto, _Mapping]]] = ...) -> None: ...
|
||||
|
||||
class LogEntryProto(_message.Message):
|
||||
__slots__ = ("timestamp", "level", "source", "message", "details")
|
||||
__slots__ = ("timestamp", "level", "source", "message", "details", "trace_id", "span_id", "event_type", "operation_id", "entity_id")
|
||||
class DetailsEntry(_message.Message):
|
||||
__slots__ = ("key", "value")
|
||||
KEY_FIELD_NUMBER: _ClassVar[int]
|
||||
@@ -1171,12 +1171,22 @@ class LogEntryProto(_message.Message):
|
||||
SOURCE_FIELD_NUMBER: _ClassVar[int]
|
||||
MESSAGE_FIELD_NUMBER: _ClassVar[int]
|
||||
DETAILS_FIELD_NUMBER: _ClassVar[int]
|
||||
TRACE_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
SPAN_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
EVENT_TYPE_FIELD_NUMBER: _ClassVar[int]
|
||||
OPERATION_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
ENTITY_ID_FIELD_NUMBER: _ClassVar[int]
|
||||
timestamp: str
|
||||
level: str
|
||||
source: str
|
||||
message: str
|
||||
details: _containers.ScalarMap[str, str]
|
||||
def __init__(self, timestamp: _Optional[str] = ..., level: _Optional[str] = ..., source: _Optional[str] = ..., message: _Optional[str] = ..., details: _Optional[_Mapping[str, str]] = ...) -> None: ...
|
||||
trace_id: str
|
||||
span_id: str
|
||||
event_type: str
|
||||
operation_id: str
|
||||
entity_id: str
|
||||
def __init__(self, timestamp: _Optional[str] = ..., level: _Optional[str] = ..., source: _Optional[str] = ..., message: _Optional[str] = ..., details: _Optional[_Mapping[str, str]] = ..., trace_id: _Optional[str] = ..., span_id: _Optional[str] = ..., event_type: _Optional[str] = ..., operation_id: _Optional[str] = ..., entity_id: _Optional[str] = ...) -> None: ...
|
||||
|
||||
class GetPerformanceMetricsRequest(_message.Message):
|
||||
__slots__ = ("history_limit",)
|
||||
|
||||
@@ -16,6 +16,7 @@ from .config import (
|
||||
get_log_level,
|
||||
get_logger,
|
||||
)
|
||||
from .events import LogEventType, get_event_domain
|
||||
from .log_buffer import LogBuffer, LogBufferHandler, LogEntry, get_log_buffer
|
||||
from .processors import add_noteflow_context, add_otel_trace_context
|
||||
from .structured import (
|
||||
@@ -38,12 +39,14 @@ from .transitions import log_state_transition
|
||||
|
||||
__all__ = [
|
||||
"DEFAULT_RATE_LIMIT_SECONDS",
|
||||
"RateLimitedLogger",
|
||||
"get_client_rate_limiter",
|
||||
"LogBuffer",
|
||||
"LogBufferHandler",
|
||||
"LogEntry",
|
||||
"LogEventType",
|
||||
"LoggingConfig",
|
||||
"RateLimitedLogger",
|
||||
"get_client_rate_limiter",
|
||||
"get_event_domain",
|
||||
"add_noteflow_context",
|
||||
"add_otel_trace_context",
|
||||
"configure_logging",
|
||||
|
||||
93
src/noteflow/infrastructure/logging/events.py
Normal file
93
src/noteflow/infrastructure/logging/events.py
Normal file
@@ -0,0 +1,93 @@
|
||||
"""Log event type enumeration for semantic classification.
|
||||
|
||||
Provides a standardized vocabulary for categorizing log events,
|
||||
enabling better grouping, filtering, and summarization in the UI.
|
||||
"""
|
||||
|
||||
from enum import StrEnum
|
||||
|
||||
|
||||
class LogEventType(StrEnum):
|
||||
"""Semantic log event types for classification.
|
||||
|
||||
Event types follow the pattern: <domain>.<action>
|
||||
where domain identifies the subsystem and action describes what happened.
|
||||
"""
|
||||
|
||||
# Meeting lifecycle
|
||||
MEETING_CREATED = "meeting.created"
|
||||
MEETING_STARTED = "meeting.started"
|
||||
MEETING_STOPPED = "meeting.stopped"
|
||||
MEETING_DELETED = "meeting.deleted"
|
||||
MEETING_RECOVERED = "meeting.recovered"
|
||||
|
||||
# Recording
|
||||
RECORDING_STARTED = "recording.started"
|
||||
RECORDING_STOPPED = "recording.stopped"
|
||||
RECORDING_PAUSED = "recording.paused"
|
||||
RECORDING_RESUMED = "recording.resumed"
|
||||
|
||||
# Transcription
|
||||
SEGMENT_PROCESSED = "segment.processed"
|
||||
TRANSCRIPT_EXPORTED = "transcript.exported"
|
||||
|
||||
# Diarization
|
||||
DIARIZATION_STARTED = "diarization.started"
|
||||
DIARIZATION_COMPLETED = "diarization.completed"
|
||||
DIARIZATION_FAILED = "diarization.failed"
|
||||
SPEAKER_RENAMED = "speaker.renamed"
|
||||
|
||||
# Summarization
|
||||
SUMMARY_STARTED = "summary.started"
|
||||
SUMMARY_GENERATED = "summary.generated"
|
||||
SUMMARY_FAILED = "summary.failed"
|
||||
|
||||
# Entity extraction
|
||||
ENTITIES_EXTRACTED = "entities.extracted"
|
||||
ENTITY_PINNED = "entity.pinned"
|
||||
|
||||
# Webhooks
|
||||
WEBHOOK_REGISTERED = "webhook.registered"
|
||||
WEBHOOK_DELIVERED = "webhook.delivered"
|
||||
WEBHOOK_FAILED = "webhook.failed"
|
||||
|
||||
# Calendar
|
||||
CALENDAR_SYNCED = "calendar.synced"
|
||||
CALENDAR_EVENT_DETECTED = "calendar.event_detected"
|
||||
|
||||
# Triggers
|
||||
TRIGGER_DETECTED = "trigger.detected"
|
||||
TRIGGER_SNOOZED = "trigger.snoozed"
|
||||
TRIGGER_DISMISSED = "trigger.dismissed"
|
||||
|
||||
# User preferences
|
||||
CONSENT_GRANTED = "consent.granted"
|
||||
CONSENT_REVOKED = "consent.revoked"
|
||||
PREFERENCE_UPDATED = "preference.updated"
|
||||
|
||||
# Connection
|
||||
CONNECTION_ESTABLISHED = "connection.established"
|
||||
CONNECTION_LOST = "connection.lost"
|
||||
CONNECTION_TIMEOUT = "connection.timeout"
|
||||
|
||||
# Server
|
||||
SERVER_STARTED = "server.started"
|
||||
SERVER_STOPPED = "server.stopped"
|
||||
|
||||
# RPC operations
|
||||
RPC_STARTED = "rpc.started"
|
||||
RPC_COMPLETED = "rpc.completed"
|
||||
RPC_FAILED = "rpc.failed"
|
||||
|
||||
|
||||
def get_event_domain(event_type: LogEventType | str) -> str:
|
||||
"""Extract the domain prefix from an event type.
|
||||
|
||||
Args:
|
||||
event_type: The event type to extract domain from.
|
||||
|
||||
Returns:
|
||||
The domain portion (e.g., "meeting" from "meeting.created").
|
||||
"""
|
||||
value = event_type.value if isinstance(event_type, LogEventType) else event_type
|
||||
return value.split(".")[0] if "." in value else value
|
||||
@@ -23,6 +23,9 @@ class LogEntry:
|
||||
details: Optional key-value details.
|
||||
trace_id: OpenTelemetry trace ID (if available).
|
||||
span_id: OpenTelemetry span ID (if available).
|
||||
event_type: Semantic event type (e.g., "meeting.created").
|
||||
operation_id: Groups related events (e.g., meeting session ID).
|
||||
entity_id: Primary entity ID (e.g., meeting_id).
|
||||
"""
|
||||
|
||||
timestamp: datetime
|
||||
@@ -32,6 +35,9 @@ class LogEntry:
|
||||
details: dict[str, str] = field(default_factory=dict)
|
||||
trace_id: str | None = None
|
||||
span_id: str | None = None
|
||||
event_type: str | None = None
|
||||
operation_id: str | None = None
|
||||
entity_id: str | None = None
|
||||
|
||||
|
||||
class LogBuffer:
|
||||
|
||||
@@ -317,6 +317,101 @@ class TestGetRecentLogs:
|
||||
assert proto.details["host"] == "example.com", "Details should match"
|
||||
assert proto.details["port"] == "443", "Details should match"
|
||||
|
||||
async def test_log_entry_proto_includes_trace_context(
|
||||
self,
|
||||
observability_servicer: MockServicerHost,
|
||||
mock_grpc_context: MagicMock,
|
||||
) -> None:
|
||||
"""GetRecentLogs includes trace and span IDs in proto."""
|
||||
entry = LogEntry(
|
||||
timestamp=datetime(2024, 1, 15, 10, 30, 0, tzinfo=UTC),
|
||||
level="info",
|
||||
source="app",
|
||||
message="Traced operation",
|
||||
details={},
|
||||
trace_id="abc123def456",
|
||||
span_id="span789",
|
||||
)
|
||||
mock_buffer = MagicMock(spec=LogBuffer)
|
||||
mock_buffer.get_recent.return_value = [entry]
|
||||
|
||||
with patch(
|
||||
"noteflow.grpc._mixins.observability.get_log_buffer",
|
||||
return_value=mock_buffer,
|
||||
):
|
||||
request = noteflow_pb2.GetRecentLogsRequest()
|
||||
response = await observability_servicer.GetRecentLogs(request, mock_grpc_context)
|
||||
|
||||
proto = response.logs[0]
|
||||
assert proto.trace_id == "abc123def456", "Trace ID should be included"
|
||||
assert proto.span_id == "span789", "Span ID should be included"
|
||||
|
||||
async def test_log_entry_proto_includes_event_metadata(
|
||||
self,
|
||||
observability_servicer: MockServicerHost,
|
||||
mock_grpc_context: MagicMock,
|
||||
) -> None:
|
||||
"""GetRecentLogs includes event_type, operation_id, entity_id in proto."""
|
||||
entry = LogEntry(
|
||||
timestamp=datetime(2024, 1, 15, 10, 30, 0, tzinfo=UTC),
|
||||
level="info",
|
||||
source="app",
|
||||
message="Meeting started",
|
||||
details={},
|
||||
event_type="meeting.started",
|
||||
operation_id="op-12345",
|
||||
entity_id="meeting-67890",
|
||||
)
|
||||
mock_buffer = MagicMock(spec=LogBuffer)
|
||||
mock_buffer.get_recent.return_value = [entry]
|
||||
|
||||
with patch(
|
||||
"noteflow.grpc._mixins.observability.get_log_buffer",
|
||||
return_value=mock_buffer,
|
||||
):
|
||||
request = noteflow_pb2.GetRecentLogsRequest()
|
||||
response = await observability_servicer.GetRecentLogs(request, mock_grpc_context)
|
||||
|
||||
proto = response.logs[0]
|
||||
assert proto.event_type == "meeting.started", "Event type should be included"
|
||||
assert proto.operation_id == "op-12345", "Operation ID should be included"
|
||||
assert proto.entity_id == "meeting-67890", "Entity ID should be included"
|
||||
|
||||
async def test_log_entry_proto_handles_none_optional_fields(
|
||||
self,
|
||||
observability_servicer: MockServicerHost,
|
||||
mock_grpc_context: MagicMock,
|
||||
) -> None:
|
||||
"""GetRecentLogs converts None optional fields to empty strings in proto."""
|
||||
entry = LogEntry(
|
||||
timestamp=datetime(2024, 1, 15, 10, 30, 0, tzinfo=UTC),
|
||||
level="info",
|
||||
source="app",
|
||||
message="Basic log",
|
||||
details={},
|
||||
trace_id=None,
|
||||
span_id=None,
|
||||
event_type=None,
|
||||
operation_id=None,
|
||||
entity_id=None,
|
||||
)
|
||||
mock_buffer = MagicMock(spec=LogBuffer)
|
||||
mock_buffer.get_recent.return_value = [entry]
|
||||
|
||||
with patch(
|
||||
"noteflow.grpc._mixins.observability.get_log_buffer",
|
||||
return_value=mock_buffer,
|
||||
):
|
||||
request = noteflow_pb2.GetRecentLogsRequest()
|
||||
response = await observability_servicer.GetRecentLogs(request, mock_grpc_context)
|
||||
|
||||
proto = response.logs[0]
|
||||
assert proto.trace_id == "", "None trace_id should be empty string"
|
||||
assert proto.span_id == "", "None span_id should be empty string"
|
||||
assert proto.event_type == "", "None event_type should be empty string"
|
||||
assert proto.operation_id == "", "None operation_id should be empty string"
|
||||
assert proto.entity_id == "", "None entity_id should be empty string"
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# TestGetPerformanceMetrics
|
||||
|
||||
Reference in New Issue
Block a user